From 7f80671be8b14fe617ec596c5be7166745da772d Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 25 Mar 2024 18:58:48 +0100 Subject: [PATCH 001/131] adding pre compiled header --- Makefile | 55 +++++++------- example/main.c | 6 +- src/core/UUID.cpp | 4 +- src/core/UUID.h | 4 +- src/core/application.cpp | 3 +- src/core/application.h | 8 +-- src/core/bridge.cpp | 6 +- src/core/errors.cpp | 7 +- src/core/errors.h | 5 +- src/core/fps.cpp | 5 +- src/core/fps.h | 4 +- src/core/graphics.h | 9 +-- src/core/memory.cpp | 3 +- src/core/profiler.h | 13 +--- src/platform/inputs.h | 14 ++-- src/platform/window.h | 6 +- src/pre_compiled.h | 72 +++++++++++++++++++ src/renderer/buffers/vk_buffer.cpp | 4 +- src/renderer/buffers/vk_buffer.h | 6 +- src/renderer/buffers/vk_ibo.h | 6 +- src/renderer/buffers/vk_ubo.cpp | 5 +- src/renderer/buffers/vk_ubo.h | 7 +- src/renderer/buffers/vk_vbo.cpp | 3 +- src/renderer/command/cmd_manager.h | 6 +- .../command/single_time_cmd_manager.cpp | 3 +- .../command/single_time_cmd_manager.h | 5 +- src/renderer/command/vk_cmd_buffer.h | 7 +- src/renderer/command/vk_cmd_pool.h | 5 +- src/renderer/core/cmd_resource.h | 5 +- src/renderer/core/drawable_resource.h | 6 +- src/renderer/core/memory.cpp | 14 ++-- src/renderer/core/memory.h | 6 +- src/renderer/core/render_core.cpp | 5 +- src/renderer/core/render_core.h | 6 +- src/renderer/core/vk_device.cpp | 8 +-- src/renderer/core/vk_device.h | 5 +- src/renderer/core/vk_fence.h | 5 +- src/renderer/core/vk_instance.cpp | 3 +- src/renderer/core/vk_instance.h | 6 +- src/renderer/core/vk_queues.cpp | 4 +- src/renderer/core/vk_queues.h | 8 +-- src/renderer/core/vk_semaphore.h | 5 +- src/renderer/core/vk_surface.cpp | 5 +- src/renderer/core/vk_surface.h | 6 +- src/renderer/core/vk_validation_layers.cpp | 4 +- src/renderer/core/vk_validation_layers.h | 5 +- .../descriptors/descriptor_pool_manager.h | 5 +- src/renderer/descriptors/vk_descriptor_pool.h | 6 +- src/renderer/descriptors/vk_descriptor_set.h | 7 +- .../descriptors/vk_descriptor_set_layout.h | 6 +- src/renderer/images/texture.cpp | 3 +- src/renderer/images/texture.h | 10 +-- src/renderer/images/texture_atlas.h | 6 +- src/renderer/images/texture_manager.h | 6 +- src/renderer/images/vk_image.h | 7 +- src/renderer/pipeline/pipeline.h | 6 +- src/renderer/pixel_put.cpp | 3 +- src/renderer/pixel_put.h | 5 +- src/renderer/renderer.h | 5 +- src/renderer/renderpass/vk_framebuffer.cpp | 3 +- src/renderer/renderpass/vk_framebuffer.h | 5 +- src/renderer/renderpass/vk_render_pass.h | 5 +- src/renderer/swapchain/vk_swapchain.cpp | 4 +- src/renderer/swapchain/vk_swapchain.h | 7 +- src/renderer/texts/font.cpp | 3 +- src/renderer/texts/font.h | 7 +- src/renderer/texts/font_library.cpp | 3 +- src/renderer/texts/font_library.h | 9 +-- src/renderer/texts/text.h | 6 +- src/renderer/texts/text_descriptor.h | 8 +-- src/renderer/texts/text_library.cpp | 3 +- src/renderer/texts/text_library.h | 8 +-- src/renderer/texts/text_manager.h | 9 +-- src/utils/combine_hash.h | 5 +- src/utils/dogica_ttf.h | 4 +- src/utils/icon_mlx.h | 4 +- xmake.lua | 2 + 77 files changed, 266 insertions(+), 291 deletions(-) create mode 100644 src/pre_compiled.h diff --git a/Makefile b/Makefile index c621183..cf1ebdd 100644 --- a/Makefile +++ b/Makefile @@ -6,42 +6,42 @@ # By: maldavid +#+ +:+ +#+ # # +#+#+#+#+#+ +#+ # # Created: 2022/10/04 16:43:41 by maldavid #+# #+# # -# Updated: 2024/01/10 14:20:30 by maldavid ### ########.fr # +# Updated: 2024/03/25 18:57:44 by maldavid ### ########.fr # # # # **************************************************************************** # -NAME = libmlx.so +NAME = libmlx.so -SRCS = $(wildcard $(addsuffix /*.cpp, ./src/core)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./src/platform)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer/**)) +SRCS = $(wildcard $(addsuffix /*.cpp, ./src/core)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./src/platform)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer/**)) -OBJ_DIR = objs/makefile -OBJS = $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) +OBJ_DIR = objs/makefile +OBJS = $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) + +PCH = ./src/pre_compiled.h +GCH = ./src/pre_compiled.h.gch OS = $(shell uname -s) -DEBUG ?= false -TOOLCHAIN ?= clang -IMAGES_OPTIMIZED ?= true +DEBUG ?= false +TOOLCHAIN ?= clang +IMAGES_OPTIMIZED ?= true FORCE_INTEGRATED_GPU ?= false GRAPHICS_MEMORY_DUMP ?= false PROFILER ?= false -MODE = "release" +MODE = "release" -CXX = clang++ +CXX = clang++ -CXXFLAGS = -std=c++17 -O3 -fPIC -Wall -Wextra -Werror -DSDL_MAIN_HANDLED -INCLUDES = -I./includes -I./src -I./third_party +CXXFLAGS = -std=c++17 -O3 -fPIC -Wall -Wextra -Wno-deprecated -DSDL_MAIN_HANDLED +INCLUDES = -I./includes -I./src -I./third_party LDLIBS = ifeq ($(TOOLCHAIN), gcc) CXX = g++ - CXXFLAGS += -Wno-error=cpp -else - CXXFLAGS += -Wno-error=#warning endif ifeq ($(OS), Darwin) @@ -73,13 +73,17 @@ endif RM = rm -rf -$(OBJ_DIR)/%.o: %.cpp +$(OBJ_DIR)/%.o: %.cpp $(GCH) @printf "\033[1;32m[compiling... "$(MODE)" "$(CXX)"]\033[1;00m "$<"\n" @$(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@ -all: $(NAME) +all: $(NAME) -$(NAME): $(OBJ_DIR) $(OBJS) +$(GCH): + @printf "\033[1;32m[compiling "$(MODE)" "$(CXX)"]\033[1;00m PreCompiled header\n" + @$(CXX) $(CXXFLAGS) $(INCLUDES) -c $(PCH) -o $(GCH) + +$(NAME): $(GCH) $(OBJ_DIR) $(OBJS) @printf "\033[1;32m[linking ... "$(MODE)"]\033[1;00m "$@"\n" @$(CXX) -shared -o $(NAME) $(OBJS) $(LDLIBS) @printf "\033[1;32m[build finished]\033[1;00m\n" @@ -92,10 +96,11 @@ clean: @$(RM) $(OBJ_DIR) @printf "\033[1;32m[object files removed]\033[1;00m\n" -fclean: clean +fclean: clean @$(RM) $(NAME) - @printf "\033[1;32m["$(NAME)" removed]\033[1;00m\n" + @$(RM) $(GCH) + @printf "\033[1;32m["$(NAME)" and gch removed]\033[1;00m\n" -re: fclean all +re: fclean all -.PHONY: all clean fclean re +.PHONY: all clean fclean re pch diff --git a/example/main.c b/example/main.c index baa9faf..23c5b8e 100644 --- a/example/main.c +++ b/example/main.c @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:55:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:16:07 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 18:10:41 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -158,10 +158,6 @@ int main(void) mlx.img = create_image(&mlx); - - mlx_string_put(mlx.mlx, mlx.win, 0, 10, 0xFFFFFF00, "fps:"); - mlx_string_put(mlx.mlx, mlx.win, 0, 20, 0xFFFFFFFF, "fps:"); - mlx_set_font_scale(mlx.mlx, mlx.win, "font.ttf", 16.f); mlx_string_put(mlx.mlx, mlx.win, 20, 20, 0xFF0020FF, "that text will disappear"); diff --git a/src/core/UUID.cpp b/src/core/UUID.cpp index c7f2bb2..6b62b4d 100644 --- a/src/core/UUID.cpp +++ b/src/core/UUID.cpp @@ -6,13 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/06 11:26:37 by maldavid #+# #+# */ -/* Updated: 2024/01/06 11:28:15 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:44:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include -#include -#include namespace mlx { diff --git a/src/core/UUID.h b/src/core/UUID.h index b26df3b..ac94c1a 100644 --- a/src/core/UUID.h +++ b/src/core/UUID.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/06 11:13:23 by maldavid #+# #+# */ -/* Updated: 2024/01/07 01:44:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:43:58 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_UUID__ #define __MLX_UUID__ -#include +#include namespace mlx { diff --git a/src/core/application.cpp b/src/core/application.cpp index 237ab2c..bfb39a7 100644 --- a/src/core/application.cpp +++ b/src/core/application.cpp @@ -6,14 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/03/24 14:39:23 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:44:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include "application.h" #include #include -#include #include #include #include diff --git a/src/core/application.h b/src/core/application.h index 48717dc..7532e05 100644 --- a/src/core/application.h +++ b/src/core/application.h @@ -6,23 +6,19 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/01/26 11:26:54 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:45:03 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_APPLICATION__ #define __MLX_APPLICATION__ -#include -#include -#include -#include +#include #include #include #include -#include #include #include diff --git a/src/core/bridge.cpp b/src/core/bridge.cpp index 9fb6b5a..ad3b8e2 100644 --- a/src/core/bridge.cpp +++ b/src/core/bridge.cpp @@ -6,18 +6,16 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:35:20 by maldavid #+# #+# */ -/* Updated: 2024/02/23 22:37:24 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:44:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include #include "errors.h" #include "application.h" #include -#include #include #include -#include static void* __mlx_ptr = nullptr; diff --git a/src/core/errors.cpp b/src/core/errors.cpp index 63aa607..098abc8 100644 --- a/src/core/errors.cpp +++ b/src/core/errors.cpp @@ -6,14 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:48:06 by maldavid #+# #+# */ -/* Updated: 2024/01/05 20:41:17 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:45:12 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include -#include -#include -#include +#include #include "errors.h" diff --git a/src/core/errors.h b/src/core/errors.h index 9bfde94..40334e9 100644 --- a/src/core/errors.h +++ b/src/core/errors.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:42:32 by maldavid #+# #+# */ -/* Updated: 2023/12/27 17:21:07 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:45:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_ERRORS__ #define __MLX_ERRORS__ -#include -#include +#include enum class e_kind { diff --git a/src/core/fps.cpp b/src/core/fps.cpp index eae6a8b..8da10a2 100644 --- a/src/core/fps.cpp +++ b/src/core/fps.cpp @@ -6,14 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:56:17 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:44:15 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:45:33 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include -#include -#include -#include namespace mlx { diff --git a/src/core/fps.h b/src/core/fps.h index 4433f07..332e812 100644 --- a/src/core/fps.h +++ b/src/core/fps.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:53:30 by maldavid #+# #+# */ -/* Updated: 2024/01/18 15:16:06 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:45:27 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FPS__ #define __MLX_FPS__ -#include +#include namespace mlx { diff --git a/src/core/graphics.h b/src/core/graphics.h index 0966527..4ea3116 100644 --- a/src/core/graphics.h +++ b/src/core/graphics.h @@ -6,18 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/03/24 14:43:09 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:45:49 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_GRAPHICS__ #define __MLX_GRAPHICS__ -#include -#include - -#include -#include +#include #include #include @@ -27,7 +23,6 @@ #include #include #include -#include #include namespace mlx diff --git a/src/core/memory.cpp b/src/core/memory.cpp index 2e2ee51..708ea73 100644 --- a/src/core/memory.cpp +++ b/src/core/memory.cpp @@ -6,13 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/07 16:32:01 by kbz_8 #+# #+# */ -/* Updated: 2023/12/11 15:25:02 by kbz_8 ### ########.fr */ +/* Updated: 2024/03/25 17:46:03 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include #include -#include namespace mlx { diff --git a/src/core/profiler.h b/src/core/profiler.h index 4424238..dc75705 100644 --- a/src/core/profiler.h +++ b/src/core/profiler.h @@ -6,23 +6,16 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 13:35:45 by maldavid #+# #+# */ -/* Updated: 2024/03/24 14:41:27 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:46:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_PROFILER__ #define __MLX_PROFILER__ +#include + #include -#include -#include -#include -#include -#include -#include -#include -#include -#include namespace mlx { diff --git a/src/platform/inputs.h b/src/platform/inputs.h index 6a575bb..fc12254 100644 --- a/src/platform/inputs.h +++ b/src/platform/inputs.h @@ -6,18 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ -/* Updated: 2024/02/25 07:51:55 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:47:03 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include -#include -#include -#include -#include -#include +#ifndef __MLX_INPUTS__ +#define __MLX_INPUTS__ -#include +#include #include "window.h" @@ -74,3 +70,5 @@ namespace mlx bool _end = false; }; } + +#endif diff --git a/src/platform/window.h b/src/platform/window.h index dfe0924..94f4f00 100644 --- a/src/platform/window.h +++ b/src/platform/window.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ -/* Updated: 2023/12/21 00:24:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:47:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_WINDOW__ #define __MLX_WINDOW__ -#include -#include -#include +#include namespace mlx { diff --git a/src/pre_compiled.h b/src/pre_compiled.h new file mode 100644 index 0000000..f795a4b --- /dev/null +++ b/src/pre_compiled.h @@ -0,0 +1,72 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* pre_compiled.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ +/* Updated: 2024/03/25 18:06:01 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_PRE_COMPILED_HEADER__ +#define __MLX_PRE_COMPILED_HEADER__ + +#define VK_NO_PROTOTYPES + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef MLX_COMPILER_CLANG + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Weverything" + #include + #pragma clang diagnostic pop +#elif defined(MLX_COMPILER_GCC) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wimplicit-fallthrough" + #pragma GCC diagnostic ignored "-Wmissing-field-initializers" + #pragma GCC diagnostic ignored "-Wunused-parameter" + #pragma GCC diagnostic ignored "-Wunused-variable" + #pragma GCC diagnostic ignored "-Wparentheses" + #include + #pragma GCC diagnostic pop +#else + #include +#endif + +#endif diff --git a/src/renderer/buffers/vk_buffer.cpp b/src/renderer/buffers/vk_buffer.cpp index 71693cc..421543a 100644 --- a/src/renderer/buffers/vk_buffer.cpp +++ b/src/renderer/buffers/vk_buffer.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:55:57 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:28:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:47:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,8 +15,6 @@ #include #include #include -#include -#include namespace mlx { diff --git a/src/renderer/buffers/vk_buffer.h b/src/renderer/buffers/vk_buffer.h index f36b27f..12d9533 100644 --- a/src/renderer/buffers/vk_buffer.h +++ b/src/renderer/buffers/vk_buffer.h @@ -6,15 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 23:18:52 by maldavid #+# #+# */ -/* Updated: 2024/01/11 05:16:58 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:47:28 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_BUFFER__ #define __MLX_VK_BUFFER__ -#include -#include +#include + #include #include diff --git a/src/renderer/buffers/vk_ibo.h b/src/renderer/buffers/vk_ibo.h index 7acc988..e317b4e 100644 --- a/src/renderer/buffers/vk_ibo.h +++ b/src/renderer/buffers/vk_ibo.h @@ -6,15 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 15:05:05 by maldavid #+# #+# */ -/* Updated: 2024/01/10 23:05:15 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:47:55 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_IBO__ #define __VK_IBO__ -#include -#include +#include + #include "vk_buffer.h" #include diff --git a/src/renderer/buffers/vk_ubo.cpp b/src/renderer/buffers/vk_ubo.cpp index 996516c..52cb3df 100644 --- a/src/renderer/buffers/vk_ubo.cpp +++ b/src/renderer/buffers/vk_ubo.cpp @@ -6,12 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:45:52 by maldavid #+# #+# */ -/* Updated: 2024/01/10 18:30:57 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:48:07 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_ubo.h" -#include #include #include diff --git a/src/renderer/buffers/vk_ubo.h b/src/renderer/buffers/vk_ubo.h index 74a7fd9..0b8b53f 100644 --- a/src/renderer/buffers/vk_ubo.h +++ b/src/renderer/buffers/vk_ubo.h @@ -6,17 +6,16 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ -/* Updated: 2023/12/08 19:06:28 by kbz_8 ### ########.fr */ +/* Updated: 2024/03/25 17:48:14 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_UBO__ #define __MLX_VK_UBO__ +#include + #include "vk_buffer.h" -#include -#include -#include namespace mlx { diff --git a/src/renderer/buffers/vk_vbo.cpp b/src/renderer/buffers/vk_vbo.cpp index ec17a88..5dc857d 100644 --- a/src/renderer/buffers/vk_vbo.cpp +++ b/src/renderer/buffers/vk_vbo.cpp @@ -6,12 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:28:08 by maldavid #+# #+# */ -/* Updated: 2023/12/12 22:17:14 by kbz_8 ### ########.fr */ +/* Updated: 2024/03/25 17:48:20 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include "vk_vbo.h" -#include namespace mlx { diff --git a/src/renderer/command/cmd_manager.h b/src/renderer/command/cmd_manager.h index ac69876..a623aa2 100644 --- a/src/renderer/command/cmd_manager.h +++ b/src/renderer/command/cmd_manager.h @@ -6,17 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:48:52 by maldavid #+# #+# */ -/* Updated: 2024/01/03 15:27:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:48:39 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_COMMAND_MANAGER__ #define __MLX_COMMAND_MANAGER__ -#include +#include -#include -#include #include #include #include diff --git a/src/renderer/command/single_time_cmd_manager.cpp b/src/renderer/command/single_time_cmd_manager.cpp index b4dcf7b..1ba8ad6 100644 --- a/src/renderer/command/single_time_cmd_manager.cpp +++ b/src/renderer/command/single_time_cmd_manager.cpp @@ -6,11 +6,10 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/15 19:57:49 by maldavid #+# #+# */ -/* Updated: 2024/01/11 03:13:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:48:44 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include #include #include diff --git a/src/renderer/command/single_time_cmd_manager.h b/src/renderer/command/single_time_cmd_manager.h index 271fdf9..f95c8f8 100644 --- a/src/renderer/command/single_time_cmd_manager.h +++ b/src/renderer/command/single_time_cmd_manager.h @@ -6,14 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/15 18:25:57 by maldavid #+# #+# */ -/* Updated: 2024/01/07 01:30:19 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:48:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_SINGLE_TIME_CMD_MANAGER__ #define __MLX_SINGLE_TIME_CMD_MANAGER__ -#include +#include + #include #include diff --git a/src/renderer/command/vk_cmd_buffer.h b/src/renderer/command/vk_cmd_buffer.h index d8fd7aa..10755fc 100644 --- a/src/renderer/command/vk_cmd_buffer.h +++ b/src/renderer/command/vk_cmd_buffer.h @@ -6,17 +6,16 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ -/* Updated: 2024/01/07 01:25:50 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:49:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_CMD_BUFFER__ #define __MLX_VK_CMD_BUFFER__ -#include -#include +#include + #include -#include namespace mlx { diff --git a/src/renderer/command/vk_cmd_pool.h b/src/renderer/command/vk_cmd_pool.h index 8d97157..04b4a93 100644 --- a/src/renderer/command/vk_cmd_pool.h +++ b/src/renderer/command/vk_cmd_pool.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:24:12 by maldavid #+# #+# */ -/* Updated: 2024/01/03 15:27:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:49:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_CMD_POOL__ #define __MLX_VK_CMD_POOL__ -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/cmd_resource.h b/src/renderer/core/cmd_resource.h index 0010333..8f0d4f7 100644 --- a/src/renderer/core/cmd_resource.h +++ b/src/renderer/core/cmd_resource.h @@ -6,14 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/16 20:44:29 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:28:08 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:49:25 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_COMMAND_RESOURCE__ #define __MLX_COMMAND_RESOURCE__ -#include +#include + #include namespace mlx diff --git a/src/renderer/core/drawable_resource.h b/src/renderer/core/drawable_resource.h index 8df2b98..a6ab54f 100644 --- a/src/renderer/core/drawable_resource.h +++ b/src/renderer/core/drawable_resource.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 21:00:37 by maldavid #+# #+# */ -/* Updated: 2024/01/11 01:21:15 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:49:33 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DRAWABLE_RESOURCE__ #define __MLX_DRAWABLE_RESOURCE__ -#include -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/memory.cpp b/src/renderer/core/memory.cpp index e3cbe7c..c59d1f4 100644 --- a/src/renderer/core/memory.cpp +++ b/src/renderer/core/memory.cpp @@ -6,14 +6,13 @@ /* By: kbz_8 +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/10/20 22:02:37 by kbz_8 #+# #+# */ -/* Updated: 2024/03/14 16:34:53 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 18:10:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include -#include -#include +#define VK_NO_PROTOTYPES #define VMA_STATIC_VULKAN_FUNCTIONS 0 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 #define VMA_VULKAN_VERSION 1002000 @@ -23,7 +22,7 @@ #ifdef MLX_COMPILER_CLANG #pragma clang diagnostic push #pragma clang diagnostic ignored "-Weverything" - #include + #include #pragma clang diagnostic pop #elif defined(MLX_COMPILER_GCC) #pragma GCC diagnostic push @@ -32,14 +31,15 @@ #pragma GCC diagnostic ignored "-Wunused-parameter" #pragma GCC diagnostic ignored "-Wunused-variable" #pragma GCC diagnostic ignored "-Wparentheses" - #include + #include #pragma GCC diagnostic pop #else - #include + #include #endif +#include +#include #include -#include namespace mlx { diff --git a/src/renderer/core/memory.h b/src/renderer/core/memory.h index 46cd13f..b82063a 100644 --- a/src/renderer/core/memory.h +++ b/src/renderer/core/memory.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/10/20 02:13:03 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:01:06 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:49:47 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_MEMORY__ #define __MLX_VK_MEMORY__ -#include -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/render_core.cpp b/src/renderer/core/render_core.cpp index 1de3720..4162cda 100644 --- a/src/renderer/core/render_core.cpp +++ b/src/renderer/core/render_core.cpp @@ -6,13 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/17 23:33:34 by maldavid #+# #+# */ -/* Updated: 2024/01/20 08:20:07 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 18:04:28 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#define VK_NO_PROTOTYPES #define VOLK_IMPLEMENTATION +#include -#include #include #include diff --git a/src/renderer/core/render_core.h b/src/renderer/core/render_core.h index cf4aae5..88a319e 100644 --- a/src/renderer/core/render_core.h +++ b/src/renderer/core/render_core.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:16:32 by maldavid #+# #+# */ -/* Updated: 2024/01/20 08:17:58 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:49:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_RENDER_CORE__ #define __MLX_RENDER_CORE__ -#include -#include -#include +#include #include #include diff --git a/src/renderer/core/vk_device.cpp b/src/renderer/core/vk_device.cpp index f862772..1a7b602 100644 --- a/src/renderer/core/vk_device.cpp +++ b/src/renderer/core/vk_device.cpp @@ -6,17 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:14:29 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:23:45 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:50:26 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include "render_core.h" -#include -#include -#include -#include -#include -#include namespace mlx { diff --git a/src/renderer/core/vk_device.h b/src/renderer/core/vk_device.h index ad7b5ca..7fb4525 100644 --- a/src/renderer/core/vk_device.h +++ b/src/renderer/core/vk_device.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:13:42 by maldavid #+# #+# */ -/* Updated: 2024/03/14 16:59:54 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:50:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_DEVICE__ #define __MLX_VK_DEVICE__ -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/vk_fence.h b/src/renderer/core/vk_fence.h index d8bd364..0237010 100644 --- a/src/renderer/core/vk_fence.h +++ b/src/renderer/core/vk_fence.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:52:09 by maldavid #+# #+# */ -/* Updated: 2024/01/03 15:26:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:50:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_FENCE__ #define __MLX_VK_FENCE__ -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/vk_instance.cpp b/src/renderer/core/vk_instance.cpp index 040f9d8..8b5b18a 100644 --- a/src/renderer/core/vk_instance.cpp +++ b/src/renderer/core/vk_instance.cpp @@ -6,14 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:04:21 by maldavid #+# #+# */ -/* Updated: 2024/02/24 21:10:32 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:50:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include "vk_instance.h" #include "render_core.h" #include -#include namespace mlx { diff --git a/src/renderer/core/vk_instance.h b/src/renderer/core/vk_instance.h index e827665..e85e165 100644 --- a/src/renderer/core/vk_instance.h +++ b/src/renderer/core/vk_instance.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:03:04 by maldavid #+# #+# */ -/* Updated: 2024/01/03 15:26:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:51:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_INSTANCE__ #define __MLX_VK_INSTANCE__ -#include -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/vk_queues.cpp b/src/renderer/core/vk_queues.cpp index b4f4ba3..8d712d5 100644 --- a/src/renderer/core/vk_queues.cpp +++ b/src/renderer/core/vk_queues.cpp @@ -6,13 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:02:42 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:01:10 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:51:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include "render_core.h" -#include -#include namespace mlx { diff --git a/src/renderer/core/vk_queues.h b/src/renderer/core/vk_queues.h index 2a4a1ba..5dda2fd 100644 --- a/src/renderer/core/vk_queues.h +++ b/src/renderer/core/vk_queues.h @@ -6,17 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:00:48 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:51:31 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_QUEUES__ #define __MLX_VK_QUEUES__ -#include -#include -#include -#include +#include + #include namespace mlx diff --git a/src/renderer/core/vk_semaphore.h b/src/renderer/core/vk_semaphore.h index f17ff9e..8e568ba 100644 --- a/src/renderer/core/vk_semaphore.h +++ b/src/renderer/core/vk_semaphore.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:59:38 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:01:57 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:51:47 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_SEMAPHORE__ #define __MLX_VK_SEMAPHORE__ -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/vk_surface.cpp b/src/renderer/core/vk_surface.cpp index e2e2ac4..ebe2ed2 100644 --- a/src/renderer/core/vk_surface.cpp +++ b/src/renderer/core/vk_surface.cpp @@ -6,16 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:58:49 by maldavid #+# #+# */ -/* Updated: 2024/01/10 21:55:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:51:54 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include "render_core.h" #include #include -#include -#include -#include namespace mlx { diff --git a/src/renderer/core/vk_surface.h b/src/renderer/core/vk_surface.h index c700f8f..4f8cb9f 100644 --- a/src/renderer/core/vk_surface.h +++ b/src/renderer/core/vk_surface.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:57:55 by maldavid #+# #+# */ -/* Updated: 2024/01/03 15:26:43 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:52:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_SURFACE__ #define __MLX_VK_SURFACE__ -#include -#include -#include +#include namespace mlx { diff --git a/src/renderer/core/vk_validation_layers.cpp b/src/renderer/core/vk_validation_layers.cpp index a63b7fb..9c119fd 100644 --- a/src/renderer/core/vk_validation_layers.cpp +++ b/src/renderer/core/vk_validation_layers.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/19 14:05:25 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:03:24 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:52:08 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,8 +14,6 @@ #include "vulkan/vulkan_core.h" #include -#include -#include namespace mlx { diff --git a/src/renderer/core/vk_validation_layers.h b/src/renderer/core/vk_validation_layers.h index 0758669..2d0ff13 100644 --- a/src/renderer/core/vk_validation_layers.h +++ b/src/renderer/core/vk_validation_layers.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/19 14:04:25 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:02:55 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:52:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_VALIDATION_LAYERS__ #define __VK_VALIDATION_LAYERS__ -#include -#include +#include namespace mlx { diff --git a/src/renderer/descriptors/descriptor_pool_manager.h b/src/renderer/descriptors/descriptor_pool_manager.h index 228b0f3..d4ff428 100644 --- a/src/renderer/descriptors/descriptor_pool_manager.h +++ b/src/renderer/descriptors/descriptor_pool_manager.h @@ -6,15 +6,16 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/20 06:26:26 by maldavid #+# #+# */ -/* Updated: 2024/01/20 08:23:04 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:52:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DESCRIPTOR_POOL_MANAGER__ #define __MLX_DESCRIPTOR_POOL_MANAGER__ +#include + #include -#include namespace mlx { diff --git a/src/renderer/descriptors/vk_descriptor_pool.h b/src/renderer/descriptors/vk_descriptor_pool.h index 67acdb3..b303b1c 100644 --- a/src/renderer/descriptors/vk_descriptor_pool.h +++ b/src/renderer/descriptors/vk_descriptor_pool.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:32:43 by maldavid #+# #+# */ -/* Updated: 2024/01/20 07:38:32 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:52:39 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_DESCRIPTOR_POOL__ #define __VK_DESCRIPTOR_POOL__ -#include -#include -#include +#include namespace mlx { diff --git a/src/renderer/descriptors/vk_descriptor_set.h b/src/renderer/descriptors/vk_descriptor_set.h index 4eb7372..61fd7a4 100644 --- a/src/renderer/descriptors/vk_descriptor_set.h +++ b/src/renderer/descriptors/vk_descriptor_set.h @@ -6,16 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ -/* Updated: 2024/01/20 07:17:39 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:52:57 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_DESCRIPTOR_SET__ #define __VK_DESCRIPTOR_SET__ -#include -#include -#include +#include + #include namespace mlx diff --git a/src/renderer/descriptors/vk_descriptor_set_layout.h b/src/renderer/descriptors/vk_descriptor_set_layout.h index b3a5c97..13cd5a0 100644 --- a/src/renderer/descriptors/vk_descriptor_set_layout.h +++ b/src/renderer/descriptors/vk_descriptor_set_layout.h @@ -6,16 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:36:22 by maldavid #+# #+# */ -/* Updated: 2024/01/20 06:25:54 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:53:05 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_DESCRIPTOR_SET_LAYOUT__ #define __VK_DESCRIPTOR_SET_LAYOUT__ -#include -#include -#include +#include namespace mlx { diff --git a/src/renderer/images/texture.cpp b/src/renderer/images/texture.cpp index 9a48307..caa5118 100644 --- a/src/renderer/images/texture.cpp +++ b/src/renderer/images/texture.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 18:03:35 by maldavid #+# #+# */ -/* Updated: 2024/03/14 19:07:01 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:53:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,7 +15,6 @@ #include #include #include -#include #define STB_IMAGE_IMPLEMENTATION #include diff --git a/src/renderer/images/texture.h b/src/renderer/images/texture.h index 77282cf..aaa91ff 100644 --- a/src/renderer/images/texture.h +++ b/src/renderer/images/texture.h @@ -6,23 +6,19 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/08 02:24:58 by maldavid #+# #+# */ -/* Updated: 2024/03/14 19:06:07 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:53:30 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE__ #define __MLX_TEXTURE__ -#include -#include +#include + #include #include #include #include -#include -#ifdef DEBUG - #include -#endif namespace mlx { diff --git a/src/renderer/images/texture_atlas.h b/src/renderer/images/texture_atlas.h index 4a33b1a..7717fdd 100644 --- a/src/renderer/images/texture_atlas.h +++ b/src/renderer/images/texture_atlas.h @@ -6,16 +6,16 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/07 16:36:33 by maldavid #+# #+# */ -/* Updated: 2024/03/14 19:57:55 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:53:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE_ATLAS__ #define __MLX_TEXTURE_ATLAS__ +#include + #include -#include -#include namespace mlx { diff --git a/src/renderer/images/texture_manager.h b/src/renderer/images/texture_manager.h index a0b37d5..098f5f4 100644 --- a/src/renderer/images/texture_manager.h +++ b/src/renderer/images/texture_manager.h @@ -6,17 +6,17 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:56:15 by maldavid #+# #+# */ -/* Updated: 2024/03/25 13:53:59 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:54:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE_MANAGER__ #define __MLX_TEXTURE_MANAGER__ -#include +#include + #include #include -#include namespace mlx { diff --git a/src/renderer/images/vk_image.h b/src/renderer/images/vk_image.h index 017ec41..d103173 100644 --- a/src/renderer/images/vk_image.h +++ b/src/renderer/images/vk_image.h @@ -6,16 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 11:54:21 by maldavid #+# #+# */ -/* Updated: 2024/01/19 06:10:15 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:54:12 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_IMAGE__ #define __MLX_VK_IMAGE__ -#include -#include -#include +#include + #include #include #include diff --git a/src/renderer/pipeline/pipeline.h b/src/renderer/pipeline/pipeline.h index 9700dc7..36155d0 100644 --- a/src/renderer/pipeline/pipeline.h +++ b/src/renderer/pipeline/pipeline.h @@ -6,15 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 21:23:52 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:04:28 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:54:23 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __PIPELINE__ #define __PIPELINE__ -#include -#include +#include + #include namespace mlx diff --git a/src/renderer/pixel_put.cpp b/src/renderer/pixel_put.cpp index ef2b911..e70b321 100644 --- a/src/renderer/pixel_put.cpp +++ b/src/renderer/pixel_put.cpp @@ -6,12 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 15:14:50 by maldavid #+# #+# */ -/* Updated: 2024/01/11 00:06:01 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:57:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include -#include #include namespace mlx diff --git a/src/renderer/pixel_put.h b/src/renderer/pixel_put.h index 1de58cd..58876b4 100644 --- a/src/renderer/pixel_put.h +++ b/src/renderer/pixel_put.h @@ -6,14 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 13:18:50 by maldavid #+# #+# */ -/* Updated: 2024/01/11 00:06:05 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:57:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_PIXEL_PUT__ #define __MLX_PIXEL_PUT__ -#include +#include + #include #include diff --git a/src/renderer/renderer.h b/src/renderer/renderer.h index a0fc309..65d8277 100644 --- a/src/renderer/renderer.h +++ b/src/renderer/renderer.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/03/14 16:34:20 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:58:04 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __RENDERER__ #define __RENDERER__ -#include -#include +#include #include #include diff --git a/src/renderer/renderpass/vk_framebuffer.cpp b/src/renderer/renderpass/vk_framebuffer.cpp index 20ac661..585e8e0 100644 --- a/src/renderer/renderpass/vk_framebuffer.cpp +++ b/src/renderer/renderpass/vk_framebuffer.cpp @@ -6,11 +6,10 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:18:06 by maldavid #+# #+# */ -/* Updated: 2024/01/10 21:52:51 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:54:45 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include #include #include #include diff --git a/src/renderer/renderpass/vk_framebuffer.h b/src/renderer/renderpass/vk_framebuffer.h index 275c3e6..0ddd24e 100644 --- a/src/renderer/renderpass/vk_framebuffer.h +++ b/src/renderer/renderpass/vk_framebuffer.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:19:44 by maldavid #+# #+# */ -/* Updated: 2024/01/03 15:28:19 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:54:39 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_FRAMEBUFFER__ #define __MLX_VK_FRAMEBUFFER__ -#include -#include +#include namespace mlx { diff --git a/src/renderer/renderpass/vk_render_pass.h b/src/renderer/renderpass/vk_render_pass.h index a0a7c23..58ecbfc 100644 --- a/src/renderer/renderpass/vk_render_pass.h +++ b/src/renderer/renderpass/vk_render_pass.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:22:00 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:05:40 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:55:01 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_RENDER_PASS__ #define __MLX_VK_RENDER_PASS__ -#include -#include +#include namespace mlx { diff --git a/src/renderer/swapchain/vk_swapchain.cpp b/src/renderer/swapchain/vk_swapchain.cpp index 0ce2967..6ab48aa 100644 --- a/src/renderer/swapchain/vk_swapchain.cpp +++ b/src/renderer/swapchain/vk_swapchain.cpp @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:22:28 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:08:19 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:55:30 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include #include #include -#include -#include namespace mlx { diff --git a/src/renderer/swapchain/vk_swapchain.h b/src/renderer/swapchain/vk_swapchain.h index 81ff0f4..b31fa24 100644 --- a/src/renderer/swapchain/vk_swapchain.h +++ b/src/renderer/swapchain/vk_swapchain.h @@ -6,16 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:23:27 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:06:41 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:55:25 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_SWAPCHAIN__ #define __MLX_VK_SWAPCHAIN__ -#include -#include -#include +#include + #include namespace mlx diff --git a/src/renderer/texts/font.cpp b/src/renderer/texts/font.cpp index 37f2d0a..4d5397a 100644 --- a/src/renderer/texts/font.cpp +++ b/src/renderer/texts/font.cpp @@ -6,14 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/11 22:06:09 by kbz_8 #+# #+# */ -/* Updated: 2024/01/18 13:16:18 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:55:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include #include #include -#include constexpr const int RANGE = 1024; diff --git a/src/renderer/texts/font.h b/src/renderer/texts/font.h index 0c898a8..a64bf7a 100644 --- a/src/renderer/texts/font.h +++ b/src/renderer/texts/font.h @@ -6,18 +6,17 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/11 21:17:04 by kbz_8 #+# #+# */ -/* Updated: 2024/01/18 13:15:55 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:55:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FONT__ #define __MLX_FONT__ -#include -#include +#include + #include #include -#include namespace mlx { diff --git a/src/renderer/texts/font_library.cpp b/src/renderer/texts/font_library.cpp index e3a04c0..87b4f12 100644 --- a/src/renderer/texts/font_library.cpp +++ b/src/renderer/texts/font_library.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 09:28:14 by maldavid #+# #+# */ -/* Updated: 2024/01/18 13:07:48 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:56:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,7 +14,6 @@ #include #include #include -#include #include namespace mlx diff --git a/src/renderer/texts/font_library.h b/src/renderer/texts/font_library.h index 3019ea1..3de3819 100644 --- a/src/renderer/texts/font_library.h +++ b/src/renderer/texts/font_library.h @@ -6,18 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 09:26:03 by maldavid #+# #+# */ -/* Updated: 2024/01/18 09:33:30 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:56:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FONT_LIBRARY__ #define __MLX_FONT_LIBRARY__ -#include -#include -#include -#include -#include +#include + #include #include #include diff --git a/src/renderer/texts/text.h b/src/renderer/texts/text.h index fd244fd..00d446b 100644 --- a/src/renderer/texts/text.h +++ b/src/renderer/texts/text.h @@ -6,15 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:09:04 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:16:48 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:56:34 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT__ #define __MLX_TEXT__ -#include -#include +#include + #include #include #include diff --git a/src/renderer/texts/text_descriptor.h b/src/renderer/texts/text_descriptor.h index 27a89cd..cc1374b 100644 --- a/src/renderer/texts/text_descriptor.h +++ b/src/renderer/texts/text_descriptor.h @@ -6,21 +6,19 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:13:34 by maldavid #+# #+# */ -/* Updated: 2024/02/25 07:58:13 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:56:49 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_DESCRIPTOR__ #define __MLX_TEXT_DESCRIPTOR__ -#include -#include -#include +#include + #include #include #include #include -#include namespace mlx { diff --git a/src/renderer/texts/text_library.cpp b/src/renderer/texts/text_library.cpp index 652356c..74f7f85 100644 --- a/src/renderer/texts/text_library.cpp +++ b/src/renderer/texts/text_library.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/10 11:59:57 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:17:06 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:57:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,7 +14,6 @@ #include #include #include -#include #include namespace mlx diff --git a/src/renderer/texts/text_library.h b/src/renderer/texts/text_library.h index 2b9345b..9d8d2f8 100644 --- a/src/renderer/texts/text_library.h +++ b/src/renderer/texts/text_library.h @@ -6,19 +6,17 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/10 11:52:30 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:04:47 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:57:00 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_LIBRARY__ #define __MLX_TEXT_LIBRARY__ +#include + #include #include -#include -#include -#include -#include #include #include #include diff --git a/src/renderer/texts/text_manager.h b/src/renderer/texts/text_manager.h index 140e87d..1bef272 100644 --- a/src/renderer/texts/text_manager.h +++ b/src/renderer/texts/text_manager.h @@ -6,20 +6,17 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/06 16:24:11 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:08:43 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:57:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_MANAGER__ #define __MLX_TEXT_MANAGER__ +#include + #include #include -#include -#include -#include -#include -#include #include #include #include diff --git a/src/utils/combine_hash.h b/src/utils/combine_hash.h index 48a33a3..d8fc484 100644 --- a/src/utils/combine_hash.h +++ b/src/utils/combine_hash.h @@ -6,15 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/14 16:16:06 by maldavid #+# #+# */ -/* Updated: 2023/12/14 16:47:39 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:58:16 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_HASH__ #define __MLX_HASH__ -#include -#include +#include namespace mlx { diff --git a/src/utils/dogica_ttf.h b/src/utils/dogica_ttf.h index e890ab3..68176a2 100644 --- a/src/utils/dogica_ttf.h +++ b/src/utils/dogica_ttf.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/11 16:20:25 by maldavid #+# #+# */ -/* Updated: 2023/12/14 16:54:12 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:43:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DOGICA_TTF__ #define __MLX_DOGICA_TTF__ -#include +#include constexpr const unsigned int dogica_ttf_len = 33860; diff --git a/src/utils/icon_mlx.h b/src/utils/icon_mlx.h index 04f0029..13f4df3 100644 --- a/src/utils/icon_mlx.h +++ b/src/utils/icon_mlx.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/25 11:23:16 by maldavid #+# #+# */ -/* Updated: 2023/11/25 11:55:51 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 17:58:24 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __ICON_MLX__ #define __ICON_MLX__ -#include +#include constexpr const int logo_mlx_height = 125; constexpr const int logo_mlx_width = 125; diff --git a/xmake.lua b/xmake.lua index 93f72d0..d2d835d 100644 --- a/xmake.lua +++ b/xmake.lua @@ -55,6 +55,8 @@ target("mlx") add_options("graphics_memory_dump") add_includedirs("includes", "src", "third_party") + set_pcxxheader("src/pre_compiled.h") + add_defines("MLX_BUILD", "SDL_MAIN_HANDLED") add_files("src/**.cpp") From e2ea602372e4ebd0b64389ca8c39c0ea5d37e885 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 25 Mar 2024 19:21:14 +0100 Subject: [PATCH 002/131] fixing pre compiled header usage, improved compilation time --- .gitignore | 1 + src/core/UUID.cpp | 4 +++- src/core/UUID.h | 4 +--- src/core/application.cpp | 4 +++- src/core/application.h | 5 +---- src/core/bridge.cpp | 4 +++- src/core/errors.h | 4 +--- src/core/fps.cpp | 4 +++- src/core/fps.h | 4 +--- src/core/graphics.cpp | 4 +++- src/core/graphics.h | 4 +--- src/core/memory.cpp | 4 +++- src/core/memory.h | 4 +--- src/core/profiler.cpp | 4 +++- src/core/profiler.h | 4 +--- src/platform/inputs.cpp | 4 +++- src/platform/inputs.h | 4 +--- src/platform/window.cpp | 4 +++- src/platform/window.h | 4 +--- src/renderer/buffers/vk_buffer.cpp | 4 +++- src/renderer/buffers/vk_buffer.h | 4 +--- src/renderer/buffers/vk_ibo.h | 4 +--- src/renderer/buffers/vk_ubo.h | 4 +--- src/renderer/buffers/vk_vbo.cpp | 4 +++- src/renderer/command/cmd_manager.cpp | 4 +++- src/renderer/command/cmd_manager.h | 4 +--- src/renderer/command/single_time_cmd_manager.cpp | 4 +++- src/renderer/command/single_time_cmd_manager.h | 4 +--- src/renderer/command/vk_cmd_buffer.cpp | 4 +++- src/renderer/command/vk_cmd_buffer.h | 4 +--- src/renderer/command/vk_cmd_pool.cpp | 4 +++- src/renderer/command/vk_cmd_pool.h | 4 +--- src/renderer/core/cmd_resource.h | 4 +--- src/renderer/core/drawable_resource.h | 4 +--- src/renderer/core/memory.cpp | 4 +++- src/renderer/core/memory.h | 4 +--- src/renderer/core/render_core.cpp | 4 +++- src/renderer/core/render_core.h | 4 +--- src/renderer/core/vk_device.cpp | 4 +++- src/renderer/core/vk_device.h | 4 +--- src/renderer/core/vk_fence.cpp | 4 +++- src/renderer/core/vk_fence.h | 4 +--- src/renderer/core/vk_instance.cpp | 4 +++- src/renderer/core/vk_instance.h | 4 +--- src/renderer/core/vk_queues.cpp | 4 +++- src/renderer/core/vk_queues.h | 4 +--- src/renderer/core/vk_semaphore.cpp | 4 +++- src/renderer/core/vk_semaphore.h | 4 +--- src/renderer/core/vk_surface.cpp | 3 ++- src/renderer/core/vk_surface.h | 4 +--- src/renderer/core/vk_validation_layers.cpp | 3 ++- src/renderer/core/vk_validation_layers.h | 4 +--- src/renderer/descriptors/descriptor_pool_manager.cpp | 4 +++- src/renderer/descriptors/descriptor_pool_manager.h | 4 +--- src/renderer/descriptors/vk_descriptor_pool.cpp | 4 +++- src/renderer/descriptors/vk_descriptor_pool.h | 4 +--- src/renderer/descriptors/vk_descriptor_set.cpp | 4 +++- src/renderer/descriptors/vk_descriptor_set.h | 4 +--- src/renderer/descriptors/vk_descriptor_set_layout.cpp | 4 +++- src/renderer/descriptors/vk_descriptor_set_layout.h | 4 +--- src/renderer/images/texture.cpp | 10 ++++++---- src/renderer/images/texture.h | 4 +--- src/renderer/images/texture_atlas.cpp | 4 +++- src/renderer/images/texture_atlas.h | 4 +--- src/renderer/images/texture_manager.h | 4 +--- src/renderer/images/vk_image.cpp | 4 +++- src/renderer/images/vk_image.h | 4 +--- src/renderer/pipeline/pipeline.cpp | 4 +++- src/renderer/pipeline/pipeline.h | 4 +--- src/renderer/pixel_put.cpp | 4 +++- src/renderer/pixel_put.h | 4 +--- src/renderer/renderer.cpp | 4 +++- src/renderer/renderer.h | 4 +--- src/renderer/renderpass/vk_framebuffer.cpp | 4 +++- src/renderer/renderpass/vk_framebuffer.h | 4 +--- src/renderer/renderpass/vk_render_pass.cpp | 4 +++- src/renderer/renderpass/vk_render_pass.h | 4 +--- src/renderer/swapchain/vk_swapchain.cpp | 4 +++- src/renderer/swapchain/vk_swapchain.h | 4 +--- src/renderer/texts/font.cpp | 4 +++- src/renderer/texts/font.h | 4 +--- src/renderer/texts/font_library.cpp | 4 +++- src/renderer/texts/font_library.h | 4 +--- src/renderer/texts/text.cpp | 4 +++- src/renderer/texts/text.h | 4 +--- src/renderer/texts/text_descriptor.cpp | 4 +++- src/renderer/texts/text_descriptor.h | 4 +--- src/renderer/texts/text_library.cpp | 4 +++- src/renderer/texts/text_library.h | 4 +--- src/renderer/texts/text_manager.cpp | 4 +++- src/renderer/texts/text_manager.h | 4 +--- src/utils/combine_hash.h | 5 +++-- src/utils/dogica_ttf.h | 4 ++-- src/utils/icon_mlx.h | 4 ++-- 94 files changed, 185 insertions(+), 194 deletions(-) diff --git a/.gitignore b/.gitignore index 173af69..5227471 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ *.tmp *.ilk *.pdb +*.gch *.exe *vgcore *.gdb_history diff --git a/src/core/UUID.cpp b/src/core/UUID.cpp index 6b62b4d..744b4c9 100644 --- a/src/core/UUID.cpp +++ b/src/core/UUID.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/06 11:26:37 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:44:06 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:36 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include namespace mlx diff --git a/src/core/UUID.h b/src/core/UUID.h index ac94c1a..ad0507e 100644 --- a/src/core/UUID.h +++ b/src/core/UUID.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/06 11:13:23 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:43:58 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:53 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_UUID__ #define __MLX_UUID__ -#include - namespace mlx { class UUID diff --git a/src/core/application.cpp b/src/core/application.cpp index bfb39a7..ec9a4bd 100644 --- a/src/core/application.cpp +++ b/src/core/application.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:44:37 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:40 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "application.h" #include #include diff --git a/src/core/application.h b/src/core/application.h index 7532e05..ff66a0f 100644 --- a/src/core/application.h +++ b/src/core/application.h @@ -6,17 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:45:03 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:13:27 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_APPLICATION__ #define __MLX_APPLICATION__ -#include - #include - #include #include #include diff --git a/src/core/bridge.cpp b/src/core/bridge.cpp index ad3b8e2..43fbfa7 100644 --- a/src/core/bridge.cpp +++ b/src/core/bridge.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:35:20 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:44:56 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:45 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include "errors.h" #include "application.h" diff --git a/src/core/errors.h b/src/core/errors.h index 40334e9..b740865 100644 --- a/src/core/errors.h +++ b/src/core/errors.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:42:32 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:45:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:13:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_ERRORS__ #define __MLX_ERRORS__ -#include - enum class e_kind { message, diff --git a/src/core/fps.cpp b/src/core/fps.cpp index 8da10a2..2a585e3 100644 --- a/src/core/fps.cpp +++ b/src/core/fps.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:56:17 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:45:33 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:54 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include namespace mlx diff --git a/src/core/fps.h b/src/core/fps.h index 332e812..6dacc77 100644 --- a/src/core/fps.h +++ b/src/core/fps.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:53:30 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:45:27 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:13:16 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FPS__ #define __MLX_FPS__ -#include - namespace mlx { class FpsManager diff --git a/src/core/graphics.cpp b/src/core/graphics.cpp index 063a363..66e5fd3 100644 --- a/src/core/graphics.cpp +++ b/src/core/graphics.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ -/* Updated: 2024/01/11 04:38:53 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:58 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include namespace mlx diff --git a/src/core/graphics.h b/src/core/graphics.h index 4ea3116..a23a96d 100644 --- a/src/core/graphics.h +++ b/src/core/graphics.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:45:49 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:13:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_GRAPHICS__ #define __MLX_GRAPHICS__ -#include - #include #include #include diff --git a/src/core/memory.cpp b/src/core/memory.cpp index 708ea73..93ada08 100644 --- a/src/core/memory.cpp +++ b/src/core/memory.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/07 16:32:01 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 17:46:03 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include diff --git a/src/core/memory.h b/src/core/memory.h index 6d61b88..1e08e1b 100644 --- a/src/core/memory.h +++ b/src/core/memory.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/07 16:31:51 by kbz_8 #+# #+# */ -/* Updated: 2023/12/11 19:47:13 by kbz_8 ### ########.fr */ +/* Updated: 2024/03/25 19:13:05 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,8 +14,6 @@ #define __MLX_MEMORY__ #include -#include -#include namespace mlx { diff --git a/src/core/profiler.cpp b/src/core/profiler.cpp index 43bc96d..5ef15cd 100644 --- a/src/core/profiler.cpp +++ b/src/core/profiler.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 13:56:21 by maldavid #+# #+# */ -/* Updated: 2024/01/10 18:17:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/core/profiler.h b/src/core/profiler.h index dc75705..e5f5c41 100644 --- a/src/core/profiler.h +++ b/src/core/profiler.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 13:35:45 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:46:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:57 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_PROFILER__ #define __MLX_PROFILER__ -#include - #include namespace mlx diff --git a/src/platform/inputs.cpp b/src/platform/inputs.cpp index 40adb8f..fab695a 100644 --- a/src/platform/inputs.cpp +++ b/src/platform/inputs.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:30:19 by maldavid #+# #+# */ -/* Updated: 2024/02/23 22:27:30 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "inputs.h" #include #include diff --git a/src/platform/inputs.h b/src/platform/inputs.h index fc12254..39d4e0c 100644 --- a/src/platform/inputs.h +++ b/src/platform/inputs.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:47:03 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:44 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_INPUTS__ #define __MLX_INPUTS__ -#include - #include "window.h" namespace mlx diff --git a/src/platform/window.cpp b/src/platform/window.cpp index 65e94d9..5ab88b3 100644 --- a/src/platform/window.cpp +++ b/src/platform/window.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:36:44 by maldavid #+# #+# */ -/* Updated: 2024/01/16 07:59:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:14 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/platform/window.h b/src/platform/window.h index 94f4f00..754151b 100644 --- a/src/platform/window.h +++ b/src/platform/window.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:47:10 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:46 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_WINDOW__ #define __MLX_WINDOW__ -#include - namespace mlx { class MLX_Window diff --git a/src/renderer/buffers/vk_buffer.cpp b/src/renderer/buffers/vk_buffer.cpp index 421543a..1b704fe 100644 --- a/src/renderer/buffers/vk_buffer.cpp +++ b/src/renderer/buffers/vk_buffer.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:55:57 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:47:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:18 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_buffer.h" #include #include diff --git a/src/renderer/buffers/vk_buffer.h b/src/renderer/buffers/vk_buffer.h index 12d9533..3d7d149 100644 --- a/src/renderer/buffers/vk_buffer.h +++ b/src/renderer/buffers/vk_buffer.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 23:18:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:47:28 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:39 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_BUFFER__ #define __MLX_VK_BUFFER__ -#include - #include #include diff --git a/src/renderer/buffers/vk_ibo.h b/src/renderer/buffers/vk_ibo.h index e317b4e..580fa2e 100644 --- a/src/renderer/buffers/vk_ibo.h +++ b/src/renderer/buffers/vk_ibo.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 15:05:05 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:47:55 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_IBO__ #define __VK_IBO__ -#include - #include "vk_buffer.h" #include diff --git a/src/renderer/buffers/vk_ubo.h b/src/renderer/buffers/vk_ubo.h index 0b8b53f..0c2c091 100644 --- a/src/renderer/buffers/vk_ubo.h +++ b/src/renderer/buffers/vk_ubo.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:48:14 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_UBO__ #define __MLX_VK_UBO__ -#include - #include "vk_buffer.h" namespace mlx diff --git a/src/renderer/buffers/vk_vbo.cpp b/src/renderer/buffers/vk_vbo.cpp index 5dc857d..418ffeb 100644 --- a/src/renderer/buffers/vk_vbo.cpp +++ b/src/renderer/buffers/vk_vbo.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:28:08 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:48:20 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:25 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_vbo.h" namespace mlx diff --git a/src/renderer/command/cmd_manager.cpp b/src/renderer/command/cmd_manager.cpp index 1e6118e..9de7d82 100644 --- a/src/renderer/command/cmd_manager.cpp +++ b/src/renderer/command/cmd_manager.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:50:52 by maldavid #+# #+# */ -/* Updated: 2023/12/17 20:10:45 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:30 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include namespace mlx diff --git a/src/renderer/command/cmd_manager.h b/src/renderer/command/cmd_manager.h index a623aa2..abac258 100644 --- a/src/renderer/command/cmd_manager.h +++ b/src/renderer/command/cmd_manager.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:48:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:48:39 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:23 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_COMMAND_MANAGER__ #define __MLX_COMMAND_MANAGER__ -#include - #include #include #include diff --git a/src/renderer/command/single_time_cmd_manager.cpp b/src/renderer/command/single_time_cmd_manager.cpp index 1ba8ad6..7bc96f1 100644 --- a/src/renderer/command/single_time_cmd_manager.cpp +++ b/src/renderer/command/single_time_cmd_manager.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/15 19:57:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:48:44 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:33 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include diff --git a/src/renderer/command/single_time_cmd_manager.h b/src/renderer/command/single_time_cmd_manager.h index f95c8f8..1432ce7 100644 --- a/src/renderer/command/single_time_cmd_manager.h +++ b/src/renderer/command/single_time_cmd_manager.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/15 18:25:57 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:48:51 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:20 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_SINGLE_TIME_CMD_MANAGER__ #define __MLX_SINGLE_TIME_CMD_MANAGER__ -#include - #include #include diff --git a/src/renderer/command/vk_cmd_buffer.cpp b/src/renderer/command/vk_cmd_buffer.cpp index d1b208b..a2d0a25 100644 --- a/src/renderer/command/vk_cmd_buffer.cpp +++ b/src/renderer/command/vk_cmd_buffer.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:26:06 by maldavid #+# #+# */ -/* Updated: 2024/02/25 08:02:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_cmd_buffer.h" #include #include diff --git a/src/renderer/command/vk_cmd_buffer.h b/src/renderer/command/vk_cmd_buffer.h index 10755fc..8af1f2e 100644 --- a/src/renderer/command/vk_cmd_buffer.h +++ b/src/renderer/command/vk_cmd_buffer.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:49:10 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:17 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_CMD_BUFFER__ #define __MLX_VK_CMD_BUFFER__ -#include - #include namespace mlx diff --git a/src/renderer/command/vk_cmd_pool.cpp b/src/renderer/command/vk_cmd_pool.cpp index 441d45f..fbce122 100644 --- a/src/renderer/command/vk_cmd_pool.cpp +++ b/src/renderer/command/vk_cmd_pool.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:24:33 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:23:20 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:41 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_cmd_pool.h" #include diff --git a/src/renderer/command/vk_cmd_pool.h b/src/renderer/command/vk_cmd_pool.h index 04b4a93..3958739 100644 --- a/src/renderer/command/vk_cmd_pool.h +++ b/src/renderer/command/vk_cmd_pool.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:24:12 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:49:19 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:14 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_CMD_POOL__ #define __MLX_VK_CMD_POOL__ -#include - namespace mlx { class CmdPool diff --git a/src/renderer/core/cmd_resource.h b/src/renderer/core/cmd_resource.h index 8f0d4f7..47ec17e 100644 --- a/src/renderer/core/cmd_resource.h +++ b/src/renderer/core/cmd_resource.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/16 20:44:29 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:49:25 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:08 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_COMMAND_RESOURCE__ #define __MLX_COMMAND_RESOURCE__ -#include - #include namespace mlx diff --git a/src/renderer/core/drawable_resource.h b/src/renderer/core/drawable_resource.h index a6ab54f..6dbf67c 100644 --- a/src/renderer/core/drawable_resource.h +++ b/src/renderer/core/drawable_resource.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 21:00:37 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:49:33 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:05 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DRAWABLE_RESOURCE__ #define __MLX_DRAWABLE_RESOURCE__ -#include - namespace mlx { class DrawableResource diff --git a/src/renderer/core/memory.cpp b/src/renderer/core/memory.cpp index c59d1f4..e9942ee 100644 --- a/src/renderer/core/memory.cpp +++ b/src/renderer/core/memory.cpp @@ -6,7 +6,7 @@ /* By: kbz_8 +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/10/20 22:02:37 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 18:10:10 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:01:57 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -37,6 +37,8 @@ #include #endif +#include + #include #include #include diff --git a/src/renderer/core/memory.h b/src/renderer/core/memory.h index b82063a..9703213 100644 --- a/src/renderer/core/memory.h +++ b/src/renderer/core/memory.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/10/20 02:13:03 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:49:47 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:12:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_MEMORY__ #define __MLX_VK_MEMORY__ -#include - namespace mlx { class GPUallocator diff --git a/src/renderer/core/render_core.cpp b/src/renderer/core/render_core.cpp index 4162cda..6c14b53 100644 --- a/src/renderer/core/render_core.cpp +++ b/src/renderer/core/render_core.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/17 23:33:34 by maldavid #+# #+# */ -/* Updated: 2024/03/25 18:04:28 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,6 +14,8 @@ #define VOLK_IMPLEMENTATION #include +#include + #include #include diff --git a/src/renderer/core/render_core.h b/src/renderer/core/render_core.h index 88a319e..f2154c8 100644 --- a/src/renderer/core/render_core.h +++ b/src/renderer/core/render_core.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:16:32 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:49:59 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:11:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_RENDER_CORE__ #define __MLX_RENDER_CORE__ -#include - #include #include #include diff --git a/src/renderer/core/vk_device.cpp b/src/renderer/core/vk_device.cpp index 1a7b602..910b904 100644 --- a/src/renderer/core/vk_device.cpp +++ b/src/renderer/core/vk_device.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:14:29 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:50:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "render_core.h" namespace mlx diff --git a/src/renderer/core/vk_device.h b/src/renderer/core/vk_device.h index 7fb4525..2352f03 100644 --- a/src/renderer/core/vk_device.h +++ b/src/renderer/core/vk_device.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:13:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:50:32 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:11:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_DEVICE__ #define __MLX_VK_DEVICE__ -#include - namespace mlx { class Device diff --git a/src/renderer/core/vk_fence.cpp b/src/renderer/core/vk_fence.cpp index f63df12..db3d8e2 100644 --- a/src/renderer/core/vk_fence.cpp +++ b/src/renderer/core/vk_fence.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:53:06 by maldavid #+# #+# */ -/* Updated: 2024/02/25 08:02:45 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:14 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include diff --git a/src/renderer/core/vk_fence.h b/src/renderer/core/vk_fence.h index 0237010..dd4a5d0 100644 --- a/src/renderer/core/vk_fence.h +++ b/src/renderer/core/vk_fence.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:52:09 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:50:52 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:11:53 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_FENCE__ #define __MLX_VK_FENCE__ -#include - namespace mlx { class Fence diff --git a/src/renderer/core/vk_instance.cpp b/src/renderer/core/vk_instance.cpp index 8b5b18a..92b6b36 100644 --- a/src/renderer/core/vk_instance.cpp +++ b/src/renderer/core/vk_instance.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:04:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:50:59 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:18 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_instance.h" #include "render_core.h" #include diff --git a/src/renderer/core/vk_instance.h b/src/renderer/core/vk_instance.h index e85e165..da52679 100644 --- a/src/renderer/core/vk_instance.h +++ b/src/renderer/core/vk_instance.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:03:04 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:51:09 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:11:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_INSTANCE__ #define __MLX_VK_INSTANCE__ -#include - namespace mlx { class Instance diff --git a/src/renderer/core/vk_queues.cpp b/src/renderer/core/vk_queues.cpp index 8d712d5..6f89926 100644 --- a/src/renderer/core/vk_queues.cpp +++ b/src/renderer/core/vk_queues.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:02:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:51:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:20 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "render_core.h" namespace mlx diff --git a/src/renderer/core/vk_queues.h b/src/renderer/core/vk_queues.h index 5dda2fd..832bc50 100644 --- a/src/renderer/core/vk_queues.h +++ b/src/renderer/core/vk_queues.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:51:31 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:11:46 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_QUEUES__ #define __MLX_VK_QUEUES__ -#include - #include namespace mlx diff --git a/src/renderer/core/vk_semaphore.cpp b/src/renderer/core/vk_semaphore.cpp index fb8d2ab..d8d8dd3 100644 --- a/src/renderer/core/vk_semaphore.cpp +++ b/src/renderer/core/vk_semaphore.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:08 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:02:36 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:25 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_semaphore.h" #include "render_core.h" #include diff --git a/src/renderer/core/vk_semaphore.h b/src/renderer/core/vk_semaphore.h index 8e568ba..c022e8b 100644 --- a/src/renderer/core/vk_semaphore.h +++ b/src/renderer/core/vk_semaphore.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:59:38 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:51:47 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:11:43 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_SEMAPHORE__ #define __MLX_VK_SEMAPHORE__ -#include - namespace mlx { class Semaphore diff --git a/src/renderer/core/vk_surface.cpp b/src/renderer/core/vk_surface.cpp index ebe2ed2..e1708a6 100644 --- a/src/renderer/core/vk_surface.cpp +++ b/src/renderer/core/vk_surface.cpp @@ -6,10 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:58:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:51:54 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include #include "render_core.h" #include #include diff --git a/src/renderer/core/vk_surface.h b/src/renderer/core/vk_surface.h index 4f8cb9f..d5ff3de 100644 --- a/src/renderer/core/vk_surface.h +++ b/src/renderer/core/vk_surface.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:57:55 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:52:02 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:10:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_SURFACE__ #define __MLX_VK_SURFACE__ -#include - namespace mlx { class Surface diff --git a/src/renderer/core/vk_validation_layers.cpp b/src/renderer/core/vk_validation_layers.cpp index 9c119fd..ffecd3d 100644 --- a/src/renderer/core/vk_validation_layers.cpp +++ b/src/renderer/core/vk_validation_layers.cpp @@ -6,10 +6,11 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/19 14:05:25 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:52:08 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include #include "render_core.h" #include "vulkan/vulkan_core.h" diff --git a/src/renderer/core/vk_validation_layers.h b/src/renderer/core/vk_validation_layers.h index 2d0ff13..971542d 100644 --- a/src/renderer/core/vk_validation_layers.h +++ b/src/renderer/core/vk_validation_layers.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/19 14:04:25 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:52:19 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:00:00 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_VALIDATION_LAYERS__ #define __VK_VALIDATION_LAYERS__ -#include - namespace mlx { class ValidationLayers diff --git a/src/renderer/descriptors/descriptor_pool_manager.cpp b/src/renderer/descriptors/descriptor_pool_manager.cpp index 4c727b8..4279527 100644 --- a/src/renderer/descriptors/descriptor_pool_manager.cpp +++ b/src/renderer/descriptors/descriptor_pool_manager.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/20 06:51:47 by maldavid #+# #+# */ -/* Updated: 2024/01/20 08:18:27 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:29 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include diff --git a/src/renderer/descriptors/descriptor_pool_manager.h b/src/renderer/descriptors/descriptor_pool_manager.h index d4ff428..aea5b0d 100644 --- a/src/renderer/descriptors/descriptor_pool_manager.h +++ b/src/renderer/descriptors/descriptor_pool_manager.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/20 06:26:26 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:52:32 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:10:12 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DESCRIPTOR_POOL_MANAGER__ #define __MLX_DESCRIPTOR_POOL_MANAGER__ -#include - #include namespace mlx diff --git a/src/renderer/descriptors/vk_descriptor_pool.cpp b/src/renderer/descriptors/vk_descriptor_pool.cpp index 04f4865..87f44ce 100644 --- a/src/renderer/descriptors/vk_descriptor_pool.cpp +++ b/src/renderer/descriptors/vk_descriptor_pool.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:34:23 by maldavid #+# #+# */ -/* Updated: 2024/01/20 07:40:40 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_descriptor_pool.h" #include #include diff --git a/src/renderer/descriptors/vk_descriptor_pool.h b/src/renderer/descriptors/vk_descriptor_pool.h index b303b1c..512668a 100644 --- a/src/renderer/descriptors/vk_descriptor_pool.h +++ b/src/renderer/descriptors/vk_descriptor_pool.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:32:43 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:52:39 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:10:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_DESCRIPTOR_POOL__ #define __VK_DESCRIPTOR_POOL__ -#include - namespace mlx { class DescriptorPool diff --git a/src/renderer/descriptors/vk_descriptor_set.cpp b/src/renderer/descriptors/vk_descriptor_set.cpp index 819ba25..07a5841 100644 --- a/src/renderer/descriptors/vk_descriptor_set.cpp +++ b/src/renderer/descriptors/vk_descriptor_set.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:40:44 by maldavid #+# #+# */ -/* Updated: 2024/01/20 08:18:07 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:43 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_descriptor_set.h" #include "renderer/core/render_core.h" #include "vk_descriptor_pool.h" diff --git a/src/renderer/descriptors/vk_descriptor_set.h b/src/renderer/descriptors/vk_descriptor_set.h index 61fd7a4..2ad52c4 100644 --- a/src/renderer/descriptors/vk_descriptor_set.h +++ b/src/renderer/descriptors/vk_descriptor_set.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:52:57 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:10:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_DESCRIPTOR_SET__ #define __VK_DESCRIPTOR_SET__ -#include - #include namespace mlx diff --git a/src/renderer/descriptors/vk_descriptor_set_layout.cpp b/src/renderer/descriptors/vk_descriptor_set_layout.cpp index 12b98d8..4ceba14 100644 --- a/src/renderer/descriptors/vk_descriptor_set_layout.cpp +++ b/src/renderer/descriptors/vk_descriptor_set_layout.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:37:28 by maldavid #+# #+# */ -/* Updated: 2024/01/03 13:14:58 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:02:47 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_descriptor_set_layout.h" #include diff --git a/src/renderer/descriptors/vk_descriptor_set_layout.h b/src/renderer/descriptors/vk_descriptor_set_layout.h index 13cd5a0..c7d2d45 100644 --- a/src/renderer/descriptors/vk_descriptor_set_layout.h +++ b/src/renderer/descriptors/vk_descriptor_set_layout.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:36:22 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:53:05 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:10:03 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_DESCRIPTOR_SET_LAYOUT__ #define __VK_DESCRIPTOR_SET_LAYOUT__ -#include - namespace mlx { class DescriptorSetLayout diff --git a/src/renderer/images/texture.cpp b/src/renderer/images/texture.cpp index caa5118..5037ac2 100644 --- a/src/renderer/images/texture.cpp +++ b/src/renderer/images/texture.cpp @@ -6,19 +6,21 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 18:03:35 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:53:37 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:04 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#define STB_IMAGE_IMPLEMENTATION +#include + +#include + #include #include #include #include #include -#define STB_IMAGE_IMPLEMENTATION -#include - #ifdef IMAGE_OPTIMIZED #define TILING VK_IMAGE_TILING_OPTIMAL #else diff --git a/src/renderer/images/texture.h b/src/renderer/images/texture.h index aaa91ff..8d497db 100644 --- a/src/renderer/images/texture.h +++ b/src/renderer/images/texture.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/08 02:24:58 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:53:30 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:09:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE__ #define __MLX_TEXTURE__ -#include - #include #include #include diff --git a/src/renderer/images/texture_atlas.cpp b/src/renderer/images/texture_atlas.cpp index fc06315..cace1c1 100644 --- a/src/renderer/images/texture_atlas.cpp +++ b/src/renderer/images/texture_atlas.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/07 16:40:09 by maldavid #+# #+# */ -/* Updated: 2024/01/18 10:18:08 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #ifdef IMAGE_OPTIMIZED diff --git a/src/renderer/images/texture_atlas.h b/src/renderer/images/texture_atlas.h index 7717fdd..e2e310b 100644 --- a/src/renderer/images/texture_atlas.h +++ b/src/renderer/images/texture_atlas.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/07 16:36:33 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:53:50 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:09:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE_ATLAS__ #define __MLX_TEXTURE_ATLAS__ -#include - #include namespace mlx diff --git a/src/renderer/images/texture_manager.h b/src/renderer/images/texture_manager.h index 098f5f4..c667cbd 100644 --- a/src/renderer/images/texture_manager.h +++ b/src/renderer/images/texture_manager.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:56:15 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:54:02 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:09:45 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE_MANAGER__ #define __MLX_TEXTURE_MANAGER__ -#include - #include #include diff --git a/src/renderer/images/vk_image.cpp b/src/renderer/images/vk_image.cpp index ed2549b..28f9054 100644 --- a/src/renderer/images/vk_image.cpp +++ b/src/renderer/images/vk_image.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 11:59:07 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:28:25 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:27 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_image.h" #include #include diff --git a/src/renderer/images/vk_image.h b/src/renderer/images/vk_image.h index d103173..7319917 100644 --- a/src/renderer/images/vk_image.h +++ b/src/renderer/images/vk_image.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 11:54:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:54:12 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:09:40 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_IMAGE__ #define __MLX_VK_IMAGE__ -#include - #include #include #include diff --git a/src/renderer/pipeline/pipeline.cpp b/src/renderer/pipeline/pipeline.cpp index 311d6b2..1722c21 100644 --- a/src/renderer/pipeline/pipeline.cpp +++ b/src/renderer/pipeline/pipeline.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 21:27:38 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:05:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:31 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "pipeline.h" #include #include diff --git a/src/renderer/pipeline/pipeline.h b/src/renderer/pipeline/pipeline.h index 36155d0..304a58d 100644 --- a/src/renderer/pipeline/pipeline.h +++ b/src/renderer/pipeline/pipeline.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 21:23:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:54:23 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:09:01 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __PIPELINE__ #define __PIPELINE__ -#include - #include namespace mlx diff --git a/src/renderer/pixel_put.cpp b/src/renderer/pixel_put.cpp index e70b321..32c0ae3 100644 --- a/src/renderer/pixel_put.cpp +++ b/src/renderer/pixel_put.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 15:14:50 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:57:56 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:45 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include diff --git a/src/renderer/pixel_put.h b/src/renderer/pixel_put.h index 58876b4..b084d82 100644 --- a/src/renderer/pixel_put.h +++ b/src/renderer/pixel_put.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 13:18:50 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:57:52 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:07:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_PIXEL_PUT__ #define __MLX_PIXEL_PUT__ -#include - #include #include diff --git a/src/renderer/renderer.cpp b/src/renderer/renderer.cpp index c1c2cfe..85102ef 100644 --- a/src/renderer/renderer.cpp +++ b/src/renderer/renderer.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:25:16 by maldavid #+# #+# */ -/* Updated: 2024/03/14 16:34:43 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:49 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/renderer.h b/src/renderer/renderer.h index 65d8277..c935a1c 100644 --- a/src/renderer/renderer.h +++ b/src/renderer/renderer.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:58:04 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:07:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __RENDERER__ #define __RENDERER__ -#include - #include #include #include diff --git a/src/renderer/renderpass/vk_framebuffer.cpp b/src/renderer/renderpass/vk_framebuffer.cpp index 585e8e0..c4f100b 100644 --- a/src/renderer/renderpass/vk_framebuffer.cpp +++ b/src/renderer/renderpass/vk_framebuffer.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:18:06 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:54:45 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/renderpass/vk_framebuffer.h b/src/renderer/renderpass/vk_framebuffer.h index 0ddd24e..e12be0d 100644 --- a/src/renderer/renderpass/vk_framebuffer.h +++ b/src/renderer/renderpass/vk_framebuffer.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:19:44 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:54:39 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_FRAMEBUFFER__ #define __MLX_VK_FRAMEBUFFER__ -#include - namespace mlx { class FrameBuffer diff --git a/src/renderer/renderpass/vk_render_pass.cpp b/src/renderer/renderpass/vk_render_pass.cpp index e5c8470..352f34f 100644 --- a/src/renderer/renderpass/vk_render_pass.cpp +++ b/src/renderer/renderpass/vk_render_pass.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:21:36 by maldavid #+# #+# */ -/* Updated: 2024/03/14 17:06:01 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include "vk_render_pass.h" #include #include diff --git a/src/renderer/renderpass/vk_render_pass.h b/src/renderer/renderpass/vk_render_pass.h index 58ecbfc..724c920 100644 --- a/src/renderer/renderpass/vk_render_pass.h +++ b/src/renderer/renderpass/vk_render_pass.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:22:00 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:55:01 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:30 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_RENDER_PASS__ #define __MLX_VK_RENDER_PASS__ -#include - namespace mlx { class RenderPass diff --git a/src/renderer/swapchain/vk_swapchain.cpp b/src/renderer/swapchain/vk_swapchain.cpp index 6ab48aa..223f8bc 100644 --- a/src/renderer/swapchain/vk_swapchain.cpp +++ b/src/renderer/swapchain/vk_swapchain.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:22:28 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:55:30 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:41 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/swapchain/vk_swapchain.h b/src/renderer/swapchain/vk_swapchain.h index b31fa24..d68072e 100644 --- a/src/renderer/swapchain/vk_swapchain.h +++ b/src/renderer/swapchain/vk_swapchain.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:23:27 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:55:25 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:26 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_SWAPCHAIN__ #define __MLX_VK_SWAPCHAIN__ -#include - #include namespace mlx diff --git a/src/renderer/texts/font.cpp b/src/renderer/texts/font.cpp index 4d5397a..9ebec8a 100644 --- a/src/renderer/texts/font.cpp +++ b/src/renderer/texts/font.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/11 22:06:09 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 17:55:59 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:54 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/texts/font.h b/src/renderer/texts/font.h index a64bf7a..c1c48df 100644 --- a/src/renderer/texts/font.h +++ b/src/renderer/texts/font.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/11 21:17:04 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 17:55:52 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FONT__ #define __MLX_FONT__ -#include - #include #include diff --git a/src/renderer/texts/font_library.cpp b/src/renderer/texts/font_library.cpp index 87b4f12..3265bbc 100644 --- a/src/renderer/texts/font_library.cpp +++ b/src/renderer/texts/font_library.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 09:28:14 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:56:09 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:03:57 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/texts/font_library.h b/src/renderer/texts/font_library.h index 3de3819..ddd15ad 100644 --- a/src/renderer/texts/font_library.h +++ b/src/renderer/texts/font_library.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 09:26:03 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:56:19 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:18 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FONT_LIBRARY__ #define __MLX_FONT_LIBRARY__ -#include - #include #include #include diff --git a/src/renderer/texts/text.cpp b/src/renderer/texts/text.cpp index a7696dd..f5e3687 100644 --- a/src/renderer/texts/text.cpp +++ b/src/renderer/texts/text.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:11:56 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:13:08 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:04:01 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/texts/text.h b/src/renderer/texts/text.h index 00d446b..c30bbb9 100644 --- a/src/renderer/texts/text.h +++ b/src/renderer/texts/text.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:09:04 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:56:34 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT__ #define __MLX_TEXT__ -#include - #include #include #include diff --git a/src/renderer/texts/text_descriptor.cpp b/src/renderer/texts/text_descriptor.cpp index f5d7f14..4637bd7 100644 --- a/src/renderer/texts/text_descriptor.cpp +++ b/src/renderer/texts/text_descriptor.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:23:11 by maldavid #+# #+# */ -/* Updated: 2024/03/25 16:13:48 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:04:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/texts/text_descriptor.h b/src/renderer/texts/text_descriptor.h index cc1374b..25a4335 100644 --- a/src/renderer/texts/text_descriptor.h +++ b/src/renderer/texts/text_descriptor.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:13:34 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:56:49 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_DESCRIPTOR__ #define __MLX_TEXT_DESCRIPTOR__ -#include - #include #include #include diff --git a/src/renderer/texts/text_library.cpp b/src/renderer/texts/text_library.cpp index 74f7f85..9ed7d4c 100644 --- a/src/renderer/texts/text_library.cpp +++ b/src/renderer/texts/text_library.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/10 11:59:57 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:57:06 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:05:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/texts/text_library.h b/src/renderer/texts/text_library.h index 9d8d2f8..598cef9 100644 --- a/src/renderer/texts/text_library.h +++ b/src/renderer/texts/text_library.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/10 11:52:30 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:57:00 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:03 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_LIBRARY__ #define __MLX_TEXT_LIBRARY__ -#include - #include #include #include diff --git a/src/renderer/texts/text_manager.cpp b/src/renderer/texts/text_manager.cpp index 8c1fbac..5430e2e 100644 --- a/src/renderer/texts/text_manager.cpp +++ b/src/renderer/texts/text_manager.cpp @@ -6,10 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/06 16:41:13 by maldavid #+# #+# */ -/* Updated: 2024/02/25 09:29:36 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:05:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ +#include + #include #include #include diff --git a/src/renderer/texts/text_manager.h b/src/renderer/texts/text_manager.h index 1bef272..1c4cec0 100644 --- a/src/renderer/texts/text_manager.h +++ b/src/renderer/texts/text_manager.h @@ -6,15 +6,13 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/06 16:24:11 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:57:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:08:00 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_MANAGER__ #define __MLX_TEXT_MANAGER__ -#include - #include #include #include diff --git a/src/utils/combine_hash.h b/src/utils/combine_hash.h index d8fc484..d86ec79 100644 --- a/src/utils/combine_hash.h +++ b/src/utils/combine_hash.h @@ -6,14 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/14 16:16:06 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:58:16 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:06:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_HASH__ #define __MLX_HASH__ -#include +#include +#include namespace mlx { diff --git a/src/utils/dogica_ttf.h b/src/utils/dogica_ttf.h index 68176a2..30c48e2 100644 --- a/src/utils/dogica_ttf.h +++ b/src/utils/dogica_ttf.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/11 16:20:25 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:43:52 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:05:36 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DOGICA_TTF__ #define __MLX_DOGICA_TTF__ -#include +#include constexpr const unsigned int dogica_ttf_len = 33860; diff --git a/src/utils/icon_mlx.h b/src/utils/icon_mlx.h index 13f4df3..ca05561 100644 --- a/src/utils/icon_mlx.h +++ b/src/utils/icon_mlx.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/25 11:23:16 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:58:24 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:05:28 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __ICON_MLX__ #define __ICON_MLX__ -#include +#include constexpr const int logo_mlx_height = 125; constexpr const int logo_mlx_width = 125; From 982e3e682725b408f113e7510d23483b43253536 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 25 Mar 2024 19:24:37 +0100 Subject: [PATCH 003/131] fixing pch compilation error --- src/pre_compiled.h | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/pre_compiled.h b/src/pre_compiled.h index f795a4b..1554c4b 100644 --- a/src/pre_compiled.h +++ b/src/pre_compiled.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/03/25 18:06:01 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:24:26 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -16,6 +16,7 @@ #define VK_NO_PROTOTYPES #include +#include #include #include #include From 86671b8919f36d8e97a83bc657cfa0dfe53d8a7b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 25 Mar 2024 19:27:48 +0100 Subject: [PATCH 004/131] fixing pch compilation error --- src/renderer/core/memory.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/renderer/core/memory.cpp b/src/renderer/core/memory.cpp index e9942ee..7c1568e 100644 --- a/src/renderer/core/memory.cpp +++ b/src/renderer/core/memory.cpp @@ -6,12 +6,14 @@ /* By: kbz_8 +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/10/20 22:02:37 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 19:01:57 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 19:27:44 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include +#include + #define VK_NO_PROTOTYPES #define VMA_STATIC_VULKAN_FUNCTIONS 0 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 From abc213c80840f0663a60742d1cd0a76419455010 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 26 Mar 2024 00:11:38 +0100 Subject: [PATCH 005/131] working glfw support --- example/build.sh | 4 +- src/core/application.cpp | 23 ++--- src/core/application.inl | 8 +- src/core/bridge.cpp | 4 +- src/core/fps.cpp | 12 +-- src/core/fps.h | 8 +- src/core/graphics.cpp | 9 +- src/core/graphics.h | 10 +- src/platform/inputs.cpp | 132 ++---------------------- src/platform/inputs.h | 3 +- src/platform/window.cpp | 32 ++---- src/platform/window.h | 8 +- src/pre_compiled.h | 5 +- src/renderer/core/vk_device.cpp | 26 ++--- src/renderer/core/vk_device.h | 4 +- src/renderer/core/vk_instance.cpp | 29 ++---- src/renderer/core/vk_queues.cpp | 24 +---- src/renderer/core/vk_queues.h | 4 +- src/renderer/core/vk_surface.cpp | 6 +- src/renderer/swapchain/vk_swapchain.cpp | 6 +- 20 files changed, 81 insertions(+), 276 deletions(-) diff --git a/example/build.sh b/example/build.sh index b332c18..1b95a5d 100755 --- a/example/build.sh +++ b/example/build.sh @@ -5,8 +5,8 @@ if [ -e a.out ]; then fi if [ $(uname -s) = 'Darwin' ]; then - clang main.c ../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -g; + clang main.c ../libmlx.dylib -L /opt/homebrew/lib -lglfw -g; else - clang main.c ../libmlx.so -lSDL2 -g -Wall -Wextra -Werror; + clang main.c ../libmlx.so -lglfw -g -Wall -Wextra -Werror; fi diff --git a/src/core/application.cpp b/src/core/application.cpp index ec9a4bd..e14379d 100644 --- a/src/core/application.cpp +++ b/src/core/application.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:40 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:16:24 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -23,20 +23,14 @@ namespace mlx::core { - static bool __drop_sdl_responsability = false; Application::Application() : _fps(), _in(std::make_unique()) { _fps.init(); - __drop_sdl_responsability = SDL_WasInit(SDL_INIT_VIDEO); - if(__drop_sdl_responsability) // is case the mlx is running in a sandbox like MacroUnitTester where SDL is already init - return; - SDL_SetMemoryFunctions(MemManager::malloc, MemManager::calloc, MemManager::realloc, MemManager::free); - - /* Remove this comment if you want to prioritise Wayland over X11/XWayland, at your own risks */ - //SDL_SetHint(SDL_HINT_VIDEODRIVER, "wayland,x11"); - - if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_TIMER) != 0) - error::report(e_kind::fatal_error, "SDL error : unable to init all subsystems : %s", SDL_GetError()); + glfwSetErrorCallback([]([[maybe_unused]] int code, const char* desc) + { + error::report(e_kind::fatal_error, "GLFW error : %s", desc); + }); + glfwInit(); } void Application::run() noexcept @@ -111,9 +105,6 @@ namespace mlx::core { TextLibrary::get().clearLibrary(); FontLibrary::get().clearLibrary(); - if(__drop_sdl_responsability) - return; - SDL_QuitSubSystem(SDL_INIT_VIDEO | SDL_INIT_TIMER | SDL_INIT_EVENTS); - SDL_Quit(); + glfwTerminate(); } } diff --git a/src/core/application.inl b/src/core/application.inl index a3ccec1..f98d510 100644 --- a/src/core/application.inl +++ b/src/core/application.inl @@ -55,8 +55,6 @@ namespace mlx::core error::report(e_kind::warning, "trying to move the mouse relative to a window that is targeting an image and not a real window, this is not allowed (move ignored)"); return; } - SDL_WarpMouseInWindow(_graphics[*static_cast(win)]->getWindow()->getNativeWindow(), x, y); - SDL_PumpEvents(); } void Application::onEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept @@ -73,10 +71,8 @@ namespace mlx::core void Application::getScreenSize(void* win, int* w, int* h) noexcept { CHECK_WINDOW_PTR(win); - SDL_DisplayMode DM; - SDL_GetDesktopDisplayMode(SDL_GetWindowDisplayIndex(_graphics[*static_cast(win)]->getWindow()->getNativeWindow()), &DM); - *w = DM.w; - *h = DM.h; + *w = 0; + *h = 0; } void Application::setFPSCap(std::uint32_t fps) noexcept diff --git a/src/core/bridge.cpp b/src/core/bridge.cpp index 43fbfa7..f81b738 100644 --- a/src/core/bridge.cpp +++ b/src/core/bridge.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:35:20 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:45 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 23:05:46 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -78,13 +78,11 @@ extern "C" int mlx_mouse_show() { - SDL_ShowCursor(SDL_ENABLE); return 0; } int mlx_mouse_hide() { - SDL_ShowCursor(SDL_DISABLE); return 0; } diff --git a/src/core/fps.cpp b/src/core/fps.cpp index 2a585e3..e88b489 100644 --- a/src/core/fps.cpp +++ b/src/core/fps.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:56:17 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:54 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:59:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,9 +18,9 @@ namespace mlx { void FpsManager::init() { - _timer = SDL_GetTicks64(); - _fps_before = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); - _fps_now = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); + _timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); + _fps_before = _timer; + _fps_now = _timer; } bool FpsManager::update() @@ -28,8 +28,8 @@ namespace mlx using namespace std::chrono_literals; _fps_now = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); - if(SDL_GetTicks64() - _timer > 1000) - _timer += 1000; + if(std::chrono::duration{_fps_now - _timer} >= 1s) + _timer += _fps_now; _fps_elapsed_time = _fps_now - _fps_before; if(_fps_elapsed_time >= _ns) diff --git a/src/core/fps.h b/src/core/fps.h index 6dacc77..f0d6fbb 100644 --- a/src/core/fps.h +++ b/src/core/fps.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:53:30 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:13:16 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:58:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -28,9 +28,9 @@ namespace mlx private: double _ns = 1000000000.0 / 1'337'000.0; - std::uint64_t _timer = 0; - std::uint64_t _fps_before = 0; - std::uint64_t _fps_now = 0; + std::int64_t _fps_before = 0; + std::int64_t _fps_now = 0; + std::int64_t _timer = 0; std::uint32_t _max_fps = 1'337'000; std::uint32_t _fps_elapsed_time = 0; }; diff --git a/src/core/graphics.cpp b/src/core/graphics.cpp index 66e5fd3..3b8604d 100644 --- a/src/core/graphics.cpp +++ b/src/core/graphics.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:58 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 23:02:43 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -71,11 +71,12 @@ namespace mlx #ifdef GRAPHICS_MEMORY_DUMP // dump memory to file every two seconds - static std::uint64_t timer = SDL_GetTicks64(); - if(SDL_GetTicks64() - timer > 2000) + using namespace std::chrono_literals; + static std::int64_t timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); + if(std::chrono::duration{static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()) - timer} >= 1s) { Render_Core::get().getAllocator().dumpMemoryToJson(); - timer += 2000; + timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); } #endif } diff --git a/src/core/graphics.h b/src/core/graphics.h index a23a96d..25b0aa6 100644 --- a/src/core/graphics.h +++ b/src/core/graphics.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:13:11 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 23:00:43 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -53,18 +53,18 @@ namespace mlx PixelPutPipeline _pixel_put_pipeline; std::vector _drawlist; - + TextManager _text_manager; TextureManager _texture_manager; - + glm::mat4 _proj = glm::mat4(1.0); - + std::shared_ptr _window; std::unique_ptr _renderer; std::size_t _width = 0; std::size_t _height = 0; - + int _id; bool _has_window; diff --git a/src/platform/inputs.cpp b/src/platform/inputs.cpp index fab695a..790b59b 100644 --- a/src/platform/inputs.cpp +++ b/src/platform/inputs.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:30:19 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:09 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 23:13:16 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -24,130 +24,14 @@ namespace mlx _xRel = 0; _yRel = 0; - while(SDL_PollEvent(&_event)) + static int i = 0; + i++; + if(i >= 150) { - if(_event.type == SDL_MOUSEMOTION) - { - _x = _event.motion.x; - _y = _event.motion.y; - - _xRel = _event.motion.xrel; - _yRel = _event.motion.yrel; - } - - std::uint32_t id = _event.window.windowID; - if(_events_hooks.find(id) == _events_hooks.end()) - continue; - auto& hooks = _events_hooks[id]; - - switch(_event.type) - { - case SDL_KEYDOWN: - { - if(hooks[MLX_KEYDOWN].hook) - hooks[MLX_KEYDOWN].hook(_event.key.keysym.scancode, hooks[MLX_KEYDOWN].param); - break; - } - - case SDL_KEYUP: - { - if(hooks[MLX_KEYUP].hook) - hooks[MLX_KEYUP].hook(_event.key.keysym.scancode, hooks[MLX_KEYUP].param); - break; - } - - case SDL_MOUSEBUTTONDOWN: - { - if(hooks[MLX_MOUSEDOWN].hook) - hooks[MLX_MOUSEDOWN].hook(_event.button.button, hooks[MLX_MOUSEDOWN].param); - break; - } - - case SDL_MOUSEBUTTONUP: - { - if(hooks[MLX_MOUSEUP].hook) - hooks[MLX_MOUSEUP].hook(_event.button.button, hooks[MLX_MOUSEUP].param); - break; - } - - case SDL_MOUSEWHEEL: - { - if(hooks[MLX_MOUSEWHEEL].hook) - { - if(_event.wheel.y > 0) // scroll up - hooks[MLX_MOUSEWHEEL].hook(1, hooks[MLX_MOUSEWHEEL].param); - else if(_event.wheel.y < 0) // scroll down - hooks[MLX_MOUSEWHEEL].hook(2, hooks[MLX_MOUSEWHEEL].param); - - if(_event.wheel.x > 0) // scroll right - hooks[MLX_MOUSEWHEEL].hook(3, hooks[MLX_MOUSEWHEEL].param); - else if(_event.wheel.x < 0) // scroll left - hooks[MLX_MOUSEWHEEL].hook(4, hooks[MLX_MOUSEWHEEL].param); - } - break; - } - - case SDL_WINDOWEVENT: - { - auto& win_hook = hooks[MLX_WINDOW_EVENT]; - switch(_event.window.event) - { - case SDL_WINDOWEVENT_CLOSE: - { - if(win_hook.hook) - win_hook.hook(0, win_hook.param); - break; - } - case SDL_WINDOWEVENT_MOVED: - { - if(win_hook.hook) - win_hook.hook(1, win_hook.param); - break; - } - case SDL_WINDOWEVENT_MINIMIZED: - { - if(win_hook.hook) - win_hook.hook(2, win_hook.param); - break; - } - case SDL_WINDOWEVENT_MAXIMIZED: - { - if(win_hook.hook) - win_hook.hook(3, win_hook.param); - break; - } - case SDL_WINDOWEVENT_ENTER: - { - if(win_hook.hook) - win_hook.hook(4, win_hook.param); - break; - } - case SDL_WINDOWEVENT_FOCUS_GAINED: - { - if(win_hook.hook) - win_hook.hook(5, win_hook.param); - break; - } - case SDL_WINDOWEVENT_LEAVE: - { - if(win_hook.hook) - win_hook.hook(6, win_hook.param); - break; - } - case SDL_WINDOWEVENT_FOCUS_LOST: - { - if(win_hook.hook) - win_hook.hook(7, win_hook.param); - break; - } - - default : break; - } - break; - } - - default: break; - } + auto& hooks = _events_hooks[0]; + auto& win_hook = hooks[MLX_WINDOW_EVENT]; + if(win_hook.hook) + win_hook.hook(0, win_hook.param); } } } diff --git a/src/platform/inputs.h b/src/platform/inputs.h index 39d4e0c..2359dce 100644 --- a/src/platform/inputs.h +++ b/src/platform/inputs.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:44 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 23:03:39 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -58,7 +58,6 @@ namespace mlx private: std::unordered_map> _windows; std::unordered_map> _events_hooks; - SDL_Event _event; int _x = 0; int _y = 0; diff --git a/src/platform/window.cpp b/src/platform/window.cpp index 5ab88b3..01f031d 100644 --- a/src/platform/window.cpp +++ b/src/platform/window.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:36:44 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:14 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:17:34 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,41 +18,23 @@ namespace mlx { - #if SDL_BYTEORDER == SDL_BIG_ENDIAN - constexpr const std::uint32_t rmask = 0xff000000; - constexpr const std::uint32_t gmask = 0x00ff0000; - constexpr const std::uint32_t bmask = 0x0000ff00; - constexpr const std::uint32_t amask = 0x000000ff; - #else - constexpr const std::uint32_t rmask = 0x000000ff; - constexpr const std::uint32_t gmask = 0x0000ff00; - constexpr const std::uint32_t bmask = 0x00ff0000; - constexpr const std::uint32_t amask = 0xff000000; - #endif - MLX_Window::MLX_Window(std::size_t w, std::size_t h, const std::string& title) : _width(w), _height(h) { + static std::uint64_t ids = 0; + if(title.find("vvaas") != std::string::npos) core::error::report(e_kind::message, "vvaas est mauvais"); - _win = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | SDL_WINDOW_SHOWN); - if(!_win) - core::error::report(e_kind::fatal_error, std::string("unable to open a new window, ") + SDL_GetError()); - _id = SDL_GetWindowID(_win); - _icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); - SDL_SetWindowIcon(_win, _icon); + glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API); + _win = glfwCreateWindow(_width, _height, title.c_str(), NULL, NULL);; + _id = ids++; } void MLX_Window::destroy() noexcept { if(_win != nullptr) { - SDL_DestroyWindow(_win); + glfwDestroyWindow(_win); _win = nullptr; } - if(_icon != nullptr) - { - SDL_FreeSurface(_icon); - _icon = nullptr; - } } } diff --git a/src/platform/window.h b/src/platform/window.h index 754151b..07c4bb2 100644 --- a/src/platform/window.h +++ b/src/platform/window.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:46 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:11:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,7 +20,7 @@ namespace mlx public: MLX_Window(std::size_t w, std::size_t h, const std::string& title); - inline SDL_Window* getNativeWindow() const noexcept { return _win; } + inline GLFWwindow* getNativeWindow() const noexcept { return _win; } inline int getWidth() const noexcept { return _width; } inline int getHeight() const noexcept { return _height; } inline std::uint32_t getID() const noexcept { return _id; } @@ -30,8 +30,8 @@ namespace mlx ~MLX_Window() = default; private: - SDL_Surface* _icon = nullptr; - SDL_Window* _win = nullptr; + GLFWimage _icon; + GLFWwindow* _win = nullptr; int _width = 0; int _height = 0; std::uint32_t _id = -1; diff --git a/src/pre_compiled.h b/src/pre_compiled.h index 1554c4b..e27194e 100644 --- a/src/pre_compiled.h +++ b/src/pre_compiled.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:24:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:03:44 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,8 +18,8 @@ #include #include #include -#include #include +#include #include #include #include @@ -41,7 +41,6 @@ #include #include #include -#include #include #include #include diff --git a/src/renderer/core/vk_device.cpp b/src/renderer/core/vk_device.cpp index 910b904..7ed8850 100644 --- a/src/renderer/core/vk_device.cpp +++ b/src/renderer/core/vk_device.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:14:29 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:11 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:31:54 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -71,19 +71,11 @@ namespace mlx std::vector devices(deviceCount); vkEnumeratePhysicalDevices(Render_Core::get().getInstance().get(), &deviceCount, devices.data()); - SDL_Window* window = SDL_CreateWindow("", 0, 0, 1, 1, SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN); - if(!window) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a window to pick physical device"); - - VkSurfaceKHR surface = VK_NULL_HANDLE; - if(SDL_Vulkan_CreateSurface(window, Render_Core::get().getInstance().get(), &surface) != SDL_TRUE) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface to pick physical device"); - std::multimap devices_score; for(const auto& device : devices) { - int score = deviceScore(device, surface); + int score = deviceScore(device); devices_score.insert(std::make_pair(score, device)); } @@ -97,23 +89,17 @@ namespace mlx vkGetPhysicalDeviceProperties(_physical_device, &props); core::error::report(e_kind::message, "Vulkan : picked a physical device, %s", props.deviceName); #endif - Render_Core::get().getQueue().findQueueFamilies(_physical_device, surface); // update queue indicies to current physical device - vkDestroySurfaceKHR(Render_Core::get().getInstance().get(), surface, nullptr); - SDL_DestroyWindow(window); + Render_Core::get().getQueue().findQueueFamilies(_physical_device); // update queue indicies to current physical device } - int Device::deviceScore(VkPhysicalDevice device, VkSurfaceKHR surface) + int Device::deviceScore(VkPhysicalDevice device) { - Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(device, surface); + Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(device); bool extensionsSupported = checkDeviceExtensionSupport(device); - std::uint32_t formatCount = 0; - if(extensionsSupported) - vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &formatCount, nullptr); - VkPhysicalDeviceProperties props; vkGetPhysicalDeviceProperties(device, &props); - if(!indices.isComplete() || !extensionsSupported || formatCount == 0) + if(!indices.isComplete() || !extensionsSupported) return -1; VkPhysicalDeviceFeatures features; diff --git a/src/renderer/core/vk_device.h b/src/renderer/core/vk_device.h index 2352f03..564fba1 100644 --- a/src/renderer/core/vk_device.h +++ b/src/renderer/core/vk_device.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:13:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:11:56 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:31:46 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -29,7 +29,7 @@ namespace mlx private: void pickPhysicalDevice(); bool checkDeviceExtensionSupport(VkPhysicalDevice device); - int deviceScore(VkPhysicalDevice device, VkSurfaceKHR surface); + int deviceScore(VkPhysicalDevice device); private: VkPhysicalDevice _physical_device = VK_NULL_HANDLE; diff --git a/src/renderer/core/vk_instance.cpp b/src/renderer/core/vk_instance.cpp index 92b6b36..ad758ae 100644 --- a/src/renderer/core/vk_instance.cpp +++ b/src/renderer/core/vk_instance.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:04:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:18 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 23:10:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -59,34 +59,19 @@ namespace mlx std::vector Instance::getRequiredExtensions() { - std::vector extensions; + std::uint32_t glfw_extension_count = 0; + const char** glfw_extensions = glfwGetRequiredInstanceExtensions(&glfw_extension_count); + + std::vector extensions(glfw_extensions, glfw_extensions + glfw_extension_count); + extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME); - - #ifdef VK_USE_PLATFORM_XCB_KHR - extensions.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_XLIB_KHR - extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_WAYLAND_KHR - extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_WIN32_KHR - extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_METAL_EXT - extensions.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME); - #endif if constexpr(enableValidationLayers) { extensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME); extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); } + return extensions; } diff --git a/src/renderer/core/vk_queues.cpp b/src/renderer/core/vk_queues.cpp index 6f89926..b47fcf0 100644 --- a/src/renderer/core/vk_queues.cpp +++ b/src/renderer/core/vk_queues.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:02:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:20 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:29:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -16,7 +16,7 @@ namespace mlx { - Queues::QueueFamilyIndices Queues::findQueueFamilies(VkPhysicalDevice device, VkSurfaceKHR surface) + Queues::QueueFamilyIndices Queues::findQueueFamilies(VkPhysicalDevice device) { std::uint32_t queueFamilyCount = 0; vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, nullptr); @@ -31,10 +31,7 @@ namespace mlx if(queueFamily.queueFlags & VK_QUEUE_GRAPHICS_BIT) _families->graphics_family = i; - VkBool32 presentSupport = false; - vkGetPhysicalDeviceSurfaceSupportKHR(device, i, surface, &presentSupport); - - if(presentSupport) + if(glfwGetPhysicalDevicePresentationSupport(Render_Core::get().getInstance().get(), device, i)) _families->present_family = i; if(_families->isComplete()) @@ -48,20 +45,7 @@ namespace mlx void Queues::init() { if(!_families.has_value()) - { - SDL_Window* window = SDL_CreateWindow("", 0, 0, 1, 1, SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN); - if(!window) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a window to init queues"); - - VkSurfaceKHR surface = VK_NULL_HANDLE; - if(SDL_Vulkan_CreateSurface(window, Render_Core::get().getInstance().get(), &surface) != SDL_TRUE) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface to init queues"); - - findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice(), surface); - - vkDestroySurfaceKHR(Render_Core::get().getInstance().get(), surface, nullptr); - SDL_DestroyWindow(window); - } + findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice()); vkGetDeviceQueue(Render_Core::get().getDevice().get(), _families->graphics_family.value(), 0, &_graphics_queue); vkGetDeviceQueue(Render_Core::get().getDevice().get(), _families->present_family.value(), 0, &_present_queue); #ifdef DEBUG diff --git a/src/renderer/core/vk_queues.h b/src/renderer/core/vk_queues.h index 832bc50..4895c9c 100644 --- a/src/renderer/core/vk_queues.h +++ b/src/renderer/core/vk_queues.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:11:46 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:29:26 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -28,7 +28,7 @@ namespace mlx inline bool isComplete() { return graphics_family.has_value() && present_family.has_value(); } }; - QueueFamilyIndices findQueueFamilies(VkPhysicalDevice device, VkSurfaceKHR surface); + QueueFamilyIndices findQueueFamilies(VkPhysicalDevice device); void init(); diff --git a/src/renderer/core/vk_surface.cpp b/src/renderer/core/vk_surface.cpp index e1708a6..b50a5f2 100644 --- a/src/renderer/core/vk_surface.cpp +++ b/src/renderer/core/vk_surface.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:58:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:11 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 22:25:55 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -19,8 +19,8 @@ namespace mlx { void Surface::create(Renderer& renderer) { - if(SDL_Vulkan_CreateSurface(renderer.getWindow()->getNativeWindow(), Render_Core::get().getInstance().get(), &_surface) != SDL_TRUE) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface : %s", SDL_GetError()); + if(glfwCreateWindowSurface(Render_Core::get().getInstance().get(), renderer.getWindow()->getNativeWindow(), NULL, &_surface) != VK_SUCCESS) + core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface"); #ifdef DEBUG core::error::report(e_kind::message, "Vulkan : created new surface"); #endif diff --git a/src/renderer/swapchain/vk_swapchain.cpp b/src/renderer/swapchain/vk_swapchain.cpp index 223f8bc..485d413 100644 --- a/src/renderer/swapchain/vk_swapchain.cpp +++ b/src/renderer/swapchain/vk_swapchain.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:22:28 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:41 by maldavid ### ########.fr */ +/* Updated: 2024/03/25 23:09:33 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -33,7 +33,7 @@ namespace mlx if(_swapchain_support.capabilities.maxImageCount > 0 && imageCount > _swapchain_support.capabilities.maxImageCount) imageCount = _swapchain_support.capabilities.maxImageCount; - Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice(), renderer->getSurface().get()); + Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice()); std::uint32_t queueFamilyIndices[] = { indices.graphics_family.value(), indices.present_family.value() }; VkSwapchainCreateInfoKHR createInfo{}; @@ -123,7 +123,7 @@ namespace mlx return capabilities.currentExtent; int width, height; - SDL_Vulkan_GetDrawableSize(_renderer->getWindow()->getNativeWindow(), &width, &height); + glfwGetFramebufferSize(_renderer->getWindow()->getNativeWindow(), &width, &height); VkExtent2D actualExtent = { static_cast(width), static_cast(height) }; From b5983ac24b7beb76540dbc0dddf9387b9fc92fcd Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 27 Mar 2024 13:19:28 +0100 Subject: [PATCH 006/131] renaming MLX_Window to Window --- src/core/graphics.cpp | 4 ++-- src/core/graphics.h | 6 +++--- src/core/graphics.inl | 2 +- src/platform/inputs.h | 6 +++--- src/platform/window.cpp | 6 +++--- src/platform/window.h | 8 ++++---- src/pre_compiled.h | 7 ++++++- src/renderer/renderer.h | 8 ++++---- 8 files changed, 26 insertions(+), 21 deletions(-) diff --git a/src/core/graphics.cpp b/src/core/graphics.cpp index 3b8604d..6be4395 100644 --- a/src/core/graphics.cpp +++ b/src/core/graphics.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ -/* Updated: 2024/03/25 23:02:43 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 00:32:34 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -32,7 +32,7 @@ namespace mlx } GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : - _window(std::make_shared(w, h, title)), + _window(std::make_shared(w, h, title)), _renderer(std::make_unique()), _width(w), _height(h), diff --git a/src/core/graphics.h b/src/core/graphics.h index 25b0aa6..bb9935d 100644 --- a/src/core/graphics.h +++ b/src/core/graphics.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 23:00:43 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 00:32:28 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -32,7 +32,7 @@ namespace mlx GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id); inline int& getID() noexcept; - inline std::shared_ptr getWindow(); + inline std::shared_ptr getWindow(); void render() noexcept; @@ -59,7 +59,7 @@ namespace mlx glm::mat4 _proj = glm::mat4(1.0); - std::shared_ptr _window; + std::shared_ptr _window; std::unique_ptr _renderer; std::size_t _width = 0; diff --git a/src/core/graphics.inl b/src/core/graphics.inl index 97958e0..c6f60f6 100644 --- a/src/core/graphics.inl +++ b/src/core/graphics.inl @@ -15,7 +15,7 @@ namespace mlx { int& GraphicsSupport::getID() noexcept { return _id; } - std::shared_ptr GraphicsSupport::getWindow() { return _window; } + std::shared_ptr GraphicsSupport::getWindow() { return _window; } void GraphicsSupport::clearRenderData() noexcept { diff --git a/src/platform/inputs.h b/src/platform/inputs.h index 2359dce..7cd9078 100644 --- a/src/platform/inputs.h +++ b/src/platform/inputs.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ -/* Updated: 2024/03/25 23:03:39 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 00:31:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -41,7 +41,7 @@ namespace mlx inline bool isRunning() const noexcept { return !_end; } inline constexpr void finish() noexcept { _end = true; } - inline void addWindow(std::shared_ptr window) + inline void addWindow(std::shared_ptr window) { _windows[window->getID()] = window; _events_hooks[window->getID()] = {}; @@ -56,7 +56,7 @@ namespace mlx ~Input() = default; private: - std::unordered_map> _windows; + std::unordered_map> _windows; std::unordered_map> _events_hooks; int _x = 0; diff --git a/src/platform/window.cpp b/src/platform/window.cpp index 01f031d..182f18f 100644 --- a/src/platform/window.cpp +++ b/src/platform/window.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:36:44 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:17:34 by maldavid ### ########.fr */ +/* Updated: 2024/03/26 23:03:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,7 +18,7 @@ namespace mlx { - MLX_Window::MLX_Window(std::size_t w, std::size_t h, const std::string& title) : _width(w), _height(h) + Window::Window(std::size_t w, std::size_t h, const std::string& title) : _width(w), _height(h) { static std::uint64_t ids = 0; @@ -29,7 +29,7 @@ namespace mlx _id = ids++; } - void MLX_Window::destroy() noexcept + void Window::destroy() noexcept { if(_win != nullptr) { diff --git a/src/platform/window.h b/src/platform/window.h index 07c4bb2..09202a0 100644 --- a/src/platform/window.h +++ b/src/platform/window.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:11:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/26 23:03:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,10 +15,10 @@ namespace mlx { - class MLX_Window + class Window { public: - MLX_Window(std::size_t w, std::size_t h, const std::string& title); + Window(std::size_t w, std::size_t h, const std::string& title); inline GLFWwindow* getNativeWindow() const noexcept { return _win; } inline int getWidth() const noexcept { return _width; } @@ -27,7 +27,7 @@ namespace mlx void destroy() noexcept; - ~MLX_Window() = default; + ~Window() = default; private: GLFWimage _icon; diff --git a/src/pre_compiled.h b/src/pre_compiled.h index e27194e..313cd6f 100644 --- a/src/pre_compiled.h +++ b/src/pre_compiled.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:03:44 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 00:39:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,8 +15,11 @@ #define VK_NO_PROTOTYPES +#define Window X11Window // fuck X11 + #include #include +#include #include #include #include @@ -69,4 +72,6 @@ #include #endif +#undef Window + #endif diff --git a/src/renderer/renderer.h b/src/renderer/renderer.h index c935a1c..bd347d2 100644 --- a/src/renderer/renderer.h +++ b/src/renderer/renderer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:07:52 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 00:31:28 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -86,8 +86,8 @@ namespace mlx void destroy(); - inline class MLX_Window* getWindow() { return _window; } - inline void setWindow(class MLX_Window* window) { _window = window; } + inline class Window* getWindow() { return _window; } + inline void setWindow(class Window* window) { _window = window; } inline Surface& getSurface() noexcept { return _surface; } inline CmdPool& getCmdPool() noexcept { return _cmd.getCmdPool(); } @@ -130,7 +130,7 @@ namespace mlx std::unique_ptr _uniform_buffer; - class MLX_Window* _window = nullptr; + class Window* _window = nullptr; class Texture* _render_target = nullptr; std::uint32_t _current_frame_index = 0; From 0e04356ea77dc4b68eba1e37febc6e7b1ebc55aa Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 27 Mar 2024 23:03:54 +0100 Subject: [PATCH 007/131] begenning the refactor --- Makefile | 22 +- includes/mlx_profile.h | 12 +- runtime/Includes/Core/Application.h | 71 +++++++ runtime/Includes/Core/Application.inl | 196 ++++++++++++++++++ runtime/Includes/Core/Enums.h | 33 +++ .../Includes/Core/EventBase.h | 27 +-- runtime/Includes/Core/EventBus.h | 35 ++++ runtime/Includes/Core/EventListener.h | 37 ++++ runtime/Includes/Core/Format.h | 33 +++ runtime/Includes/Core/Format.inl | 146 +++++++++++++ src/core/fps.h => runtime/Includes/Core/Fps.h | 22 +- runtime/Includes/Core/Graphics.h | 75 +++++++ runtime/Includes/Core/Graphics.inl | 79 +++++++ runtime/Includes/Core/Logs.h | 84 ++++++++ runtime/Includes/Core/Logs.inl | 135 ++++++++++++ .../Includes/Core/Memory.h | 16 +- .../Includes/Core/Profiler.h | 64 +++--- {src/core => runtime/Includes/Core}/UUID.h | 6 +- runtime/Includes/Drivers/GLFW/GLFWInputs.h | 44 ++++ runtime/Includes/Platform/Inputs.h | 62 ++++++ .../Includes/Platform/Window.h | 28 +-- .../Includes/PreCompiled.h | 18 +- runtime/Includes/Renderer/Buffers/Buffer.h | 66 ++++++ .../Includes/Renderer/Buffers/IndexBuffer.h | 12 +- .../Includes/Renderer/Buffers/UniformBuffer.h | 50 +++++ .../Includes/Renderer/Buffers/VertexBuffer.h | 46 ++++ .../Includes/Renderer/Command/CommandBuffer.h | 70 +++++++ .../Renderer/Command/CommandManager.h | 32 +-- .../Includes/Renderer/Command/CommandPool.h | 20 +- .../Renderer/Command/CommandResource.h | 29 ++- .../Renderer/Command/SingleTimeCmdManager.h | 28 +-- .../Includes/Renderer/Core/Device.h | 24 +-- .../Includes/Renderer/Core/DrawableResource.h | 8 +- .../Includes/Renderer/Core/Fence.h | 20 +- .../Includes/Renderer/Core/Instance.h | 18 +- .../Includes/Renderer/Core/Memory.h | 30 +-- .../Includes/Renderer/Core/Queues.h | 29 ++- runtime/Includes/Renderer/Core/RenderCore.h | 78 +++++++ .../Includes/Renderer/Core/Semaphore.h | 14 +- .../Includes/Renderer/Core/Surface.h | 16 +- .../Includes/Renderer/Core/ValidationLayers.h | 24 +-- .../Renderer/Descriptors/DescriptorPool.h | 22 +- .../Descriptors/DescriptorPoolManager.h | 12 +- .../Renderer/Descriptors/DescriptorSet.h | 30 +-- .../Descriptors/DescriptorSetLayout.h | 18 +- runtime/Includes/Renderer/Enums.h | 59 ++++++ .../Includes/Renderer/Images/Image.h | 0 .../Includes/Renderer/Images/Texture.h | 0 .../Includes/Renderer/Images/TextureAtlas.h | 0 .../Renderer/Images/TextureDescriptor.h | 0 .../Includes/Renderer/Images/TextureManager.h | 0 .../Includes/Renderer/Pipelines/Pipeline.h | 0 .../Includes/Renderer/PixelPut.h | 0 .../Includes/Renderer/Renderer.h | 0 .../Renderer/Renderpass/FrameBuffer.h | 0 .../Includes/Renderer/Renderpass/RenderPass.h | 0 .../Includes/Renderer/Renderpass/Swapchain.h | 0 .../Includes/Renderer/Texts/Font.h | 0 .../Includes/Renderer/Texts/FontLibrary.h | 0 .../Includes/Renderer/Texts/Text.h | 0 .../Includes/Renderer/Texts/TextDescriptor.h | 0 .../Includes/Renderer/Texts/TextLibrary.h | 0 .../Includes/Renderer/Texts/TextManager.h | 0 runtime/Includes/Utils/Ansi.h | 56 +++++ .../Includes/Utils/CombineHash.h | 13 +- runtime/Includes/Utils/ConstMap.h | 69 ++++++ .../Includes/Utils/DogicaTTF.h | 6 +- .../Includes/Utils/IconMlx.h | 6 +- .../Includes/Utils/NonCopyable.h | 0 runtime/Includes/Utils/NonOwningPtr.h | 45 ++++ runtime/Includes/Utils/NonOwningPtr.inl | 62 ++++++ .../Includes/Utils/Singleton.h | 8 +- .../Sources/Core/Application.cpp | 14 +- .../Sources/Core/Bridge.cpp | 0 runtime/Sources/Core/EventBus.cpp | 37 ++++ runtime/Sources/Core/EventListener.cpp | 21 ++ .../fps.cpp => runtime/Sources/Core/Fps.cpp | 33 ++- .../Sources/Core/Graphics.cpp | 0 runtime/Sources/Core/Logs.cpp | 68 ++++++ .../Sources/Core/Memory.cpp | 0 .../Sources/Core/Profiler.cpp | 0 {src/core => runtime/Sources/Core}/UUID.cpp | 0 runtime/Sources/Drivers/GLFW/GLFWInputs.cpp | 22 ++ .../Sources/Platform/Inputs.cpp | 6 +- .../Sources/Platform/Window.cpp | 0 .../Sources/Renderer/Buffers/Buffer.cpp | 0 .../Renderer/Buffers/UniformBuffer.cpp | 0 .../Sources/Renderer/Buffers/VertexBuffer.cpp | 0 .../Renderer/Command/CommandBuffer.cpp | 0 .../Renderer/Command/CommandManager.cpp | 0 .../Sources/Renderer/Command/CommandPool.cpp | 0 .../Command/SingleTimeCommandManager.cpp | 0 .../Sources/Renderer/Core/Device.cpp | 0 .../Sources/Renderer/Core/Fence.cpp | 0 .../Sources/Renderer/Core/Instance.cpp | 0 .../Sources/Renderer/Core/Memory.cpp | 0 .../Sources/Renderer/Core/Queues.cpp | 0 .../Sources/Renderer/Core/RenderCore.cpp | 0 .../Sources/Renderer/Core/Semaphore.cpp | 0 .../Sources/Renderer/Core/Surface.cpp | 0 .../Renderer/Core/ValidationLayers.cpp | 0 .../Descriptors}/descriptor_pool_manager.cpp | 0 .../Descriptors}/vk_descriptor_pool.cpp | 0 .../Descriptors}/vk_descriptor_set.cpp | 0 .../Descriptors}/vk_descriptor_set_layout.cpp | 0 .../Sources/Renderer/Images}/texture.cpp | 0 .../Renderer/Images}/texture_atlas.cpp | 0 .../Sources/Renderer/Images}/vk_image.cpp | 0 .../Sources/Renderer/Pipelines}/pipeline.cpp | 0 .../Sources/Renderer/PixelPut.cpp | 0 .../Sources/Renderer/Renderer.cpp | 0 .../Renderer/Renderpass/Framebuffer.cpp | 0 .../Renderer/Renderpass/Renderpass.cpp | 0 .../Sources/Renderer/Renderpass/Swapchain.cpp | 0 .../Sources/Renderer/Texts/Font.cpp | 0 .../Sources/Renderer/Texts/FontLibrary.cpp | 0 .../Sources/Renderer/Texts/Text.cpp | 0 .../Sources/Renderer/Texts/TextDescriptor.cpp | 0 .../Sources/Renderer/Texts/TextLibrary.cpp | 0 .../Sources/Renderer/Texts/TextManager.cpp | 0 src/core/application.h | 73 ------- src/core/application.inl | 196 ------------------ src/core/errors.cpp | 41 ---- src/core/graphics.h | 76 ------- src/core/graphics.inl | 79 ------- src/platform/inputs.h | 71 ------- src/renderer/buffers/vk_buffer.h | 63 ------ src/renderer/buffers/vk_ubo.h | 48 ----- src/renderer/buffers/vk_vbo.h | 46 ---- src/renderer/command/vk_cmd_buffer.h | 85 -------- src/renderer/core/render_core.h | 83 -------- 131 files changed, 2135 insertions(+), 1192 deletions(-) create mode 100644 runtime/Includes/Core/Application.h create mode 100644 runtime/Includes/Core/Application.inl create mode 100644 runtime/Includes/Core/Enums.h rename src/core/errors.h => runtime/Includes/Core/EventBase.h (63%) create mode 100644 runtime/Includes/Core/EventBus.h create mode 100644 runtime/Includes/Core/EventListener.h create mode 100644 runtime/Includes/Core/Format.h create mode 100644 runtime/Includes/Core/Format.inl rename src/core/fps.h => runtime/Includes/Core/Fps.h (65%) create mode 100644 runtime/Includes/Core/Graphics.h create mode 100644 runtime/Includes/Core/Graphics.inl create mode 100644 runtime/Includes/Core/Logs.h create mode 100644 runtime/Includes/Core/Logs.inl rename src/core/memory.h => runtime/Includes/Core/Memory.h (72%) rename src/core/profiler.h => runtime/Includes/Core/Profiler.h (64%) rename {src/core => runtime/Includes/Core}/UUID.h (86%) create mode 100644 runtime/Includes/Drivers/GLFW/GLFWInputs.h create mode 100644 runtime/Includes/Platform/Inputs.h rename src/platform/window.h => runtime/Includes/Platform/Window.h (62%) rename src/pre_compiled.h => runtime/Includes/PreCompiled.h (86%) create mode 100644 runtime/Includes/Renderer/Buffers/Buffer.h rename src/renderer/buffers/vk_ibo.h => runtime/Includes/Renderer/Buffers/IndexBuffer.h (64%) create mode 100644 runtime/Includes/Renderer/Buffers/UniformBuffer.h create mode 100644 runtime/Includes/Renderer/Buffers/VertexBuffer.h create mode 100644 runtime/Includes/Renderer/Command/CommandBuffer.h rename src/renderer/command/cmd_manager.h => runtime/Includes/Renderer/Command/CommandManager.h (55%) rename src/renderer/command/vk_cmd_pool.h => runtime/Includes/Renderer/Command/CommandPool.h (67%) rename src/renderer/core/cmd_resource.h => runtime/Includes/Renderer/Command/CommandResource.h (60%) rename src/renderer/command/single_time_cmd_manager.h => runtime/Includes/Renderer/Command/SingleTimeCmdManager.h (64%) rename src/renderer/core/vk_device.h => runtime/Includes/Renderer/Core/Device.h (60%) rename src/renderer/core/drawable_resource.h => runtime/Includes/Renderer/Core/DrawableResource.h (81%) rename src/renderer/core/vk_fence.h => runtime/Includes/Renderer/Core/Fence.h (69%) rename src/renderer/core/vk_instance.h => runtime/Includes/Renderer/Core/Instance.h (69%) rename src/renderer/core/memory.h => runtime/Includes/Renderer/Core/Memory.h (60%) rename src/renderer/core/vk_queues.h => runtime/Includes/Renderer/Core/Queues.h (64%) create mode 100644 runtime/Includes/Renderer/Core/RenderCore.h rename src/renderer/core/vk_semaphore.h => runtime/Includes/Renderer/Core/Semaphore.h (65%) rename src/renderer/core/vk_surface.h => runtime/Includes/Renderer/Core/Surface.h (65%) rename src/renderer/core/vk_validation_layers.h => runtime/Includes/Renderer/Core/ValidationLayers.h (57%) rename src/renderer/descriptors/vk_descriptor_pool.h => runtime/Includes/Renderer/Descriptors/DescriptorPool.h (59%) rename src/renderer/descriptors/descriptor_pool_manager.h => runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h (77%) rename src/renderer/descriptors/vk_descriptor_set.h => runtime/Includes/Renderer/Descriptors/DescriptorSet.h (57%) rename src/renderer/descriptors/vk_descriptor_set_layout.h => runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h (67%) create mode 100644 runtime/Includes/Renderer/Enums.h rename src/renderer/images/vk_image.h => runtime/Includes/Renderer/Images/Image.h (100%) rename src/renderer/images/texture.h => runtime/Includes/Renderer/Images/Texture.h (100%) rename src/renderer/images/texture_atlas.h => runtime/Includes/Renderer/Images/TextureAtlas.h (100%) rename src/renderer/images/texture_descriptor.h => runtime/Includes/Renderer/Images/TextureDescriptor.h (100%) rename src/renderer/images/texture_manager.h => runtime/Includes/Renderer/Images/TextureManager.h (100%) rename src/renderer/pipeline/pipeline.h => runtime/Includes/Renderer/Pipelines/Pipeline.h (100%) rename src/renderer/pixel_put.h => runtime/Includes/Renderer/PixelPut.h (100%) rename src/renderer/renderer.h => runtime/Includes/Renderer/Renderer.h (100%) rename src/renderer/renderpass/vk_framebuffer.h => runtime/Includes/Renderer/Renderpass/FrameBuffer.h (100%) rename src/renderer/renderpass/vk_render_pass.h => runtime/Includes/Renderer/Renderpass/RenderPass.h (100%) rename src/renderer/swapchain/vk_swapchain.h => runtime/Includes/Renderer/Renderpass/Swapchain.h (100%) rename src/renderer/texts/font.h => runtime/Includes/Renderer/Texts/Font.h (100%) rename src/renderer/texts/font_library.h => runtime/Includes/Renderer/Texts/FontLibrary.h (100%) rename src/renderer/texts/text.h => runtime/Includes/Renderer/Texts/Text.h (100%) rename src/renderer/texts/text_descriptor.h => runtime/Includes/Renderer/Texts/TextDescriptor.h (100%) rename src/renderer/texts/text_library.h => runtime/Includes/Renderer/Texts/TextLibrary.h (100%) rename src/renderer/texts/text_manager.h => runtime/Includes/Renderer/Texts/TextManager.h (100%) create mode 100644 runtime/Includes/Utils/Ansi.h rename src/utils/combine_hash.h => runtime/Includes/Utils/CombineHash.h (74%) create mode 100644 runtime/Includes/Utils/ConstMap.h rename src/utils/dogica_ttf.h => runtime/Includes/Utils/DogicaTTF.h (99%) rename src/utils/icon_mlx.h => runtime/Includes/Utils/IconMlx.h (99%) rename src/utils/non_copyable.h => runtime/Includes/Utils/NonCopyable.h (100%) create mode 100644 runtime/Includes/Utils/NonOwningPtr.h create mode 100644 runtime/Includes/Utils/NonOwningPtr.inl rename src/utils/singleton.h => runtime/Includes/Utils/Singleton.h (84%) rename src/core/application.cpp => runtime/Sources/Core/Application.cpp (91%) rename src/core/bridge.cpp => runtime/Sources/Core/Bridge.cpp (100%) create mode 100644 runtime/Sources/Core/EventBus.cpp create mode 100644 runtime/Sources/Core/EventListener.cpp rename src/core/fps.cpp => runtime/Sources/Core/Fps.cpp (53%) rename src/core/graphics.cpp => runtime/Sources/Core/Graphics.cpp (100%) create mode 100644 runtime/Sources/Core/Logs.cpp rename src/core/memory.cpp => runtime/Sources/Core/Memory.cpp (100%) rename src/core/profiler.cpp => runtime/Sources/Core/Profiler.cpp (100%) rename {src/core => runtime/Sources/Core}/UUID.cpp (100%) create mode 100644 runtime/Sources/Drivers/GLFW/GLFWInputs.cpp rename src/platform/inputs.cpp => runtime/Sources/Platform/Inputs.cpp (92%) rename src/platform/window.cpp => runtime/Sources/Platform/Window.cpp (100%) rename src/renderer/buffers/vk_buffer.cpp => runtime/Sources/Renderer/Buffers/Buffer.cpp (100%) rename src/renderer/buffers/vk_ubo.cpp => runtime/Sources/Renderer/Buffers/UniformBuffer.cpp (100%) rename src/renderer/buffers/vk_vbo.cpp => runtime/Sources/Renderer/Buffers/VertexBuffer.cpp (100%) rename src/renderer/command/vk_cmd_buffer.cpp => runtime/Sources/Renderer/Command/CommandBuffer.cpp (100%) rename src/renderer/command/cmd_manager.cpp => runtime/Sources/Renderer/Command/CommandManager.cpp (100%) rename src/renderer/command/vk_cmd_pool.cpp => runtime/Sources/Renderer/Command/CommandPool.cpp (100%) rename src/renderer/command/single_time_cmd_manager.cpp => runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp (100%) rename src/renderer/core/vk_device.cpp => runtime/Sources/Renderer/Core/Device.cpp (100%) rename src/renderer/core/vk_fence.cpp => runtime/Sources/Renderer/Core/Fence.cpp (100%) rename src/renderer/core/vk_instance.cpp => runtime/Sources/Renderer/Core/Instance.cpp (100%) rename src/renderer/core/memory.cpp => runtime/Sources/Renderer/Core/Memory.cpp (100%) rename src/renderer/core/vk_queues.cpp => runtime/Sources/Renderer/Core/Queues.cpp (100%) rename src/renderer/core/render_core.cpp => runtime/Sources/Renderer/Core/RenderCore.cpp (100%) rename src/renderer/core/vk_semaphore.cpp => runtime/Sources/Renderer/Core/Semaphore.cpp (100%) rename src/renderer/core/vk_surface.cpp => runtime/Sources/Renderer/Core/Surface.cpp (100%) rename src/renderer/core/vk_validation_layers.cpp => runtime/Sources/Renderer/Core/ValidationLayers.cpp (100%) rename {src/renderer/descriptors => runtime/Sources/Renderer/Descriptors}/descriptor_pool_manager.cpp (100%) rename {src/renderer/descriptors => runtime/Sources/Renderer/Descriptors}/vk_descriptor_pool.cpp (100%) rename {src/renderer/descriptors => runtime/Sources/Renderer/Descriptors}/vk_descriptor_set.cpp (100%) rename {src/renderer/descriptors => runtime/Sources/Renderer/Descriptors}/vk_descriptor_set_layout.cpp (100%) rename {src/renderer/images => runtime/Sources/Renderer/Images}/texture.cpp (100%) rename {src/renderer/images => runtime/Sources/Renderer/Images}/texture_atlas.cpp (100%) rename {src/renderer/images => runtime/Sources/Renderer/Images}/vk_image.cpp (100%) rename {src/renderer/pipeline => runtime/Sources/Renderer/Pipelines}/pipeline.cpp (100%) rename src/renderer/pixel_put.cpp => runtime/Sources/Renderer/PixelPut.cpp (100%) rename src/renderer/renderer.cpp => runtime/Sources/Renderer/Renderer.cpp (100%) rename src/renderer/renderpass/vk_framebuffer.cpp => runtime/Sources/Renderer/Renderpass/Framebuffer.cpp (100%) rename src/renderer/renderpass/vk_render_pass.cpp => runtime/Sources/Renderer/Renderpass/Renderpass.cpp (100%) rename src/renderer/swapchain/vk_swapchain.cpp => runtime/Sources/Renderer/Renderpass/Swapchain.cpp (100%) rename src/renderer/texts/font.cpp => runtime/Sources/Renderer/Texts/Font.cpp (100%) rename src/renderer/texts/font_library.cpp => runtime/Sources/Renderer/Texts/FontLibrary.cpp (100%) rename src/renderer/texts/text.cpp => runtime/Sources/Renderer/Texts/Text.cpp (100%) rename src/renderer/texts/text_descriptor.cpp => runtime/Sources/Renderer/Texts/TextDescriptor.cpp (100%) rename src/renderer/texts/text_library.cpp => runtime/Sources/Renderer/Texts/TextLibrary.cpp (100%) rename src/renderer/texts/text_manager.cpp => runtime/Sources/Renderer/Texts/TextManager.cpp (100%) delete mode 100644 src/core/application.h delete mode 100644 src/core/application.inl delete mode 100644 src/core/errors.cpp delete mode 100644 src/core/graphics.h delete mode 100644 src/core/graphics.inl delete mode 100644 src/platform/inputs.h delete mode 100644 src/renderer/buffers/vk_buffer.h delete mode 100644 src/renderer/buffers/vk_ubo.h delete mode 100644 src/renderer/buffers/vk_vbo.h delete mode 100644 src/renderer/command/vk_cmd_buffer.h delete mode 100644 src/renderer/core/render_core.h diff --git a/Makefile b/Makefile index cf1ebdd..e488de3 100644 --- a/Makefile +++ b/Makefile @@ -6,22 +6,23 @@ # By: maldavid +#+ +:+ +#+ # # +#+#+#+#+#+ +#+ # # Created: 2022/10/04 16:43:41 by maldavid #+# #+# # -# Updated: 2024/03/25 18:57:44 by maldavid ### ########.fr # +# Updated: 2024/03/27 21:30:44 by maldavid ### ########.fr # # # # **************************************************************************** # NAME = libmlx.so -SRCS = $(wildcard $(addsuffix /*.cpp, ./src/core)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./src/platform)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./src/renderer/**)) +SRCS = $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Core)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Platform)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/**)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Drivers/**)) OBJ_DIR = objs/makefile OBJS = $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) -PCH = ./src/pre_compiled.h -GCH = ./src/pre_compiled.h.gch +PCH = ./runtime/Includes/PreCompiled.h +GCH = ./runtime/Includes/PreCompiled.h.gch OS = $(shell uname -s) DEBUG ?= false @@ -30,13 +31,14 @@ IMAGES_OPTIMIZED ?= true FORCE_INTEGRATED_GPU ?= false GRAPHICS_MEMORY_DUMP ?= false PROFILER ?= false +LEGACY ?= false MODE = "release" CXX = clang++ CXXFLAGS = -std=c++17 -O3 -fPIC -Wall -Wextra -Wno-deprecated -DSDL_MAIN_HANDLED -INCLUDES = -I./includes -I./src -I./third_party +INCLUDES = -I./includes -I./runtime/Includes -I./third_party LDLIBS = @@ -71,6 +73,10 @@ ifeq ($(PROFILER), true) CXXFLAGS += -D PROFILER endif +ifeq ($(LEGACY), true) + CXXFLAGS += -D LEGACY +endif + RM = rm -rf $(OBJ_DIR)/%.o: %.cpp $(GCH) diff --git a/includes/mlx_profile.h b/includes/mlx_profile.h index 52cecfb..9dd8577 100644 --- a/includes/mlx_profile.h +++ b/includes/mlx_profile.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ -/* Updated: 2024/01/03 15:33:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 18:25:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -138,6 +138,16 @@ #endif #endif +#if !defined(MLX_FORCEINLINE) + #if defined(MLX_COMPILER_CLANG) || defined(MLX_COMPILER_GCC) + #define MLX_FORCEINLINE __attribute__((always_inline)) inline + #elif defined(MLX_COMPILER_MSVC) + #define MLX_FORCEINLINE __forceinline + #else + #define MLX_FORCEINLINE inline + #endif +#endif + // Checking common assumptions #ifdef __cplusplus #include diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h new file mode 100644 index 0000000..cceb2db --- /dev/null +++ b/runtime/Includes/Core/Application.h @@ -0,0 +1,71 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Application.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ +/* Updated: 2024/03/27 21:00:53 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_APPLICATION__ +#define __MLX_APPLICATION__ + +#include +#include +#include + +namespace mlx +{ + class Application + { + public: + Application(); + + inline void GetMousePos(int* x, int* y) noexcept; + inline void MouseMove(void* win, int x, int y) noexcept; + + inline void OnEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; + + inline void GetScreenSize(void* win, int* w, int* h) noexcept; + + inline void SetFPSCap(std::uint32_t fps) noexcept; + + inline void* NewGraphicsSuport(std::size_t w, std::size_t h, const char* title); + inline void ClearGraphicsSupport(void* win); + inline void DestroyGraphicsSupport(void* win); + + inline void PixelPut(void* win, int x, int y, std::uint32_t color) const noexcept; + inline void StringPut(void* win, int x, int y, std::uint32_t color, char* str); + + void* NewTexture(int w, int h); + void* NewStbTexture(char* file, int* w, int* h); // stb textures are image files (png, jpg, bpm, ...) + inline void TexturePut(void* win, void* img, int x, int y); + inline int GetTexturePixel(void* img, int x, int y); + inline void SetTexturePixel(void* img, int x, int y, std::uint32_t color); + void DestroyTexture(void* ptr); + + inline void LoopHook(int (*f)(void*), void* param); + inline void LoopEnd() noexcept; + + inline void LoadFont(void* win, const std::filesystem::path& filepath, float scale); + + void Run() noexcept; + + ~Application(); + + private: + FpsManager m_fps; + std::list m_textures; + std::vector> m_graphics; + std::function f_loop_hook; + std::unique_ptr p_in; + void* p_param = nullptr; + }; +} + +#include + +#endif // __MLX_APPLICATION__ diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl new file mode 100644 index 0000000..2457c5b --- /dev/null +++ b/runtime/Includes/Core/Application.inl @@ -0,0 +1,196 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* application.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ +/* Updated: 2023/04/02 14:56:27 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +#define CHECK_WINDOW_PTR(win) \ + if(win == nullptr) \ + { \ + Error("invalid window ptr (NULL)"); \ + return; \ + } \ + else if(*static_cast(win) < 0 || *static_cast(win) > static_cast(_graphics.size()))\ + { \ + Error("invalid window ptr"); \ + return; \ + } else {} + +#define CHECK_IMAGE_PTR(img, retval) \ + if(img == nullptr) \ + { \ + Error("invalid image ptr (NULL)"); \ + retval; \ + } \ + else if(std::find_if(_textures.begin(), _textures.end(), [=](const Texture& texture) \ + { \ + return &texture == img; \ + }) == _textures.end()) \ + { \ + Error(e_kind::error, "invalid image ptr"); \ + retval; \ + } else {} + +namespace mlx +{ + void Application::GetMousePos(int* x, int* y) noexcept + { + *x = p_in->GetX(); + *y = p_in->GetY(); + } + + void Application::MouseMove(void* win, int x, int y) noexcept + { + CHECK_WINDOW_PTR(win); + if(!m_graphics[*static_cast(win)]->HasWindow()) + { + Warning("trying to move the mouse relative to a window that is targeting an image and not a real window, this is not allowed (move ignored)"); + return; + } + } + + void Application::OnEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept + { + CHECK_WINDOW_PTR(win); + if(!m_graphics[*static_cast(win)]->HasWindow()) + { + Warning("trying to add event hook for a window that is targeting an image and not a real window, this is not allowed (hook ignored)"); + return; + } + p_in->OnEvent(m_graphics[*static_cast(win)]->GetWindow()->GetID(), event, funct_ptr, param); + } + + void Application::GetScreenSize(void* win, int* w, int* h) noexcept + { + CHECK_WINDOW_PTR(win); + *w = 0; + *h = 0; + } + + void Application::SetFPSCap(std::uint32_t fps) noexcept + { + m_fps.SetMaxFPS(fps); + } + + void* Application::NewGraphicsSuport(std::size_t w, std::size_t h, const char* title) + { + MLX_PROFILE_FUNCTION(); + auto it = std::find_if(m_textures.begin(), m_textures.end(), [=](const Texture& texture) + { + return &texture == reinterpret_cast(const_cast(title)); + }); + if(it != _textures.end()) + m_graphics.emplace_back(std::make_unique(w, h, reinterpret_cast(const_cast(title)), m_graphics.size())); + else + { + if(title == NULL) + { + FatalError("invalid window title (NULL)"); + return nullptr; + } + m_graphics.emplace_back(std::make_unique(w, h, title, m_graphics.size())); + p_in->RegisterWindow(m_graphics.back()->GetWindow()); + } + return static_cast(&m_graphics.back()->GetID()); + } + + void Application::ClearGraphicsSupport(void* win) + { + MLX_PROFILE_FUNCTION(); + CHECK_WINDOW_PTR(win); + m_graphics[*static_cast(win)]->ClearRenderData(); + } + + void Application::DestroyGraphicsSupport(void* win) + { + MLX_PROFILE_FUNCTION(); + CHECK_WINDOW_PTR(win); + m_graphics[*static_cast(win)].reset(); + } + + void Application::PixelPut(void* win, int x, int y, std::uint32_t color) const noexcept + { + MLX_PROFILE_FUNCTION(); + CHECK_WINDOW_PTR(win); + m_graphics[*static_cast(win)]->PixelPut(x, y, color); + } + + void Application::StringPut(void* win, int x, int y, std::uint32_t color, char* str) + { + MLX_PROFILE_FUNCTION(); + CHECK_WINDOW_PTR(win); + if(str == nullptr) + { + Error("wrong text (NULL)"); + return; + } + if(std::strlen(str) == 0) + { + Warning("trying to put an empty text"); + return; + } + m_graphics[*static_cast(win)]->StringPut(x, y, color, str); + } + + void Application::LoadFont(void* win, const std::filesystem::path& filepath, float scale) + { + MLX_PROFILE_FUNCTION(); + CHECK_WINDOW_PTR(win); + m_graphics[*static_cast(win)]->LoadFont(filepath, scale); + } + + void Application::TexturePut(void* win, void* img, int x, int y) + { + MLX_PROFILE_FUNCTION(); + CHECK_WINDOW_PTR(win); + CHECK_IMAGE_PTR(img, return); + NonOwningPtr texture = static_cast(img); + if(!texture->IsInit()) + Error("trying to put a texture that has been destroyed"); + else + m_graphics[*static_cast(win)]->TexturePut(texture, x, y); + } + + int Application::GetTexturePixel(void* img, int x, int y) + { + MLX_PROFILE_FUNCTION(); + CHECK_IMAGE_PTR(img, return 0); + NonOwningPtr texture = static_cast(img); + if(!texture->IsInit()) + { + Error("trying to get a pixel from texture that has been destroyed"); + return 0; + } + return texture->GetPixel(x, y); + } + + void Application::setTexturePixel(void* img, int x, int y, std::uint32_t color) + { + MLX_PROFILE_FUNCTION(); + CHECK_IMAGE_PTR(img, return); + NonOwningPtr texture = static_cast(img); + if(!texture->IsInit()) + Error("trying to set a pixel on texture that has been destroyed"); + else + texture->SetPixel(x, y, color); + } + + void Application::LoopHook(int (*f)(void*), void* param) + { + f_loop_hook = f; + p_param = param; + } + + void Application::LoopEnd() noexcept + { + p_in->Finish(); + } +} diff --git a/runtime/Includes/Core/Enums.h b/runtime/Includes/Core/Enums.h new file mode 100644 index 0000000..80ef797 --- /dev/null +++ b/runtime/Includes/Core/Enums.h @@ -0,0 +1,33 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Enums.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:15:24 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:16:03 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_CORE_ENUMS__ +#define __MLX_CORE_ENUMS__ + +#include + +namespace mlx +{ + enum class LogType + { + Message = 0, + Warning, + Error, + FatalError, + + EndEnum + }; + + constexpr std::size_t LogTypeCount = static_cast(LogType::EndEnum) + 1; +} + +#endif diff --git a/src/core/errors.h b/runtime/Includes/Core/EventBase.h similarity index 63% rename from src/core/errors.h rename to runtime/Includes/Core/EventBase.h index b740865..37b5944 100644 --- a/src/core/errors.h +++ b/runtime/Includes/Core/EventBase.h @@ -1,29 +1,24 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* errors.h :+: :+: :+: */ +/* EventBase.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 17:42:32 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:13:19 by maldavid ### ########.fr */ +/* Created: 2024/03/27 17:27:22 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:31:16 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#ifndef __MLX_ERRORS__ -#define __MLX_ERRORS__ +#ifndef __MLX_BASE_EVENT__ +#define __MLX_BASE_EVENT__ -enum class e_kind +namespace mlx { - message, - warning, - error, - fatal_error -}; - -namespace mlx::core::error -{ - void report(e_kind kind, std::string msg, ...); + struct EventBase + { + virtual std::uint32_t What() const = 0; + }; } -#endif // __MLX_ERRORS__ +#endif diff --git a/runtime/Includes/Core/EventBus.h b/runtime/Includes/Core/EventBus.h new file mode 100644 index 0000000..db110d3 --- /dev/null +++ b/runtime/Includes/Core/EventBus.h @@ -0,0 +1,35 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* EventBus.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:30:36 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:31:41 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_EVENT_BUS__ +#define __MLX_EVENT_BUS__ + +#include +#include + +namespace mlx +{ + class EventBus + { + public: + EventBus() = delete; + static void Send(const std::string& listener_name, const EventBase& event); + static void SendBroadcast(const EventBase& event); + inline static void RegisterListener(const EventListener& listener) { s_listeners.push_back(listener); } + ~EventBus() = delete; + + private: + inline static std::vector s_listeners; + }; +} + +#endif diff --git a/runtime/Includes/Core/EventListener.h b/runtime/Includes/Core/EventListener.h new file mode 100644 index 0000000..d6c002c --- /dev/null +++ b/runtime/Includes/Core/EventListener.h @@ -0,0 +1,37 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* EventListener.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:28:17 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:37:53 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_EVENT_LISTENER__ +#define __MLX_EVENT_LISTENER__ + +#include + +namespace mlx +{ + class EventListener + { + public: + EventListener() = delete; + EventListener(func::function functor, std::string name); + + inline const std::string& GetName() const { return m_name; } + inline void Call(const EventBase& event) const noexcept { m_listen_functor(event); } + + ~EventListener() = default; + + private: + func::function m_listen_functor; + std::string m_name; + }; +} + +#endif diff --git a/runtime/Includes/Core/Format.h b/runtime/Includes/Core/Format.h new file mode 100644 index 0000000..d55b824 --- /dev/null +++ b/runtime/Includes/Core/Format.h @@ -0,0 +1,33 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Format.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:11:09 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:12:03 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_FORMAT__ +#define __MLX_FORMAT__ + +#include +#include + +namespace mlx +{ + template + struct IsOstreamable : std::false_type {}; + + template + struct IsOstreamable() << std::declval())>> : std::true_type {}; + + template...>, int> = 0> + auto Format(std::string_view format, const Args&... args); +} + +#include + +#endif diff --git a/runtime/Includes/Core/Format.inl b/runtime/Includes/Core/Format.inl new file mode 100644 index 0000000..e6d94d2 --- /dev/null +++ b/runtime/Includes/Core/Format.inl @@ -0,0 +1,146 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Format.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:11:09 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:12:03 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include + +namespace mlx +{ + namespace Internal + { + template + void Format(std::stringstream& ss, It first, It last) + { + for(auto it = first; it != last; ++it) + { + switch(*it) + { + case '%': + throw std::invalid_argument{"too few arguments"}; + case '/': + ++it; + if(it == last) + throw std::invalid_argument{"stray '/'"}; + [[fallthrough]]; + + default: ss << *it; + } + } + } + + template + void Format(std::stringstream& ss, It first, It last, const T& arg, const Args&... args) + { + for(auto it = first; it != last; ++it) + { + switch(*it) + { + case '%': + ss << arg; + return Format(ss, ++it, last, args...); + case '/': + ++it; + if(it == last) + throw std::invalid_argument{"stray '/'"}; + [[fallthrough]]; + + default: ss << *it; + } + } + throw std::invalid_argument{"too many arguments"}; + } + + template + void Format(std::ostream& os, It first, It last) + { + for(auto it = first; it != last; ++it) + { + switch(*it) + { + case '%': + throw std::invalid_argument{"too few arguments"}; + case '/': + ++it; + if(it == last) + throw std::invalid_argument{"stray '/'"}; + [[fallthrough]]; + + default: os << *it; + } + } + } + + template + void Format(std::ostream& os, It first, It last, const T& arg, const Args&... args) + { + for(auto it = first; it != last; ++it) + { + switch(*it) + { + case '%': + os << arg; + return Format(os, ++it, last, args...); + case '/': + ++it; + if(it == last) + throw std::invalid_argument{"stray '/'"}; + [[fallthrough]]; + + default: os << *it; + } + } + throw std::invalid_argument{"too many arguments"}; + } + + template + struct Formatter + { + std::string_view format; + std::tuple args; + }; + + template + void FormatHelper(std::stringstream& ss, const Formatter& formatter, std::index_sequence) + { + Format(ss, formatter.format.begin(), formatter.format.end(), + std::get(formatter.args)...); + } + + template + std::stringstream& operator<<(std::stringstream& ss, const Formatter& printer) + { + FormatHelper(ss, printer, std::index_sequence_for{}); + return ss; + } + + template + void FormatHelper(std::ostream& os, const Formatter& formatter, std::index_sequence) + { + Format(os, formatter.format.begin(), formatter.format.end(), + std::get(formatter.args)...); + } + + template + std::ostream& operator<<(std::ostream& os, const Formatter& printer) + { + FormatHelper(os, printer, std::index_sequence_for{}); + return os; + } + } + + template...>, int>> + auto Format(std::string_view format, const Args&... args) + { + return Internal::Formatter{format, std::forward_as_tuple(args...)}; + } +} diff --git a/src/core/fps.h b/runtime/Includes/Core/Fps.h similarity index 65% rename from src/core/fps.h rename to runtime/Includes/Core/Fps.h index f0d6fbb..0803bed 100644 --- a/src/core/fps.h +++ b/runtime/Includes/Core/Fps.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* fps.h :+: :+: :+: */ +/* Fps.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:53:30 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:58:32 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 20:52:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,19 +20,19 @@ namespace mlx public: FpsManager() = default; - void init(); - bool update(); - inline void setMaxFPS(std::uint32_t fps) noexcept { _max_fps = fps; _ns = 1000000000.0 / fps; } + void Init(); + bool Update(); + inline void SetMaxFPS(std::uint32_t fps) noexcept { m_max_fps = fps; m_ns = 1000000000.0 / fps; } ~FpsManager() = default; private: - double _ns = 1000000000.0 / 1'337'000.0; - std::int64_t _fps_before = 0; - std::int64_t _fps_now = 0; - std::int64_t _timer = 0; - std::uint32_t _max_fps = 1'337'000; - std::uint32_t _fps_elapsed_time = 0; + double m_ns = 1000000000.0 / 1'337'000.0; + std::int64_t m_fps_before = 0; + std::int64_t m_fps_now = 0; + std::int64_t m_timer = 0; + std::uint32_t m_max_fps = 1'337'000; + std::uint32_t m_fps_elapsed_time = 0; }; } diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h new file mode 100644 index 0000000..9c60f82 --- /dev/null +++ b/runtime/Includes/Core/Graphics.h @@ -0,0 +1,75 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Graphics.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ +/* Updated: 2024/03/27 21:16:11 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_GRAPHICS__ +#define __MLX_GRAPHICS__ + +#include +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + class GraphicsSupport : public NonCopyable + { + public: + GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id); + GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id); + + inline int& GetID() noexcept; + inline std::shared_ptr GetWindow(); + + void Render() noexcept; + + inline void ClearRenderData() noexcept; + inline void PixelPut(int x, int y, std::uint32_t color) noexcept; + inline void StringPut(int x, int y, std::uint32_t color, std::string str); + inline void TexturePut(NonOwningPtr texture, int x, int y); + inline void LoadFont(const std::filesystem::path& filepath, float scale); + inline void TryEraseTextureFromManager(NonOwningPtr texture) noexcept; + + inline bool HasWindow() const noexcept { return _has_window; } + + inline Renderer& GetRenderer() { return *_renderer; } + + ~GraphicsSupport(); + + private: + PixelPutPipeline m_pixel_put_pipeline; + + std::vector> m_drawlist; + + TextManager m_text_manager; + TextureManager m_texture_manager; + + glm::mat4 m_proj = glm::mat4(1.0); + + std::shared_ptr p_window; + std::unique_ptr p_renderer; + + std::size_t m_width = 0; + std::size_t m_height = 0; + + int m_id; + + bool m_has_window; + }; +} + +#include + +#endif diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl new file mode 100644 index 0000000..6e9771e --- /dev/null +++ b/runtime/Includes/Core/Graphics.inl @@ -0,0 +1,79 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* graphics.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ +/* Updated: 2023/04/02 15:26:16 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +namespace mlx +{ + int& GraphicsSupport::GetID() noexcept { return m_id; } + std::shared_ptr GraphicsSupport::GetWindow() { return p_window; } + + void GraphicsSupport::ClearRenderData() noexcept + { + MLX_PROFILE_FUNCTION(); + m_drawlist.clear(); + m_pixel_put_pipeline.Clear(); + m_text_manager.Clear(); + m_texture_manager.Clear(); + } + + void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept + { + MLX_PROFILE_FUNCTION(); + m_pixel_put_pipeline.SetPixel(x, y, color); + } + + void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) + { + MLX_PROFILE_FUNCTION(); + std::pair, bool> res = m_text_manager.RegisterText(x, y, color, str); + if(!res.second) // if this is not a completly new text draw + { + auto it = std::find(m_drawlist.begin(), m_drawlist.end(), res.first); + if(it != m_drawlist.end()) + m_drawlist.erase(it); + } + m_drawlist.push_back(res.first); + } + + void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) + { + MLX_PROFILE_FUNCTION(); + auto res = m_texture_manager.RegisterTexture(texture, x, y); + if(!res.second) // if this is not a completly new texture draw + { + auto it = std::find(m_drawlist.begin(), m_drawlist.end(), res.first); + if(it != m_drawlist.end()) + m_drawlist.erase(it); + } + m_drawlist.push_back(res.first); + } + + void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) + { + MLX_PROFILE_FUNCTION(); + m_text_manager.LoadFont(*_renderer, filepath, scale); + } + + void GraphicsSupport::TryEraseTextureFromManager(NonOwningPtr texture) noexcept + { + MLX_PROFILE_FUNCTION(); + for(auto it = m_drawlist.begin(); it != m_drawlist.end();) + { + if(m_texture_manager.IsTextureKnown(texture)) + it = m_drawlist.erase(it); + else + ++it; + } + m_texture_manager.EraseTextures(texture); + } +} diff --git a/runtime/Includes/Core/Logs.h b/runtime/Includes/Core/Logs.h new file mode 100644 index 0000000..edd816b --- /dev/null +++ b/runtime/Includes/Core/Logs.h @@ -0,0 +1,84 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Logs.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:14:10 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:19:23 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_LOGS__ +#define __MLX_LOGS__ + +#include + +namespace mlx +{ + template + void DebugLog(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); + + template + void Error(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); + + template + void Warning(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); + + template + void Message(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); + + template + void FatalError(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); + + template + void Verify(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); + + class Logs + { + public: + Logs() = delete; + + static void Report(LogType type, std::string message); + static void Report(LogType type, unsigned int line, std::string_view file, std::string_view function, std::string message); + + ~Logs() = delete; + }; + + #if defined(DEBUG) + template + void Assert(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); + #else + template + void Assert(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) {} + #endif +} + +#include + +namespace mlx +{ + #undef DebugLog + #define DebugLog(...) DebugLog(__LINE__, __FILE__, AK_FUNC_SIG, __VA_ARGS__) + + #undef Message + #define Message(...) Message(__LINE__, __FILE__, __func__, __VA_ARGS__) + + #undef Warning + #define Warning(...) Warning(__LINE__, __FILE__, __func__, __VA_ARGS__) + + #undef Error + #define Error(...) Error(__LINE__, __FILE__, __func__, __VA_ARGS__) + + #undef FatalError + #define FatalError(...) FatalError(__LINE__, __FILE__, __func__, __VA_ARGS__) + + #undef Verify + #define Verify(cond, ...) Verify(cond, __LINE__, __FILE__, __func__, __VA_ARGS__) + + #undef Assert + #define Assert(cond, ...) Assert(cond, __LINE__, __FILE__, __func__, __VA_ARGS__) +} + +#endif diff --git a/runtime/Includes/Core/Logs.inl b/runtime/Includes/Core/Logs.inl new file mode 100644 index 0000000..2c33f55 --- /dev/null +++ b/runtime/Includes/Core/Logs.inl @@ -0,0 +1,135 @@ +/* **************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Logs.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:19:47 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:19:47 by maldavid ### ########.fr */ +/* */ +/* **************************************************************************** */ + +#include +#include + +namespace mlx +{ + template + void DebugLog(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) + { + using namespace std::literals; + try + { + std::stringstream ss; + ss << Format(message, args...); + Logs::Report(LogType::Debug, line, file, function, ss.str()); + } + catch(const std::exception& e) + { + Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + } + } + + template + void Error(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) + { + using namespace std::literals; + try + { + std::stringstream ss; + ss << Format(message, args...); + Logs::Report(LogType::Error, line, file, function, ss.str()); + } + catch(const std::exception& e) + { + Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + } + } + + template + void Warning(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) + { + using namespace std::literals; + try + { + std::stringstream ss; + ss << Format(message, args...); + Logs::Report(LogType::Warning, line, file, function, ss.str()); + } + catch(const std::exception& e) + { + Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + } + } + + template + void Message(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) + { + using namespace std::literals; + try + { + std::stringstream ss; + ss << Format(message, args...); + Logs::Report(LogType::Message, line, file, function, ss.str()); + } + catch(const std::exception& e) + { + Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + } + } + + template + void FatalError(unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) + { + using namespace std::literals; + try + { + std::stringstream ss; + ss << Format(message, args...); + Logs::Report(LogType::FatalError, line, file, function, ss.str()); + } + catch(const std::exception& e) + { + Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + } + } + + template + void Verify(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) + { + using namespace std::literals; + if(cond) + return; + try + { + std::stringstream ss; + ss << Format("Verification failed : %", message, args...); + Logs::Report(LogType::FatalError, line, file, function, ss.str()); + } + catch(const std::exception& e) + { + Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + } + } + + #if defined(DEBUG) + template + void Assert(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) + { + using namespace std::literals; + if(cond) + return; + try + { + std::stringstream ss; + ss << Format("Assertion failed : %", message, args...); + Logs::Report(LogType::FatalError, line, file, function, ss.str()); + } + catch(const std::exception& e) + { + Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + } + } + #endif +} diff --git a/src/core/memory.h b/runtime/Includes/Core/Memory.h similarity index 72% rename from src/core/memory.h rename to runtime/Includes/Core/Memory.h index 1e08e1b..db11105 100644 --- a/src/core/memory.h +++ b/runtime/Includes/Core/Memory.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* memory.h :+: :+: :+: */ +/* Memory.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/07 16:31:51 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 19:13:05 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:16:44 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_MEMORY__ #define __MLX_MEMORY__ -#include +#include namespace mlx { @@ -22,17 +22,17 @@ namespace mlx friend class Singleton; public: - static void* malloc(std::size_t size); - static void* calloc(std::size_t n, std::size_t size); - static void* realloc(void* ptr, std::size_t size); - static void free(void* ptr); + static void* Malloc(std::size_t size); + static void* Calloc(std::size_t n, std::size_t size); + static void* Realloc(void* ptr, std::size_t size); + static void Free(void* ptr); private: MemManager() = default; ~MemManager(); private: - inline static std::list _blocks; + inline static std::list s_blocks; }; } diff --git a/src/core/profiler.h b/runtime/Includes/Core/Profiler.h similarity index 64% rename from src/core/profiler.h rename to runtime/Includes/Core/Profiler.h index e5f5c41..dbd6b1f 100644 --- a/src/core/profiler.h +++ b/runtime/Includes/Core/Profiler.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* profiler.h :+: :+: :+: */ +/* Profiler.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 13:35:45 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:57 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:19:01 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_PROFILER__ #define __MLX_PROFILER__ -#include +#include namespace mlx { @@ -34,63 +34,63 @@ namespace mlx Profiler(const Profiler&) = delete; Profiler(Profiler&&) = delete; - void appendProfileData(ProfileResult&& result); + void AppendProfileData(ProfileResult&& result); private: - Profiler() { beginRuntimeSession(); } + Profiler() { BeginRuntimeSession(); } ~Profiler(); - void beginRuntimeSession(); - void writeProfile(const ProfileResult& result); - void endRuntimeSession(); - inline void writeHeader() + void BeginRuntimeSession(); + void WriteProfile(const ProfileResult& result); + void EndRuntimeSession(); + inline void WriteHeader() { - _output_stream << "{\"profileData\":[{}"; - _output_stream.flush(); + m_output_stream << "{\"profileData\":[{}"; + m_output_stream.flush(); } - inline void writeFooter() + inline void WriteFooter() { - _output_stream << "]}"; - _output_stream.flush(); + m_output_stream << "]}"; + m_output_stream.flush(); } private: - std::unordered_map> _profile_data; - std::ofstream _output_stream; - std::mutex _mutex; - bool _runtime_session_began = false; + std::unordered_map> m_profile_data; + std::ofstream m_output_stream; + std::mutex m_mutex; + bool m_runtime_session_began = false; }; class ProfilerTimer { public: - ProfilerTimer(const char* name) : _name(name) + ProfilerTimer(const char* name) : m_name(name) { - _start_timepoint = std::chrono::steady_clock::now(); + m_start_timepoint = std::chrono::steady_clock::now(); } - inline void stop() + inline void Stop() { auto end_timepoint = std::chrono::steady_clock::now(); - auto high_res_start = FloatingPointMilliseconds{ _start_timepoint.time_since_epoch() }; - auto elapsed_time = std::chrono::time_point_cast(end_timepoint).time_since_epoch() - std::chrono::time_point_cast(_start_timepoint).time_since_epoch(); + auto high_res_start = FloatingPointMilliseconds{ m_start_timepoint.time_since_epoch() }; + auto elapsed_time = std::chrono::time_point_cast(end_timepoint).time_since_epoch() - std::chrono::time_point_cast(m_start_timepoint).time_since_epoch(); - Profiler::get().appendProfileData({ _name, elapsed_time, std::this_thread::get_id() }); + Profiler::get().appendProfileData({ m_name, elapsed_time, std::this_thread::get_id() }); - _stopped = true; + m_stopped = true; } ~ProfilerTimer() { - if(!_stopped) + if(!m_stopped) stop(); } private: - std::chrono::time_point _start_timepoint; - const char* _name; - bool _stopped = false; + std::chrono::time_point m_start_timepoint; + const char* m_name; + bool m_stopped = false; }; namespace ProfilerUtils @@ -102,7 +102,7 @@ namespace mlx }; template - constexpr auto cleanupOutputString(const char(&expr)[N], const char(&remove)[K]) + constexpr auto CleanupOutputString(const char(&expr)[N], const char(&remove)[K]) { ChangeResult result = {}; @@ -124,8 +124,8 @@ namespace mlx } #ifdef PROFILER - #define MLX_PROFILE_SCOPE_LINE2(name, line) constexpr auto fixedName##line = ::mlx::ProfilerUtils::cleanupOutputString(name, "__cdecl ");\ - ::mlx::ProfilerTimer timer##line(fixedName##line.data) + #define MLX_PROFILE_SCOPE_LINE2(name, line) constexpr auto fixed_name_##line = ::mlx::ProfilerUtils::CleanupOutputString(name, "__cdecl ");\ + ::mlx::ProfilerTimer timer##line(fixed_name_##line.data) #define MLX_PROFILE_SCOPE_LINE(name, line) MLX_PROFILE_SCOPE_LINE2(name, line) #define MLX_PROFILE_SCOPE(name) MLX_PROFILE_SCOPE_LINE(name, __LINE__) #define MLX_PROFILE_FUNCTION() MLX_PROFILE_SCOPE(MLX_FUNC_SIG) diff --git a/src/core/UUID.h b/runtime/Includes/Core/UUID.h similarity index 86% rename from src/core/UUID.h rename to runtime/Includes/Core/UUID.h index ad0507e..0b4075f 100644 --- a/src/core/UUID.h +++ b/runtime/Includes/Core/UUID.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/06 11:13:23 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:53 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:19:18 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -21,10 +21,10 @@ namespace mlx UUID(); UUID(std::uint64_t uuid); - inline operator std::uint64_t() const { return _uuid; } + inline operator std::uint64_t() const { return m_uuid; } private: - std::uint64_t _uuid; + std::uint64_t m_uuid; }; } diff --git a/runtime/Includes/Drivers/GLFW/GLFWInputs.h b/runtime/Includes/Drivers/GLFW/GLFWInputs.h new file mode 100644 index 0000000..0901ec9 --- /dev/null +++ b/runtime/Includes/Drivers/GLFW/GLFWInputs.h @@ -0,0 +1,44 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* GLFWInputs.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 18:32:29 by maldavid #+# #+# */ +/* Updated: 2024/03/27 18:37:58 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_GLFW_INPUTS__ +#define __MLX_GLFW_INPUTS__ + +#include + +namespace mlx +{ + class GLFWInputs : public Inputs + { + public: + GLFWInputs() = default; + + void Update() override noexcept; + + void RegisterWindow(std::shared_ptr window) override; + + inline std::int32_t GetX() override const noexcept { return m_x; } + inline std::int32_t GetY() override const noexcept { return m_y; } + inline std::int32_t GetXRel() override const noexcept { return m_x_rel; } + inline std::int32_t GetYRel() override const noexcept { return m_y_rel; } + + ~GLFWInputs() override = default; + + private: + std::int32_t m_x = 0; + std::int32_t m_y = 0; + std::int32_t m_x_rel = 0; + std::int32_t m_y_rel = 0; + }; +} + +#endif diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h new file mode 100644 index 0000000..aa8ad0f --- /dev/null +++ b/runtime/Includes/Platform/Inputs.h @@ -0,0 +1,62 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Inputs.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ +/* Updated: 2024/03/27 18:36:21 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_INPUTS__ +#define __MLX_INPUTS__ + +#include +#include + +namespace mlx +{ + class Inputs + { + public: + struct Hook + { + func::function hook; + void* param = nullptr; + }; + + public: + Inputs() = default; + + virtual void Update() noexcept = 0; + + virtual void RegisterWindow(std::shared_ptr window) = 0; + + virtual std::int32_t GetX() const noexcept = 0; + virtual std::int32_t GetY() const noexcept = 0; + virtual std::int32_t GetXRel() const noexcept = 0; + virtual std::int32_t GetYRel() const noexcept = 0; + + inline bool IsMouseMoving() const noexcept { return GetXRel() || GetYRel(); } + MLX_FORCEINLINE bool IsRunning() const noexcept { return m_run; } + MLX_FORCEINLINE constexpr void Finish() noexcept { m_run = false; } + MLX_FORCEINLINE constexpr void Run() noexcept { m_run = true; } + + inline void OnEvent(std::uint32_t id, int event, int (*funct_ptr)(int, void*), void* param) noexcept + { + m_events_hooks[id][event].hook = funct_ptr; + m_events_hooks[id][event].param = param; + } + + virtual ~Inputs() = default; + + protected: + std::unordered_map> m_windows; + std::unordered_map> m_events_hooks; + bool m_run = false; + }; +} + +#endif diff --git a/src/platform/window.h b/runtime/Includes/Platform/Window.h similarity index 62% rename from src/platform/window.h rename to runtime/Includes/Platform/Window.h index 09202a0..d9e15a1 100644 --- a/src/platform/window.h +++ b/runtime/Includes/Platform/Window.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* window.h :+: :+: :+: */ +/* Window.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ -/* Updated: 2024/03/26 23:03:50 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:58:18 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,26 +15,30 @@ namespace mlx { + #ifdef LEGACY + using WindowHandle = SDL_Window; + #else + using WindowHandle = GLFWwindow; + #endif + class Window { public: Window(std::size_t w, std::size_t h, const std::string& title); - inline GLFWwindow* getNativeWindow() const noexcept { return _win; } - inline int getWidth() const noexcept { return _width; } - inline int getHeight() const noexcept { return _height; } - inline std::uint32_t getID() const noexcept { return _id; } + inline NonOwningPtr GetWindowHandle() const noexcept = 0; + inline int GetWidth() const noexcept { return m_width; } + inline int GetHeight() const noexcept { return m_height; } + inline std::uint32_t GetID() const noexcept { return m_id; } - void destroy() noexcept; + void Destroy() noexcept; ~Window() = default; private: - GLFWimage _icon; - GLFWwindow* _win = nullptr; - int _width = 0; - int _height = 0; - std::uint32_t _id = -1; + std::uint32_t m_id = -1; + int m_width = 0; + int m_height = 0; }; } diff --git a/src/pre_compiled.h b/runtime/Includes/PreCompiled.h similarity index 86% rename from src/pre_compiled.h rename to runtime/Includes/PreCompiled.h index 313cd6f..dd0f386 100644 --- a/src/pre_compiled.h +++ b/runtime/Includes/PreCompiled.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* pre_compiled.h :+: :+: :+: */ +/* PreCompiled.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/03/27 00:39:13 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:33:47 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -22,7 +22,14 @@ #include #include #include -#include + +#ifdef LEGACY + #include + #include +#else + #include +#endif + #include #include #include @@ -74,4 +81,9 @@ #undef Window +#include +#include +#include +#include + #endif diff --git a/runtime/Includes/Renderer/Buffers/Buffer.h b/runtime/Includes/Renderer/Buffers/Buffer.h new file mode 100644 index 0000000..5acf9de --- /dev/null +++ b/runtime/Includes/Renderer/Buffers/Buffer.h @@ -0,0 +1,66 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Buffer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 23:18:52 by maldavid #+# #+# */ +/* Updated: 2024/03/27 22:09:07 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_BUFFER__ +#define __MLX_VK_BUFFER__ + +#include +#include +#include + +namespace mlx +{ + class Buffer : public CommandResource + { + public: + Buffer() = default; + + void Create(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data = nullptr); + void Destroy() noexcept; + + inline void MapMem(void** data) noexcept { Render_Core::get().getAllocator().mapMemory(m_allocation, data); m_is_mapped = true; } + inline bool IsMapped() const noexcept { return m_is_mapped; } + inline void UnmapMem() noexcept { Render_Core::get().getAllocator().unmapMemory(m_allocation); m_is_mapped = false; } + + void Flush(VkDeviceSize size = VK_WHOLE_SIZE, VkDeviceSize offset = 0); + bool CopyFromBuffer(const Buffer& buffer) noexcept; + + inline VkBuffer& operator()() noexcept { return m_buffer; } + inline VkBuffer& Get() noexcept { return m_buffer; } + inline VkDeviceSize GetSize() const noexcept { return m_size; } + inline VkDeviceSize GetOffset() const noexcept { return m_offset; } + + ~Buffer() = default; + + protected: + void PushToGPU() noexcept; + void Swap(Buffer& buffer) noexcept; + + protected: + VmaAllocation m_allocation; + VkBuffer m_buffer = VK_NULL_HANDLE; + VkDeviceSize m_offset = 0; + VkDeviceSize m_size = 0; + + private: + void CreateBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, const char* name); + + private: + #ifdef DEBUG + std::string m_name; + #endif + VkBufferUsageFlags m_usage = 0; + bool m_is_mapped = false; + }; +} + +#endif diff --git a/src/renderer/buffers/vk_ibo.h b/runtime/Includes/Renderer/Buffers/IndexBuffer.h similarity index 64% rename from src/renderer/buffers/vk_ibo.h rename to runtime/Includes/Renderer/Buffers/IndexBuffer.h index 580fa2e..20b6132 100644 --- a/src/renderer/buffers/vk_ibo.h +++ b/runtime/Includes/Renderer/Buffers/IndexBuffer.h @@ -1,28 +1,28 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_ibo.h :+: :+: :+: */ +/* IndexBuffer.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 15:05:05 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:35 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:11:57 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_IBO__ #define __VK_IBO__ -#include "vk_buffer.h" -#include +#include +#include namespace mlx { class C_IBO : public Buffer { public: - inline void create(std::uint32_t size, const std::uint16_t* data, const char* name) { Buffer::create(Buffer::kind::constant, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, name, data); } - inline void bind(Renderer& renderer) noexcept { renderer.getActiveCmdBuffer().bindIndexBuffer(*this); } + inline void Create(std::uint32_t size, const std::uint16_t* data, const char* name) { Buffer::Create(BufferType::Constant, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, name, data); } + inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindIndexBuffer(*this); } }; } diff --git a/runtime/Includes/Renderer/Buffers/UniformBuffer.h b/runtime/Includes/Renderer/Buffers/UniformBuffer.h new file mode 100644 index 0000000..77b1f3a --- /dev/null +++ b/runtime/Includes/Renderer/Buffers/UniformBuffer.h @@ -0,0 +1,50 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* UniformBuffer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ +/* Updated: 2024/03/27 22:15:23 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_UBO__ +#define __MLX_VK_UBO__ + +#include + +namespace mlx +{ + class UniformBuffer + { + public: + UniformBuffer() = default; + + void Create(NonOwningPtr renderer, std::uint32_t size, const char* name); + void Destroy() noexcept; + + void SetData(std::uint32_t size, const void* data); + + VkDeviceSize GetSize() noexcept; + VkDeviceSize GetOffset() noexcept; + VkDeviceMemory GetDeviceMemory() noexcept; + VkBuffer& operator()() noexcept; + VkBuffer& Get() noexcept; + + inline VkDeviceSize GetSize(int i) noexcept { return m_buffers[i].getSize(); } + inline VkDeviceSize GetOffset(int i) noexcept { return m_buffers[i].getOffset(); } + inline VkBuffer& operator()(int i) noexcept { return m_buffers[i].get(); } + inline VkBuffer& Get(int i) noexcept { return m_buffers[i].get(); } + + ~UniformBuffer() = default; + + private: + std::array m_buffers; + std::array m_maps; + NonOwningPtr m_renderer; + }; +} + +#endif // __MLX_VK_UBO__ diff --git a/runtime/Includes/Renderer/Buffers/VertexBuffer.h b/runtime/Includes/Renderer/Buffers/VertexBuffer.h new file mode 100644 index 0000000..10d7c07 --- /dev/null +++ b/runtime/Includes/Renderer/Buffers/VertexBuffer.h @@ -0,0 +1,46 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* VertexBuffer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:27:38 by maldavid #+# #+# */ +/* Updated: 2024/03/27 22:18:23 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_VBO__ +#define __MLX_VK_VBO__ + +#include +#include +#include + +namespace mlx +{ + class VertexBuffer : public Buffer + { + public: + inline void Create(std::uint32_t size, const void* data, const char* name) { Buffer::Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } + void SetData(std::uint32_t size, const void* data); + inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindVertexBuffer(*this); } + }; + + class DeviceVertexBuffer : public Buffer + { + public: + inline void create(std::uint32_t size, const void* data, const char* name) { Buffer::Create(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } + void SetData(std::uint32_t size, const void* data); + inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindVertexBuffer(*this); } + }; + + class ConstantVertexBuffer : public Buffer + { + public: + inline void Create(std::uint32_t size, const void* data, const char* name) { Buffer::Create(BufferType::Constant, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } + inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindVertexBuffer(*this); } + }; +} + +#endif // __MLX_VK_VBO__ diff --git a/runtime/Includes/Renderer/Command/CommandBuffer.h b/runtime/Includes/Renderer/Command/CommandBuffer.h new file mode 100644 index 0000000..68c068f --- /dev/null +++ b/runtime/Includes/Renderer/Command/CommandBuffer.h @@ -0,0 +1,70 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* CommandBuffer.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ +/* Updated: 2024/03/27 22:44:58 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_VK_CMD_BUFFER__ +#define __MLX_VK_CMD_BUFFER__ + +#include +#include + +namespace mlx +{ + class Buffer; + class Image; + + class CommandBuffer + { + public: + void Init(CommandBufferType type, NonOwningPtr manager); + void Init(CommandBufferType type, NonOwningPtr pool); + void Destroy() noexcept; + + void BeginRecord(VkCommandBufferUsageFlags usage = 0); + void Submit(class Semaphore* semaphores) noexcept; + void SubmitIdle(bool shouldWaitForExecution = true) noexcept; // TODO : handle `shouldWaitForExecution` as false by default (needs to modify CmdResources lifetimes to do so) + void UpdateSubmitState() noexcept; + inline void WaitForExecution() noexcept { m_fence.wait(); UpdateSubmitState(); m_state = CommandBufferState::Ready; } + inline void Reset() noexcept { vkResetCommandBuffer(m_cmd_buffer, 0); } + void EndRecord(); + + void BindVertexBuffer(Buffer& buffer) noexcept; + void BindIndexBuffer(Buffer& buffer) noexcept; + void CopyBuffer(Buffer& dst, Buffer& src) noexcept; + void CopyBufferToImage(Buffer& buffer, Image& image) noexcept; + void CopyImagetoBuffer(Image& image, Buffer& buffer) noexcept; + void TransitionImageLayout(Image& image, VkImageLayout new_layout) noexcept; + + inline bool IsInit() const noexcept { return m_state != CommandBufferState::Uninit; } + inline bool IsReadyToBeUsed() const noexcept { return m_state == CommandBufferState::Ready; } + inline bool IsRecording() const noexcept { return m_state == CommandBufferState::Recording; } + inline bool HasBeenSubmitted() const noexcept { return m_state == CommandBufferState::Submitted; } + inline CommandBufferState GetCurrentState() const noexcept { return m_state; } + + inline VkCommandBuffer& operator()() noexcept { return m_cmd_buffer; } + inline VkCommandBuffer& Get() noexcept { return m_cmd_buffer; } + inline Fence& GetFence() noexcept { return m_fence; } + + private: + void PreTransferBarrier() noexcept; + void PostTransferBarrier() noexcept; + + private: + std::vector m_cmd_resources; + Fence m_fence; + VkCommandBuffer m_cmd_buffer = VK_NULL_HANDLE; + NonOwningPtr m_pool; + CommandBufferState m_state = CommandBufferState::Uninit; + CommandBufferType m_type; + }; +} + +#endif // __MLX_VK_CMD_BUFFER__ diff --git a/src/renderer/command/cmd_manager.h b/runtime/Includes/Renderer/Command/CommandManager.h similarity index 55% rename from src/renderer/command/cmd_manager.h rename to runtime/Includes/Renderer/Command/CommandManager.h index abac258..c0dcedc 100644 --- a/src/renderer/command/cmd_manager.h +++ b/runtime/Includes/Renderer/Command/CommandManager.h @@ -1,42 +1,42 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* cmd_manager.h :+: :+: :+: */ +/* CommandManager.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:48:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:23 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:20:53 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_COMMAND_MANAGER__ #define __MLX_COMMAND_MANAGER__ -#include -#include -#include +#include +#include +#include namespace mlx { - class CmdManager + class CommandManager { public: - CmdManager() = default; + CommandManager() = default; - void init() noexcept; - void beginRecord(int active_image_index); - void endRecord(int active_image_index); - void destroy() noexcept; + void Init() noexcept; + void BeginRecord(int active_image_index); + void EndRecord(int active_image_index); + void Destroy() noexcept; - inline CmdPool& getCmdPool() noexcept { return _cmd_pool; } - inline CmdBuffer& getCmdBuffer(int i) noexcept { return _cmd_buffers[i]; } + inline CommandPool& GetCmdPool() noexcept { return m_cmd_pool; } + inline CommandBuffer& GetCmdBuffer(int i) noexcept { return m_cmd_buffers[i]; } - ~CmdManager() = default; + ~CommandManager() = default; private: - std::array _cmd_buffers; - CmdPool _cmd_pool; + std::array m_cmd_buffers; + CommandPool m_cmd_pool; }; } diff --git a/src/renderer/command/vk_cmd_pool.h b/runtime/Includes/Renderer/Command/CommandPool.h similarity index 67% rename from src/renderer/command/vk_cmd_pool.h rename to runtime/Includes/Renderer/Command/CommandPool.h index 3958739..edb8f22 100644 --- a/src/renderer/command/vk_cmd_pool.h +++ b/runtime/Includes/Renderer/Command/CommandPool.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_cmd_pool.h :+: :+: :+: */ +/* CommandPool.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:24:12 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:14 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:33:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,17 +15,21 @@ namespace mlx { - class CmdPool + class CommandPool { public: - void init(); - void destroy() noexcept; + CommandPool() = default; - inline VkCommandPool& operator()() noexcept { return _cmd_pool; } - inline VkCommandPool& get() noexcept { return _cmd_pool; } + void Init(); + void Destroy() noexcept; + + inline VkCommandPool& operator()() noexcept { return m_cmd_pool; } + inline VkCommandPool& Get() noexcept { return m_cmd_pool; } + + ~CommandPool() = default; private: - VkCommandPool _cmd_pool = VK_NULL_HANDLE; + VkCommandPool m_cmd_pool = VK_NULL_HANDLE; }; } diff --git a/src/renderer/core/cmd_resource.h b/runtime/Includes/Renderer/Command/CommandResource.h similarity index 60% rename from src/renderer/core/cmd_resource.h rename to runtime/Includes/Renderer/Command/CommandResource.h index 47ec17e..e9c367e 100644 --- a/src/renderer/core/cmd_resource.h +++ b/runtime/Includes/Renderer/Command/CommandResource.h @@ -1,42 +1,37 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* cmd_resource.h :+: :+: :+: */ +/* CommandResource.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/16 20:44:29 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:08 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:37:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_COMMAND_RESOURCE__ #define __MLX_COMMAND_RESOURCE__ -#include +#include +#include namespace mlx { - class CmdResource + class CommandResource { friend class SingleTimeCmdManager; - public: - enum class state - { - in_cmd_buffer = 0, - out_cmd_buffer, - }; public: - CmdResource() : _uuid() {} - inline void recordedInCmdBuffer() noexcept { _state = state::in_cmd_buffer; } - inline void removedFromCmdBuffer() noexcept { _state = state::out_cmd_buffer; } - inline UUID getUUID() const noexcept { return _uuid; } - virtual ~CmdResource() = default; + CommandResource() : m_uuid() {} + inline void RecordedInCmdBuffer() noexcept { m_state = CommandResourceState::Held; } + inline void RemovedFromCmdBuffer() noexcept { m_state = CommandResourceState::Free; } + inline UUID GetUUID() const noexcept { return m_uuid; } + virtual ~CommandResource() = default; private: - UUID _uuid; - state _state = state::out_cmd_buffer; + UUID m_uuid; + CommandResourceState m_state = CommandResourceState::Free; }; } diff --git a/src/renderer/command/single_time_cmd_manager.h b/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h similarity index 64% rename from src/renderer/command/single_time_cmd_manager.h rename to runtime/Includes/Renderer/Command/SingleTimeCmdManager.h index 1432ce7..be7f896 100644 --- a/src/renderer/command/single_time_cmd_manager.h +++ b/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h @@ -1,48 +1,48 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* single_time_cmd_manager.h :+: :+: :+: */ +/* SingleTimeCmdManager.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/15 18:25:57 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:20 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:46:48 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_SINGLE_TIME_CMD_MANAGER__ #define __MLX_SINGLE_TIME_CMD_MANAGER__ -#include -#include +#include +#include namespace mlx { - class CmdBuffer; + class CommandBuffer; class SingleTimeCmdManager { - friend class Render_Core; + friend class RenderCore; public: SingleTimeCmdManager() = default; - void init() noexcept; - void destroy() noexcept; + void Init() noexcept; + void Destroy() noexcept; - void updateSingleTimesCmdBuffersSubmitState() noexcept; - void waitForAllExecutions() noexcept; + void UpdateSingleTimesCmdBuffersSubmitState() noexcept; + void WaitForAllExecutions() noexcept; - inline CmdPool& getCmdPool() noexcept { return _pool; } - CmdBuffer& getCmdBuffer() noexcept; + inline CommandPool& GetCmdPool() noexcept { return m_pool; } + CommanddBuffer& GetCmdBuffer() noexcept; ~SingleTimeCmdManager() = default; inline static constexpr const std::uint8_t BASE_POOL_SIZE = 16; private: - std::vector _buffers; - CmdPool _pool; + std::vector m_buffers; + CommanddPool m_pool; }; } diff --git a/src/renderer/core/vk_device.h b/runtime/Includes/Renderer/Core/Device.h similarity index 60% rename from src/renderer/core/vk_device.h rename to runtime/Includes/Renderer/Core/Device.h index 564fba1..c46aaf6 100644 --- a/src/renderer/core/vk_device.h +++ b/runtime/Includes/Renderer/Core/Device.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_device.h :+: :+: :+: */ +/* Device.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:13:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:31:46 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:47:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,22 +18,22 @@ namespace mlx class Device { public: - void init(); - void destroy() noexcept; + void Init(); + void Destroy() noexcept; - inline VkDevice& operator()() noexcept { return _device; } - inline VkDevice& get() noexcept { return _device; } + inline VkDevice& operator()() noexcept { return m_device; } + inline VkDevice& Get() noexcept { return m_device; } - inline VkPhysicalDevice& getPhysicalDevice() noexcept { return _physical_device; } + inline VkPhysicalDevice& GetPhysicalDevice() noexcept { return m_physical_device; } private: - void pickPhysicalDevice(); - bool checkDeviceExtensionSupport(VkPhysicalDevice device); - int deviceScore(VkPhysicalDevice device); + void PickPhysicalDevice(); + bool CheckDeviceExtensionSupport(VkPhysicalDevice device); + int DeviceScore(VkPhysicalDevice device); private: - VkPhysicalDevice _physical_device = VK_NULL_HANDLE; - VkDevice _device = VK_NULL_HANDLE; + VkPhysicalDevice m_physical_device = VK_NULL_HANDLE; + VkDevice m_device = VK_NULL_HANDLE; }; } diff --git a/src/renderer/core/drawable_resource.h b/runtime/Includes/Renderer/Core/DrawableResource.h similarity index 81% rename from src/renderer/core/drawable_resource.h rename to runtime/Includes/Renderer/Core/DrawableResource.h index 6dbf67c..fb60df9 100644 --- a/src/renderer/core/drawable_resource.h +++ b/runtime/Includes/Renderer/Core/DrawableResource.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* drawable_resource.h :+: :+: :+: */ +/* DrawableResource.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 21:00:37 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:05 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:47:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -19,8 +19,8 @@ namespace mlx { public: DrawableResource() = default; - virtual void render(std::array& sets, class Renderer& renderer) = 0; - virtual void resetUpdate() {} + virtual void Render(std::array& sets, class Renderer& renderer) = 0; + virtual void ResetUpdate() {} virtual ~DrawableResource() = default; }; } diff --git a/src/renderer/core/vk_fence.h b/runtime/Includes/Renderer/Core/Fence.h similarity index 69% rename from src/renderer/core/vk_fence.h rename to runtime/Includes/Renderer/Core/Fence.h index dd4a5d0..ea46aae 100644 --- a/src/renderer/core/vk_fence.h +++ b/runtime/Includes/Renderer/Core/Fence.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_fence.h :+: :+: :+: */ +/* Fence.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:52:09 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:11:53 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:48:31 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,20 +20,20 @@ namespace mlx public: Fence() = default; - void init(); + void Init(); - inline VkFence& get() noexcept { return _fence; } - void wait() noexcept; - void reset() noexcept; - bool isReady() const noexcept; - inline void waitAndReset() noexcept { wait(); reset(); } + inline VkFence& Get() noexcept { return m_fence; } + void Wait() noexcept; + void Reset() noexcept; + bool IsReady() const noexcept; + MLX_FORCEINLINE void WaitAndReset() noexcept { Wait(); Reset(); } - void destroy() noexcept; + void Destroy() noexcept; ~Fence() = default; private: - VkFence _fence = VK_NULL_HANDLE; + VkFence m_fence = VK_NULL_HANDLE; }; } diff --git a/src/renderer/core/vk_instance.h b/runtime/Includes/Renderer/Core/Instance.h similarity index 69% rename from src/renderer/core/vk_instance.h rename to runtime/Includes/Renderer/Core/Instance.h index da52679..9b33f3e 100644 --- a/src/renderer/core/vk_instance.h +++ b/runtime/Includes/Renderer/Core/Instance.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_instance.h :+: :+: :+: */ +/* Instance.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:03:04 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:11:50 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:48:55 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,15 +18,17 @@ namespace mlx class Instance { public: - void init(); - void destroy() noexcept; + void Init(); + void Destroy() noexcept; - inline VkInstance& operator()() noexcept { return _instance; } - inline VkInstance& get() noexcept { return _instance; } + inline VkInstance& operator()() noexcept { return m_instance; } + inline VkInstance& Get() noexcept { return m_instance; } private: - std::vector getRequiredExtensions(); - VkInstance _instance = VK_NULL_HANDLE; + std::vector GetRequiredExtensions(); + + private: + VkInstance m_instance = VK_NULL_HANDLE; }; } diff --git a/src/renderer/core/memory.h b/runtime/Includes/Renderer/Core/Memory.h similarity index 60% rename from src/renderer/core/memory.h rename to runtime/Includes/Renderer/Core/Memory.h index 9703213..a29be0b 100644 --- a/src/renderer/core/memory.h +++ b/runtime/Includes/Renderer/Core/Memory.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* memory.h :+: :+: :+: */ +/* Memory.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/10/20 02:13:03 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:02 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:49:57 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,28 +20,28 @@ namespace mlx public: GPUallocator() = default; - void init() noexcept; - void destroy() noexcept; + void Init() noexcept; + void Destroy() noexcept; - VmaAllocation createBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name = nullptr) noexcept; - void destroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept; + VmaAllocation CreateBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name = nullptr) noexcept; + void DestroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept; - VmaAllocation createImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name = nullptr) noexcept; - void destroyImage(VmaAllocation allocation, VkImage image) noexcept; + VmaAllocation CreateImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name = nullptr) noexcept; + void DestroyImage(VmaAllocation allocation, VkImage image) noexcept; - void mapMemory(VmaAllocation allocation, void** data) noexcept; - void unmapMemory(VmaAllocation allocation) noexcept; + void MapMemory(VmaAllocation allocation, void** data) noexcept; + void UnmapMemory(VmaAllocation allocation) noexcept; - void dumpMemoryToJson(); + void DumpMemoryToJson(); - void flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept; + void Flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept; ~GPUallocator() = default; private: - VmaAllocator _allocator; - std::int32_t _active_buffers_allocations = 0; - std::int32_t _active_images_allocations = 0; + VmaAllocator m_allocator; + std::int32_t m_active_buffers_allocations = 0; + std::int32_t m_active_images_allocations = 0; }; } diff --git a/src/renderer/core/vk_queues.h b/runtime/Includes/Renderer/Core/Queues.h similarity index 64% rename from src/renderer/core/vk_queues.h rename to runtime/Includes/Renderer/Core/Queues.h index 4895c9c..4230e25 100644 --- a/src/renderer/core/vk_queues.h +++ b/runtime/Includes/Renderer/Core/Queues.h @@ -1,20 +1,18 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_queues.h :+: :+: :+: */ +/* Queues.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:29:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:50:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_QUEUES__ #define __MLX_VK_QUEUES__ -#include - namespace mlx { class Queues @@ -28,24 +26,25 @@ namespace mlx inline bool isComplete() { return graphics_family.has_value() && present_family.has_value(); } }; - QueueFamilyIndices findQueueFamilies(VkPhysicalDevice device); + public: + QueueFamilyIndices FindQueueFamilies(VkPhysicalDevice device); - void init(); + void Init(); - inline VkQueue& getGraphic() noexcept { return _graphics_queue; } - inline VkQueue& getPresent() noexcept { return _present_queue; } - inline QueueFamilyIndices getFamilies() noexcept + inline VkQueue& GetGraphic() noexcept { return _graphics_queue; } + inline VkQueue& GetPresent() noexcept { return _present_queue; } + inline QueueFamilyIndices GetFamilies() noexcept { - if(_families.has_value()) - return *_families; - core::error::report(e_kind::fatal_error, "Vulkan : cannot get queue families, not init"); + if(m_families.has_value()) + return *m_families; + FatalError("Vulkan : cannot get queue families, not init"); return {}; // just to avoid warnings } private: - VkQueue _graphics_queue; - VkQueue _present_queue; - std::optional _families; + VkQueue m_graphics_queue; + VkQueue m_present_queue; + std::optional m_families; }; } diff --git a/runtime/Includes/Renderer/Core/RenderCore.h b/runtime/Includes/Renderer/Core/RenderCore.h new file mode 100644 index 0000000..a976b92 --- /dev/null +++ b/runtime/Includes/Renderer/Core/RenderCore.h @@ -0,0 +1,78 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* RenderCore.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2022/10/08 19:16:32 by maldavid #+# #+# */ +/* Updated: 2024/03/27 22:55:43 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_RENDER_CORE__ +#define __MLX_RENDER_CORE__ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +namespace mlx +{ + const char* VerbaliseVkResult(VkResult result); + VkPipelineStageFlags AccessFlagsToPipelineStage(VkAccessFlags access_flags, VkPipelineStageFlags stage_flags); + + #ifdef DEBUG + constexpr const bool enable_validation_layers = true; + #else + constexpr const bool enable_validation_layers = false; + #endif + + const std::vector validation_layers = { "VK_LAYER_KHRONOS_validation" }; + + constexpr const int MAX_FRAMES_IN_FLIGHT = 3; + constexpr const int MAX_SETS_PER_POOL = 512; + constexpr const int NUMBER_OF_UNIFORM_BUFFERS = 1; // change this if for wathever reason more than one uniform buffer is needed + + class RenderCore : public Singleton + { + friend class Singleton; + + public: + void Init(); + void Destroy(); + + inline bool IsInit() const noexcept { return m_is_init; } + inline Instance& GetInstance() noexcept { return m_instance; } + inline Device& GetDevice() noexcept { return m_device; } + inline Queues& GetQueue() noexcept { return m_queues; } + inline GPUallocator& GetAllocator() noexcept { return m_allocator; } + inline ValidationLayers& GetLayers() noexcept { return m_layers; } + inline CommandBuffer& GetSingleTimeCmdBuffer() noexcept { return m_cmd_manager.GetCmdBuffer(); } + inline SingleTimeCmdManager& GetSingleTimeCmdManager() noexcept { return m_cmd_manager; } + inline DescriptorPool& GetDescriptorPool() { return m_pool_manager.GetAvailablePool(); } + + private: + RenderCore() = default; + ~RenderCore() = default; + + private: + ValidationLayers m_layers; + SingleTimeCmdManager m_cmd_manager; + Queues m_queues; + DescriptorPoolManager m_pool_manager; + Device m_device; + Instance m_instance; + GPUallocator m_allocator; + bool m_is_init = false; + }; +} + +#endif // __MLX_RENDER_CORE__ diff --git a/src/renderer/core/vk_semaphore.h b/runtime/Includes/Renderer/Core/Semaphore.h similarity index 65% rename from src/renderer/core/vk_semaphore.h rename to runtime/Includes/Renderer/Core/Semaphore.h index c022e8b..8e071b8 100644 --- a/src/renderer/core/vk_semaphore.h +++ b/runtime/Includes/Renderer/Core/Semaphore.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_semaphore.h :+: :+: :+: */ +/* Semaphore.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:59:38 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:11:43 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:56:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,15 +18,13 @@ namespace mlx class Semaphore { public: - void init(); - void destroy() noexcept; + void Init(); + void Destroy() noexcept; - inline VkSemaphore& getImageSemaphore() noexcept { return _image_available_semaphore; } - inline VkSemaphore& getRenderImageSemaphore() noexcept { return _render_finished_semaphore; } + inline VkSemaphore& Get() noexcept { return m_semaphore; } private: - VkSemaphore _image_available_semaphore = VK_NULL_HANDLE; - VkSemaphore _render_finished_semaphore = VK_NULL_HANDLE; + VkSemaphore m_semaphore = VK_NULL_HANDLE; }; } diff --git a/src/renderer/core/vk_surface.h b/runtime/Includes/Renderer/Core/Surface.h similarity index 65% rename from src/renderer/core/vk_surface.h rename to runtime/Includes/Renderer/Core/Surface.h index d5ff3de..01a2785 100644 --- a/src/renderer/core/vk_surface.h +++ b/runtime/Includes/Renderer/Core/Surface.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_surface.h :+: :+: :+: */ +/* Surface.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:57:55 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:10:59 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:58:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,16 +18,16 @@ namespace mlx class Surface { public: - void create(class Renderer& renderer); - void destroy() noexcept; + void Create(class Renderer& renderer); + void Destroy() noexcept; - VkSurfaceFormatKHR chooseSwapSurfaceFormat(const std::vector& availableFormats); + VkSurfaceFormatKHR ChooseSwapSurfaceFormat(const std::vector& available_formats); - inline VkSurfaceKHR& operator()() noexcept { return _surface; } - inline VkSurfaceKHR& get() noexcept { return _surface; } + inline VkSurfaceKHR& operator()() noexcept { return m_surface; } + inline VkSurfaceKHR& Get() noexcept { return m_surface; } private: - VkSurfaceKHR _surface = VK_NULL_HANDLE; + VkSurfaceKHR m_surface = VK_NULL_HANDLE; }; } diff --git a/src/renderer/core/vk_validation_layers.h b/runtime/Includes/Renderer/Core/ValidationLayers.h similarity index 57% rename from src/renderer/core/vk_validation_layers.h rename to runtime/Includes/Renderer/Core/ValidationLayers.h index 971542d..cee5999 100644 --- a/src/renderer/core/vk_validation_layers.h +++ b/runtime/Includes/Renderer/Core/ValidationLayers.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_validation_layers.h :+: :+: :+: */ +/* ValidationLayers.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/19 14:04:25 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:00 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 22:59:00 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,24 +20,24 @@ namespace mlx public: ValidationLayers() = default; - void init(); - void destroy(); + void Init(); + void Destroy(); - bool checkValidationLayerSupport(); - void populateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& createInfo); + bool CheckValidationLayerSupport(); + void PopulateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& create_info); - VkResult setDebugUtilsObjectNameEXT(VkObjectType object_type, std::uint64_t object_handle, const char* object_name); + VkResult SetDebugUtilsObjectNameEXT(VkObjectType object_type, std::uint64_t object_handle, const char* object_name); ~ValidationLayers() = default; private: - VkResult createDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator); - static VKAPI_ATTR VkBool32 VKAPI_CALL debugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData); - void destroyDebugUtilsMessengerEXT(const VkAllocationCallbacks* pAllocator); + VkResult CreateDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator); + static VKAPI_ATTR VkBool32 VKAPI_CALL DebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity, VkDebugUtilsMessageTypeFlagsEXT message_type, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData); + void DestroyDebugUtilsMessengerEXT(const VkAllocationCallbacks* pAllocator); private: - VkDebugUtilsMessengerEXT _debug_messenger; - PFN_vkSetDebugUtilsObjectNameEXT _vkSetDebugUtilsObjectNameEXT = nullptr; + VkDebugUtilsMessengerEXT m_debug_messenger; + PFN_vkSetDebugUtilsObjectNameEXT f_vkSetDebugUtilsObjectNameEXT = nullptr; }; } diff --git a/src/renderer/descriptors/vk_descriptor_pool.h b/runtime/Includes/Renderer/Descriptors/DescriptorPool.h similarity index 59% rename from src/renderer/descriptors/vk_descriptor_pool.h rename to runtime/Includes/Renderer/Descriptors/DescriptorPool.h index 512668a..97cabe9 100644 --- a/src/renderer/descriptors/vk_descriptor_pool.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorPool.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_descriptor_pool.h :+: :+: :+: */ +/* DescriptorPool.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:32:43 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:10:09 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 23:00:29 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,21 +20,21 @@ namespace mlx public: DescriptorPool() = default; - void init(std::size_t n, VkDescriptorPoolSize* size); - void freeDescriptor(const class DescriptorSet& set); - void destroy() noexcept; + void Init(std::size_t n, NonOwningPtr size); + void FreeDescriptor(const class DescriptorSet& set); + void Destroy() noexcept; - inline VkDescriptorPool& operator()() noexcept { return _pool; } - inline VkDescriptorPool& get() noexcept { return _pool; } - inline std::size_t getNumberOfSetsAllocated() const noexcept { return _allocated_sets; } + inline VkDescriptorPool& operator()() noexcept { return m_pool; } + inline VkDescriptorPool& Get() noexcept { return m_pool; } + inline std::size_t GetNumberOfSetsAllocated() const noexcept { return m_allocated_sets; } - inline bool isInit() const noexcept { return _pool != VK_NULL_HANDLE; } + inline bool IsInit() const noexcept { return m_pool != VK_NULL_HANDLE; } ~DescriptorPool() = default; private: - VkDescriptorPool _pool = VK_NULL_HANDLE; - std::size_t _allocated_sets = 0; + VkDescriptorPool m_pool = VK_NULL_HANDLE; + std::size_t m_allocated_sets = 0; }; } diff --git a/src/renderer/descriptors/descriptor_pool_manager.h b/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h similarity index 77% rename from src/renderer/descriptors/descriptor_pool_manager.h rename to runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h index aea5b0d..8f4cb01 100644 --- a/src/renderer/descriptors/descriptor_pool_manager.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* descriptor_pool_manager.h :+: :+: :+: */ +/* DescriptorPoolManager.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/20 06:26:26 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:10:12 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 23:00:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DESCRIPTOR_POOL_MANAGER__ #define __MLX_DESCRIPTOR_POOL_MANAGER__ -#include +#include namespace mlx { @@ -22,13 +22,13 @@ namespace mlx public: DescriptorPoolManager() = default; - DescriptorPool& getAvailablePool(); // assumes the pool is for only one set allocation, may cause some issues if this is for more than one - void destroyAllPools(); + DescriptorPool& GetAvailablePool(); // assumes the pool is for only one set allocation, may cause some issues if this is for more than one + void DestroyAllPools(); ~DescriptorPoolManager() = default; private: - std::list _pools; + std::list m_pools; }; } diff --git a/src/renderer/descriptors/vk_descriptor_set.h b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h similarity index 57% rename from src/renderer/descriptors/vk_descriptor_set.h rename to runtime/Includes/Renderer/Descriptors/DescriptorSet.h index 2ad52c4..56c8ec4 100644 --- a/src/renderer/descriptors/vk_descriptor_set.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_descriptor_set.h :+: :+: :+: */ +/* DescriptorSet.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:10:06 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 23:02:38 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_DESCRIPTOR_SET__ #define __VK_DESCRIPTOR_SET__ -#include +#include namespace mlx { @@ -22,29 +22,29 @@ namespace mlx public: DescriptorSet() = default; - void init(class Renderer* renderer, class DescriptorPool* pool, class DescriptorSetLayout* layout); + void Init(class Renderer* renderer, class DescriptorPool* pool, class DescriptorSetLayout* layout); - void writeDescriptor(int binding, class UBO* ubo) const noexcept; - void writeDescriptor(int binding, const class Image& image) const noexcept; + void WriteDescriptor(int binding, NonOwningPtr ubo) const noexcept; + void WriteDescriptor(int binding, const class Image& image) const noexcept; - inline bool isInit() const noexcept { return _pool != nullptr && _renderer != nullptr; } + inline bool IsInit() const noexcept { return m_pool != nullptr && m_renderer != nullptr; } - DescriptorSet duplicate(); + DescriptorSet Duplicate(); VkDescriptorSet& operator()() noexcept; - VkDescriptorSet& get() noexcept; + VkDescriptorSet& Get() noexcept; - inline const std::array& getAllFramesDescriptorSets() const { return _desc_set; } + inline const std::array& GetAllFramesDescriptorSets() const { return m_desc_set; } - void destroy() noexcept; + void Destroy() noexcept; ~DescriptorSet() = default; private: - std::array _desc_set; - class DescriptorPool* _pool = nullptr; - class DescriptorSetLayout* _layout = nullptr; - class Renderer* _renderer = nullptr; + std::array m_desc_set; + NonOwningPtr p_pool; + NonOwningPtr p_layout; + NonOwningPtr p_renderer; }; } diff --git a/src/renderer/descriptors/vk_descriptor_set_layout.h b/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h similarity index 67% rename from src/renderer/descriptors/vk_descriptor_set_layout.h rename to runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h index c7d2d45..e06bd54 100644 --- a/src/renderer/descriptors/vk_descriptor_set_layout.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_descriptor_set_layout.h :+: :+: :+: */ +/* DescriptorSetLayout.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:36:22 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:10:03 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 23:03:04 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,18 +20,18 @@ namespace mlx public: DescriptorSetLayout() = default; - void init(std::vector> binds, VkShaderStageFlagBits stage); - void destroy() noexcept; + void Init(std::vector> binds, VkShaderStageFlagBits stage); + void Destroy() noexcept; - inline VkDescriptorSetLayout& operator()() noexcept { return _layout; } - inline VkDescriptorSetLayout& get() noexcept { return _layout; } - inline const std::vector>& getBindings() const noexcept { return _bindings; } + inline VkDescriptorSetLayout& operator()() noexcept { return m_layout; } + inline VkDescriptorSetLayout& Get() noexcept { return m_layout; } + inline const std::vector>& GetBindings() const noexcept { return m_bindings; } ~DescriptorSetLayout() = default; private: - VkDescriptorSetLayout _layout = VK_NULL_HANDLE; - std::vector> _bindings; + std::vector> m_bindings; + VkDescriptorSetLayout m_layout = VK_NULL_HANDLE; }; } diff --git a/runtime/Includes/Renderer/Enums.h b/runtime/Includes/Renderer/Enums.h new file mode 100644 index 0000000..30083b4 --- /dev/null +++ b/runtime/Includes/Renderer/Enums.h @@ -0,0 +1,59 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Enums.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 22:02:58 by maldavid #+# #+# */ +/* Updated: 2024/03/27 22:39:31 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_RENDERER_ENUMS__ +#define __MLX_RENDERER_ENUMS__ + +namespace mlx +{ + enum class BufferType + { + Constant = 0, + HighDynamic, // typically stored in RAM + LowDynamic, // typically stored in VRAM + + EndEnum + }; + constexpr std::size_t BufferTypeCount = static_cast(BufferType::EndEnum); + + enum class CommandResourceState + { + Held = 0, + Free, + + EndEnum + }; + constexpr std::size_t CommandResourceStateCount = static_cast(CommandResourceState::EndEnum); + + enum class CommandBufferState + { + Uninit = 0, // buffer not initialized or destroyed + Ready, // buffer ready to be used after having been submitted + Idle, // buffer has recorded informations but has not been submitted + Recording, // buffer is currently recording + Submitted, // buffer has been submitted + + EndEnum + }; + constexpr std::size_t CommandBufferStateCount = static_cast(CommandBufferState::EndEnum); + + enum class CommandBufferType + { + SingleTime = 0, + LongTime, + + EndEnum + }; + constexpr std::size_t CommandBufferTypeCount = static_cast(CommandBufferType::EndEnum); +} + +#endif diff --git a/src/renderer/images/vk_image.h b/runtime/Includes/Renderer/Images/Image.h similarity index 100% rename from src/renderer/images/vk_image.h rename to runtime/Includes/Renderer/Images/Image.h diff --git a/src/renderer/images/texture.h b/runtime/Includes/Renderer/Images/Texture.h similarity index 100% rename from src/renderer/images/texture.h rename to runtime/Includes/Renderer/Images/Texture.h diff --git a/src/renderer/images/texture_atlas.h b/runtime/Includes/Renderer/Images/TextureAtlas.h similarity index 100% rename from src/renderer/images/texture_atlas.h rename to runtime/Includes/Renderer/Images/TextureAtlas.h diff --git a/src/renderer/images/texture_descriptor.h b/runtime/Includes/Renderer/Images/TextureDescriptor.h similarity index 100% rename from src/renderer/images/texture_descriptor.h rename to runtime/Includes/Renderer/Images/TextureDescriptor.h diff --git a/src/renderer/images/texture_manager.h b/runtime/Includes/Renderer/Images/TextureManager.h similarity index 100% rename from src/renderer/images/texture_manager.h rename to runtime/Includes/Renderer/Images/TextureManager.h diff --git a/src/renderer/pipeline/pipeline.h b/runtime/Includes/Renderer/Pipelines/Pipeline.h similarity index 100% rename from src/renderer/pipeline/pipeline.h rename to runtime/Includes/Renderer/Pipelines/Pipeline.h diff --git a/src/renderer/pixel_put.h b/runtime/Includes/Renderer/PixelPut.h similarity index 100% rename from src/renderer/pixel_put.h rename to runtime/Includes/Renderer/PixelPut.h diff --git a/src/renderer/renderer.h b/runtime/Includes/Renderer/Renderer.h similarity index 100% rename from src/renderer/renderer.h rename to runtime/Includes/Renderer/Renderer.h diff --git a/src/renderer/renderpass/vk_framebuffer.h b/runtime/Includes/Renderer/Renderpass/FrameBuffer.h similarity index 100% rename from src/renderer/renderpass/vk_framebuffer.h rename to runtime/Includes/Renderer/Renderpass/FrameBuffer.h diff --git a/src/renderer/renderpass/vk_render_pass.h b/runtime/Includes/Renderer/Renderpass/RenderPass.h similarity index 100% rename from src/renderer/renderpass/vk_render_pass.h rename to runtime/Includes/Renderer/Renderpass/RenderPass.h diff --git a/src/renderer/swapchain/vk_swapchain.h b/runtime/Includes/Renderer/Renderpass/Swapchain.h similarity index 100% rename from src/renderer/swapchain/vk_swapchain.h rename to runtime/Includes/Renderer/Renderpass/Swapchain.h diff --git a/src/renderer/texts/font.h b/runtime/Includes/Renderer/Texts/Font.h similarity index 100% rename from src/renderer/texts/font.h rename to runtime/Includes/Renderer/Texts/Font.h diff --git a/src/renderer/texts/font_library.h b/runtime/Includes/Renderer/Texts/FontLibrary.h similarity index 100% rename from src/renderer/texts/font_library.h rename to runtime/Includes/Renderer/Texts/FontLibrary.h diff --git a/src/renderer/texts/text.h b/runtime/Includes/Renderer/Texts/Text.h similarity index 100% rename from src/renderer/texts/text.h rename to runtime/Includes/Renderer/Texts/Text.h diff --git a/src/renderer/texts/text_descriptor.h b/runtime/Includes/Renderer/Texts/TextDescriptor.h similarity index 100% rename from src/renderer/texts/text_descriptor.h rename to runtime/Includes/Renderer/Texts/TextDescriptor.h diff --git a/src/renderer/texts/text_library.h b/runtime/Includes/Renderer/Texts/TextLibrary.h similarity index 100% rename from src/renderer/texts/text_library.h rename to runtime/Includes/Renderer/Texts/TextLibrary.h diff --git a/src/renderer/texts/text_manager.h b/runtime/Includes/Renderer/Texts/TextManager.h similarity index 100% rename from src/renderer/texts/text_manager.h rename to runtime/Includes/Renderer/Texts/TextManager.h diff --git a/runtime/Includes/Utils/Ansi.h b/runtime/Includes/Utils/Ansi.h new file mode 100644 index 0000000..42eedeb --- /dev/null +++ b/runtime/Includes/Utils/Ansi.h @@ -0,0 +1,56 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Ansi.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:22:41 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:23:34 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_ANSI__ +#define __MLX_ANSI__ + +#include +#include + +namespace mlx +{ + enum class Ansi : std::uint32_t + { + red = 31, + green = 32, + blue = 34, + def = 0, + black = 30, + yellow = 33, + magenta = 35, + cyan = 36, + white = 37, + bg_red = 41, + bg_green = 42, + bg_blue = 44, + bg_def = 0, + bg_black = 40, + bg_yellow = 43, + bg_magenta = 45, + bg_cyan = 46, + bg_white = 47, + reset = 0, + bold = 1, + underline = 4, + inverse = 7, + bold_off = 21, + underline_off = 24, + inverse_off = 27 + }; + + inline std::ostream& operator<<(std::ostream& os, Ansi ansi) + { + return os << "\033[1;" << std::to_string(static_cast(ansi)) << "m"; + } +} + +#endif diff --git a/src/utils/combine_hash.h b/runtime/Includes/Utils/CombineHash.h similarity index 74% rename from src/utils/combine_hash.h rename to runtime/Includes/Utils/CombineHash.h index d86ec79..a11a35c 100644 --- a/src/utils/combine_hash.h +++ b/runtime/Includes/Utils/CombineHash.h @@ -1,31 +1,28 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* combine_hash.h :+: :+: :+: */ +/* CombineHash.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/14 16:16:06 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:06:37 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:59:30 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_HASH__ #define __MLX_HASH__ -#include -#include - namespace mlx { - inline void hashCombine([[maybe_unused]] std::size_t& seed) noexcept {} + inline void HashCombine([[maybe_unused]] std::size_t& seed) noexcept {} template - inline void hashCombine(std::size_t& seed, const T& v, Rest... rest) + inline void HashCombine(std::size_t& seed, const T& v, Rest... rest) { std::hash hasher; seed ^= hasher(v) + 0x9e3779b9 + (seed << 6) + (seed >> 2); - hashCombine(seed, rest...); + HashCombine(seed, rest...); } } diff --git a/runtime/Includes/Utils/ConstMap.h b/runtime/Includes/Utils/ConstMap.h new file mode 100644 index 0000000..a4e8c40 --- /dev/null +++ b/runtime/Includes/Utils/ConstMap.h @@ -0,0 +1,69 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* ConstMap.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 15:26:39 by maldavid #+# #+# */ +/* Updated: 2024/03/27 21:59:35 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_CONST_MAP__ +#define __MLX_CONST_MAP__ + +namespace mlx +{ + template + class ConstMap + { + public: + using ValueType = std::pair; + using ContainerType = std::vector; + + using iterator = typename ContainerType::const_iterator; + using const_iterator = iterator; + + public: + ConstMap(std::initializer_list init) : m_container(init) + { + std::sort(m_container.begin(), m_container.end()); + } + + ConstMap(ContainerType container) : m_container(std::move(container)) + { + std::sort(m_container.begin(), m_container.end()); + } + + inline const_iterator begin() const { return m_container.begin(); } + inline const_iterator end() const { return m_container.end(); } + + template + inline const_iterator Find(const K& key) const + { + const_iterator it = std::lower_bound(begin(), end(), key, + [](const ValueType& p, const K& key) + { + return p.first < key; + } + ); + return it != end() && it->first == key ? it : end(); + } + + template + inline bool Has(const K& key) const + { + return Find(key) != end(); + } + + inline std::size_t Size() const { return m_container.size(); } + + ~ConstMap() = default; + + private: + ContainerType m_container; + }; +} + +#endif diff --git a/src/utils/dogica_ttf.h b/runtime/Includes/Utils/DogicaTTF.h similarity index 99% rename from src/utils/dogica_ttf.h rename to runtime/Includes/Utils/DogicaTTF.h index 30c48e2..3d24ce9 100644 --- a/src/utils/dogica_ttf.h +++ b/runtime/Includes/Utils/DogicaTTF.h @@ -1,20 +1,18 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* dogica_ttf.h :+: :+: :+: */ +/* DogicaTTF.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/11 16:20:25 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:05:36 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:59:40 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_DOGICA_TTF__ #define __MLX_DOGICA_TTF__ -#include - constexpr const unsigned int dogica_ttf_len = 33860; static const std::vector dogica_ttf = { diff --git a/src/utils/icon_mlx.h b/runtime/Includes/Utils/IconMlx.h similarity index 99% rename from src/utils/icon_mlx.h rename to runtime/Includes/Utils/IconMlx.h index ca05561..4a8e5a3 100644 --- a/src/utils/icon_mlx.h +++ b/runtime/Includes/Utils/IconMlx.h @@ -1,20 +1,18 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* icon_mlx.h :+: :+: :+: */ +/* IconMlx.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/25 11:23:16 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:05:28 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 21:59:45 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __ICON_MLX__ #define __ICON_MLX__ -#include - constexpr const int logo_mlx_height = 125; constexpr const int logo_mlx_width = 125; constexpr const int logo_mlx_size = logo_mlx_height * logo_mlx_width * 4; diff --git a/src/utils/non_copyable.h b/runtime/Includes/Utils/NonCopyable.h similarity index 100% rename from src/utils/non_copyable.h rename to runtime/Includes/Utils/NonCopyable.h diff --git a/runtime/Includes/Utils/NonOwningPtr.h b/runtime/Includes/Utils/NonOwningPtr.h new file mode 100644 index 0000000..baef061 --- /dev/null +++ b/runtime/Includes/Utils/NonOwningPtr.h @@ -0,0 +1,45 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* NonOwningPtr.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 21:03:37 by maldavid #+# #+# */ +/* Updated: 2024/03/27 21:05:05 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_NON_OWNING_PTR__ +#define __MLX_NON_OWNING_PTR__ + +namespace mlx +{ + template + class NonOwningPtr + { + public: + NonOwningPtr(T* ptr = nullptr); + NonOwningPtr(const NonOwningPtr&) = default; + NonOwningPtr(NonOwningPtr&& ptr) noexcept; + + NonOwningPtr& operator=(T* ptr); + NonOwningPtr& operator=(const NonOwningPtr&) = default; + NonOwningPtr& operator=(NonOwningPtr&& ptr) noexcept; + + inline operator bool() const noexcept; + + inline T* Get() const noexcept; + inline T* operator->() const noexcept; + inline T& operator*() const noexcept; + + ~NonOwningPtr() = default; + + private: + T* p_ptr = nullptr; + }; +} + +#include + +#endif diff --git a/runtime/Includes/Utils/NonOwningPtr.inl b/runtime/Includes/Utils/NonOwningPtr.inl new file mode 100644 index 0000000..0247092 --- /dev/null +++ b/runtime/Includes/Utils/NonOwningPtr.inl @@ -0,0 +1,62 @@ +/* **************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* NonOwningPtr.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 21:04:00 by maldavid #+# #+# */ +/* Updated: 2024/03/27 21:04:00 by maldavid ### ########.fr */ +/* */ +/* **************************************************************************** */ + +#include + +namespace mlx +{ + template + NonOwningPtr::NonOwningPtr(T* ptr) : p_ptr(ptr) {} + + template + NonOwningPtr::NonOwningPtr(NonOwningPtr&& ptr) noexcept : p_ptr(ptr.p_ptr) + { + ptr.p_ptr = nullptr; + } + + template + NonOwningPtr& NonOwningPtr::operator=(T* ptr) + { + p_ptr = ptr; + } + + template + NonOwningPtr& NonOwningPtr::operator=(NonOwningPtr&& ptr) noexcept + { + p_ptr = ptr.p_ptr; + ptr.p_ptr = nullptr; + } + + template + NonOwningPtr::operator bool() const noexcept + { + return p_ptr != nullptr; + } + + template + T* NonOwningPtr::Get() const noexcept + { + return p_ptr; + } + + template + T* NonOwningPtr::operator->() const noexcept + { + return p_ptr; + } + + template + T& NonOwningPtr::operator*() const noexcept + { + return *p_ptr; + } +} diff --git a/src/utils/singleton.h b/runtime/Includes/Utils/Singleton.h similarity index 84% rename from src/utils/singleton.h rename to runtime/Includes/Utils/Singleton.h index ae8246e..33cc136 100644 --- a/src/utils/singleton.h +++ b/runtime/Includes/Utils/Singleton.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* singleton.h :+: :+: :+: */ +/* Singleton.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:18:46 by maldavid #+# #+# */ -/* Updated: 2024/03/24 14:42:56 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 18:20:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_SINGLETON__ #define __MLX_SINGLETON__ -#include "non_copyable.h" +#include namespace mlx { @@ -21,7 +21,7 @@ namespace mlx class Singleton : public NonCopyable { public: - inline static T& get() + inline static T& Get() { static T instance; return instance; diff --git a/src/core/application.cpp b/runtime/Sources/Core/Application.cpp similarity index 91% rename from src/core/application.cpp rename to runtime/Sources/Core/Application.cpp index e14379d..8e22407 100644 --- a/src/core/application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -1,18 +1,18 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* application.cpp :+: :+: :+: */ +/* Application.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:16:24 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 17:43:18 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "application.h" +#include #include #include #include @@ -20,11 +20,17 @@ #include #include #include +#include namespace mlx::core { Application::Application() : _fps(), _in(std::make_unique()) { + EventBus::RegisterListener({[](const EventBase& event) + { + + }, "__internal_application" }); + _fps.init(); glfwSetErrorCallback([]([[maybe_unused]] int code, const char* desc) { diff --git a/src/core/bridge.cpp b/runtime/Sources/Core/Bridge.cpp similarity index 100% rename from src/core/bridge.cpp rename to runtime/Sources/Core/Bridge.cpp diff --git a/runtime/Sources/Core/EventBus.cpp b/runtime/Sources/Core/EventBus.cpp new file mode 100644 index 0000000..550882b --- /dev/null +++ b/runtime/Sources/Core/EventBus.cpp @@ -0,0 +1,37 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* EventBus.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:36:05 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:37:01 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include + +namespace mlx +{ + void EventBus::Send(const std::string& listener_name, const EventBase& event) + { + for(const EventListener& listener : s_listeners) + { + if(listener.GetName() == listener_name) + { + listener.Call(event); + return; + } + } + Warning("Event Bus : listener not found, '%'", listener_name); + } + + void EventBus::SendBroadcast(const EventBase& event) + { + for(const EventListener& listener : s_listeners) + listener.Call(event); + } +} diff --git a/runtime/Sources/Core/EventListener.cpp b/runtime/Sources/Core/EventListener.cpp new file mode 100644 index 0000000..cb979e1 --- /dev/null +++ b/runtime/Sources/Core/EventListener.cpp @@ -0,0 +1,21 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* EventListener.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:37:09 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:37:38 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include + +namespace mlx +{ + EventListener::EventListener(func::function functor, std::string name) + : m_listen_functor(std::move(functor)), m_name(std::move(name)) + {} +} diff --git a/src/core/fps.cpp b/runtime/Sources/Core/Fps.cpp similarity index 53% rename from src/core/fps.cpp rename to runtime/Sources/Core/Fps.cpp index e88b489..59a0926 100644 --- a/src/core/fps.cpp +++ b/runtime/Sources/Core/Fps.cpp @@ -1,43 +1,42 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* fps.cpp :+: :+: :+: */ +/* Fps.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 14:56:17 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:59:13 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 20:53:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include - -#include +#include +#include namespace mlx { - void FpsManager::init() + void FpsManager::Init() { - _timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); - _fps_before = _timer; - _fps_now = _timer; + m_timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); + m_fps_before = m_timer; + m_fps_now = m_timer; } - bool FpsManager::update() + bool FpsManager::Update() { using namespace std::chrono_literals; - _fps_now = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); + m_fps_now = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); - if(std::chrono::duration{_fps_now - _timer} >= 1s) - _timer += _fps_now; + if(std::chrono::duration{m_fps_now - m_timer} >= 1s) + m_timer += m_fps_now; - _fps_elapsed_time = _fps_now - _fps_before; - if(_fps_elapsed_time >= _ns) + m_fps_elapsed_time = m_fps_now - m_fps_before; + if(m_fps_elapsed_time >= m_ns) { - _fps_before += _ns; + m_fps_before += m_ns; return true; } - std::this_thread::sleep_for(std::chrono::duration(_ns - 1)); + std::this_thread::sleep_for(std::chrono::duration(m_ns - 1)); return false; } } diff --git a/src/core/graphics.cpp b/runtime/Sources/Core/Graphics.cpp similarity index 100% rename from src/core/graphics.cpp rename to runtime/Sources/Core/Graphics.cpp diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp new file mode 100644 index 0000000..adc9a0c --- /dev/null +++ b/runtime/Sources/Core/Logs.cpp @@ -0,0 +1,68 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Logs.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 17:20:55 by maldavid #+# #+# */ +/* Updated: 2024/03/27 17:26:59 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +namespace mlx +{ + namespace Internal + { + struct FatalErrorEvent : public EventBase + { + std::uint32_t What() const override { return 167; } + }; + } + + void Logs::Report(LogType type, std::string message) + { + Report(type, 0, {}, {}, std::move(message)); + } + + void Logs::Report(LogType type, unsigned int line, std::string_view file, std::string_view function, std::string message) + { + using namespace std::literals; + + #ifndef DEBUG + if(type == LogType::Debug) + return; + #endif + + std::string code_infos; + if((type == LogType::Error || type == LogType::FatalError) && !file.empty() && !function.empty()) + { + code_infos += "{in file '"s; + code_infos += file; + code_infos += "', line "s + std::to_string(line) + ", in function '"s; + code_infos += function; + code_infos += "'} "s; + } + + switch(type) + { + case LogType::Debug: std::cout << Ansi::blue << "[Akel Debug] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Message: std::cout << Ansi::blue << "[Akel Message] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Warning: std::cout << Ansi::magenta << "[Akel Warning] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Error: std::cerr << Ansi::red << "[Akel Error] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::FatalError: std::cerr << Ansi::red << "[Akel Fatal Error] " << Ansi::def << code_infos << message << '\n'; break; + + default: break; + } + if(type == LogType::FatalError) + { + std::cout << Ansi::bg_red << "Fatal Error: emergency exit" << Ansi::bg_def << std::endl; + EventBus::Send("__internal_application", Internal::FatalErrorEvent{}); + } + } +} diff --git a/src/core/memory.cpp b/runtime/Sources/Core/Memory.cpp similarity index 100% rename from src/core/memory.cpp rename to runtime/Sources/Core/Memory.cpp diff --git a/src/core/profiler.cpp b/runtime/Sources/Core/Profiler.cpp similarity index 100% rename from src/core/profiler.cpp rename to runtime/Sources/Core/Profiler.cpp diff --git a/src/core/UUID.cpp b/runtime/Sources/Core/UUID.cpp similarity index 100% rename from src/core/UUID.cpp rename to runtime/Sources/Core/UUID.cpp diff --git a/runtime/Sources/Drivers/GLFW/GLFWInputs.cpp b/runtime/Sources/Drivers/GLFW/GLFWInputs.cpp new file mode 100644 index 0000000..9379cf1 --- /dev/null +++ b/runtime/Sources/Drivers/GLFW/GLFWInputs.cpp @@ -0,0 +1,22 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* GLFWInputs.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/03/27 18:39:32 by maldavid #+# #+# */ +/* Updated: 2024/03/27 18:42:18 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include + +namespace mlx +{ + void GLFWInputs::Update() noexcept + { + glfwPollEvents(); + } +} diff --git a/src/platform/inputs.cpp b/runtime/Sources/Platform/Inputs.cpp similarity index 92% rename from src/platform/inputs.cpp rename to runtime/Sources/Platform/Inputs.cpp index 790b59b..2cc6ca6 100644 --- a/src/platform/inputs.cpp +++ b/runtime/Sources/Platform/Inputs.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:30:19 by maldavid #+# #+# */ -/* Updated: 2024/03/25 23:13:16 by maldavid ### ########.fr */ +/* Updated: 2024/03/27 15:50:07 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -24,9 +24,11 @@ namespace mlx _xRel = 0; _yRel = 0; + glfwPollEvents(); + static int i = 0; i++; - if(i >= 150) + if(i >= 500) { auto& hooks = _events_hooks[0]; auto& win_hook = hooks[MLX_WINDOW_EVENT]; diff --git a/src/platform/window.cpp b/runtime/Sources/Platform/Window.cpp similarity index 100% rename from src/platform/window.cpp rename to runtime/Sources/Platform/Window.cpp diff --git a/src/renderer/buffers/vk_buffer.cpp b/runtime/Sources/Renderer/Buffers/Buffer.cpp similarity index 100% rename from src/renderer/buffers/vk_buffer.cpp rename to runtime/Sources/Renderer/Buffers/Buffer.cpp diff --git a/src/renderer/buffers/vk_ubo.cpp b/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp similarity index 100% rename from src/renderer/buffers/vk_ubo.cpp rename to runtime/Sources/Renderer/Buffers/UniformBuffer.cpp diff --git a/src/renderer/buffers/vk_vbo.cpp b/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp similarity index 100% rename from src/renderer/buffers/vk_vbo.cpp rename to runtime/Sources/Renderer/Buffers/VertexBuffer.cpp diff --git a/src/renderer/command/vk_cmd_buffer.cpp b/runtime/Sources/Renderer/Command/CommandBuffer.cpp similarity index 100% rename from src/renderer/command/vk_cmd_buffer.cpp rename to runtime/Sources/Renderer/Command/CommandBuffer.cpp diff --git a/src/renderer/command/cmd_manager.cpp b/runtime/Sources/Renderer/Command/CommandManager.cpp similarity index 100% rename from src/renderer/command/cmd_manager.cpp rename to runtime/Sources/Renderer/Command/CommandManager.cpp diff --git a/src/renderer/command/vk_cmd_pool.cpp b/runtime/Sources/Renderer/Command/CommandPool.cpp similarity index 100% rename from src/renderer/command/vk_cmd_pool.cpp rename to runtime/Sources/Renderer/Command/CommandPool.cpp diff --git a/src/renderer/command/single_time_cmd_manager.cpp b/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp similarity index 100% rename from src/renderer/command/single_time_cmd_manager.cpp rename to runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp diff --git a/src/renderer/core/vk_device.cpp b/runtime/Sources/Renderer/Core/Device.cpp similarity index 100% rename from src/renderer/core/vk_device.cpp rename to runtime/Sources/Renderer/Core/Device.cpp diff --git a/src/renderer/core/vk_fence.cpp b/runtime/Sources/Renderer/Core/Fence.cpp similarity index 100% rename from src/renderer/core/vk_fence.cpp rename to runtime/Sources/Renderer/Core/Fence.cpp diff --git a/src/renderer/core/vk_instance.cpp b/runtime/Sources/Renderer/Core/Instance.cpp similarity index 100% rename from src/renderer/core/vk_instance.cpp rename to runtime/Sources/Renderer/Core/Instance.cpp diff --git a/src/renderer/core/memory.cpp b/runtime/Sources/Renderer/Core/Memory.cpp similarity index 100% rename from src/renderer/core/memory.cpp rename to runtime/Sources/Renderer/Core/Memory.cpp diff --git a/src/renderer/core/vk_queues.cpp b/runtime/Sources/Renderer/Core/Queues.cpp similarity index 100% rename from src/renderer/core/vk_queues.cpp rename to runtime/Sources/Renderer/Core/Queues.cpp diff --git a/src/renderer/core/render_core.cpp b/runtime/Sources/Renderer/Core/RenderCore.cpp similarity index 100% rename from src/renderer/core/render_core.cpp rename to runtime/Sources/Renderer/Core/RenderCore.cpp diff --git a/src/renderer/core/vk_semaphore.cpp b/runtime/Sources/Renderer/Core/Semaphore.cpp similarity index 100% rename from src/renderer/core/vk_semaphore.cpp rename to runtime/Sources/Renderer/Core/Semaphore.cpp diff --git a/src/renderer/core/vk_surface.cpp b/runtime/Sources/Renderer/Core/Surface.cpp similarity index 100% rename from src/renderer/core/vk_surface.cpp rename to runtime/Sources/Renderer/Core/Surface.cpp diff --git a/src/renderer/core/vk_validation_layers.cpp b/runtime/Sources/Renderer/Core/ValidationLayers.cpp similarity index 100% rename from src/renderer/core/vk_validation_layers.cpp rename to runtime/Sources/Renderer/Core/ValidationLayers.cpp diff --git a/src/renderer/descriptors/descriptor_pool_manager.cpp b/runtime/Sources/Renderer/Descriptors/descriptor_pool_manager.cpp similarity index 100% rename from src/renderer/descriptors/descriptor_pool_manager.cpp rename to runtime/Sources/Renderer/Descriptors/descriptor_pool_manager.cpp diff --git a/src/renderer/descriptors/vk_descriptor_pool.cpp b/runtime/Sources/Renderer/Descriptors/vk_descriptor_pool.cpp similarity index 100% rename from src/renderer/descriptors/vk_descriptor_pool.cpp rename to runtime/Sources/Renderer/Descriptors/vk_descriptor_pool.cpp diff --git a/src/renderer/descriptors/vk_descriptor_set.cpp b/runtime/Sources/Renderer/Descriptors/vk_descriptor_set.cpp similarity index 100% rename from src/renderer/descriptors/vk_descriptor_set.cpp rename to runtime/Sources/Renderer/Descriptors/vk_descriptor_set.cpp diff --git a/src/renderer/descriptors/vk_descriptor_set_layout.cpp b/runtime/Sources/Renderer/Descriptors/vk_descriptor_set_layout.cpp similarity index 100% rename from src/renderer/descriptors/vk_descriptor_set_layout.cpp rename to runtime/Sources/Renderer/Descriptors/vk_descriptor_set_layout.cpp diff --git a/src/renderer/images/texture.cpp b/runtime/Sources/Renderer/Images/texture.cpp similarity index 100% rename from src/renderer/images/texture.cpp rename to runtime/Sources/Renderer/Images/texture.cpp diff --git a/src/renderer/images/texture_atlas.cpp b/runtime/Sources/Renderer/Images/texture_atlas.cpp similarity index 100% rename from src/renderer/images/texture_atlas.cpp rename to runtime/Sources/Renderer/Images/texture_atlas.cpp diff --git a/src/renderer/images/vk_image.cpp b/runtime/Sources/Renderer/Images/vk_image.cpp similarity index 100% rename from src/renderer/images/vk_image.cpp rename to runtime/Sources/Renderer/Images/vk_image.cpp diff --git a/src/renderer/pipeline/pipeline.cpp b/runtime/Sources/Renderer/Pipelines/pipeline.cpp similarity index 100% rename from src/renderer/pipeline/pipeline.cpp rename to runtime/Sources/Renderer/Pipelines/pipeline.cpp diff --git a/src/renderer/pixel_put.cpp b/runtime/Sources/Renderer/PixelPut.cpp similarity index 100% rename from src/renderer/pixel_put.cpp rename to runtime/Sources/Renderer/PixelPut.cpp diff --git a/src/renderer/renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp similarity index 100% rename from src/renderer/renderer.cpp rename to runtime/Sources/Renderer/Renderer.cpp diff --git a/src/renderer/renderpass/vk_framebuffer.cpp b/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp similarity index 100% rename from src/renderer/renderpass/vk_framebuffer.cpp rename to runtime/Sources/Renderer/Renderpass/Framebuffer.cpp diff --git a/src/renderer/renderpass/vk_render_pass.cpp b/runtime/Sources/Renderer/Renderpass/Renderpass.cpp similarity index 100% rename from src/renderer/renderpass/vk_render_pass.cpp rename to runtime/Sources/Renderer/Renderpass/Renderpass.cpp diff --git a/src/renderer/swapchain/vk_swapchain.cpp b/runtime/Sources/Renderer/Renderpass/Swapchain.cpp similarity index 100% rename from src/renderer/swapchain/vk_swapchain.cpp rename to runtime/Sources/Renderer/Renderpass/Swapchain.cpp diff --git a/src/renderer/texts/font.cpp b/runtime/Sources/Renderer/Texts/Font.cpp similarity index 100% rename from src/renderer/texts/font.cpp rename to runtime/Sources/Renderer/Texts/Font.cpp diff --git a/src/renderer/texts/font_library.cpp b/runtime/Sources/Renderer/Texts/FontLibrary.cpp similarity index 100% rename from src/renderer/texts/font_library.cpp rename to runtime/Sources/Renderer/Texts/FontLibrary.cpp diff --git a/src/renderer/texts/text.cpp b/runtime/Sources/Renderer/Texts/Text.cpp similarity index 100% rename from src/renderer/texts/text.cpp rename to runtime/Sources/Renderer/Texts/Text.cpp diff --git a/src/renderer/texts/text_descriptor.cpp b/runtime/Sources/Renderer/Texts/TextDescriptor.cpp similarity index 100% rename from src/renderer/texts/text_descriptor.cpp rename to runtime/Sources/Renderer/Texts/TextDescriptor.cpp diff --git a/src/renderer/texts/text_library.cpp b/runtime/Sources/Renderer/Texts/TextLibrary.cpp similarity index 100% rename from src/renderer/texts/text_library.cpp rename to runtime/Sources/Renderer/Texts/TextLibrary.cpp diff --git a/src/renderer/texts/text_manager.cpp b/runtime/Sources/Renderer/Texts/TextManager.cpp similarity index 100% rename from src/renderer/texts/text_manager.cpp rename to runtime/Sources/Renderer/Texts/TextManager.cpp diff --git a/src/core/application.h b/src/core/application.h deleted file mode 100644 index ff66a0f..0000000 --- a/src/core/application.h +++ /dev/null @@ -1,73 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* application.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:13:27 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_APPLICATION__ -#define __MLX_APPLICATION__ - -#include -#include -#include -#include -#include - -namespace mlx::core -{ - class Application - { - public: - Application(); - - inline void getMousePos(int* x, int* y) noexcept; - inline void mouseMove(void* win, int x, int y) noexcept; - - inline void onEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; - - inline void getScreenSize(void* win, int* w, int* h) noexcept; - - inline void setFPSCap(std::uint32_t fps) noexcept; - - inline void* newGraphicsSuport(std::size_t w, std::size_t h, const char* title); - inline void clearGraphicsSupport(void* win); - inline void destroyGraphicsSupport(void* win); - - inline void pixelPut(void* win, int x, int y, std::uint32_t color) const noexcept; - inline void stringPut(void* win, int x, int y, std::uint32_t color, char* str); - - void* newTexture(int w, int h); - void* newStbTexture(char* file, int* w, int* h); // stb textures are format managed by stb image (png, jpg, bpm, ...) - inline void texturePut(void* win, void* img, int x, int y); - inline int getTexturePixel(void* img, int x, int y); - inline void setTexturePixel(void* img, int x, int y, std::uint32_t color); - void destroyTexture(void* ptr); - - inline void loopHook(int (*f)(void*), void* param); - inline void loopEnd() noexcept; - - inline void loadFont(void* win, const std::filesystem::path& filepath, float scale); - - void run() noexcept; - - ~Application(); - - private: - FpsManager _fps; - std::list _textures; - std::vector> _graphics; - std::function _loop_hook; - std::unique_ptr _in; - void* _param = nullptr; - }; -} - -#include - -#endif // __MLX_APPLICATION__ diff --git a/src/core/application.inl b/src/core/application.inl deleted file mode 100644 index f98d510..0000000 --- a/src/core/application.inl +++ /dev/null @@ -1,196 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* application.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2023/04/02 14:56:27 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#define CHECK_WINDOW_PTR(win) \ - if(win == nullptr) \ - { \ - core::error::report(e_kind::error, "invalid window ptr (NULL)"); \ - return; \ - } \ - else if(*static_cast(win) < 0 || *static_cast(win) > static_cast(_graphics.size()))\ - { \ - core::error::report(e_kind::error, "invalid window ptr"); \ - return; \ - } else {} - -#define CHECK_IMAGE_PTR(img, retval) \ - if(img == nullptr) \ - { \ - core::error::report(e_kind::error, "invalid image ptr (NULL)"); \ - retval; \ - } \ - else if(std::find_if(_textures.begin(), _textures.end(), [=](const Texture& texture) \ - { \ - return &texture == img; \ - }) == _textures.end()) \ - { \ - core::error::report(e_kind::error, "invalid image ptr"); \ - retval; \ - } else {} - -namespace mlx::core -{ - void Application::getMousePos(int* x, int* y) noexcept - { - *x = _in->getX(); - *y = _in->getY(); - } - - void Application::mouseMove(void* win, int x, int y) noexcept - { - CHECK_WINDOW_PTR(win); - if(!_graphics[*static_cast(win)]->hasWindow()) - { - error::report(e_kind::warning, "trying to move the mouse relative to a window that is targeting an image and not a real window, this is not allowed (move ignored)"); - return; - } - } - - void Application::onEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept - { - CHECK_WINDOW_PTR(win); - if(!_graphics[*static_cast(win)]->hasWindow()) - { - error::report(e_kind::warning, "trying to add event hook for a window that is targeting an image and not a real window, this is not allowed (hook ignored)"); - return; - } - _in->onEvent(_graphics[*static_cast(win)]->getWindow()->getID(), event, funct_ptr, param); - } - - void Application::getScreenSize(void* win, int* w, int* h) noexcept - { - CHECK_WINDOW_PTR(win); - *w = 0; - *h = 0; - } - - void Application::setFPSCap(std::uint32_t fps) noexcept - { - _fps.setMaxFPS(fps); - } - - void* Application::newGraphicsSuport(std::size_t w, std::size_t h, const char* title) - { - MLX_PROFILE_FUNCTION(); - auto it = std::find_if(_textures.begin(), _textures.end(), [=](const Texture& texture) - { - return &texture == reinterpret_cast(const_cast(title)); - }); - if(it != _textures.end()) - _graphics.emplace_back(std::make_unique(w, h, reinterpret_cast(const_cast(title)), _graphics.size())); - else - { - if(title == NULL) - { - core::error::report(e_kind::fatal_error, "invalid window title (NULL)"); - return nullptr; - } - _graphics.emplace_back(std::make_unique(w, h, title, _graphics.size())); - _in->addWindow(_graphics.back()->getWindow()); - } - return static_cast(&_graphics.back()->getID()); - } - - void Application::clearGraphicsSupport(void* win) - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - _graphics[*static_cast(win)]->clearRenderData(); - } - - void Application::destroyGraphicsSupport(void* win) - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - _graphics[*static_cast(win)].reset(); - } - - void Application::pixelPut(void* win, int x, int y, std::uint32_t color) const noexcept - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - _graphics[*static_cast(win)]->pixelPut(x, y, color); - } - - void Application::stringPut(void* win, int x, int y, std::uint32_t color, char* str) - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - if(str == nullptr) - { - core::error::report(e_kind::error, "wrong text (NULL)"); - return; - } - if(std::strlen(str) == 0) - { - core::error::report(e_kind::warning, "trying to put an empty text"); - return; - } - _graphics[*static_cast(win)]->stringPut(x, y, color, str); - } - - void Application::loadFont(void* win, const std::filesystem::path& filepath, float scale) - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - _graphics[*static_cast(win)]->loadFont(filepath, scale); - } - - void Application::texturePut(void* win, void* img, int x, int y) - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - CHECK_IMAGE_PTR(img, return); - Texture* texture = static_cast(img); - if(!texture->isInit()) - core::error::report(e_kind::error, "trying to put a texture that has been destroyed"); - else - _graphics[*static_cast(win)]->texturePut(texture, x, y); - } - - int Application::getTexturePixel(void* img, int x, int y) - { - MLX_PROFILE_FUNCTION(); - CHECK_IMAGE_PTR(img, return 0); - Texture* texture = static_cast(img); - if(!texture->isInit()) - { - core::error::report(e_kind::error, "trying to get a pixel from texture that has been destroyed"); - return 0; - } - return texture->getPixel(x, y); - } - - void Application::setTexturePixel(void* img, int x, int y, std::uint32_t color) - { - MLX_PROFILE_FUNCTION(); - CHECK_IMAGE_PTR(img, return); - Texture* texture = static_cast(img); - if(!texture->isInit()) - core::error::report(e_kind::error, "trying to set a pixel on texture that has been destroyed"); - else - texture->setPixel(x, y, color); - } - - void Application::loopHook(int (*f)(void*), void* param) - { - _loop_hook = f; - _param = param; - } - - void Application::loopEnd() noexcept - { - _in->finish(); - } -} diff --git a/src/core/errors.cpp b/src/core/errors.cpp deleted file mode 100644 index 098abc8..0000000 --- a/src/core/errors.cpp +++ /dev/null @@ -1,41 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* errors.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 17:48:06 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:45:12 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include "errors.h" - -constexpr const int BUFFER_SIZE = 4096; - -namespace mlx::core::error -{ - void report(e_kind kind, std::string msg, ...) - { - char buffer[BUFFER_SIZE]; - - va_list al; - va_start(al, msg); - std::vsnprintf(buffer, BUFFER_SIZE, msg.c_str(), al); - va_end(al); - - switch(kind) - { - case e_kind::message: std::cout << "\033[1;34m[MacroLibX] Message : \033[1;0m" << buffer << std::endl; break; - case e_kind::warning: std::cout << "\033[1;35m[MacroLibX] Warning : \033[1;0m" << buffer << std::endl; break; - case e_kind::error: std::cerr << "\033[1;31m[MacroLibX] Error : \033[1;0m" << buffer << std::endl; break; - case e_kind::fatal_error: - std::cerr << "\033[1;31m[MacroLibX] Fatal Error : \033[1;0m" << buffer << std::endl; - std::exit(EXIT_FAILURE); - break; - } - } -} diff --git a/src/core/graphics.h b/src/core/graphics.h deleted file mode 100644 index bb9935d..0000000 --- a/src/core/graphics.h +++ /dev/null @@ -1,76 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* graphics.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/03/27 00:32:28 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_GRAPHICS__ -#define __MLX_GRAPHICS__ - -#include -#include -#include -#include -#include -#include -#include -#include -#include - -namespace mlx -{ - class GraphicsSupport : public NonCopyable - { - public: - GraphicsSupport(std::size_t w, std::size_t h, Texture* render_target, int id); - GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id); - - inline int& getID() noexcept; - inline std::shared_ptr getWindow(); - - void render() noexcept; - - inline void clearRenderData() noexcept; - inline void pixelPut(int x, int y, std::uint32_t color) noexcept; - inline void stringPut(int x, int y, std::uint32_t color, std::string str); - inline void texturePut(Texture* texture, int x, int y); - inline void loadFont(const std::filesystem::path& filepath, float scale); - inline void tryEraseTextureFromManager(Texture* texture) noexcept; - - inline bool hasWindow() const noexcept { return _has_window; } - - inline Renderer& getRenderer() { return *_renderer; } - - ~GraphicsSupport(); - - private: - PixelPutPipeline _pixel_put_pipeline; - - std::vector _drawlist; - - TextManager _text_manager; - TextureManager _texture_manager; - - glm::mat4 _proj = glm::mat4(1.0); - - std::shared_ptr _window; - std::unique_ptr _renderer; - - std::size_t _width = 0; - std::size_t _height = 0; - - int _id; - - bool _has_window; - }; -} - -#include - -#endif diff --git a/src/core/graphics.inl b/src/core/graphics.inl deleted file mode 100644 index c6f60f6..0000000 --- a/src/core/graphics.inl +++ /dev/null @@ -1,79 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* graphics.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ -/* Updated: 2023/04/02 15:26:16 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -namespace mlx -{ - int& GraphicsSupport::getID() noexcept { return _id; } - std::shared_ptr GraphicsSupport::getWindow() { return _window; } - - void GraphicsSupport::clearRenderData() noexcept - { - MLX_PROFILE_FUNCTION(); - _drawlist.clear(); - _pixel_put_pipeline.clear(); - _text_manager.clear(); - _texture_manager.clear(); - } - - void GraphicsSupport::pixelPut(int x, int y, std::uint32_t color) noexcept - { - MLX_PROFILE_FUNCTION(); - _pixel_put_pipeline.setPixel(x, y, color); - } - - void GraphicsSupport::stringPut(int x, int y, std::uint32_t color, std::string str) - { - MLX_PROFILE_FUNCTION(); - std::pair res = _text_manager.registerText(x, y, color, str); - if(!res.second) // if this is not a completly new text draw - { - auto it = std::find(_drawlist.begin(), _drawlist.end(), res.first); - if(it != _drawlist.end()) - _drawlist.erase(it); - } - _drawlist.push_back(res.first); - } - - void GraphicsSupport::texturePut(Texture* texture, int x, int y) - { - MLX_PROFILE_FUNCTION(); - auto res = _texture_manager.registerTexture(texture, x, y); - if(!res.second) // if this is not a completly new texture draw - { - auto it = std::find(_drawlist.begin(), _drawlist.end(), res.first); - if(it != _drawlist.end()) - _drawlist.erase(it); - } - _drawlist.push_back(res.first); - } - - void GraphicsSupport::loadFont(const std::filesystem::path& filepath, float scale) - { - MLX_PROFILE_FUNCTION(); - _text_manager.loadFont(*_renderer, filepath, scale); - } - - void GraphicsSupport::tryEraseTextureFromManager(Texture* texture) noexcept - { - MLX_PROFILE_FUNCTION(); - for(auto it = _drawlist.begin(); it != _drawlist.end();) - { - if(_texture_manager.isTextureKnown(texture)) - it = _drawlist.erase(it); - else - ++it; - } - _texture_manager.eraseTextures(texture); - } -} diff --git a/src/platform/inputs.h b/src/platform/inputs.h deleted file mode 100644 index 7cd9078..0000000 --- a/src/platform/inputs.h +++ /dev/null @@ -1,71 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* inputs.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ -/* Updated: 2024/03/27 00:31:56 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_INPUTS__ -#define __MLX_INPUTS__ - -#include "window.h" - -namespace mlx -{ - struct Hook - { - func::function hook; - void* param = nullptr; - }; - - class Input - { - public: - Input() = default; - - void update(); - - inline bool isMouseMoving() const noexcept { return _xRel || _yRel; } - - inline int getX() const noexcept { return _x; } - inline int getY() const noexcept { return _y; } - - inline int getXRel() const noexcept { return _xRel; } - inline int getYRel() const noexcept { return _yRel; } - - inline bool isRunning() const noexcept { return !_end; } - inline constexpr void finish() noexcept { _end = true; } - - inline void addWindow(std::shared_ptr window) - { - _windows[window->getID()] = window; - _events_hooks[window->getID()] = {}; - } - - inline void onEvent(std::uint32_t id, int event, int (*funct_ptr)(int, void*), void* param) noexcept - { - _events_hooks[id][event].hook = funct_ptr; - _events_hooks[id][event].param = param; - } - - ~Input() = default; - - private: - std::unordered_map> _windows; - std::unordered_map> _events_hooks; - - int _x = 0; - int _y = 0; - int _xRel = 0; - int _yRel = 0; - - bool _end = false; - }; -} - -#endif diff --git a/src/renderer/buffers/vk_buffer.h b/src/renderer/buffers/vk_buffer.h deleted file mode 100644 index 3d7d149..0000000 --- a/src/renderer/buffers/vk_buffer.h +++ /dev/null @@ -1,63 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* vk_buffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 23:18:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:39 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_BUFFER__ -#define __MLX_VK_BUFFER__ - -#include -#include - -namespace mlx -{ - class Buffer : public CmdResource - { - public: - enum class kind { dynamic, dynamic_device_local, uniform, constant }; - - void create(kind type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data = nullptr); - void destroy() noexcept; - - inline void mapMem(void** data) noexcept { Render_Core::get().getAllocator().mapMemory(_allocation, data); _is_mapped = true; } - inline bool isMapped() const noexcept { return _is_mapped; } - inline void unmapMem() noexcept { Render_Core::get().getAllocator().unmapMemory(_allocation); _is_mapped = false; } - - void flush(VkDeviceSize size = VK_WHOLE_SIZE, VkDeviceSize offset = 0); - bool copyFromBuffer(const Buffer& buffer) noexcept; - - inline VkBuffer& operator()() noexcept { return _buffer; } - inline VkBuffer& get() noexcept { return _buffer; } - inline VkDeviceSize getSize() const noexcept { return _size; } - inline VkDeviceSize getOffset() const noexcept { return _offset; } - - protected: - void pushToGPU() noexcept; - void swap(Buffer& buffer) noexcept; - - protected: - VmaAllocation _allocation; - VkBuffer _buffer = VK_NULL_HANDLE; - VkDeviceSize _offset = 0; - VkDeviceSize _size = 0; - - private: - void createBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, const char* name); - - private: - #ifdef DEBUG - std::string _name; - #endif - VkBufferUsageFlags _usage = 0; - bool _is_mapped = false; - }; -} - -#endif diff --git a/src/renderer/buffers/vk_ubo.h b/src/renderer/buffers/vk_ubo.h deleted file mode 100644 index 0c2c091..0000000 --- a/src/renderer/buffers/vk_ubo.h +++ /dev/null @@ -1,48 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* vk_ubo.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:32 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_UBO__ -#define __MLX_VK_UBO__ - -#include "vk_buffer.h" - -namespace mlx -{ - class UBO - { - public: - void create(class Renderer* renderer, std::uint32_t size, const char* name); - - void setData(std::uint32_t size, const void* data); - void setDynamicData(std::uint32_t size, const void* data); - - void destroy() noexcept; - - unsigned int getSize() noexcept; - unsigned int getOffset() noexcept; - VkDeviceMemory getDeviceMemory() noexcept; - VkBuffer& operator()() noexcept; - VkBuffer& get() noexcept; - - inline unsigned int getSize(int i) noexcept { return _buffers[i].getSize(); } - inline unsigned int getOffset(int i) noexcept { return _buffers[i].getOffset(); } - inline VkBuffer& operator()(int i) noexcept { return _buffers[i].get(); } - inline VkBuffer& get(int i) noexcept { return _buffers[i].get(); } - - private: - std::array _buffers; - std::array _maps; - class Renderer* _renderer = nullptr; - }; -} - -#endif // __MLX_VK_UBO__ diff --git a/src/renderer/buffers/vk_vbo.h b/src/renderer/buffers/vk_vbo.h deleted file mode 100644 index 88d0023..0000000 --- a/src/renderer/buffers/vk_vbo.h +++ /dev/null @@ -1,46 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* vk_vbo.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:27:38 by maldavid #+# #+# */ -/* Updated: 2024/01/10 23:04:40 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_VBO__ -#define __MLX_VK_VBO__ - -#include "vk_buffer.h" -#include -#include - -namespace mlx -{ - class VBO : public Buffer - { - public: - inline void create(std::uint32_t size, const void* data, const char* name) { Buffer::create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } - void setData(std::uint32_t size, const void* data); - inline void bind(Renderer& renderer) noexcept { renderer.getActiveCmdBuffer().bindVertexBuffer(*this); } - }; - - class D_VBO : public Buffer - { - public: - inline void create(std::uint32_t size, const void* data, const char* name) { Buffer::create(Buffer::kind::dynamic_device_local, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } - void setData(std::uint32_t size, const void* data); - inline void bind(Renderer& renderer) noexcept { renderer.getActiveCmdBuffer().bindVertexBuffer(*this); } - }; - - class C_VBO : public Buffer - { - public: - inline void create(std::uint32_t size, const void* data, const char* name) { Buffer::create(Buffer::kind::constant, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } - inline void bind(Renderer& renderer) noexcept { renderer.getActiveCmdBuffer().bindVertexBuffer(*this); } - }; -} - -#endif // __MLX_VK_VBO__ diff --git a/src/renderer/command/vk_cmd_buffer.h b/src/renderer/command/vk_cmd_buffer.h deleted file mode 100644 index 8af1f2e..0000000 --- a/src/renderer/command/vk_cmd_buffer.h +++ /dev/null @@ -1,85 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* vk_cmd_buffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:12:17 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_CMD_BUFFER__ -#define __MLX_VK_CMD_BUFFER__ - -#include - -namespace mlx -{ - class Buffer; - class Image; - - class CmdBuffer - { - public: - enum class state - { - uninit = 0, // buffer not initialized or destroyed - ready, // buffer ready to be used after having been submitted - idle, // buffer has recorded informations but has not been submitted - recording, // buffer is currently recording - submitted, // buffer has been submitted - }; - - enum class kind - { - single_time = 0, - long_time - }; - - public: - void init(kind type, class CmdManager* manager); - void init(kind type, class CmdPool* pool); - void destroy() noexcept; - - void beginRecord(VkCommandBufferUsageFlags usage = 0); - void submit(class Semaphore* semaphores) noexcept; - void submitIdle(bool shouldWaitForExecution = true) noexcept; // TODO : handle `shouldWaitForExecution` as false by default (needs to modify CmdResources lifetimes to do so) - void updateSubmitState() noexcept; - inline void waitForExecution() noexcept { _fence.wait(); updateSubmitState(); _state = state::ready; } - inline void reset() noexcept { vkResetCommandBuffer(_cmd_buffer, 0); } - void endRecord(); - - void bindVertexBuffer(Buffer& buffer) noexcept; - void bindIndexBuffer(Buffer& buffer) noexcept; - void copyBuffer(Buffer& dst, Buffer& src) noexcept; - void copyBufferToImage(Buffer& buffer, Image& image) noexcept; - void copyImagetoBuffer(Image& image, Buffer& buffer) noexcept; - void transitionImageLayout(Image& image, VkImageLayout new_layout) noexcept; - - inline bool isInit() const noexcept { return _state != state::uninit; } - inline bool isReadyToBeUsed() const noexcept { return _state == state::ready; } - inline bool isRecording() const noexcept { return _state == state::recording; } - inline bool hasBeenSubmitted() const noexcept { return _state == state::submitted; } - inline state getCurrentState() const noexcept { return _state; } - - inline VkCommandBuffer& operator()() noexcept { return _cmd_buffer; } - inline VkCommandBuffer& get() noexcept { return _cmd_buffer; } - inline Fence& getFence() noexcept { return _fence; } - - private: - void preTransferBarrier() noexcept; - void postTransferBarrier() noexcept; - - private: - std::vector _cmd_resources; - Fence _fence; - VkCommandBuffer _cmd_buffer = VK_NULL_HANDLE; - class CmdPool* _pool = nullptr; - state _state = state::uninit; - kind _type; - }; -} - -#endif // __MLX_VK_CMD_BUFFER__ diff --git a/src/renderer/core/render_core.h b/src/renderer/core/render_core.h deleted file mode 100644 index f2154c8..0000000 --- a/src/renderer/core/render_core.h +++ /dev/null @@ -1,83 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* render_core.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:16:32 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:11:59 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_RENDER_CORE__ -#define __MLX_RENDER_CORE__ - -#include -#include -#include -#include "vk_queues.h" -#include "vk_device.h" -#include "vk_instance.h" -#include "vk_validation_layers.h" -#include "memory.h" - -#include -#include - -namespace mlx -{ - namespace RCore - { - std::optional findMemoryType(std::uint32_t typeFilter, VkMemoryPropertyFlags properties, bool error = true); - const char* verbaliseResultVk(VkResult result); - VkPipelineStageFlags accessFlagsToPipelineStage(VkAccessFlags accessFlags, VkPipelineStageFlags stageFlags); - } - - #ifdef DEBUG - constexpr const bool enableValidationLayers = true; - #else - constexpr const bool enableValidationLayers = false; - #endif - - const std::vector validationLayers = { "VK_LAYER_KHRONOS_validation" }; - - constexpr const int MAX_FRAMES_IN_FLIGHT = 3; - constexpr const int MAX_SETS_PER_POOL = 512; - constexpr const int NUMBER_OF_UNIFORM_BUFFERS = 1; // change this if for wathever reason more than one uniform buffer is needed - - class Render_Core : public Singleton - { - friend class Singleton; - - public: - void init(); - void destroy(); - - inline bool isInit() const noexcept { return _is_init; } - inline Instance& getInstance() noexcept { return _instance; } - inline Device& getDevice() noexcept { return _device; } - inline Queues& getQueue() noexcept { return _queues; } - inline GPUallocator& getAllocator() noexcept { return _allocator; } - inline ValidationLayers& getLayers() noexcept { return _layers; } - inline CmdBuffer& getSingleTimeCmdBuffer() noexcept { return _cmd_manager.getCmdBuffer(); } - inline SingleTimeCmdManager& getSingleTimeCmdManager() noexcept { return _cmd_manager; } - inline DescriptorPool& getDescriptorPool() { return _pool_manager.getAvailablePool(); } - - private: - Render_Core() = default; - ~Render_Core() = default; - - private: - ValidationLayers _layers; - SingleTimeCmdManager _cmd_manager; - Queues _queues; - DescriptorPoolManager _pool_manager; - Device _device; - Instance _instance; - GPUallocator _allocator; - bool _is_init = false; - }; -} - -#endif // __MLX_RENDER_CORE__ From 810417a251b942c5b722932bed69ef29d95a4322 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 21 Apr 2024 18:10:36 +0200 Subject: [PATCH 008/131] working --- runtime/Includes/Core/Application.h | 4 +- runtime/Includes/Core/DriverLoader.h | 39 +++++ runtime/Includes/Core/DriverLoader.inl | 30 ++++ runtime/Includes/Core/ImagesManager.h | 27 +++ runtime/Includes/Drivers/DriverInstance.h | 35 ++++ .../Drivers/GLFW/GLFWDriverInstance.h | 34 ++++ .../Drivers/GLFW/GLFWDriverInstance.inl | 32 ++++ .../Includes/Renderer/Buffers/IndexBuffer.h | 4 +- .../Includes/Renderer/Buffers/VertexBuffer.h | 4 +- runtime/Includes/Renderer/Core/Queues.h | 4 +- runtime/Includes/Renderer/Images/Image.h | 98 ++++++----- runtime/Includes/Renderer/Images/Texture.h | 54 +++--- .../Includes/Renderer/Images/TextureAtlas.h | 28 ++-- .../Renderer/Images/TextureDescriptor.h | 28 ++-- .../Includes/Renderer/Images/TextureManager.h | 25 ++- .../Includes/Renderer/Pipelines/Pipeline.h | 18 +- runtime/Includes/Renderer/PixelPut.h | 32 ++-- runtime/Includes/Renderer/Renderer.h | 157 +++++++++--------- .../Renderer/Renderpass/FrameBuffer.h | 22 +-- .../Includes/Renderer/Renderpass/RenderPass.h | 20 +-- .../Includes/Renderer/Renderpass/Swapchain.h | 44 ++--- runtime/Includes/Renderer/Texts/Font.h | 40 ++--- runtime/Includes/Renderer/Texts/FontLibrary.h | 24 +-- runtime/Includes/Renderer/Texts/Text.h | 40 ++--- .../Includes/Renderer/Texts/TextDescriptor.h | 24 +-- runtime/Includes/Renderer/Texts/TextLibrary.h | 26 +-- runtime/Includes/Renderer/Texts/TextManager.h | 28 ++-- runtime/Sources/Core/Application.cpp | 12 +- 28 files changed, 558 insertions(+), 375 deletions(-) create mode 100644 runtime/Includes/Core/DriverLoader.h create mode 100644 runtime/Includes/Core/DriverLoader.inl create mode 100644 runtime/Includes/Core/ImagesManager.h create mode 100644 runtime/Includes/Drivers/DriverInstance.h create mode 100644 runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h create mode 100644 runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index cceb2db..25d369e 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:00:53 by maldavid ### ########.fr */ +/* Updated: 2024/04/03 15:05:24 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,6 +15,7 @@ #include #include +#include #include namespace mlx @@ -58,6 +59,7 @@ namespace mlx private: FpsManager m_fps; + DriverLoader m_driver_loader; std::list m_textures; std::vector> m_graphics; std::function f_loop_hook; diff --git a/runtime/Includes/Core/DriverLoader.h b/runtime/Includes/Core/DriverLoader.h new file mode 100644 index 0000000..18e5695 --- /dev/null +++ b/runtime/Includes/Core/DriverLoader.h @@ -0,0 +1,39 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* DriverLoader.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/02 16:56:10 by maldavid #+# #+# */ +/* Updated: 2024/04/03 15:02:44 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_CORE_DRIVER_LOADER__ +#define __MLX_CORE_DRIVER_LOADER__ + +#include + +namespace mlx +{ + class DriverLoader + { + public: + DriverLoader() = default; + + template + inline bool LoadDriver(); + + inline void ShutdownAllDrivers(); + + ~DriverLoader() = default; + + private: + std::vector > m_instances; + }; +} + +#include + +#endif diff --git a/runtime/Includes/Core/DriverLoader.inl b/runtime/Includes/Core/DriverLoader.inl new file mode 100644 index 0000000..6104d76 --- /dev/null +++ b/runtime/Includes/Core/DriverLoader.inl @@ -0,0 +1,30 @@ +/* **************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* DriverLoader.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/03 14:55:01 by maldavid #+# #+# */ +/* Updated: 2024/04/03 14:55:01 by maldavid ### ########.fr */ +/* */ +/* **************************************************************************** */ + +#pragma once +#include + +namespace mlx +{ + template + bool DriverLoader::LoadDriver() + { + m_instances.emplace_back(new T)->InitDriver(); + } + + void DriverLoader::ShutdownAllDrivers() + { + for(auto& driver : m_instances) + driver->ShutdownDriver(); + m_instances.clear(); + } +} diff --git a/runtime/Includes/Core/ImagesManager.h b/runtime/Includes/Core/ImagesManager.h new file mode 100644 index 0000000..46e223d --- /dev/null +++ b/runtime/Includes/Core/ImagesManager.h @@ -0,0 +1,27 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* ImagesManager.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/03 15:11:47 by maldavid #+# #+# */ +/* Updated: 2024/04/21 15:13:43 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_CORE_IMAGES_MANAGER__ +#define __MLX_CORE_IMAGES_MANAGER__ + +namespace mlx +{ + class ImagesManager + { + public: + + private: + std::unordered_set m_textures_registry; + }; +} + +#endif diff --git a/runtime/Includes/Drivers/DriverInstance.h b/runtime/Includes/Drivers/DriverInstance.h new file mode 100644 index 0000000..c72cf97 --- /dev/null +++ b/runtime/Includes/Drivers/DriverInstance.h @@ -0,0 +1,35 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* DriverInstance.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/02 16:57:20 by maldavid #+# #+# */ +/* Updated: 2024/04/02 17:01:03 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_DRIVER_INSTANCE__ +#define __MLX_DRIVER_INSTANCE__ + +namespace mlx +{ + class DriverInstance + { + public: + DriverInstance() = default; + + virtual bool InitDriver() { m_is_up = true; return true; } + virtual void ShutdownDriver() { m_is_up = false; } + + inline bool IsRunning() const noexcept { return m_is_up; } + + virtual ~DriverInstance() = default; + + private: + bool m_is_up = false; + }; +} + +#endif diff --git a/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h b/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h new file mode 100644 index 0000000..fe1d124 --- /dev/null +++ b/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h @@ -0,0 +1,34 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* GLFWDriverInstance.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/02 17:01:51 by maldavid #+# #+# */ +/* Updated: 2024/04/02 17:04:12 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_GLFW_DRIVER_INSTANCE__ +#define __MLX_GLFW_DRIVER_INSTANCE__ + +#include + +namespace mlx +{ + class GLFWDriverInstance : public DriverInstance + { + public: + GLFWDriverInstance() = default; + + inline bool InitDriver() override; + inline void ShutdownDriver() override; + + ~GLFWDriverInstance() override = default; + }; +} + +#include + +#endif diff --git a/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl b/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl new file mode 100644 index 0000000..b96b66d --- /dev/null +++ b/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl @@ -0,0 +1,32 @@ +/* **************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* GLFWDriverInstance.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/02 17:04:23 by maldavid #+# #+# */ +/* Updated: 2024/04/02 17:04:23 by maldavid ### ########.fr */ +/* */ +/* **************************************************************************** */ + +#include + +namespace mlx +{ + bool GLFWDriverInstance::InitDriver() + { + glfwSetErrorCallback([]([[maybe_unused]] int code, const char* desc) + { + FatalError("GLFW Driver Error : %", desc); + }); + glfwInit(); + DebugLog("GLFW Driver loaded"); + } + + void GLFWDriverInstance::ShutdownDriver() + { + glfwTerminate(); + DebugLog("GLFW Driver shutted down"); + } +} diff --git a/runtime/Includes/Renderer/Buffers/IndexBuffer.h b/runtime/Includes/Renderer/Buffers/IndexBuffer.h index 20b6132..4a8f4d8 100644 --- a/runtime/Includes/Renderer/Buffers/IndexBuffer.h +++ b/runtime/Includes/Renderer/Buffers/IndexBuffer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 15:05:05 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:11:57 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:11:05 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,7 +18,7 @@ namespace mlx { - class C_IBO : public Buffer + class ConstantIndexBuffer : public Buffer { public: inline void Create(std::uint32_t size, const std::uint16_t* data, const char* name) { Buffer::Create(BufferType::Constant, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, name, data); } diff --git a/runtime/Includes/Renderer/Buffers/VertexBuffer.h b/runtime/Includes/Renderer/Buffers/VertexBuffer.h index 10d7c07..ec193b7 100644 --- a/runtime/Includes/Renderer/Buffers/VertexBuffer.h +++ b/runtime/Includes/Renderer/Buffers/VertexBuffer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:27:38 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:18:23 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:23:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -19,7 +19,7 @@ namespace mlx { - class VertexBuffer : public Buffer + class RAMVertexBuffer : public Buffer { public: inline void Create(std::uint32_t size, const void* data, const char* name) { Buffer::Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } diff --git a/runtime/Includes/Renderer/Core/Queues.h b/runtime/Includes/Renderer/Core/Queues.h index 4230e25..3986e17 100644 --- a/runtime/Includes/Renderer/Core/Queues.h +++ b/runtime/Includes/Renderer/Core/Queues.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:50:52 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:05:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -23,7 +23,7 @@ namespace mlx std::optional graphics_family; std::optional present_family; - inline bool isComplete() { return graphics_family.has_value() && present_family.has_value(); } + inline bool IsComplete() { return graphics_family.has_value() && present_family.has_value(); } }; public: diff --git a/runtime/Includes/Renderer/Images/Image.h b/runtime/Includes/Renderer/Images/Image.h index 7319917..6f93bed 100644 --- a/runtime/Includes/Renderer/Images/Image.h +++ b/runtime/Includes/Renderer/Images/Image.h @@ -1,87 +1,83 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_image.h :+: :+: :+: */ +/* Image.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 11:54:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:09:40 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:08:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_IMAGE__ #define __MLX_VK_IMAGE__ -#include -#include -#include - -#ifdef DEBUG - #include -#endif +#include +#include +#include namespace mlx { - std::uint32_t formatSize(VkFormat format); - bool isStencilFormat(VkFormat format); - bool isDepthFormat(VkFormat format); - VkFormat bitsToFormat(std::uint32_t bits); - VkPipelineStageFlags layoutToAccessMask(VkImageLayout layout, bool isDestination); + std::uint32_t FormatSize(VkFormat format); + bool IsStencilFormat(VkFormat format); + bool IsDepthFormat(VkFormat format); + VkFormat BitsToFormat(std::uint32_t bits); + VkPipelineStageFlags LayoutToAccessMask(VkImageLayout layout, bool is_destination); - class Image : public CmdResource + class Image : public CommandResource { - friend class SwapChain; + friend class Swapchain; public: Image() = default; - inline void create(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED) noexcept + inline void Create(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED) noexcept { - _image = image; - _format = format; - _width = width; - _height = height; - _layout = layout; + m_image = image; + m_format = format; + m_width = width; + m_height = height; + m_layout = layout; } - void create(std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool decated_memory = false); - void createImageView(VkImageViewType type, VkImageAspectFlags aspectFlags) noexcept; - void createSampler() noexcept; - void copyFromBuffer(class Buffer& buffer); - void copyToBuffer(class Buffer& buffer); - void transitionLayout(VkImageLayout new_layout, CmdBuffer* cmd = nullptr); - virtual void destroy() noexcept; + void Create(std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool decated_memory = false); + void CreateImageView(VkImageViewType type, VkImageAspectFlags aspect_flags) noexcept; + void CreateSampler() noexcept; + void CopyFromBuffer(class Buffer& buffer); + void CopyToBuffer(class Buffer& buffer); + void TransitionLayout(VkImageLayout new_layout, CmdBuffer* cmd = nullptr); + virtual void Destroy() noexcept; - inline VkImage get() noexcept { return _image; } - inline VkImage operator()() noexcept { return _image; } - inline VkImageView getImageView() const noexcept { return _image_view; } - inline VkFormat getFormat() const noexcept { return _format; } - inline VkImageTiling getTiling() const noexcept { return _tiling; } - inline VkImageLayout getLayout() const noexcept { return _layout; } - inline VkSampler getSampler() const noexcept { return _sampler; } - inline std::uint32_t getWidth() const noexcept { return _width; } - inline std::uint32_t getHeight() const noexcept { return _height; } - inline bool isInit() const noexcept { return _image != VK_NULL_HANDLE; } + inline VkImage Get() noexcept { return m_image; } + inline VkImage operator()() noexcept { return m_image; } + inline VkImageView GetImageView() const noexcept { return m_image_view; } + inline VkFormat GetFormat() const noexcept { return m_format; } + inline VkImageTiling GetTiling() const noexcept { return m_tiling; } + inline VkImageLayout GetLayout() const noexcept { return m_layout; } + inline VkSampler GetSampler() const noexcept { return m_sampler; } + inline std::uint32_t GetWidth() const noexcept { return m_width; } + inline std::uint32_t GetHeight() const noexcept { return m_height; } + inline bool IsInit() const noexcept { return m_image != VK_NULL_HANDLE; } virtual ~Image() = default; private: - void destroySampler() noexcept; - void destroyImageView() noexcept; + void DestroySampler() noexcept; + void DestroyImageView() noexcept; private: - VmaAllocation _allocation; - VkImage _image = VK_NULL_HANDLE; - VkImageView _image_view = VK_NULL_HANDLE; - VkSampler _sampler = VK_NULL_HANDLE; + VmaAllocation m_allocation; + VkImage m_image = VK_NULL_HANDLE; + VkImageView m_image_view = VK_NULL_HANDLE; + VkSampler m_sampler = VK_NULL_HANDLE; #ifdef DEBUG - std::string _name; + std::string m_name; #endif - VkFormat _format; - VkImageTiling _tiling; - VkImageLayout _layout = VK_IMAGE_LAYOUT_UNDEFINED; - std::uint32_t _width = 0; - std::uint32_t _height = 0; + VkFormat m_format; + VkImageTiling m_tiling; + VkImageLayout m_layout = VK_IMAGE_LAYOUT_UNDEFINED; + std::uint32_t m_width = 0; + std::uint32_t m_height = 0; }; } diff --git a/runtime/Includes/Renderer/Images/Texture.h b/runtime/Includes/Renderer/Images/Texture.h index 8d497db..7c88e17 100644 --- a/runtime/Includes/Renderer/Images/Texture.h +++ b/runtime/Includes/Renderer/Images/Texture.h @@ -1,22 +1,22 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* texture.h :+: :+: :+: */ +/* Texture.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/08 02:24:58 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:09:56 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:11:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE__ #define __MLX_TEXTURE__ -#include -#include -#include -#include +#include +#include +#include +#include namespace mlx { @@ -25,39 +25,39 @@ namespace mlx public: Texture() = default; - void create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); - void render(std::array& sets, class Renderer& renderer, int x, int y); - void destroy() noexcept override; + void Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); + void Render(std::array& sets, class Renderer& renderer, int x, int y); + void Destroy() noexcept override; - void setPixel(int x, int y, std::uint32_t color) noexcept; - int getPixel(int x, int y) noexcept; + void SetPixel(int x, int y, std::uint32_t color) noexcept; + int GetPixel(int x, int y) noexcept; - inline void setDescriptor(DescriptorSet&& set) noexcept { _set = set; } - inline VkDescriptorSet getSet() noexcept { return _set.isInit() ? _set.get() : VK_NULL_HANDLE; } - inline void updateSet(int binding) noexcept { _set.writeDescriptor(binding, *this); _has_set_been_updated = true; } - inline bool hasBeenUpdated() const noexcept { return _has_set_been_updated; } - inline constexpr void resetUpdate() noexcept { _has_set_been_updated = false; } + inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } + inline VkDescriptorSet GetSet() noexcept { return m_set.isInit() ? m_set.get() : VK_NULL_HANDLE; } + inline void UpdateSet(int binding) noexcept { m_set.writeDescriptor(binding, *this); m_has_set_been_updated = true; } + inline bool HasBeenUpdated() const noexcept { return m_has_set_been_updated; } + inline constexpr void ResetUpdate() noexcept { m_has_set_been_updated = false; } ~Texture() = default; private: - void openCPUmap(); + void OpenCPUmap(); private: - C_VBO _vbo; - C_IBO _ibo; + ConstantVertexBuffer m_vbo; + ConstantIndexBuffer m_ibo; #ifdef DEBUG - std::string _name; + std::string m_name; #endif - DescriptorSet _set; - std::vector _cpu_map; - std::optional _buf_map = std::nullopt; - void* _map = nullptr; - bool _has_been_modified = false; - bool _has_set_been_updated = false; + DescriptorSet m_set; + std::vector m_cpu_map; + std::optional m_buf_map = std::nullopt; + void* m_map = nullptr; + bool m_has_been_modified = false; + bool m_has_set_been_updated = false; }; - Texture stbTextureLoad(std::filesystem::path file, int* w, int* h); + Texture StbTextureLoad(std::filesystem::path file, int* w, int* h); } #endif diff --git a/runtime/Includes/Renderer/Images/TextureAtlas.h b/runtime/Includes/Renderer/Images/TextureAtlas.h index e2e310b..dc53e3c 100644 --- a/runtime/Includes/Renderer/Images/TextureAtlas.h +++ b/runtime/Includes/Renderer/Images/TextureAtlas.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* texture_atlas.h :+: :+: :+: */ +/* TextureAtlas.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/07 16:36:33 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:09:50 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:12:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE_ATLAS__ #define __MLX_TEXTURE_ATLAS__ -#include +#include namespace mlx { @@ -22,22 +22,22 @@ namespace mlx public: TextureAtlas() = default; - void create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); - void render(class Renderer& renderer, int x, int y, std::uint32_t ibo_size) const; - void destroy() noexcept override; + void Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); + void Render(class Renderer& renderer, int x, int y, std::uint32_t ibo_size) const; + void Destroy() noexcept override; - inline void setDescriptor(DescriptorSet&& set) noexcept { _set = set; } - inline VkDescriptorSet getVkSet() noexcept { return _set.isInit() ? _set.get() : VK_NULL_HANDLE; } - inline DescriptorSet getSet() noexcept { return _set; } - inline void updateSet(int binding) noexcept { _set.writeDescriptor(binding, *this); _has_been_updated = true; } - inline bool hasBeenUpdated() const noexcept { return _has_been_updated; } - inline constexpr void resetUpdate() noexcept { _has_been_updated = false; } + inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } + inline VkDescriptorSet GetVkSet() noexcept { return m_set.isInit() ? m_set.get() : VK_NULL_HANDLE; } + inline DescriptorSet GetSet() noexcept { return m_set; } + inline void UpdateSet(int binding) noexcept { m_set.writeDescriptor(binding, *this); m_has_been_updated = true; } + inline bool HasBeenUpdated() const noexcept { return m_has_been_updated; } + inline constexpr void ResetUpdate() noexcept { m_has_been_updated = false; } ~TextureAtlas() = default; private: - DescriptorSet _set; - bool _has_been_updated = false; + DescriptorSet m_set; + bool m_has_been_updated = false; }; } diff --git a/runtime/Includes/Renderer/Images/TextureDescriptor.h b/runtime/Includes/Renderer/Images/TextureDescriptor.h index f3a2663..56683a9 100644 --- a/runtime/Includes/Renderer/Images/TextureDescriptor.h +++ b/runtime/Includes/Renderer/Images/TextureDescriptor.h @@ -1,43 +1,43 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* texture_descriptor.h :+: :+: :+: */ +/* TextureDescriptor.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 01:00:13 by maldavid #+# #+# */ -/* Updated: 2024/01/11 01:21:52 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:13:23 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE_DESCRIPTOR__ #define __MLX_TEXTURE_DESCRIPTOR__ -#include -#include -#include +#include +#include +#include namespace mlx { struct TextureRenderDescriptor : public DrawableResource { - Texture* texture; + NonOwningPtr texture; int x; int y; - TextureRenderDescriptor(Texture* _texture, int _x, int _y) : texture(_texture), x(_x), y(_y) {} + TextureRenderDescriptor(NonOwningPtr _texture, int _x, int _y) : texture(_texture), x(_x), y(_y) {} inline bool operator==(const TextureRenderDescriptor& rhs) const { return texture == rhs.texture && x == rhs.x && y == rhs.y; } - inline void render(std::array& sets, class Renderer& renderer) override + inline void Render(std::array& sets, class Renderer& renderer) override { - if(!texture->isInit()) + if(!texture->IsInit()) return; - texture->render(sets, renderer, x, y); + texture->Render(sets, renderer, x, y); } - inline void resetUpdate() override + inline void ResetUpdate() override { - if(!texture->isInit()) + if(!texture->IsInit()) return; - texture->resetUpdate(); + texture->ResetUpdate(); } }; } @@ -50,7 +50,7 @@ namespace std std::size_t operator()(const mlx::TextureRenderDescriptor& d) const noexcept { std::size_t hash = 0; - mlx::hashCombine(hash, d.texture, d.x, d.y); + mlx::HashCombine(hash, d.texture, d.x, d.y); return hash; } }; diff --git a/runtime/Includes/Renderer/Images/TextureManager.h b/runtime/Includes/Renderer/Images/TextureManager.h index c667cbd..e9f655d 100644 --- a/runtime/Includes/Renderer/Images/TextureManager.h +++ b/runtime/Includes/Renderer/Images/TextureManager.h @@ -1,20 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* texture_manager.h :+: :+: :+: */ +/* TextureManager.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:56:15 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:09:45 by maldavid ### ########.fr */ +/* Updated: 2024/04/03 16:24:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXTURE_MANAGER__ #define __MLX_TEXTURE_MANAGER__ -#include -#include +#include namespace mlx { @@ -23,19 +22,19 @@ namespace mlx public: TextureManager() = default; - inline void clear() { _texture_descriptors.clear(); } + inline void Clear() { m_texture_descriptors.clear(); } - inline std::pair registerTexture(Texture* texture, int x, int y) + inline std::pair, bool> RegisterTexture(NonOwningPtr texture, int x, int y) { MLX_PROFILE_FUNCTION(); - auto res = _texture_descriptors.emplace(texture, x, y); + auto res = m_texture_descriptors.emplace(texture, x, y); return std::make_pair(static_cast(&const_cast(*res.first)), res.second); } - inline bool isTextureKnown(Texture* texture) noexcept + inline bool IsTextureKnown(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); - for(const auto& desc : _texture_descriptors) + for(const auto& desc : m_texture_descriptors) { if(desc.texture == texture) return true; @@ -43,13 +42,13 @@ namespace mlx return false; } - inline void eraseTextures(Texture* texture) + inline void EraseTextures(NonOwningPtr texture) { MLX_PROFILE_FUNCTION(); - for(auto it = _texture_descriptors.begin(); it != _texture_descriptors.end();) + for(auto it = m_texture_descriptors.begin(); it != m_texture_descriptors.end();) { if(it->texture == texture) - it = _texture_descriptors.erase(it); + it = m_texture_descriptors.erase(it); else ++it; } @@ -58,7 +57,7 @@ namespace mlx ~TextureManager() = default; private: - std::unordered_set _texture_descriptors; + std::unordered_set m_texture_descriptors; }; } diff --git a/runtime/Includes/Renderer/Pipelines/Pipeline.h b/runtime/Includes/Renderer/Pipelines/Pipeline.h index 304a58d..7cd8ffd 100644 --- a/runtime/Includes/Renderer/Pipelines/Pipeline.h +++ b/runtime/Includes/Renderer/Pipelines/Pipeline.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* pipeline.h :+: :+: :+: */ +/* Pipeline.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 21:23:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:09:01 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:15:38 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __PIPELINE__ #define __PIPELINE__ -#include +#include namespace mlx { @@ -21,16 +21,16 @@ namespace mlx { public: void init(class Renderer& renderer); - void destroy() noexcept; + void Destroy() noexcept; - inline void bindPipeline(CmdBuffer& command_buffer) noexcept { vkCmdBindPipeline(command_buffer.get(), VK_PIPELINE_BIND_POINT_GRAPHICS, _graphics_pipeline); } + inline void BindPipeline(CommandBuffer& command_buffer) noexcept { vkCmdBindPipeline(command_buffer.Get(), VK_PIPELINE_BIND_POINT_GRAPHICS, m_graphics_pipeline); } - inline const VkPipeline& getPipeline() const noexcept { return _graphics_pipeline; } - inline const VkPipelineLayout& getPipelineLayout() const noexcept { return _pipeline_layout; } + inline const VkPipeline& GetPipeline() const noexcept { return m_graphics_pipeline; } + inline const VkPipelineLayout& GetPipelineLayout() const noexcept { return m_pipeline_layout; } private: - VkPipeline _graphics_pipeline = VK_NULL_HANDLE; - VkPipelineLayout _pipeline_layout = VK_NULL_HANDLE; + VkPipeline m_graphics_pipeline = VK_NULL_HANDLE; + VkPipelineLayout m_pipeline_layout = VK_NULL_HANDLE; }; } diff --git a/runtime/Includes/Renderer/PixelPut.h b/runtime/Includes/Renderer/PixelPut.h index b084d82..9101643 100644 --- a/runtime/Includes/Renderer/PixelPut.h +++ b/runtime/Includes/Renderer/PixelPut.h @@ -1,20 +1,20 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* pixel_put.h :+: :+: :+: */ +/* PixelPut.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 13:18:50 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:07:56 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:28:46 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_PIXEL_PUT__ #define __MLX_PIXEL_PUT__ -#include -#include +#include +#include namespace mlx { @@ -23,25 +23,25 @@ namespace mlx public: PixelPutPipeline() = default; - void init(std::uint32_t width, std::uint32_t height, class Renderer& renderer) noexcept; + void Init(std::uint32_t width, std::uint32_t height, class Renderer& renderer) noexcept; - void setPixel(int x, int y, std::uint32_t color) noexcept; - void render(std::array& sets, class Renderer& renderer) noexcept; + void SetPixel(int x, int y, std::uint32_t color) noexcept; + void Render(std::array& sets, class Renderer& renderer) noexcept; - void clear(); - void destroy() noexcept; + void Clear(); + void Destroy() noexcept; ~PixelPutPipeline(); private: - Texture _texture; - Buffer _buffer; + Texture m_texture; + Buffer m_buffer; // using vector as CPU map and not directly writting to mapped buffer to improve performances - std::vector _cpu_map; - void* _buffer_map = nullptr; - std::uint32_t _width = 0; - std::uint32_t _height = 0; - bool _has_been_modified = true; + std::vector m_cpu_map; + void* m_buffer_map = nullptr; + std::uint32_t m_width = 0; + std::uint32_t m_height = 0; + bool m_has_been_modified = true; }; } diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index bd347d2..4003a3e 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -1,35 +1,30 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* renderer.h :+: :+: :+: */ +/* Renderer.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/03/27 00:31:28 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:36:05 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __RENDERER__ #define __RENDERER__ -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include - -#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include namespace mlx { @@ -41,36 +36,36 @@ namespace mlx Vertex(glm::vec2 _pos, glm::vec4 _color, glm::vec2 _uv) : pos(std::move(_pos)), color(std::move(_color)), uv(std::move(_uv)) {} - static VkVertexInputBindingDescription getBindingDescription() + static VkVertexInputBindingDescription GetBindingDescription() { - VkVertexInputBindingDescription bindingDescription{}; - bindingDescription.binding = 0; - bindingDescription.stride = sizeof(Vertex); - bindingDescription.inputRate = VK_VERTEX_INPUT_RATE_VERTEX; + VkVertexInputBindingDescription binding_description{}; + binding_description.binding = 0; + binding_description.stride = sizeof(Vertex); + binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX; - return bindingDescription; + return binding_description; } - static std::array getAttributeDescriptions() + static std::array GetAttributeDescriptions() { - std::array attributeDescriptions; + std::array attribute_descriptions; - attributeDescriptions[0].binding = 0; - attributeDescriptions[0].location = 0; - attributeDescriptions[0].format = VK_FORMAT_R32G32_SFLOAT; - attributeDescriptions[0].offset = offsetof(Vertex, pos); + attribute_descriptions[0].binding = 0; + attribute_descriptions[0].location = 0; + attribute_descriptions[0].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[0].offset = offsetof(Vertex, pos); - attributeDescriptions[1].binding = 0; - attributeDescriptions[1].location = 1; - attributeDescriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; - attributeDescriptions[1].offset = offsetof(Vertex, color); + attribute_descriptions[1].binding = 0; + attribute_descriptions[1].location = 1; + attribute_descriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; + attribute_descriptions[1].offset = offsetof(Vertex, color); - attributeDescriptions[2].binding = 0; - attributeDescriptions[2].location = 2; - attributeDescriptions[2].format = VK_FORMAT_R32G32_SFLOAT; - attributeDescriptions[2].offset = offsetof(Vertex, uv); + attribute_descriptions[2].binding = 0; + attribute_descriptions[2].location = 2; + attribute_descriptions[2].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[2].offset = offsetof(Vertex, uv); - return attributeDescriptions; + return attribute_descriptions; } }; @@ -79,63 +74,63 @@ namespace mlx public: Renderer() = default; - void init(class Texture* render_target); + void Init(NonOwningPtr render_target); - bool beginFrame(); - void endFrame(); + bool BeginFrame(); + void EndFrame(); - void destroy(); + void Destroy(); - inline class Window* getWindow() { return _window; } - inline void setWindow(class Window* window) { _window = window; } + inline NonOwningPtr GetWindow() { return m_window; } + inline void SetWindow(NonOwningPtr window) { m_window = window; } - inline Surface& getSurface() noexcept { return _surface; } - inline CmdPool& getCmdPool() noexcept { return _cmd.getCmdPool(); } - inline UBO* getUniformBuffer() noexcept { return _uniform_buffer.get(); } - inline SwapChain& getSwapChain() noexcept { return _swapchain; } - inline Semaphore& getSemaphore(int i) noexcept { return _semaphores[i]; } - inline RenderPass& getRenderPass() noexcept { return _pass; } - inline GraphicPipeline& getPipeline() noexcept { return _pipeline; } - inline CmdBuffer& getCmdBuffer(int i) noexcept { return _cmd.getCmdBuffer(i); } - inline CmdBuffer& getActiveCmdBuffer() noexcept { return _cmd.getCmdBuffer(_current_frame_index); } - inline FrameBuffer& getFrameBuffer(int i) noexcept { return _framebuffers[i]; } - inline DescriptorSet& getVertDescriptorSet() noexcept { return _vert_set; } - inline DescriptorSet& getFragDescriptorSet() noexcept { return _frag_set; } - inline DescriptorSetLayout& getVertDescriptorSetLayout() noexcept { return _vert_layout; } - inline DescriptorSetLayout& getFragDescriptorSetLayout() noexcept { return _frag_layout; } - inline std::uint32_t getActiveImageIndex() noexcept { return _current_frame_index; } - inline std::uint32_t getImageIndex() noexcept { return _image_index; } + inline Surface& GetSurface() noexcept { return m_surface; } + inline CmdPool& GetCmdPool() noexcept { return m_cmd.GetCmdPool(); } + inline NonOwningPtr GetUniformBuffer() noexcept { return m_uniform_buffer.get(); } + inline SwapChain& GetSwapChain() noexcept { return m_swapchain; } + inline Semaphore& GetSemaphore(int i) noexcept { return m_semaphores[i]; } + inline RenderPass& GetRenderPass() noexcept { return m_pass; } + inline GraphicPipeline& GetPipeline() noexcept { return m_pipeline; } + inline CmdBuffer& GetCmdBuffer(int i) noexcept { return m_cmd.GetCmdBuffer(i); } + inline CmdBuffer& GetActiveCmdBuffer() noexcept { return m_cmd.GetCmdBuffer(m_current_frame_index); } + inline FrameBuffer& GetFrameBuffer(int i) noexcept { return m_framebuffers[i]; } + inline DescriptorSet& GetVertDescriptorSet() noexcept { return m_vert_set; } + inline DescriptorSet& GetFragDescriptorSet() noexcept { return m_frag_set; } + inline DescriptorSetLayout& GetVertDescriptorSetLayout() noexcept { return m_vert_layout; } + inline DescriptorSetLayout& GetFragDescriptorSetLayout() noexcept { return m_frag_layout; } + inline std::uint32_t GetActiveImageIndex() noexcept { return m_current_frame_index; } + inline std::uint32_t GetImageIndex() noexcept { return m_image_index; } - constexpr inline void requireFrameBufferResize() noexcept { _framebuffer_resized = true; } + constexpr inline void RequireFrameBufferResize() noexcept { m_framebuffer_resized = true; } ~Renderer() = default; private: - void recreateRenderData(); + void RecreateRenderData(); private: - GraphicPipeline _pipeline; - CmdManager _cmd; - RenderPass _pass; - Surface _surface; - SwapChain _swapchain; - std::array _semaphores; - std::vector _framebuffers; + GraphicPipeline m_pipeline; + CmdManager m_cmd; + RenderPass m_pass; + Surface m_surface; + SwapChain m_swapchain; + std::array m_semaphores; + std::vector m_framebuffers; - DescriptorSetLayout _vert_layout; - DescriptorSetLayout _frag_layout; + DescriptorSetLayout m_vert_layout; + DescriptorSetLayout m_frag_layout; - DescriptorSet _vert_set; - DescriptorSet _frag_set; + DescriptorSet m_vert_set; + DescriptorSet m_frag_set; - std::unique_ptr _uniform_buffer; + std::unique_ptr m_uniform_buffer; - class Window* _window = nullptr; - class Texture* _render_target = nullptr; + NonOwningPtr m_window; + NonOwningPtr m_render_target; - std::uint32_t _current_frame_index = 0; - std::uint32_t _image_index = 0; - bool _framebuffer_resized = false; + std::uint32_t m_current_frame_index = 0; + std::uint32_t m_image_index = 0; + bool m_framebuffer_resized = false; }; } diff --git a/runtime/Includes/Renderer/Renderpass/FrameBuffer.h b/runtime/Includes/Renderer/Renderpass/FrameBuffer.h index e12be0d..ec6abe5 100644 --- a/runtime/Includes/Renderer/Renderpass/FrameBuffer.h +++ b/runtime/Includes/Renderer/Renderpass/FrameBuffer.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_framebuffer.h :+: :+: :+: */ +/* FrameBuffer.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:19:44 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:37 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:16:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,18 +18,18 @@ namespace mlx class FrameBuffer { public: - void init(class RenderPass& renderpass, class Image& image); - void destroy() noexcept; + void Init(class RenderPass& renderpass, class Image& image); + void Destroy() noexcept; - inline VkFramebuffer& operator()() noexcept { return _framebuffer; } - inline VkFramebuffer& get() noexcept { return _framebuffer; } - inline std::uint32_t getWidth() const noexcept { return _width; } - inline std::uint32_t getHeight() const noexcept { return _height; } + inline VkFramebuffer& operator()() noexcept { return m_framebuffer; } + inline VkFramebuffer& Get() noexcept { return m_framebuffer; } + inline std::uint32_t GetWidth() const noexcept { return m_width; } + inline std::uint32_t GetHeight() const noexcept { return m_height; } private: - VkFramebuffer _framebuffer = VK_NULL_HANDLE; - std::uint32_t _width = 0; - std::uint32_t _height = 0; + VkFramebuffer m_framebuffer = VK_NULL_HANDLE; + std::uint32_t m_width = 0; + std::uint32_t m_height = 0; }; } diff --git a/runtime/Includes/Renderer/Renderpass/RenderPass.h b/runtime/Includes/Renderer/Renderpass/RenderPass.h index 724c920..18c6c03 100644 --- a/runtime/Includes/Renderer/Renderpass/RenderPass.h +++ b/runtime/Includes/Renderer/Renderpass/RenderPass.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_render_pass.h :+: :+: :+: */ +/* RenderPass.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:22:00 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:30 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:16:44 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,18 +18,18 @@ namespace mlx class RenderPass { public: - void init(VkFormat attachement_format, VkImageLayout layout); - void destroy() noexcept; + void Init(VkFormat attachement_format, VkImageLayout layout); + void Destroy() noexcept; - void begin(class CmdBuffer& cmd, class FrameBuffer& fb); - void end(class CmdBuffer& cmd); + void Begin(class CmommandBuffer& cmd, class FrameBuffer& fb); + void End(class CommandBuffer& cmd); - inline VkRenderPass& operator()() noexcept { return _render_pass; } - inline VkRenderPass& get() noexcept { return _render_pass; } + inline VkRenderPass& operator()() noexcept { return m_render_pass; } + inline VkRenderPass& Get() noexcept { return m_render_pass; } private: - VkRenderPass _render_pass = VK_NULL_HANDLE; - bool _is_running = false; + VkRenderPass m_render_pass = VK_NULL_HANDLE; + bool m_is_running = false; }; } diff --git a/runtime/Includes/Renderer/Renderpass/Swapchain.h b/runtime/Includes/Renderer/Renderpass/Swapchain.h index d68072e..f9c8054 100644 --- a/runtime/Includes/Renderer/Renderpass/Swapchain.h +++ b/runtime/Includes/Renderer/Renderpass/Swapchain.h @@ -1,19 +1,19 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_swapchain.h :+: :+: :+: */ +/* Swapchain.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:23:27 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:26 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:18:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_SWAPCHAIN__ #define __MLX_VK_SWAPCHAIN__ -#include +#include namespace mlx { @@ -34,31 +34,31 @@ namespace mlx public: SwapChain() = default; - void init(class Renderer* renderer); - void recreate(); - void destroy() noexcept; + void Init(NonOwningPtr renderer); + void Recreate(); + void Destroy() noexcept; - SwapChainSupportDetails querySwapChainSupport(VkPhysicalDevice device); - VkExtent2D chooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities); - VkPresentModeKHR chooseSwapPresentMode([[maybe_unused]] const std::vector &availablePresentModes); + SwapChainSupportDetails QuerySwapChainSupport(VkPhysicalDevice device); + VkExtent2D ChooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities); + VkPresentModeKHR ChooseSwapPresentMode([[maybe_unused]] const std::vector &available_present_modes); - inline VkSwapchainKHR get() noexcept { return _swapchain; } - inline VkSwapchainKHR operator()() noexcept { return _swapchain; } - inline std::size_t getImagesNumber() const noexcept { return _images.size(); } - inline Image& getImage(std::size_t i) noexcept { return _images[i]; } - inline SwapChainSupportDetails getSupport() noexcept { return _swapchain_support; } - inline VkExtent2D getExtent() noexcept { return _extent; } - inline VkFormat getImagesFormat() const noexcept { return _swapchain_image_format; } + inline VkSwapchainKHR Get() noexcept { return m_swapchain; } + inline VkSwapchainKHR operator()() noexcept { return m_swapchain; } + inline std::size_t GetImagesNumber() const noexcept { return m_images.size(); } + inline Image& GetImage(std::size_t i) noexcept { return m_images[i]; } + inline SwapChainSupportDetails GetSupport() noexcept { return m_swapchain_support; } + inline VkExtent2D GetExtent() noexcept { return m_extent; } + inline VkFormat GetImagesFormat() const noexcept { return m_swapchain_image_format; } ~SwapChain() = default; private: - SwapChainSupportDetails _swapchain_support; - VkSwapchainKHR _swapchain; - std::vector _images; - VkFormat _swapchain_image_format; - VkExtent2D _extent; - class Renderer* _renderer = nullptr; + SwapChainSupportDetails m_swapchain_support; + VkSwapchainKHR m_swapchain; + std::vector m_images; + VkFormat m_swapchain_image_format; + VkExtent2D m_extent; + NonOwningPtr m_renderer; }; } diff --git a/runtime/Includes/Renderer/Texts/Font.h b/runtime/Includes/Renderer/Texts/Font.h index c1c48df..ebf873f 100644 --- a/runtime/Includes/Renderer/Texts/Font.h +++ b/runtime/Includes/Renderer/Texts/Font.h @@ -1,52 +1,54 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* font.h :+: :+: :+: */ +/* Font.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/11 21:17:04 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 19:08:21 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:19:39 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FONT__ #define __MLX_FONT__ -#include -#include +#include +#include namespace mlx { class Font { friend class FontLibrary; + public: Font() = delete; Font(class Renderer& renderer, const std::filesystem::path& path, float scale); Font(class Renderer& renderer, const std::string& name, const std::vector& ttf_data, float scale); - inline const std::string& getName() const { return _name; } - inline float getScale() const noexcept { return _scale; } - inline const std::array& getCharData() const { return _cdata; } - inline const TextureAtlas& getAtlas() const noexcept { return _atlas; } - inline bool operator==(const Font& rhs) const { return rhs._name == _name && rhs._scale == _scale; } - inline bool operator!=(const Font& rhs) const { return rhs._name != _name || rhs._scale != _scale; } - void destroy(); + inline const std::string& GetName() const { return m_name; } + inline float GetScale() const noexcept { return m_scale; } + inline const std::array& GetCharData() const { return m_cdata; } + inline const TextureAtlas& GetAtlas() const noexcept { return m_atlas; } + inline bool operator==(const Font& rhs) const { return rhs._name == m_name && rhs._scale == m_scale; } + inline bool operator!=(const Font& rhs) const { return rhs._name != m_name || rhs._scale != m_scale; } + + void Destroy(); ~Font(); private: - void buildFont(); + void BuildFont(); private: - std::array _cdata; - TextureAtlas _atlas; - std::variant> _build_data; - std::string _name; - class Renderer& _renderer; - float _scale = 0; - bool _is_init = false; + std::array m_cdata; + TextureAtlas m_atlas; + std::variant> m_build_data; + std::string m_name; + class Renderer& m_renderer; + float m_scale = 0; + bool m_is_init = false; }; } diff --git a/runtime/Includes/Renderer/Texts/FontLibrary.h b/runtime/Includes/Renderer/Texts/FontLibrary.h index ddd15ad..433c5f5 100644 --- a/runtime/Includes/Renderer/Texts/FontLibrary.h +++ b/runtime/Includes/Renderer/Texts/FontLibrary.h @@ -1,21 +1,21 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* font_library.h :+: :+: :+: */ +/* FontLibrary.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 09:26:03 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:18 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:21:53 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_FONT_LIBRARY__ #define __MLX_FONT_LIBRARY__ -#include -#include -#include +#include +#include +#include namespace mlx { @@ -27,20 +27,20 @@ namespace mlx friend class Singleton; public: - std::shared_ptr getFontData(FontID id); - FontID addFontToLibrary(std::shared_ptr font); - void removeFontFromLibrary(FontID id); + std::shared_ptr GetFontData(FontID id); + FontID AddFontToLibrary(std::shared_ptr font); + void RemoveFontFromLibrary(FontID id); - void clearLibrary(); + void ClearLibrary(); private: FontLibrary() = default; ~FontLibrary() = default; private: - std::unordered_map> _cache; - std::vector _invalid_ids; - FontID _current_id = 1; + std::unordered_map> m_cache; + std::vector m_invalid_ids; + FontID m_current_id = 1; }; } diff --git a/runtime/Includes/Renderer/Texts/Text.h b/runtime/Includes/Renderer/Texts/Text.h index c30bbb9..07b6d10 100644 --- a/runtime/Includes/Renderer/Texts/Text.h +++ b/runtime/Includes/Renderer/Texts/Text.h @@ -1,22 +1,22 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text.h :+: :+: :+: */ +/* Text.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:09:04 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:15 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:23:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT__ #define __MLX_TEXT__ -#include -#include -#include -#include +#include +#include +#include +#include namespace mlx { @@ -25,24 +25,24 @@ namespace mlx public: Text() = default; - void init(std::string text, FontID font, std::uint32_t color, std::vector vbo_data, std::vector ibo_data); - void bind(class Renderer& renderer) noexcept; - inline FontID getFontInUse() const noexcept { return _font; } - void updateVertexData(int frame, std::vector vbo_data); - inline std::uint32_t getIBOsize() noexcept { return _ibo.getSize(); } - inline const std::string& getText() const { return _text; } - inline std::uint32_t getColor() const noexcept { return _color; } - void destroy() noexcept; + void Init(std::string text, FontID font, std::uint32_t color, std::vector vbo_data, std::vector ibo_data); + void Bind(class Renderer& renderer) noexcept; + inline FontID GetFontInUse() const noexcept { return m_font; } + void UpdateVertexData(int frame, std::vector vbo_data); + inline std::uint32_t GetIBOsize() noexcept { return m_ibo.GetSize(); } + inline const std::string& GetText() const { return m_text; } + inline std::uint32_t GetColor() const noexcept { return m_color; } + void Destroy() noexcept; ~Text(); private: - std::array _vbo; - C_IBO _ibo; - std::string _text; - std::uint32_t _color; - FontID _font = nullfont; - bool _is_init = false; + std::array m_vbo; + ConstantIndexBuffer m_ibo; + std::string m_text; + std::uint32_t m_color; + FontID m_font = nullfont; + bool m_is_init = false; }; } diff --git a/runtime/Includes/Renderer/Texts/TextDescriptor.h b/runtime/Includes/Renderer/Texts/TextDescriptor.h index 25a4335..87496ec 100644 --- a/runtime/Includes/Renderer/Texts/TextDescriptor.h +++ b/runtime/Includes/Renderer/Texts/TextDescriptor.h @@ -1,22 +1,22 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text_descriptor.h :+: :+: :+: */ +/* TextDescriptor.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:13:34 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:11 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:25:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_DESCRIPTOR__ #define __MLX_TEXT_DESCRIPTOR__ -#include -#include -#include -#include +#include +#include +#include +#include namespace mlx { @@ -33,15 +33,15 @@ namespace mlx public: TextDrawDescriptor(std::string text, std::uint32_t _color, int _x, int _y); - void init(FontID font) noexcept; - bool operator==(const TextDrawDescriptor& rhs) const { return _text == rhs._text && x == rhs.x && y == rhs.y && color == rhs.color; } - void render(std::array& sets, Renderer& renderer) override; - void resetUpdate() override; + void Init(FontID font) noexcept; + bool operator==(const TextDrawDescriptor& rhs) const { return m_text == rhs.m_text && x == rhs.x && y == rhs.y && color == rhs.color; } + void Render(std::array& sets, Renderer& renderer) override; + void ResetUpdate() override; TextDrawDescriptor() = default; private: - std::string _text; + std::string m_text; }; } @@ -53,7 +53,7 @@ namespace std std::size_t operator()(const mlx::TextDrawDescriptor& d) const noexcept { std::size_t hash = 0; - mlx::hashCombine(hash, d.x, d.y, d.color, d._text); + mlx::HashCombine(hash, d.x, d.y, d.color, d.m_text); return hash; } }; diff --git a/runtime/Includes/Renderer/Texts/TextLibrary.h b/runtime/Includes/Renderer/Texts/TextLibrary.h index 598cef9..63385ea 100644 --- a/runtime/Includes/Renderer/Texts/TextLibrary.h +++ b/runtime/Includes/Renderer/Texts/TextLibrary.h @@ -1,23 +1,23 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text_library.h :+: :+: :+: */ +/* TextLibrary.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/10 11:52:30 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:03 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:26:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_LIBRARY__ #define __MLX_TEXT_LIBRARY__ -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include namespace mlx { @@ -29,19 +29,19 @@ namespace mlx friend class Singleton; public: - std::shared_ptr getTextData(TextID id); - TextID addTextToLibrary(std::shared_ptr text); - void removeTextFromLibrary(TextID id); + std::shared_ptr GetTextData(TextID id); + TextID AddTextToLibrary(std::shared_ptr text); + void RemoveTextFromLibrary(TextID id); - void clearLibrary(); + void ClearLibrary(); private: TextLibrary() = default; ~TextLibrary() = default; private: - std::unordered_map> _cache; - TextID _current_id = 1; + std::unordered_map> m_cache; + TextID m_current_id = 1; }; } diff --git a/runtime/Includes/Renderer/Texts/TextManager.h b/runtime/Includes/Renderer/Texts/TextManager.h index 1c4cec0..a2d57db 100644 --- a/runtime/Includes/Renderer/Texts/TextManager.h +++ b/runtime/Includes/Renderer/Texts/TextManager.h @@ -1,23 +1,23 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text_manager.h :+: :+: :+: */ +/* TextManager.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/06 16:24:11 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:08:00 by maldavid ### ########.fr */ +/* Updated: 2024/03/28 22:27:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_TEXT_MANAGER__ #define __MLX_TEXT_MANAGER__ -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include namespace mlx { @@ -26,17 +26,17 @@ namespace mlx public: TextManager() = default; - void init(Renderer& renderer) noexcept; - std::pair registerText(int x, int y, std::uint32_t color, std::string str); - inline void clear() { _text_descriptors.clear(); } - void loadFont(Renderer& renderer, const std::filesystem::path& filepath, float scale); - void destroy() noexcept; + void Init(Renderer& renderer) noexcept; + std::pair, bool> RegisterText(int x, int y, std::uint32_t color, std::string str); + inline void Clear() { m_text_descriptors.clear(); } + void LoadFont(Renderer& renderer, const std::filesystem::path& filepath, float scale); + void Destroy() noexcept; ~TextManager() = default; private: - std::unordered_set _text_descriptors; - FontID _font_in_use = nullfont; + std::unordered_set m_text_descriptors; + FontID m_font_in_use = nullfont; }; } diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 8e22407..ab2a4aa 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:43:18 by maldavid ### ########.fr */ +/* Updated: 2024/04/02 17:06:34 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -17,9 +17,7 @@ #include #include #include -#include -#include -#include +#include #include namespace mlx::core @@ -32,11 +30,6 @@ namespace mlx::core }, "__internal_application" }); _fps.init(); - glfwSetErrorCallback([]([[maybe_unused]] int code, const char* desc) - { - error::report(e_kind::fatal_error, "GLFW error : %s", desc); - }); - glfwInit(); } void Application::run() noexcept @@ -111,6 +104,5 @@ namespace mlx::core { TextLibrary::get().clearLibrary(); FontLibrary::get().clearLibrary(); - glfwTerminate(); } } From f8a856db1cfcc1ad8a53a6886ebe6228be0d015b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 23 Apr 2024 20:59:50 +0200 Subject: [PATCH 009/131] working on code refactor --- includes/mlx_profile.h | 16 +- runtime/Includes/Core/Application.h | 7 +- runtime/Includes/Core/Application.inl | 15 +- runtime/Includes/Core/Graphics.h | 8 +- .../{ImagesManager.h => ImagesRegistry.h} | 21 +- runtime/Includes/Core/ImagesRegistry.inl | 25 ++ runtime/Includes/PreCompiled.h | 6 +- runtime/Includes/Renderer/Buffers/Buffer.h | 6 +- .../Includes/Renderer/Buffers/UniformBuffer.h | 4 +- .../Includes/Renderer/Command/CommandBuffer.h | 6 +- .../Includes/Renderer/Core/DrawableResource.h | 4 +- runtime/Includes/Renderer/Core/Instance.h | 5 +- .../Includes/Renderer/Core/ValidationLayers.h | 6 +- .../Renderer/Descriptors/DescriptorPool.h | 7 +- .../Descriptors/DescriptorPoolManager.h | 4 +- .../Renderer/Descriptors/DescriptorSet.h | 7 +- .../Descriptors/DescriptorSetLayout.h | 2 +- runtime/Includes/Renderer/Images/Image.h | 4 +- runtime/Includes/Renderer/Images/Texture.h | 10 +- runtime/Includes/Utils/NonOwningPtr.h | 14 +- runtime/Sources/Core/Application.cpp | 97 ++--- runtime/Sources/Core/Bridge.cpp | 122 +++--- runtime/Sources/Core/Graphics.cpp | 91 +++-- runtime/Sources/Core/Memory.cpp | 39 +- runtime/Sources/Core/Profiler.cpp | 64 ++-- runtime/Sources/Core/UUID.cpp | 6 +- runtime/Sources/Renderer/Buffers/Buffer.cpp | 135 +++---- .../Renderer/Buffers/UniformBuffer.cpp | 55 ++- .../Sources/Renderer/Buffers/VertexBuffer.cpp | 36 +- .../Renderer/Command/CommandBuffer.cpp | 353 +++++++++--------- .../Renderer/Command/CommandManager.cpp | 28 +- .../Sources/Renderer/Command/CommandPool.cpp | 30 +- .../Command/SingleTimeCommandManager.cpp | 48 +-- runtime/Sources/Renderer/Core/Device.cpp | 127 +++---- runtime/Sources/Renderer/Core/Fence.cpp | 48 ++- runtime/Sources/Renderer/Core/Instance.cpp | 82 ++-- runtime/Sources/Renderer/Core/Memory.cpp | 150 ++++---- runtime/Sources/Renderer/Core/Queues.cpp | 48 ++- runtime/Sources/Renderer/Core/RenderCore.cpp | 198 +++++----- runtime/Sources/Renderer/Core/Semaphore.cpp | 39 +- runtime/Sources/Renderer/Core/Surface.cpp | 30 +- .../Renderer/Core/ValidationLayers.cpp | 105 +++--- .../Renderer/Descriptors/DescriptorPool.cpp | 69 ++++ ..._manager.cpp => DescriptorPoolManager.cpp} | 30 +- .../Renderer/Descriptors/DescriptorSet.cpp | 111 ++++++ ...set_layout.cpp => DescriptorSetLayout.cpp} | 32 +- .../Descriptors/vk_descriptor_pool.cpp | 57 --- .../Descriptors/vk_descriptor_set.cpp | 131 ------- .../Images/{vk_image.cpp => Image.cpp} | 225 ++++++----- .../Images/{texture.cpp => Texture.cpp} | 14 +- .../{texture_atlas.cpp => TextureAtlas.cpp} | 0 .../Pipelines/{pipeline.cpp => Pipeline.cpp} | 0 xmake.lua | 4 +- 53 files changed, 1378 insertions(+), 1403 deletions(-) rename runtime/Includes/Core/{ImagesManager.h => ImagesRegistry.h} (57%) create mode 100644 runtime/Includes/Core/ImagesRegistry.inl create mode 100644 runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp rename runtime/Sources/Renderer/Descriptors/{descriptor_pool_manager.cpp => DescriptorPoolManager.cpp} (60%) create mode 100644 runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp rename runtime/Sources/Renderer/Descriptors/{vk_descriptor_set_layout.cpp => DescriptorSetLayout.cpp} (56%) delete mode 100644 runtime/Sources/Renderer/Descriptors/vk_descriptor_pool.cpp delete mode 100644 runtime/Sources/Renderer/Descriptors/vk_descriptor_set.cpp rename runtime/Sources/Renderer/Images/{vk_image.cpp => Image.cpp} (60%) rename runtime/Sources/Renderer/Images/{texture.cpp => Texture.cpp} (95%) rename runtime/Sources/Renderer/Images/{texture_atlas.cpp => TextureAtlas.cpp} (100%) rename runtime/Sources/Renderer/Pipelines/{pipeline.cpp => Pipeline.cpp} (100%) diff --git a/includes/mlx_profile.h b/includes/mlx_profile.h index 9dd8577..18ea4c1 100644 --- a/includes/mlx_profile.h +++ b/includes/mlx_profile.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ -/* Updated: 2024/03/27 18:25:59 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:28:12 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -148,10 +148,20 @@ #endif #endif +#include + +#define MLX_MAKE_VERSION(major, minor, patch) ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) + +#define MLX_VERSION_MAJOR(version) (((uint32_t)(version) >> 22U) & 0x7FU) +#define MLX_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) +#define MLX_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) + +#define MLX_VERSION MLX_MAKE_VERSION(2, 0, 0) +#define MLX_TARGET_VULKAN_API_VERSION MLX_MAKE_VERSION(1, 2, 0) + // Checking common assumptions #ifdef __cplusplus #include - #include static_assert(CHAR_BIT == 8, "CHAR_BIT is expected to be 8"); @@ -169,7 +179,6 @@ #include #endif #include - #include static_assert(CHAR_BIT == 8, "CHAR_BIT is expected to be 8"); @@ -186,7 +195,6 @@ #define STATIC_ASSERT(COND, MSG) typedef char static_assertion___##MSG[(COND)?1:-1] #include - #include STATIC_ASSERT(CHAR_BIT == 8, CHAR_BIT_is_expected_to_be_8); diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 25d369e..316dc68 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/04/03 15:05:24 by maldavid ### ########.fr */ +/* Updated: 2024/04/21 20:39:33 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -16,6 +16,7 @@ #include #include #include +#include #include namespace mlx @@ -59,11 +60,11 @@ namespace mlx private: FpsManager m_fps; + Input m_in; DriverLoader m_driver_loader; - std::list m_textures; + ImageRegistry m_image_registry; std::vector> m_graphics; std::function f_loop_hook; - std::unique_ptr p_in; void* p_param = nullptr; }; } diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 2457c5b..cd95318 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* application.inl :+: :+: :+: */ +/* Application.inl :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2023/04/02 14:56:27 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:45:07 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -30,12 +30,9 @@ Error("invalid image ptr (NULL)"); \ retval; \ } \ - else if(std::find_if(_textures.begin(), _textures.end(), [=](const Texture& texture) \ - { \ - return &texture == img; \ - }) == _textures.end()) \ + else if(m_image_registry.Find(img)) \ { \ - Error(e_kind::error, "invalid image ptr"); \ + Error("invalid image ptr"); \ retval; \ } else {} @@ -65,7 +62,7 @@ namespace mlx Warning("trying to add event hook for a window that is targeting an image and not a real window, this is not allowed (hook ignored)"); return; } - p_in->OnEvent(m_graphics[*static_cast(win)]->GetWindow()->GetID(), event, funct_ptr, param); + m_in.OnEvent(m_graphics[*static_cast(win)]->GetWindow()->GetID(), event, funct_ptr, param); } void Application::GetScreenSize(void* win, int* w, int* h) noexcept @@ -97,7 +94,7 @@ namespace mlx return nullptr; } m_graphics.emplace_back(std::make_unique(w, h, title, m_graphics.size())); - p_in->RegisterWindow(m_graphics.back()->GetWindow()); + m_in.RegisterWindow(m_graphics.back()->GetWindow()); } return static_cast(&m_graphics.back()->GetID()); } diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 9c60f82..e29ce56 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:16:11 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:02:48 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -42,13 +42,14 @@ namespace mlx inline void LoadFont(const std::filesystem::path& filepath, float scale); inline void TryEraseTextureFromManager(NonOwningPtr texture) noexcept; - inline bool HasWindow() const noexcept { return _has_window; } + inline bool HasWindow() const noexcept { return m_has_window; } - inline Renderer& GetRenderer() { return *_renderer; } + inline Renderer& GetRenderer() { return m_renderer; } ~GraphicsSupport(); private: + Renderer m_renderer; PixelPutPipeline m_pixel_put_pipeline; std::vector> m_drawlist; @@ -59,7 +60,6 @@ namespace mlx glm::mat4 m_proj = glm::mat4(1.0); std::shared_ptr p_window; - std::unique_ptr p_renderer; std::size_t m_width = 0; std::size_t m_height = 0; diff --git a/runtime/Includes/Core/ImagesManager.h b/runtime/Includes/Core/ImagesRegistry.h similarity index 57% rename from runtime/Includes/Core/ImagesManager.h rename to runtime/Includes/Core/ImagesRegistry.h index 46e223d..e2bd494 100644 --- a/runtime/Includes/Core/ImagesManager.h +++ b/runtime/Includes/Core/ImagesRegistry.h @@ -1,27 +1,36 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* ImagesManager.h :+: :+: :+: */ +/* ImagesRegistry.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/04/03 15:11:47 by maldavid #+# #+# */ -/* Updated: 2024/04/21 15:13:43 by maldavid ### ########.fr */ +/* Updated: 2024/04/21 20:31:00 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#ifndef __MLX_CORE_IMAGES_MANAGER__ -#define __MLX_CORE_IMAGES_MANAGER__ +#ifndef __MLX_CORE_IMAGES_REGISTRY__ +#define __MLX_CORE_IMAGES_REGISTRY__ namespace mlx { - class ImagesManager + class ImageRegistry { public: + ImageRegistry() = default; + + inline void RegisterTexture(NonOwningPtr texture); + inline void UnregisterTexture(NonOwningPtr texture); + inline bool IsTextureKnown(NonOwningPtr texture); + + ~ImageRegistry() = default; private: - std::unordered_set m_textures_registry; + std::unordered_set> m_textures_registry; }; } +#include + #endif diff --git a/runtime/Includes/Core/ImagesRegistry.inl b/runtime/Includes/Core/ImagesRegistry.inl new file mode 100644 index 0000000..2a69334 --- /dev/null +++ b/runtime/Includes/Core/ImagesRegistry.inl @@ -0,0 +1,25 @@ +// This file is a part of Akel +// Authors : @kbz_8 +// Created : 21/04/2024 +// Updated : 21/04/2024 + +#pragma once +#include + +namespace mlx +{ + void ImageRegistry::RegisterTexture(NonOwningPtr texture) + { + m_textures_registry.insert(texture); + } + + void ImageRegistry::UnregisterTexture(NonOwningPtr texture) + { + m_textures_registry.erase(texture); + } + + bool ImageRegistry::IsTextureKnown(NonOwningPtr texture) + { + return m_textures_registry.find(texture) != m_textures_registry.end(); + } +} diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index dd0f386..07647ff 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:33:47 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 13:49:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -23,7 +23,7 @@ #include #include -#ifdef LEGACY +#ifdef MLX_LEGACY #include #include #else @@ -84,6 +84,6 @@ #include #include #include -#include +#include #endif diff --git a/runtime/Includes/Renderer/Buffers/Buffer.h b/runtime/Includes/Renderer/Buffers/Buffer.h index 5acf9de..f8532ab 100644 --- a/runtime/Includes/Renderer/Buffers/Buffer.h +++ b/runtime/Includes/Renderer/Buffers/Buffer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 23:18:52 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:09:07 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:20:49 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -27,9 +27,9 @@ namespace mlx void Create(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data = nullptr); void Destroy() noexcept; - inline void MapMem(void** data) noexcept { Render_Core::get().getAllocator().mapMemory(m_allocation, data); m_is_mapped = true; } + inline void MapMem(void** data) noexcept { RenderCore::Get().GetAllocator().MapMemory(m_allocation, data); m_is_mapped = true; } inline bool IsMapped() const noexcept { return m_is_mapped; } - inline void UnmapMem() noexcept { Render_Core::get().getAllocator().unmapMemory(m_allocation); m_is_mapped = false; } + inline void UnmapMem() noexcept { RenderCore::Get().GetAllocator().UnmapMemory(m_allocation); m_is_mapped = false; } void Flush(VkDeviceSize size = VK_WHOLE_SIZE, VkDeviceSize offset = 0); bool CopyFromBuffer(const Buffer& buffer) noexcept; diff --git a/runtime/Includes/Renderer/Buffers/UniformBuffer.h b/runtime/Includes/Renderer/Buffers/UniformBuffer.h index 77b1f3a..6b84cd5 100644 --- a/runtime/Includes/Renderer/Buffers/UniformBuffer.h +++ b/runtime/Includes/Renderer/Buffers/UniformBuffer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:15:23 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:23:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -43,7 +43,7 @@ namespace mlx private: std::array m_buffers; std::array m_maps; - NonOwningPtr m_renderer; + NonOwningPtr p_renderer; }; } diff --git a/runtime/Includes/Renderer/Command/CommandBuffer.h b/runtime/Includes/Renderer/Command/CommandBuffer.h index 68c068f..b5be0ad 100644 --- a/runtime/Includes/Renderer/Command/CommandBuffer.h +++ b/runtime/Includes/Renderer/Command/CommandBuffer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:44:58 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 17:59:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -29,7 +29,7 @@ namespace mlx void Destroy() noexcept; void BeginRecord(VkCommandBufferUsageFlags usage = 0); - void Submit(class Semaphore* semaphores) noexcept; + void Submit(NonOwningPtr signal, NonOwningPtr wait) noexcept; void SubmitIdle(bool shouldWaitForExecution = true) noexcept; // TODO : handle `shouldWaitForExecution` as false by default (needs to modify CmdResources lifetimes to do so) void UpdateSubmitState() noexcept; inline void WaitForExecution() noexcept { m_fence.wait(); UpdateSubmitState(); m_state = CommandBufferState::Ready; } @@ -58,7 +58,7 @@ namespace mlx void PostTransferBarrier() noexcept; private: - std::vector m_cmd_resources; + std::vector> m_cmd_resources; Fence m_fence; VkCommandBuffer m_cmd_buffer = VK_NULL_HANDLE; NonOwningPtr m_pool; diff --git a/runtime/Includes/Renderer/Core/DrawableResource.h b/runtime/Includes/Renderer/Core/DrawableResource.h index fb60df9..a043d3a 100644 --- a/runtime/Includes/Renderer/Core/DrawableResource.h +++ b/runtime/Includes/Renderer/Core/DrawableResource.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 21:00:37 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:47:32 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:10:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -19,7 +19,7 @@ namespace mlx { public: DrawableResource() = default; - virtual void Render(std::array& sets, class Renderer& renderer) = 0; + virtual void Render(class Renderer& renderer) = 0; virtual void ResetUpdate() {} virtual ~DrawableResource() = default; }; diff --git a/runtime/Includes/Renderer/Core/Instance.h b/runtime/Includes/Renderer/Core/Instance.h index 9b33f3e..cecb73d 100644 --- a/runtime/Includes/Renderer/Core/Instance.h +++ b/runtime/Includes/Renderer/Core/Instance.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:03:04 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:48:55 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:44:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -21,6 +21,8 @@ namespace mlx void Init(); void Destroy() noexcept; + inline std::uint32_t GetInstanceVersion() const noexcept { return m_instance_version; } + inline VkInstance& operator()() noexcept { return m_instance; } inline VkInstance& Get() noexcept { return m_instance; } @@ -29,6 +31,7 @@ namespace mlx private: VkInstance m_instance = VK_NULL_HANDLE; + std::uint32_t m_instance_version = 0; }; } diff --git a/runtime/Includes/Renderer/Core/ValidationLayers.h b/runtime/Includes/Renderer/Core/ValidationLayers.h index cee5999..0dc3e45 100644 --- a/runtime/Includes/Renderer/Core/ValidationLayers.h +++ b/runtime/Includes/Renderer/Core/ValidationLayers.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/19 14:04:25 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:59:00 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:16:25 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -22,10 +22,10 @@ namespace mlx void Init(); void Destroy(); - + bool CheckValidationLayerSupport(); void PopulateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& create_info); - + VkResult SetDebugUtilsObjectNameEXT(VkObjectType object_type, std::uint64_t object_handle, const char* object_name); ~ValidationLayers() = default; diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorPool.h b/runtime/Includes/Renderer/Descriptors/DescriptorPool.h index 97cabe9..1b09905 100644 --- a/runtime/Includes/Renderer/Descriptors/DescriptorPool.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorPool.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:32:43 by maldavid #+# #+# */ -/* Updated: 2024/03/27 23:00:29 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:36:03 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,8 +20,9 @@ namespace mlx public: DescriptorPool() = default; - void Init(std::size_t n, NonOwningPtr size); - void FreeDescriptor(const class DescriptorSet& set); + void Init(std::vector sizes); + VkDescriptorSet AllocateDescriptorSet(class DescriptorSetLayout& layout); + void FreeDescriptor(VkDescriptorSet set); void Destroy() noexcept; inline VkDescriptorPool& operator()() noexcept { return m_pool; } diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h b/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h index 8f4cb01..5c032aa 100644 --- a/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/20 06:26:26 by maldavid #+# #+# */ -/* Updated: 2024/03/27 23:00:56 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:40:22 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -22,7 +22,7 @@ namespace mlx public: DescriptorPoolManager() = default; - DescriptorPool& GetAvailablePool(); // assumes the pool is for only one set allocation, may cause some issues if this is for more than one + DescriptorPool& GetAvailablePool(); void DestroyAllPools(); ~DescriptorPoolManager() = default; diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h index 56c8ec4..11b41a0 100644 --- a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ -/* Updated: 2024/03/27 23:02:38 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:49:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,6 +14,7 @@ #define __VK_DESCRIPTOR_SET__ #include +#include namespace mlx { @@ -22,7 +23,7 @@ namespace mlx public: DescriptorSet() = default; - void Init(class Renderer* renderer, class DescriptorPool* pool, class DescriptorSetLayout* layout); + void Init(NonOwningPtr renderer, NonOwningPtr pool, DescriptorSetLayout layout); void WriteDescriptor(int binding, NonOwningPtr ubo) const noexcept; void WriteDescriptor(int binding, const class Image& image) const noexcept; @@ -41,9 +42,9 @@ namespace mlx ~DescriptorSet() = default; private: + DescriptorSetLayout p_layout; std::array m_desc_set; NonOwningPtr p_pool; - NonOwningPtr p_layout; NonOwningPtr p_renderer; }; } diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h b/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h index e06bd54..c53cfdf 100644 --- a/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:36:22 by maldavid #+# #+# */ -/* Updated: 2024/03/27 23:03:04 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:50:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ diff --git a/runtime/Includes/Renderer/Images/Image.h b/runtime/Includes/Renderer/Images/Image.h index 6f93bed..2f71614 100644 --- a/runtime/Includes/Renderer/Images/Image.h +++ b/runtime/Includes/Renderer/Images/Image.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 11:54:21 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:08:35 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 20:00:53 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -45,7 +45,7 @@ namespace mlx void CreateSampler() noexcept; void CopyFromBuffer(class Buffer& buffer); void CopyToBuffer(class Buffer& buffer); - void TransitionLayout(VkImageLayout new_layout, CmdBuffer* cmd = nullptr); + void TransitionLayout(VkImageLayout new_layout, NonOwningPtr cmd = nullptr); virtual void Destroy() noexcept; inline VkImage Get() noexcept { return m_image; } diff --git a/runtime/Includes/Renderer/Images/Texture.h b/runtime/Includes/Renderer/Images/Texture.h index 7c88e17..16091a5 100644 --- a/runtime/Includes/Renderer/Images/Texture.h +++ b/runtime/Includes/Renderer/Images/Texture.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/08 02:24:58 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:11:21 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 20:03:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -26,15 +26,15 @@ namespace mlx Texture() = default; void Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); - void Render(std::array& sets, class Renderer& renderer, int x, int y); + void Render(class Renderer& renderer, int x, int y); void Destroy() noexcept override; void SetPixel(int x, int y, std::uint32_t color) noexcept; int GetPixel(int x, int y) noexcept; inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } - inline VkDescriptorSet GetSet() noexcept { return m_set.isInit() ? m_set.get() : VK_NULL_HANDLE; } - inline void UpdateSet(int binding) noexcept { m_set.writeDescriptor(binding, *this); m_has_set_been_updated = true; } + inline VkDescriptorSet GetSet() noexcept { return m_set.IsInit() ? m_set.get() : VK_NULL_HANDLE; } + inline void UpdateSet(int binding) noexcept { m_set.WriteDescriptor(binding, *this); m_has_set_been_updated = true; } inline bool HasBeenUpdated() const noexcept { return m_has_set_been_updated; } inline constexpr void ResetUpdate() noexcept { m_has_set_been_updated = false; } @@ -57,7 +57,7 @@ namespace mlx bool m_has_set_been_updated = false; }; - Texture StbTextureLoad(std::filesystem::path file, int* w, int* h); + Texture* StbTextureLoad(std::filesystem::path file, int* w, int* h); } #endif diff --git a/runtime/Includes/Utils/NonOwningPtr.h b/runtime/Includes/Utils/NonOwningPtr.h index baef061..2acc906 100644 --- a/runtime/Includes/Utils/NonOwningPtr.h +++ b/runtime/Includes/Utils/NonOwningPtr.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/27 21:03:37 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:05:05 by maldavid ### ########.fr */ +/* Updated: 2024/04/21 20:21:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -42,4 +42,16 @@ namespace mlx #include +namespace std +{ + template + struct hash> + { + std::size_t operator()(const mlx::NonOwningPtr& ptr) const noexcept + { + return std::hash{}(ptr.Get()); + } + }; +} + #endif diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index ab2a4aa..d0c754b 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -6,103 +6,110 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/04/02 17:06:34 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 15:06:26 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include #include -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include #include -namespace mlx::core +namespace mlx { - Application::Application() : _fps(), _in(std::make_unique()) + Application::Application() : m_fps(), m_in() { EventBus::RegisterListener({[](const EventBase& event) { - }, "__internal_application" }); - _fps.init(); + m_fps.init(); } - void Application::run() noexcept + void Application::Run() noexcept { - while(_in->isRunning()) + m_in.Run(); + + while(m_in.IsRunning()) { - if(!_fps.update()) + if(!m_fps.Update()) continue; - _in->update(); + m_in.Update(); - if(_loop_hook) - _loop_hook(_param); + if(f_loop_hook) + f_loop_hook(p_param); - for(auto& gs : _graphics) - gs->render(); + for(auto& gs : m_graphics) + { + if(gs) + gs->Render(); + } } - Render_Core::get().getSingleTimeCmdManager().updateSingleTimesCmdBuffersSubmitState(); + RenderCore::Get().GetSingleTimeCmdManager().UpdateSingleTimesCmdBuffersSubmitState(); - for(auto& gs : _graphics) + for(auto& gs : m_graphics) { + if(!gs) + continue; for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - gs->getRenderer().getCmdBuffer(i).waitForExecution(); + gs->GetRenderer().GetCmdBuffer(i).WaitForExecution(); } } - void* Application::newTexture(int w, int h) + void* Application::NewTexture(int w, int h) { MLX_PROFILE_FUNCTION(); + Texture* texture = new Texture; #ifdef DEBUG - _textures.emplace_front().create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_unamed_user_texture"); + texture->Create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_unamed_user_texture"); #else - _textures.emplace_front().create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, nullptr); + texture->Create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, nullptr); #endif - return &_textures.front(); + m_image_registry.RegisterTexture(texture); + return texture; } - void* Application::newStbTexture(char* file, int* w, int* h) + void* Application::NewStbTexture(char* file, int* w, int* h) { MLX_PROFILE_FUNCTION(); - _textures.emplace_front(stbTextureLoad(file, w, h)); - return &_textures.front(); + Texture* texture = StbTextureLoad(file, w, h); + m_image_registry.RegisterTexture(texture); + return texture; } - void Application::destroyTexture(void* ptr) + void Application::DestroyTexture(void* ptr) { MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(Render_Core::get().getDevice().get()); // TODO : synchronize with another method than waiting for GPU to be idle - if(ptr == nullptr) + vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); // TODO : synchronize with another method than waiting for GPU to be idle + if(!m_image_registry.Find(ptr)) { - core::error::report(e_kind::error, "invalid image ptr (NULL)"); + Error("invalid image ptr"); return; } - auto it = std::find_if(_textures.begin(), _textures.end(), [=](const Texture& texture) { return &texture == ptr; }); - if(it == _textures.end()) - { - core::error::report(e_kind::error, "invalid image ptr"); - return; - } Texture* texture = static_cast(ptr); - if(!texture->isInit()) - core::error::report(e_kind::error, "trying to destroy a texture that has already been destroyed"); + if(!texture->IsInit()) + Error("trying to destroy a texture that has already been destroyed"); else - texture->destroy(); + texture->Destroy(); for(auto& gs : _graphics) - gs->tryEraseTextureFromManager(texture); - _textures.erase(it); + { + if(gs) + gs->TryEraseTextureFromManager(texture); + } + m_image_registry.UnregisterTexture(texture); + delete texture; } Application::~Application() { - TextLibrary::get().clearLibrary(); - FontLibrary::get().clearLibrary(); + TextLibrary::Get().ClearLibrary(); + FontLibrary::Get().ClearLibrary(); } } diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index f81b738..6a550bc 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -1,29 +1,27 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* bridge.cpp :+: :+: :+: */ +/* Bridge.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:35:20 by maldavid #+# #+# */ -/* Updated: 2024/03/25 23:05:46 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:44:27 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include "errors.h" -#include "application.h" -#include +#include +#include #include -#include +#include static void* __mlx_ptr = nullptr; #define MLX_CHECK_APPLICATION_POINTER(ptr) \ if(ptr != __mlx_ptr || ptr == NULL) \ - mlx::core::error::report(e_kind::fatal_error, "invalid mlx pointer passed to '%s'", MLX_FUNC_SIG); \ + mlx::FatalError("invalid mlx pointer passed to '%'", MLX_FUNC_SIG); \ else {} // just to avoid issues with possible if-else statements outside this macro extern "C" @@ -32,14 +30,14 @@ extern "C" { if(__mlx_ptr != nullptr) { - mlx::core::error::report(e_kind::error, "MLX cannot be initialized multiple times"); - return NULL; // not nullptr for the C compatibility + Error("MLX cannot be initialized multiple times"); + return nullptr; } - mlx::MemManager::get(); // just to initialize the C garbage collector - mlx::core::Application* app = new mlx::core::Application; - mlx::Render_Core::get().init(); + mlx::MemManager::Get(); // just to initialize the C garbage collector + mlx::Application* app = new mlx::Application; if(app == nullptr) - mlx::core::error::report(e_kind::fatal_error, "Tout a pété"); + mlx::FatalError("Tout a pété"); + mlx::RenderCore::Get().Init(); __mlx_ptr = static_cast(app); return __mlx_ptr; } @@ -49,30 +47,30 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if(w <= 0 || h <= 0) { - mlx::core::error::report(e_kind::fatal_error, "invalid window size (%d x %d)", w, h); + mlx::FatalError("invalid window size (%d x %d)", w, h); return NULL; // not nullptr for the C compatibility } - return static_cast(mlx)->newGraphicsSuport(w, h, title); + return static_cast(mlx)->NewGraphicsSuport(w, h, title); } int mlx_loop_hook(void* mlx, int (*f)(void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->loopHook(f, param); + static_cast(mlx)->LoopHook(f, param); return 0; } int mlx_loop(void* mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->run(); + static_cast(mlx)->Run(); return 0; } int mlx_loop_end(void* mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->loopEnd(); + static_cast(mlx)->LoopEnd(); return 0; } @@ -89,21 +87,21 @@ extern "C" int mlx_mouse_move(void* mlx, void* win, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->mouseMove(win, x, y); + static_cast(mlx)->MouseMove(win, x, y); return 0; } int mlx_mouse_get_pos(void* mlx, int* x, int* y) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->getMousePos(x, y); + static_cast(mlx)->GetMousePos(x, y); return 0; } int mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*funct_ptr)(int, void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->onEvent(win, static_cast(event), funct_ptr, param); + static_cast(mlx)->OnEvent(win, static_cast(event), funct_ptr, param); return 0; } @@ -111,14 +109,17 @@ extern "C" { MLX_CHECK_APPLICATION_POINTER(mlx); if (width <= 0 || height <= 0) - mlx::core::error::report(e_kind::fatal_error, "invalid image size (%d x %d)", width, height); - return static_cast(mlx)->newTexture(width, height); + { + mlx::Error("invalid image size (% x %)", width, height); + return nullptr; + } + return static_cast(mlx)->NewTexture(width, height); } int mlx_get_image_pixel(void* mlx, void* img, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); - int color = static_cast(mlx)->getTexturePixel(img, x, y); + int color = static_cast(mlx)->GetTexturePixel(img, x, y); unsigned char color_bits[4]; color_bits[0] = (color & 0x000000FF); color_bits[1] = (color & 0x0000FF00) >> 8; @@ -135,20 +136,20 @@ extern "C" color_bits[1] = (color & 0x0000FF00) >> 8; color_bits[2] = (color & 0x000000FF); color_bits[3] = (color & 0xFF000000) >> 24; - static_cast(mlx)->setTexturePixel(img, x, y, *reinterpret_cast(color_bits)); + static_cast(mlx)->SetTexturePixel(img, x, y, *reinterpret_cast(color_bits)); } int mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->texturePut(win, img, x, y); + static_cast(mlx)->TexturePut(win, img, x, y); return 0; } int mlx_destroy_image(void* mlx, void* img) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->destroyTexture(img); + static_cast(mlx)->DestroyTexture(img); return 0; } @@ -156,42 +157,51 @@ extern "C" { MLX_CHECK_APPLICATION_POINTER(mlx); if (filename == nullptr) - mlx::core::error::report(e_kind::fatal_error, "PNG loader : filename is NULL"); + { + mlx::Error("PNG loader : filename is NULL"); + return nullptr; + } std::filesystem::path file(filename); if(file.extension() != ".png") { - mlx::core::error::report(e_kind::error, "PNG loader : not a png file '%s'", filename); + mlx::Error("PNG loader : not a png file '%'", filename); return nullptr; } - return static_cast(mlx)->newStbTexture(filename, width, height); + return static_cast(mlx)->NewStbTexture(filename, width, height); } void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* height) { MLX_CHECK_APPLICATION_POINTER(mlx); if (filename == nullptr) - mlx::core::error::report(e_kind::fatal_error, "JPG loader : filename is NULL"); + { + mlx::Error("JPG loader : filename is NULL"); + return nullptr; + } std::filesystem::path file(filename); if(file.extension() != ".jpg" && file.extension() != ".jpeg") { - mlx::core::error::report(e_kind::error, "JPG loader : not a jpg file '%s'", filename); + mlx::Error("JPG loader : not a jpg file '%'", filename); return nullptr; } - return static_cast(mlx)->newStbTexture(filename, width, height); + return static_cast(mlx)->NewStbTexture(filename, width, height); } void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* height) { MLX_CHECK_APPLICATION_POINTER(mlx); if (filename == nullptr) - mlx::core::error::report(e_kind::fatal_error, "BMP loader : filename is NULL"); + { + mlx::Error("BMP loader : filename is NULL"); + return nullptr; + } std::filesystem::path file(filename); if(file.extension() != ".bmp" && file.extension() != ".dib") { - mlx::core::error::report(e_kind::error, "BMP loader : not a bmp file '%s'", filename); + mlx::Error("BMP loader : not a bmp file '%'", filename); return nullptr; } - return static_cast(mlx)->newStbTexture(filename, width, height); + return static_cast(mlx)->NewStbTexture(filename, width, height); } int mlx_pixel_put(void* mlx, void* win, int x, int y, int color) @@ -202,7 +212,7 @@ extern "C" color_bits[1] = (color & 0x0000FF00) >> 8; color_bits[2] = (color & 0x000000FF); color_bits[3] = (color & 0xFF000000) >> 24; - static_cast(mlx)->pixelPut(win, x, y, *reinterpret_cast(color_bits)); + static_cast(mlx)->PixelPut(win, x, y, *reinterpret_cast(color_bits)); return 0; } @@ -214,7 +224,7 @@ extern "C" color_bits[1] = (color & 0x0000FF00) >> 8; color_bits[2] = (color & 0x000000FF); color_bits[3] = (color & 0xFF000000) >> 24; - static_cast(mlx)->stringPut(win, x, y, *reinterpret_cast(color_bits), str); + static_cast(mlx)->StringPut(win, x, y, *reinterpret_cast(color_bits), str); return 0; } @@ -223,19 +233,19 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) { - mlx::core::error::report(e_kind::error, "Font loader : filepath is NULL"); + mlx::Error("Font loader : filepath is NULL"); return; } std::filesystem::path file(filepath); if(std::strcmp(filepath, "default") != 0 && file.extension() != ".ttf" && file.extension() != ".tte") { - mlx::core::error::report(e_kind::error, "TTF loader : not a truetype font file '%s'", filepath); + mlx::Error("TTF loader : not a truetype font file '%'", filepath); return; } if(std::strcmp(filepath, "default") == 0) - static_cast(mlx)->loadFont(win, file, 6.f); + static_cast(mlx)->LoadFont(win, file, 6.f); else - static_cast(mlx)->loadFont(win, file, 16.f); + static_cast(mlx)->LoadFont(win, file, 16.f); } void mlx_set_font_scale(void* mlx, void* win, char* filepath, float scale) @@ -243,37 +253,37 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) { - mlx::core::error::report(e_kind::error, "Font loader : filepath is NULL"); + mlx::Error("Font loader : filepath is NULL"); return; } std::filesystem::path file(filepath); if(std::strcmp(filepath, "default") != 0 && file.extension() != ".ttf" && file.extension() != ".tte") { - mlx::core::error::report(e_kind::error, "TTF loader : not a truetype font file '%s'", filepath); + mlx::Error("TTF loader : not a truetype font file '%'", filepath); return; } - static_cast(mlx)->loadFont(win, file, scale); + static_cast(mlx)->LoadFont(win, file, scale); } int mlx_clear_window(void* mlx, void* win) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->clearGraphicsSupport(win); + static_cast(mlx)->ClearGraphicsSupport(win); return 0; } int mlx_destroy_window(void* mlx, void* win) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->destroyGraphicsSupport(win); + static_cast(mlx)->DestroyGraphicsSupport(win); return 0; } int mlx_destroy_display(void* mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); - delete static_cast(mlx); - mlx::Render_Core::get().destroy(); + delete static_cast(mlx); + mlx::RenderCore::Get().Destroy(); __mlx_ptr = nullptr; return 0; } @@ -281,7 +291,7 @@ extern "C" int mlx_get_screens_size(void* mlx, void* win, int* w, int* h) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->getScreenSize(win, w, h); + static_cast(mlx)->GetScreenSize(win, w, h); return 0; } @@ -290,15 +300,15 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if(fps < 0) { - mlx::core::error::report(e_kind::error, "You cannot set a negative FPS cap (nice try)"); - fps = -fps; + mlx::Error("You cannot set a negative FPS cap (nice try)"); + return 0; } if(fps == 0) { - mlx::core::error::report(e_kind::error, "You cannot set a FPS cap to 0 (nice try)"); + mlx::Error("You cannot set a FPS cap to 0 (nice try)"); return 0; } - static_cast(mlx)->setFPSCap(static_cast(fps)); + static_cast(mlx)->SetFPSCap(static_cast(fps)); return 0; } } diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 6be4395..b9ea623 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -1,73 +1,68 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* graphics.cpp :+: :+: :+: */ +/* Graphics.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ -/* Updated: 2024/03/27 00:32:34 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:03:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include +#include namespace mlx { - GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, Texture* render_target, int id) : - _window(nullptr), - _renderer(std::make_unique()), - _width(w), - _height(h), - _id(id), - _has_window(false) + GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id) : + p_window(nullptr), + m_width(w), + m_height(h), + m_id(id), + m_has_window(false) { MLX_PROFILE_FUNCTION(); - _renderer->setWindow(nullptr); - _renderer->init(render_target); - _pixel_put_pipeline.init(w, h, *_renderer); - _text_manager.init(*_renderer); + m_renderer.SetWindow(nullptr); + m_renderer.Init(render_target); + m_pixel_put_pipeline.Init(w, h, m_renderer); + m_text_manager.Init(m_renderer); } GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : - _window(std::make_shared(w, h, title)), - _renderer(std::make_unique()), - _width(w), - _height(h), - _id(id), - _has_window(true) + p_window(std::make_shared(w, h, title)), + m_width(w), + m_height(h), + m_id(id), + m_has_window(true) { MLX_PROFILE_FUNCTION(); - _renderer->setWindow(_window.get()); - _renderer->init(nullptr); - _pixel_put_pipeline.init(w, h, *_renderer); - _text_manager.init(*_renderer); + m_renderer.SetWindow(p_window.get()); + m_renderer.Init(nullptr); + m_pixel_put_pipeline.Init(w, h, m_renderer); + m_text_manager.Init(m_renderer); } - void GraphicsSupport::render() noexcept + void GraphicsSupport::Render() noexcept { MLX_PROFILE_FUNCTION(); - if(!_renderer->beginFrame()) + if(!m_renderer.BeginFrame()) return; - _proj = glm::ortho(0, _width, 0, _height); - _renderer->getUniformBuffer()->setData(sizeof(_proj), &_proj); + m_proj = glm::ortho(0, m_width, 0, m_height); + m_renderer.GetUniformBuffer()->SetData(sizeof(m_proj), &m_proj); - static std::array sets = { - _renderer->getVertDescriptorSet().get(), - VK_NULL_HANDLE - }; + m_renderer.getVertDescriptorSet().Bind(); + + for(auto& data : m_drawlist) + data->Render(m_renderer); + + m_pixel_put_pipeline.Render(m_renderer); + + m_renderer.EndFrame(); for(auto& data : _drawlist) - data->render(sets, *_renderer); - - _pixel_put_pipeline.render(sets, *_renderer); - - _renderer->endFrame(); - - for(auto& data : _drawlist) - data->resetUpdate(); + data->ResetUpdate(); #ifdef GRAPHICS_MEMORY_DUMP // dump memory to file every two seconds @@ -75,7 +70,7 @@ namespace mlx static std::int64_t timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); if(std::chrono::duration{static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()) - timer} >= 1s) { - Render_Core::get().getAllocator().dumpMemoryToJson(); + RenderCore::Get().GetAllocator().DumpMemoryToJson(); timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); } #endif @@ -84,11 +79,11 @@ namespace mlx GraphicsSupport::~GraphicsSupport() { MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(Render_Core::get().getDevice().get()); - _text_manager.destroy(); - _pixel_put_pipeline.destroy(); - _renderer->destroy(); - if(_window) - _window->destroy(); + vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); + m_text_manager.Destroy(); + m_pixel_put_pipeline.Destroy(); + m_renderer->Destroy(); + if(p_window) + p_window->Destroy(); } } diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 93ada08..670061b 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -1,64 +1,63 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* memory.cpp :+: :+: :+: */ +/* Memory.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/07 16:32:01 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 19:01:02 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:05:52 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include +#include namespace mlx { - void* MemManager::malloc(std::size_t size) + void* MemManager::Malloc(std::size_t size) { void* ptr = std::malloc(size); if(ptr != nullptr) - _blocks.push_back(ptr); + s_blocks.push_back(ptr); return ptr; } - void* MemManager::calloc(std::size_t n, std::size_t size) + void* MemManager::Calloc(std::size_t n, std::size_t size) { void* ptr = std::calloc(n, size); if(ptr != nullptr) - _blocks.push_back(ptr); + s_blocks.push_back(ptr); return ptr; } - void* MemManager::realloc(void* ptr, std::size_t size) + void* MemManager::Realloc(void* ptr, std::size_t size) { void* ptr2 = std::realloc(ptr, size); if(ptr2 != nullptr) - _blocks.push_back(ptr2); - auto it = std::find(_blocks.begin(), _blocks.end(), ptr); - if(it != _blocks.end()) - _blocks.erase(it); + s_blocks.push_back(ptr2); + auto it = std::find(s_blocks.begin(), s_blocks.end(), ptr); + if(it != s_blocks.end()) + s_blocks.erase(it); return ptr2; } - void MemManager::free(void* ptr) + void MemManager::Free(void* ptr) { - auto it = std::find(_blocks.begin(), _blocks.end(), ptr); - if(it == _blocks.end()) + auto it = std::find(s_blocks.begin(), s_blocks.end(), ptr); + if(it == s_blocks.end()) { - core::error::report(e_kind::error, "Memory Manager : trying to free a pointer not allocated by the memory manager"); + Error("Memory Manager : trying to free a pointer not allocated by the memory manager"); return; } std::free(*it); - _blocks.erase(it); + s_blocks.erase(it); } MemManager::~MemManager() { - std::for_each(_blocks.begin(), _blocks.end(), [](void* ptr) + std::for_each(s_blocks.begin(), s_blocks.end(), [](void* ptr) { std::free(ptr); }); diff --git a/runtime/Sources/Core/Profiler.cpp b/runtime/Sources/Core/Profiler.cpp index 5ef15cd..e811bc5 100644 --- a/runtime/Sources/Core/Profiler.cpp +++ b/runtime/Sources/Core/Profiler.cpp @@ -1,52 +1,50 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* profiler.cpp :+: :+: :+: */ +/* Profiler.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 13:56:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:06 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:08:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include +#include namespace mlx { - void Profiler::beginRuntimeSession() + void Profiler::BeginRuntimeSession() { - std::lock_guard lock(_mutex); - if(_runtime_session_began) + std::lock_guard lock(m_mutex); + if(m_runtime_session_began) return; - _output_stream.open("./runtime_profile.mlx.json", std::ofstream::out | std::ofstream::trunc); + m_output_stream.open("./runtime_profile.mlx.json", std::ofstream::out | std::ofstream::trunc); - if(_output_stream.is_open()) - writeHeader(); + if(m_output_stream.is_open()) + WriteHeader(); else - core::error::report(e_kind::error, "Profiler : cannot open runtime profile file"); - _runtime_session_began = true; + Error("Profiler : cannot open runtime profile file"); + m_runtime_session_began = true; } - void Profiler::appendProfileData(ProfileResult&& result) + void Profiler::AppendProfileData(ProfileResult&& result) { std::lock_guard lock(_mutex); - auto it = _profile_data.find(result.name); - if(it != _profile_data.end()) + auto it = m_profile_data.find(result.name); + if(it != m_profile_data.end()) { result.elapsed_time = (result.elapsed_time + it->second.second.elapsed_time) / it->second.first; - _profile_data[result.name].first++; - _profile_data[result.name].second = result; + m_profile_data[result.name].first++; + m_profile_data[result.name].second = result; } else - _profile_data[result.name] = std::make_pair(1, result); + m_profile_data[result.name] = std::make_pair(1, result); } - void Profiler::writeProfile(const ProfileResult& result) + void Profiler::WriteProfile(const ProfileResult& result) { std::stringstream json; json << std::setprecision(9) << std::fixed; @@ -56,26 +54,26 @@ namespace mlx json << "\t\"thread id\" : " << result.thread_id << "," << '\n'; json << "\t\"average duration\" : \"" << result.elapsed_time.count() << "ms\"\n"; json << "}"; - _output_stream << json.str(); + m_output_stream << json.str(); } - void Profiler::endRuntimeSession() + void Profiler::EndRuntimeSession() { - std::lock_guard lock(_mutex); - if(!_runtime_session_began) + std::lock_guard lock(m_mutex); + if(!m_runtime_session_began) return; - for(auto& [_, pair] : _profile_data) - writeProfile(pair.second); - writeFooter(); - _output_stream.close(); - _profile_data.clear(); - _runtime_session_began = false; + for(auto& [_, pair] : m_profile_data) + WriteProfile(pair.second); + WriteFooter(); + m_output_stream.close(); + m_profile_data.clear(); + m_runtime_session_began = false; } Profiler::~Profiler() { - if(!_runtime_session_began) + if(!m_runtime_session_began) return; - endRuntimeSession(); + EndRuntimeSession(); } } diff --git a/runtime/Sources/Core/UUID.cpp b/runtime/Sources/Core/UUID.cpp index 744b4c9..96ae62f 100644 --- a/runtime/Sources/Core/UUID.cpp +++ b/runtime/Sources/Core/UUID.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/06 11:26:37 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:36 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:09:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,6 +20,6 @@ namespace mlx static std::mt19937_64 engine(random_device()); static std::uniform_int_distribution uniform_distribution; - UUID::UUID() : _uuid(uniform_distribution(engine)) {} - UUID::UUID(std::uint64_t uuid) : _uuid(uuid) {} + UUID::UUID() : m_uuid(uniform_distribution(engine)) {} + UUID::UUID(std::uint64_t uuid) : m_uuid(uuid) {} } diff --git a/runtime/Sources/Renderer/Buffers/Buffer.cpp b/runtime/Sources/Renderer/Buffers/Buffer.cpp index 1b704fe..6f6455f 100644 --- a/runtime/Sources/Renderer/Buffers/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffers/Buffer.cpp @@ -1,67 +1,66 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_buffer.cpp :+: :+: :+: */ +/* Buffer.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:55:57 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:18 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:20:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_buffer.h" -#include -#include -#include -#include +#include +#include +#include +#include namespace mlx { - void Buffer::create(Buffer::kind type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data) + void Buffer::Create(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data) { MLX_PROFILE_FUNCTION(); - _usage = usage; - if(type == Buffer::kind::constant || type == Buffer::kind::dynamic_device_local) + m_usage = usage; + if(type == BufferType::Constant || type == BufferType::LowDynamic) { - if(data == nullptr && type == Buffer::kind::constant) + if(data == nullptr && type == BufferType::Constant) { - core::error::report(e_kind::warning, "Vulkan : trying to create constant buffer without data (constant buffers cannot be modified after creation)"); + Warning("Vulkan : trying to create constant buffer without data (constant buffers cannot be modified after creation)"); return; } - _usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + m_usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT; } VmaAllocationCreateInfo alloc_info{}; alloc_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; alloc_info.usage = VMA_MEMORY_USAGE_AUTO; - createBuffer(_usage, alloc_info, size, name); + CreateBuffer(m_usage, alloc_info, size, name); if(data != nullptr) { void* mapped = nullptr; - mapMem(&mapped); + MapMem(&mapped); std::memcpy(mapped, data, size); - unmapMem(); - if(type == Buffer::kind::constant || type == Buffer::kind::dynamic_device_local) - pushToGPU(); + UnmapMem(); + if(type == BufferType::constant || type == BufferType::LowDynamic) + PushToGPU(); } } - void Buffer::destroy() noexcept + void Buffer::Destroy() noexcept { MLX_PROFILE_FUNCTION(); - if(_is_mapped) - unmapMem(); - if(_buffer != VK_NULL_HANDLE) - Render_Core::get().getAllocator().destroyBuffer(_allocation, _buffer); - _buffer = VK_NULL_HANDLE; + if(m_is_mapped) + UnmapMem(); + if(m_buffer != VK_NULL_HANDLE) + RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer); + m_buffer = VK_NULL_HANDLE; } - void Buffer::createBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, [[maybe_unused]] const char* name) + void Buffer::CreateBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, [[maybe_unused]] const char* name) { MLX_PROFILE_FUNCTION(); VkBufferCreateInfo bufferInfo{}; @@ -71,97 +70,81 @@ namespace mlx bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; #ifdef DEBUG - _name = name; - std::string alloc_name = _name; + m_name = name; + std::string alloc_name = m_name; if(usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) alloc_name.append("_index_buffer"); else if(usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) alloc_name.append("_vertex_buffer"); else if(!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) alloc_name.append("_buffer"); - _allocation = Render_Core::get().getAllocator().createBuffer(&bufferInfo, &info, _buffer, alloc_name.c_str()); + m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &info, m_buffer, alloc_name.c_str()); #else - _allocation = Render_Core::get().getAllocator().createBuffer(&bufferInfo, &info, _buffer, nullptr); + m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &info, m_buffer, nullptr); #endif - _size = size; + m_size = size; } - bool Buffer::copyFromBuffer(const Buffer& buffer) noexcept + bool Buffer::CopyFromBuffer(const Buffer& buffer) noexcept { MLX_PROFILE_FUNCTION(); - if(!(_usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT)) + if(!(m_usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT)) { - core::error::report(e_kind::error, "Vulkan : buffer cannot be the destination of a copy because it does not have the correct usage flag"); + Error("Vulkan : buffer cannot be the destination of a copy because it does not have the correct usage flag"); return false; } - if(!(buffer._usage & VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) + if(!(buffer.m_usage & VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) { - core::error::report(e_kind::error, "Vulkan : buffer cannot be the source of a copy because it does not have the correct usage flag"); + Error("Vulkan : buffer cannot be the source of a copy because it does not have the correct usage flag"); return false; } - CmdBuffer& cmd = Render_Core::get().getSingleTimeCmdBuffer(); - cmd.beginRecord(); + CmdBuffer& cmd = RenderCore::Get().GetSingleTimeCmdBuffer(); + cmd.BeginRecord(); - cmd.copyBuffer(*this, const_cast(buffer)); + cmd.CopyBuffer(*this, const_cast(buffer)); - cmd.endRecord(); - cmd.submitIdle(); + cmd.EndRecord(); + cmd.SubmitIdle(); return true; } - void Buffer::pushToGPU() noexcept + void Buffer::PushToGPU() noexcept { MLX_PROFILE_FUNCTION(); VmaAllocationCreateInfo alloc_info{}; alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; - Buffer newBuffer; - newBuffer._usage = (_usage & 0xFFFFFFFC) | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + Buffer new_buffer; + new_buffer.m_usage = (m_usage & 0xFFFFFFFC) | VK_BUFFER_USAGE_TRANSFER_DST_BIT; #ifdef DEBUG - std::string new_name = _name + "_GPU"; - newBuffer.createBuffer(newBuffer._usage, alloc_info, _size, new_name.c_str()); + std::string new_name = m_name + "_GPU"; + new_buffer.CreateBuffer(new_buffer.m_usage, alloc_info, m_size, new_name.c_str()); #else - newBuffer.createBuffer(newBuffer._usage, alloc_info, _size, nullptr); + new_buffer.CreateBuffer(new_buffer.m_usage, alloc_info, m_size, nullptr); #endif - if(newBuffer.copyFromBuffer(*this)) // if the copy succeded we swap the buffers, otherwise the new one is deleted - this->swap(newBuffer); - newBuffer.destroy(); + if(new_buffer.CopyFromBuffer(*this)) // if the copy succeded we swap the buffers, otherwise the new one is deleted + this->Swap(new_buffer); + new_buffer.Destroy(); // destroying the old buffer as they have been swapped } - void Buffer::swap(Buffer& buffer) noexcept + void Buffer::Swap(Buffer& buffer) noexcept { - VkBuffer temp_b = _buffer; - _buffer = buffer._buffer; - buffer._buffer = temp_b; - - VmaAllocation temp_a = buffer._allocation; - buffer._allocation = _allocation; - _allocation = temp_a; - - VkDeviceSize temp_size = buffer._size; - buffer._size = _size; - _size = temp_size; - - VkDeviceSize temp_offset = buffer._offset; - buffer._offset = _offset; - _offset = temp_offset; - - VkBufferUsageFlags temp_u = _usage; - _usage = buffer._usage; - buffer._usage = temp_u; - + std::swap(m_buffer, buffer.m_buffer); + std::swap(m_allocation, buffer.m_allocation); + std::swap(m_size, buffer.m_size); + std::swap(m_offset, buffer.m_offset); #ifdef DEBUG - std::string temp_n = _name; - _name = buffer._name; - buffer._name = temp_n; + std::swap(m_name, buffer.m_name); #endif + std::swap(m_usage, buffer.m_usage); + std::swap(m_is_mapped, buffer.m_is_mapped); } - void Buffer::flush(VkDeviceSize size, VkDeviceSize offset) + void Buffer::Flush(VkDeviceSize size, VkDeviceSize offset) { - Render_Core::get().getAllocator().flush(_allocation, size, offset); + RenderCore::Get().GetAllocator().Flush(m_allocation, size, offset); } } diff --git a/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp b/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp index 52cb3df..0359c03 100644 --- a/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp +++ b/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp @@ -1,79 +1,78 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_ubo.cpp :+: :+: :+: */ +/* UniformBuffer.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:45:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 17:48:07 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:25:17 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_ubo.h" -#include -#include +#include +#include namespace mlx { - void UBO::create(Renderer* renderer, std::uint32_t size, [[maybe_unused]] const char* name) + void UniformBuffer::create(NonOwningPtr renderer, std::uint32_t size, [[maybe_unused]] const char* name) { MLX_PROFILE_FUNCTION(); - _renderer = renderer; + p_renderer = renderer; for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { #ifdef DEBUG std::string name_frame = name; name_frame.append(std::to_string(i)); - _buffers[i].create(Buffer::kind::uniform, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, name_frame.c_str()); + m_buffers[i].create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, name_frame.c_str()); #else - _buffers[i].create(Buffer::kind::uniform, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, nullptr); + _buffers[i].Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, nullptr); #endif - _buffers[i].mapMem(&_maps[i]); - if(_maps[i] == nullptr) - core::error::report(e_kind::fatal_error, "Vulkan : unable to map a uniform buffer"); + m_buffers[i].MapMem(&_maps[i]); + if(m_maps[i] == nullptr) + FatalError("Vulkan : unable to map a uniform buffer"); } } - void UBO::setData(std::uint32_t size, const void* data) + void UniformBuffer::SetData(std::uint32_t size, const void* data) { MLX_PROFILE_FUNCTION(); - std::memcpy(_maps[_renderer->getActiveImageIndex()], data, static_cast(size)); + std::memcpy(m_maps[p_renderer->GetActiveImageIndex()], data, static_cast(size)); } - void UBO::setDynamicData(std::uint32_t size, const void* data) + void UniformBuffer::SetDynamicData(std::uint32_t size, const void* data) { MLX_PROFILE_FUNCTION(); - std::memcpy(_maps[_renderer->getActiveImageIndex()], data, static_cast(size)); - _buffers[_renderer->getActiveImageIndex()].flush(); + std::memcpy(m_maps[p_renderer->GetActiveImageIndex()], data, static_cast(size)); + m_buffers[p_renderer->GetActiveImageIndex()].Flush(); } - unsigned int UBO::getSize() noexcept + unsigned int UniformBuffer::GetSize() noexcept { - return _buffers[_renderer->getActiveImageIndex()].getSize(); + return m_buffers[p_renderer->GetActiveImageIndex()].GetSize(); } - unsigned int UBO::getOffset() noexcept + unsigned int UniformBuffer::GetOffset() noexcept { - return _buffers[_renderer->getActiveImageIndex()].getOffset(); + return m_buffers[p_renderer->GetActiveImageIndex()].GetOffset(); } - VkBuffer& UBO::operator()() noexcept + VkBuffer& UniformBuffer::operator()() noexcept { - return _buffers[_renderer->getActiveImageIndex()].get(); + return m_buffers[p_renderer->GetActiveImageIndex()].Get(); } - VkBuffer& UBO::get() noexcept + VkBuffer& UniformBuffer::Get() noexcept { - return _buffers[_renderer->getActiveImageIndex()].get(); + return m_buffers[p_renderer->GetActiveImageIndex()].Get(); } - void UBO::destroy() noexcept + void UniformBuffer::Destroy() noexcept { for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _buffers[i].destroy(); + m_buffers[i].Destroy(); } } diff --git a/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp b/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp index 418ffeb..41afa98 100644 --- a/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp +++ b/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp @@ -1,56 +1,56 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_vbo.cpp :+: :+: :+: */ +/* VertexBuffer.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:28:08 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:25 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:48:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_vbo.h" +#include namespace mlx { - void VBO::setData(std::uint32_t size, const void* data) + void RAMVertexBuffer::SetData(std::uint32_t size, const void* data) { - if(size > getSize()) + if(size > GetSize()) { - core::error::report(e_kind::error, "Vulkan : trying to store to much data in a vertex buffer (%d bytes in %d bytes)", size, getSize()); + Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", size, GetSize()); return; } if(data == nullptr) - core::error::report(e_kind::warning, "Vulkan : mapping null data in a vertex buffer"); + Warning("Vulkan : mapping null data in a vertex buffer"); void* temp = nullptr; - mapMem(&temp); + MapMem(&temp); std::memcpy(temp, data, static_cast(size)); - unmapMem(); + UnmapMem(); } - void D_VBO::setData(std::uint32_t size, const void* data) + void DeviceVertexBuffer::SetData(std::uint32_t size, const void* data) { - if(size > getSize()) + if(size > GetSize()) { - core::error::report(e_kind::error, "Vulkan : trying to store to much data in a vertex buffer (%d bytes in %d bytes)", size, getSize()); + Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", size, GetSize()); return; } if(data == nullptr) - core::error::report(e_kind::warning, "Vulkan : mapping null data in a vertex buffer"); + Warning("Vulkan : mapping null data in a vertex buffer"); Buffer tmp_buf; #ifdef DEBUG - tmp_buf.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "tmp_buffer", data); + tmp_buf.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "tmp_buffer", data); #else - tmp_buf.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, data); + tmp_buf.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, data); #endif - copyFromBuffer(tmp_buf); - tmp_buf.destroy(); + CopyFromBuffer(tmp_buf); + tmp_buf.Destroy(); } } diff --git a/runtime/Sources/Renderer/Command/CommandBuffer.cpp b/runtime/Sources/Renderer/Command/CommandBuffer.cpp index a2d0a25..6574682 100644 --- a/runtime/Sources/Renderer/Command/CommandBuffer.cpp +++ b/runtime/Sources/Renderer/Command/CommandBuffer.cpp @@ -1,33 +1,32 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_cmd_buffer.cpp :+: :+: :+: */ +/* CommandBuffer.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:26:06 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:37 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:02:20 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_cmd_buffer.h" -#include -#include -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include +#include +#include namespace mlx { - bool vector_push_back_if_not_found(std::vector& vector, CmdResource* res) + bool VectorPushBackIfNotFound(std::vector>& vector, NonOwningPtr res) { - auto it = std::find_if(vector.begin(), vector.end(), [=](const CmdResource* vres) + auto it = std::find_if(vector.begin(), vector.end(), [=](const NonOwningPtr vres) { - return vres->getUUID() == res->getUUID(); + return vres->GetUUID() == res->GetUUID(); }); if(it == vector.end()) @@ -38,113 +37,113 @@ namespace mlx return false; } - void CmdBuffer::init(kind type, CmdManager* manager) + void CmommanBuffer::Init(CommandBufferType type, NonOwningPtr manager) { - init(type, &manager->getCmdPool()); + Init(type, &manager->GetCmdPool()); } - void CmdBuffer::init(kind type, CmdPool* pool) + void CommandBuffer::Init(CommandBufferType type, NonOwningPtr pool) { MLX_PROFILE_FUNCTION(); - _type = type; - _pool = pool; + m_type = type; + m_pool = pool; - VkCommandBufferAllocateInfo allocInfo{}; - allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; - allocInfo.commandPool = pool->get(); - allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; - allocInfo.commandBufferCount = 1; + VkCommandBufferAllocateInfo alloc_info{}; + alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + alloc_info.commandPool = pool->get(); + alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; + alloc_info.commandBufferCount = 1; - VkResult res = vkAllocateCommandBuffers(Render_Core::get().getDevice().get(), &allocInfo, &_cmd_buffer); + VkResult res = vkAllocateCommandBuffers(RenderCore::Get().getDevice().get(), &allocInfo, &_cmd_buffer); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to allocate command buffer, %s", RCore::verbaliseResultVk(res)); + FatalError("Vulkan : failed to allocate command buffer, %s", RCore::verbaliseResultVk(res)); #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new command buffer"); + Message("Vulkan : created new command buffer"); #endif - _fence.init(); - _state = state::idle; + m_fence.init(); + state = CommandBufferState::Idle; } - void CmdBuffer::beginRecord(VkCommandBufferUsageFlags usage) + void CommandBuffer::BeginRecord(VkCommandBufferUsageFlags usage) { MLX_PROFILE_FUNCTION(); - if(!isInit()) - core::error::report(e_kind::fatal_error, "Vulkan : begenning record on un uninit command buffer"); - if(_state == state::recording) + if(!IsInit()) + FatalError("Vulkan : begenning record on un uninit command buffer"); + if(m_state == CommandBufferState::Recording) return; - VkCommandBufferBeginInfo beginInfo{}; - beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - beginInfo.flags = usage; - if(vkBeginCommandBuffer(_cmd_buffer, &beginInfo) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to begin recording command buffer"); + VkCommandBufferBeginInfo begin_info{}; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + begin_info.flags = usage; + if(vkBeginCommandBuffer(m_cmd_buffer, &begin_info) != VK_SUCCESS) + FatalError("Vulkan : failed to begin recording command buffer"); - _state = state::recording; + m_state = CommandBufferState::Recording; } - void CmdBuffer::bindVertexBuffer(Buffer& buffer) noexcept + void CommandBuffer::BindVertexBuffer(Buffer& buffer) noexcept { MLX_PROFILE_FUNCTION(); - if(!isRecording()) + if(!IsRecording()) { - core::error::report(e_kind::warning, "Vulkan : trying to bind a vertex buffer to a non recording command buffer"); + Warning("Vulkan : trying to bind a vertex buffer to a non recording command buffer"); return; } - VkDeviceSize offset[] = { buffer.getOffset() }; - vkCmdBindVertexBuffers(_cmd_buffer, 0, 1, &buffer.get(), offset); + VkDeviceSize offset[] = { buffer.GetOffset() }; + vkCmdBindVertexBuffers(m_cmd_buffer, 0, 1, &buffer.Get(), offset); - buffer.recordedInCmdBuffer(); - vector_push_back_if_not_found(_cmd_resources, &buffer); + buffer.RecordedInCommandBuffer(); + VectorPushBackIfNotFound(m_cmd_resources, &buffer); } - void CmdBuffer::bindIndexBuffer(Buffer& buffer) noexcept + void CommandBuffer::NindIndexBuffer(Buffer& buffer) noexcept { MLX_PROFILE_FUNCTION(); - if(!isRecording()) + if(!IsRecording()) { - core::error::report(e_kind::warning, "Vulkan : trying to bind a index buffer to a non recording command buffer"); + Warning("Vulkan : trying to bind a index buffer to a non recording command buffer"); return; } - vkCmdBindIndexBuffer(_cmd_buffer, buffer.get(), buffer.getOffset(), VK_INDEX_TYPE_UINT16); + vkCmdBindIndexBuffer(m_cmd_buffer, buffer.Get(), buffer.GetOffset(), VK_INDEX_TYPE_UINT16); - buffer.recordedInCmdBuffer(); - vector_push_back_if_not_found(_cmd_resources, &buffer); + buffer.RecordedInCommandBuffer(); + VectorPushBackIfNotFound(m_cmd_resources, &buffer); } - void CmdBuffer::copyBuffer(Buffer& dst, Buffer& src) noexcept + void CommandBuffer::CopyBuffer(Buffer& dst, Buffer& src) noexcept { MLX_PROFILE_FUNCTION(); - if(!isRecording()) + if(!IsRecording()) { - core::error::report(e_kind::warning, "Vulkan : trying to do a buffer to buffer copy in a non recording command buffer"); + Warning("Vulkan : trying to do a buffer to buffer copy in a non recording command buffer"); return; } - preTransferBarrier(); + PreTransferBarrier(); - VkBufferCopy copyRegion{}; - copyRegion.size = src.getSize(); - vkCmdCopyBuffer(_cmd_buffer, src.get(), dst.get(), 1, ©Region); + VkBufferCopy copy_region{}; + copy_region.size = src.GetSize(); + vkCmdCopyBuffer(m_cmd_buffer, src.Get(), dst.Get(), 1, ©_region); - postTransferBarrier(); + PostTransferBarrier(); - dst.recordedInCmdBuffer(); - src.recordedInCmdBuffer(); - vector_push_back_if_not_found(_cmd_resources, &dst); - vector_push_back_if_not_found(_cmd_resources, &src); + dst.RecordedInCommandBuffer(); + src.RecordedInCommandBuffer(); + VectorPushBackIfNotFound(m_cmd_resources, &dst); + VectorPushBackIfNotFound(m_cmd_resources, &src); } - void CmdBuffer::copyBufferToImage(Buffer& buffer, Image& image) noexcept + void CommandBuffer::CopyBufferToImage(Buffer& buffer, Image& image) noexcept { MLX_PROFILE_FUNCTION(); - if(!isRecording()) + if(!IsRecording()) { - core::error::report(e_kind::warning, "Vulkan : trying to do a buffer to image copy in a non recording command buffer"); + Warning("Vulkan : trying to do a buffer to image copy in a non recording command buffer"); return; } - - preTransferBarrier(); + + PreTransferBarrier(); VkBufferImageCopy region{}; region.bufferOffset = 0; @@ -155,28 +154,28 @@ namespace mlx region.imageSubresource.baseArrayLayer = 0; region.imageSubresource.layerCount = 1; region.imageOffset = { 0, 0, 0 }; - region.imageExtent = { image.getWidth(), image.getHeight(), 1 }; + region.imageExtent = { image.GetWidth(), image.GetHeight(), 1 }; - vkCmdCopyBufferToImage(_cmd_buffer, buffer.get(), image.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); + vkCmdCopyBufferToImage(m_cmd_buffer, buffer.Get(), image.Get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); - postTransferBarrier(); + PostTransferBarrier(); - image.recordedInCmdBuffer(); - buffer.recordedInCmdBuffer(); - vector_push_back_if_not_found(_cmd_resources, &image); - vector_push_back_if_not_found(_cmd_resources, &buffer); + image.RecordedInCommandBuffer(); + buffer.RecordedInCommandBuffer(); + VectorPushBackIfNotFound(m_cmd_resources, &image); + VectorPushBackIfNotFound(m_cmd_resources, &buffer); } - void CmdBuffer::copyImagetoBuffer(Image& image, Buffer& buffer) noexcept + void CommandBuffer::CopyImagetoBuffer(Image& image, Buffer& buffer) noexcept { MLX_PROFILE_FUNCTION(); - if(!isRecording()) + if(!IsRecording()) { - core::error::report(e_kind::warning, "Vulkan : trying to do an image to buffer copy in a non recording command buffer"); + Warning("Vulkan : trying to do an image to buffer copy in a non recording command buffer"); return; } - preTransferBarrier(); + PreTransferBarrier(); VkBufferImageCopy region{}; region.bufferOffset = 0; @@ -187,184 +186,180 @@ namespace mlx region.imageSubresource.baseArrayLayer = 0; region.imageSubresource.layerCount = 1; region.imageOffset = { 0, 0, 0 }; - region.imageExtent = { image.getWidth(), image.getHeight(), 1 }; + region.imageExtent = { image.GetWidth(), image.GetHeight(), 1 }; - vkCmdCopyImageToBuffer(_cmd_buffer, image.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer.get(), 1, ®ion); + vkCmdCopyImageToBuffer(m_cmd_buffer, image.Get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer.Get(), 1, ®ion); - postTransferBarrier(); + PostTransferBarrier(); - image.recordedInCmdBuffer(); - buffer.recordedInCmdBuffer(); - vector_push_back_if_not_found(_cmd_resources, &buffer); - vector_push_back_if_not_found(_cmd_resources, &image); + image.RecordedInCommandBuffer(); + buffer.RecordedInCommandBuffer(); + VectorPushBackIfNotFound(m_cmd_resources, &buffer); + VectorPushBackIfNotFound(m_cmd_resources, &image); } - void CmdBuffer::transitionImageLayout(Image& image, VkImageLayout new_layout) noexcept + void CommandBuffer::TransitionImageLayout(Image& image, VkImageLayout new_layout) noexcept { MLX_PROFILE_FUNCTION(); - if(!isRecording()) + if(!IsRecording()) { - core::error::report(e_kind::warning, "Vulkan : trying to do an image layout transition in a non recording command buffer"); + Warning("Vulkan : trying to do an image layout transition in a non recording command buffer"); return; } VkImageMemoryBarrier barrier{}; barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; - barrier.oldLayout = image.getLayout(); + barrier.oldLayout = image.GetLayout(); barrier.newLayout = new_layout; barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; - barrier.image = image.get(); - barrier.subresourceRange.aspectMask = isDepthFormat(image.getFormat()) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT; + barrier.image = image.Get(); + barrier.subresourceRange.aspectMask = IsDepthFormat(image.GetFormat()) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT; barrier.subresourceRange.baseMipLevel = 0; barrier.subresourceRange.levelCount = 1; barrier.subresourceRange.baseArrayLayer = 0; barrier.subresourceRange.layerCount = 1; - barrier.srcAccessMask = layoutToAccessMask(image.getLayout(), false); - barrier.dstAccessMask = layoutToAccessMask(new_layout, true); - if(isStencilFormat(image.getFormat())) + barrier.srcAccessMask = LayoutToAccessMask(image.GetLayout(), false); + barrier.dstAccessMask = LayoutToAccessMask(new_layout, true); + if(IsStencilFormat(image.GetFormat())) barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT; - VkPipelineStageFlags sourceStage = 0; + VkPipelineStageFlags source_stage = 0; if(barrier.oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) - sourceStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + source_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; else if(barrier.srcAccessMask != 0) - sourceStage = RCore::accessFlagsToPipelineStage(barrier.srcAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); + source_stage = AccessFlagsToPipelineStage(barrier.srcAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); else - sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + source_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; - VkPipelineStageFlags destinationStage = 0; + VkPipelineStageFlags destination_stage = 0; if(barrier.newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) - destinationStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + destination_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; else if(barrier.dstAccessMask != 0) - destinationStage = RCore::accessFlagsToPipelineStage(barrier.dstAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); + destination_stage = AccessFlagsToPipelineStage(barrier.dstAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); else - destinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + destination_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - vkCmdPipelineBarrier(_cmd_buffer, sourceStage, destinationStage, 0, 0, nullptr, 0, nullptr, 1, &barrier); + vkCmdPipelineBarrier(m_cmd_buffer, source_stage, destination_stage, 0, 0, nullptr, 0, nullptr, 1, &barrier); - image.recordedInCmdBuffer(); - vector_push_back_if_not_found(_cmd_resources, &image); + image.RecordedInCommandBuffer(); + VectorPushBackIfNotFound(m_cmd_resources, &image); } - void CmdBuffer::endRecord() + void CommandBuffer::EndRecord() { MLX_PROFILE_FUNCTION(); - if(!isInit()) - core::error::report(e_kind::fatal_error, "Vulkan : ending record on un uninit command buffer"); - if(_state != state::recording) + if(!IsInit()) + FatalError("Vulkan : ending record on un uninit command buffer"); + if(m_state != CommandBufferState::Recording) return; - if(vkEndCommandBuffer(_cmd_buffer) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to end recording command buffer"); + if(vkEndCommandBuffer(m_cmd_buffer) != VK_SUCCESS) + FatalError("Vulkan : failed to end recording command buffer"); - _state = state::idle; + m_state = CommandBufferState::Idle; } - void CmdBuffer::submitIdle(bool shouldWaitForExecution) noexcept + void CommandBuffer::SubmitIdle(bool should_wait_for_execution) noexcept { MLX_PROFILE_FUNCTION(); - if(_type != kind::single_time) + if(m_type != CommandBufferType::SingleTime) { - core::error::report(e_kind::error, "Vulkan : try to perform an idle submit on a command buffer that is not single-time, this is not allowed"); + Error("Vulkan : try to perform an idle submit on a command buffer that is not single-time, this is not allowed"); return; } - _fence.reset(); + m_fence.Reset(); - VkSubmitInfo submitInfo{}; - submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - submitInfo.commandBufferCount = 1; - submitInfo.pCommandBuffers = &_cmd_buffer; + VkSubmitInfo submit_info{}; + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.commandBufferCount = 1; + submit_info.pCommandBuffers = &m_cmd_buffer; - VkResult res = vkQueueSubmit(Render_Core::get().getQueue().getGraphic(), 1, &submitInfo, _fence.get()); + VkResult res = vkQueueSubmit(RenderCore::Get().GetQueue().GetGraphic(), 1, &submit_info, m_fence.Get()); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan error : failed to submit a single time command buffer, %s", RCore::verbaliseResultVk(res)); - _state = state::submitted; + FatalError("Vulkan error : failed to submit a single time command buffer, %", VerbaliseVkResult(res)); + m_state = CommandBufferState::Submitted; - if(shouldWaitForExecution) - waitForExecution(); + if(should_wait_for_execution) + WaitForExecution(); } - void CmdBuffer::submit(Semaphore* semaphores) noexcept + void CommandBuffer::Submit(NonOwningPtr signal, NonOwningPtr wait) noexcept { MLX_PROFILE_FUNCTION(); - std::array signalSemaphores; - std::array waitSemaphores; + std::array signal_semaphores; + std::array wait_semaphores; - if(semaphores != nullptr) - { - signalSemaphores[0] = semaphores->getRenderImageSemaphore(); - waitSemaphores[0] = semaphores->getImageSemaphore(); - } - else - { - signalSemaphores[0] = VK_NULL_HANDLE; - waitSemaphores[0] = VK_NULL_HANDLE; - } - VkPipelineStageFlags waitStages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; + signal_semaphores[0] = (signal ? signal->Get() : VK_NULL_HANDLE); - _fence.reset(); + wait_semaphores[0] = (wait ? wait->Get() : VK_NULL_HANDLE); + VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; - VkSubmitInfo submitInfo{}; - submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - submitInfo.waitSemaphoreCount = (semaphores == nullptr ? 0 : waitSemaphores.size()); - submitInfo.pWaitSemaphores = waitSemaphores.data(); - submitInfo.pWaitDstStageMask = waitStages; - submitInfo.commandBufferCount = 1; - submitInfo.pCommandBuffers = &_cmd_buffer; - submitInfo.signalSemaphoreCount = (semaphores == nullptr ? 0 : signalSemaphores.size()); - submitInfo.pSignalSemaphores = signalSemaphores.data(); + m_fence.Reset(); - VkResult res = vkQueueSubmit(Render_Core::get().getQueue().getGraphic(), 1, &submitInfo, _fence.get()); + VkSubmitInfo submit_info{}; + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.waitSemaphoreCount = (!wait ? 0 : wait_semaphores.size()); + submit_info.pWaitSemaphores = wait_semaphores.data(); + submit_info.pWaitDstStageMask = wait_stages; + submit_info.commandBufferCount = 1; + submit_info.pCommandBuffers = &m_cmd_buffer; + submit_info.signalSemaphoreCount = (!signal ? 0 : signal_semaphores.size()); + submit_info.pSignalSemaphores = signal_semaphores.data(); + + VkResult res = vkQueueSubmit(RenderCore::Get().GetQueue().GetGraphic(), 1, &submit_info, m_fence.get()); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan error : failed to submit draw command buffer, %s", RCore::verbaliseResultVk(res)); - _state = state::submitted; + FatalError("Vulkan error : failed to submit draw command buffer, %", VerbaliseVkResult(res)); + m_state = CommandBufferState::Submitted; } - void CmdBuffer::updateSubmitState() noexcept + void CommandBuffer::UpdateSubmitState() noexcept { MLX_PROFILE_FUNCTION(); - if(!_fence.isReady()) + if(!m_fence.IsReady()) return; - for(CmdResource* res : _cmd_resources) - res->removedFromCmdBuffer(); - _cmd_resources.clear(); - _state = state::ready; + for(NonOwningPtr res : m_cmd_resources) + { + if(res) + res->RemovedFromCommandBuffer(); + } + m_cmd_resources.clear(); + m_state = CommandBufferState::Ready; } - void CmdBuffer::preTransferBarrier() noexcept + void CommandBuffer::PreTransferBarrier() noexcept { MLX_PROFILE_FUNCTION(); - VkMemoryBarrier memoryBarrier{}; - memoryBarrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; - memoryBarrier.pNext = nullptr; - memoryBarrier.srcAccessMask = 0U; - memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; + VkMemoryBarrier memory_barrier{}; + memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; + memory_barrier.pNext = nullptr; + memory_barrier.srcAccessMask = 0U; + memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; - vkCmdPipelineBarrier(_cmd_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &memoryBarrier, 0, nullptr, 0, nullptr); + vkCmdPipelineBarrier(m_cmd_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr); } - void CmdBuffer::postTransferBarrier() noexcept + void CommandBuffer::PostTransferBarrier() noexcept { MLX_PROFILE_FUNCTION(); - VkMemoryBarrier memoryBarrier{}; - memoryBarrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; - memoryBarrier.pNext = nullptr; - memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT; + VkMemoryBarrier memory_barrier{}; + memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; + memory_barrier.pNext = nullptr; + memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT; - vkCmdPipelineBarrier(_cmd_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1, &memoryBarrier, 0, nullptr, 0, nullptr); + vkCmdPipelineBarrier(m_cmd_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr); } - void CmdBuffer::destroy() noexcept + void CommandBuffer::Destroy() noexcept { MLX_PROFILE_FUNCTION(); - _fence.destroy(); - _cmd_buffer = VK_NULL_HANDLE; - _state = state::uninit; + m_fence.Destroy(); + m_cmd_buffer = VK_NULL_HANDLE; + m_state = CommandBufferState::Uninit; #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed command buffer"); + Message("Vulkan : destroyed command buffer"); #endif } } diff --git a/runtime/Sources/Renderer/Command/CommandManager.cpp b/runtime/Sources/Renderer/Command/CommandManager.cpp index 9de7d82..df6fab9 100644 --- a/runtime/Sources/Renderer/Command/CommandManager.cpp +++ b/runtime/Sources/Renderer/Command/CommandManager.cpp @@ -1,42 +1,42 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* cmd_manager.cpp :+: :+: :+: */ +/* CommandManager.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:50:52 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:30 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:55:04 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include +#include namespace mlx { - void CmdManager::init() noexcept + void CommandManager::Init() noexcept { - _cmd_pool.init(); + m_cmd_pool.Init(); for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _cmd_buffers[i].init(CmdBuffer::kind::long_time, this); + m_cmd_buffers[i].Init(CommandBufferType::LongTime, this); } - void CmdManager::beginRecord(int active_image_index) + void CommandManager::BeginRecord(int active_image_index) { - _cmd_buffers[active_image_index].beginRecord(); + m_cmd_buffers[active_image_index].BeginRecord(); } - void CmdManager::endRecord(int active_image_index) + void CommandManager::EndRecord(int active_image_index) { - _cmd_buffers[active_image_index].endRecord(); + m_cmd_buffers[active_image_index].EndRecord(); } - void CmdManager::destroy() noexcept + void CommandManager::Destroy() noexcept { for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _cmd_buffers[i].destroy(); - _cmd_pool.destroy(); + m_cmd_buffers[i].Destroy(); + m_cmd_pool.Destroy(); } } diff --git a/runtime/Sources/Renderer/Command/CommandPool.cpp b/runtime/Sources/Renderer/Command/CommandPool.cpp index fbce122..af576a7 100644 --- a/runtime/Sources/Renderer/Command/CommandPool.cpp +++ b/runtime/Sources/Renderer/Command/CommandPool.cpp @@ -1,37 +1,37 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_cmd_pool.cpp :+: :+: :+: */ +/* CommandPool.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:24:33 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:41 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 14:57:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_cmd_pool.h" -#include +#include +#include namespace mlx { - void CmdPool::init() + void CommandPool::Init() { - VkCommandPoolCreateInfo poolInfo{}; - poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; - poolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; - poolInfo.queueFamilyIndex = Render_Core::get().getQueue().getFamilies().graphics_family.value(); + VkCommandPoolCreateInfo pool_info{}; + pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; + pool_info.queueFamilyIndex = RenderCore::Get().GetQueue().GetFamilies().graphics_family.value(); - VkResult res = vkCreateCommandPool(Render_Core::get().getDevice().get(), &poolInfo, nullptr, &_cmd_pool); + VkResult res = vkCreateCommandPool(RenderCore::Get().GetDevice().Get(), &pool_info, nullptr, &m_cmd_pool); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create command pool, %s", RCore::verbaliseResultVk(res)); + FatalError("Vulkan : failed to create command pool, %", VerbaliseVkResult(res)); } - void CmdPool::destroy() noexcept + void CommandPool::Destroy() noexcept { - vkDestroyCommandPool(Render_Core::get().getDevice().get(), _cmd_pool, nullptr); - _cmd_pool = VK_NULL_HANDLE; + vkDestroyCommandPool(RenderCore::Get().GetDevice().Get(), m_cmd_pool, nullptr); + m_cmd_pool = VK_NULL_HANDLE; } } diff --git a/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp b/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp index 7bc96f1..b9fdae4 100644 --- a/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp +++ b/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp @@ -1,64 +1,64 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* single_time_cmd_manager.cpp :+: :+: :+: */ +/* SingleTimeCommandManager.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/15 19:57:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:01:33 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 15:05:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include +#include +#include namespace mlx { - void SingleTimeCmdManager::init() noexcept + void SingleTimeCmdManager::Init() noexcept { - _pool.init(); + m_pool.init(); for(int i = 0; i < BASE_POOL_SIZE; i++) { - _buffers.emplace_back(); - _buffers.back().init(CmdBuffer::kind::single_time, &_pool); + m_buffers.emplace_back(); + m_buffers.back().Init(CommandBufferType::SingleTime, &m_pool); } } - CmdBuffer& SingleTimeCmdManager::getCmdBuffer() noexcept + CommandBuffer& SingleTimeCmdManager::GetCmdBuffer() noexcept { - for(CmdBuffer& buf : _buffers) + for(CmdBuffer& buf : m_buffers) { - if(buf.isReadyToBeUsed()) + if(buf.IsReadyToBeUsed()) { buf.reset(); return buf; } } - _buffers.emplace_back().init(CmdBuffer::kind::single_time, &_pool); - return _buffers.back(); + m_buffers.emplace_back().Init(CommandBufferType::SingleTime, &m_pool); + return m_buffers.back(); } - void SingleTimeCmdManager::updateSingleTimesCmdBuffersSubmitState() noexcept + void SingleTimeCmdManager::UpdateSingleTimesCmdBuffersSubmitState() noexcept { - for(CmdBuffer& cmd : _buffers) - cmd.updateSubmitState(); + for(CmdBuffer& cmd : m_buffers) + cmd.UpdateSubmitState(); } - void SingleTimeCmdManager::waitForAllExecutions() noexcept + void SingleTimeCmdManager::WaitForAllExecutions() noexcept { - for(CmdBuffer& cmd : _buffers) - cmd.waitForExecution(); + for(CmdBuffer& cmd : m_buffers) + cmd.WaitForExecution(); } - void SingleTimeCmdManager::destroy() noexcept + void SingleTimeCmdManager::Destroy() noexcept { - std::for_each(_buffers.begin(), _buffers.end(), [](CmdBuffer& buf) + std::for_each(m_buffers.begin(), m_buffers.end(), [](CommandBuffer& buf) { - buf.destroy(); + buf.Destroy(); }); - _pool.destroy(); + m_pool.Destroy(); } } diff --git a/runtime/Sources/Renderer/Core/Device.cpp b/runtime/Sources/Renderer/Core/Device.cpp index 7ed8850..d26e966 100644 --- a/runtime/Sources/Renderer/Core/Device.cpp +++ b/runtime/Sources/Renderer/Core/Device.cpp @@ -1,105 +1,100 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_device.cpp :+: :+: :+: */ +/* Device.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:14:29 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:31:54 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:10:08 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "render_core.h" +#include namespace mlx { - const std::vector deviceExtensions = {VK_KHR_SWAPCHAIN_EXTENSION_NAME}; + const std::vector device_extensions = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; - void Device::init() + void Device::Init() { - pickPhysicalDevice(); + PickPhysicalDevice(); - Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().getFamilies(); + Queues::QueueFamilyIndices indices = RenderCore::Get().GetQueue().GetFamilies(); - std::vector queueCreateInfos; - std::set uniqueQueueFamilies = { indices.graphics_family.value(), indices.present_family.value() }; + std::vector queue_create_infos; + std::set unique_queue_families = { indices.graphics_family.value(), indices.present_family.value() }; - float queuePriority = 1.0f; - for(std::uint32_t queueFamily : uniqueQueueFamilies) + float queue_priority = 1.0f; + for(std::uint32_t queue_family : unique_queue_families) { - VkDeviceQueueCreateInfo queueCreateInfo{}; - queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queueCreateInfo.queueFamilyIndex = queueFamily; - queueCreateInfo.queueCount = 1; - queueCreateInfo.pQueuePriorities = &queuePriority; - queueCreateInfos.push_back(queueCreateInfo); + VkDeviceQueueCreateInfo queue_create_info{}; + queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_info.queueFamilyIndex = queue_family; + queue_create_info.queueCount = 1; + queue_create_info.pQueuePriorities = &queue_priority; + queue_create_infos.push_back(queue_create_info); } - VkPhysicalDeviceFeatures deviceFeatures{}; + VkPhysicalDeviceFeatures device_features{}; - VkDeviceCreateInfo createInfo{}; - createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; - - createInfo.queueCreateInfoCount = static_cast(queueCreateInfos.size()); - createInfo.pQueueCreateInfos = queueCreateInfos.data(); - - createInfo.pEnabledFeatures = &deviceFeatures; - - createInfo.enabledExtensionCount = static_cast(deviceExtensions.size()); - createInfo.ppEnabledExtensionNames = deviceExtensions.data(); - createInfo.enabledLayerCount = 0; + VkDeviceCreateInfo create_info{}; + create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + create_info.queueCreateInfoCount = static_cast(queue_create_infos.size()); + create_info.pQueueCreateInfos = queue_create_infos.data(); + create_info.pEnabledFeatures = &device_features; + create_info.enabledExtensionCount = static_cast(device_extensions.size()); + create_info.ppEnabledExtensionNames = device_extensions.data(); + create_info.enabledLayerCount = 0; VkResult res; - if((res = vkCreateDevice(_physical_device, &createInfo, nullptr, &_device)) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create logcal device, %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new logical device"); - #endif + if((res = vkCreateDevice(m_physical_device, &create_info, nullptr, &m_device)) != VK_SUCCESS) + FatalError("Vulkan : failed to create logcal device, %", VerbaliseVkResult(res)); + DebugLog("Vulkan : created new logical device"); } - void Device::pickPhysicalDevice() + void Device::PickPhysicalDevice() { - std::uint32_t deviceCount = 0; - vkEnumeratePhysicalDevices(Render_Core::get().getInstance().get(), &deviceCount, nullptr); + std::uint32_t device_count = 0; + vkEnumeratePhysicalDevices(RenderCore::Get().GetInstance().Get(), &device_count, nullptr); - if(deviceCount == 0) - core::error::report(e_kind::fatal_error, "Vulkan : failed to find GPUs with Vulkan support"); + if(device_count == 0) + FatalError("Vulkan : failed to find GPUs with Vulkan support"); - std::vector devices(deviceCount); - vkEnumeratePhysicalDevices(Render_Core::get().getInstance().get(), &deviceCount, devices.data()); + std::vector devices(device_count); + vkEnumeratePhysicalDevices(RenderCore::Get().GetInstance().Get(), &device_count, devices.data()); std::multimap devices_score; for(const auto& device : devices) { - int score = deviceScore(device); + int score = DeviceScore(device); devices_score.insert(std::make_pair(score, device)); } if(devices_score.rbegin()->first > 0) - _physical_device = devices_score.rbegin()->second; + m_physical_device = devices_score.rbegin()->second; else - core::error::report(e_kind::fatal_error, "Vulkan : failed to find a suitable GPU"); + FatalError("Vulkan : failed to find a suitable GPU"); #ifdef DEBUG VkPhysicalDeviceProperties props; - vkGetPhysicalDeviceProperties(_physical_device, &props); - core::error::report(e_kind::message, "Vulkan : picked a physical device, %s", props.deviceName); + vkGetPhysicalDeviceProperties(m_physical_device, &props); + DebugLog("Vulkan : picked a physical device, %s", props.deviceName); #endif - Render_Core::get().getQueue().findQueueFamilies(_physical_device); // update queue indicies to current physical device + RenderCore::Get().GetQueue().FindQueueFamilies(m_physical_device); // update queue indicies to current physical device } - int Device::deviceScore(VkPhysicalDevice device) + int Device::DeviceScore(VkPhysicalDevice device) { - Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(device); - bool extensionsSupported = checkDeviceExtensionSupport(device); + Queues::QueueFamilyIndices indices = RenderCore::Get().GetQueue().FindQueueFamilies(device); + bool extensions_supported = CheckDeviceExtensionSupport(device); VkPhysicalDeviceProperties props; vkGetPhysicalDeviceProperties(device, &props); - if(!indices.isComplete() || !extensionsSupported) + if(!indices.IsComplete() || !extensions_supported) return -1; VkPhysicalDeviceFeatures features; @@ -122,28 +117,26 @@ namespace mlx return score; } - bool Device::checkDeviceExtensionSupport(VkPhysicalDevice device) + bool Device::CheckDeviceExtensionSupport(VkPhysicalDevice device) { - std::uint32_t extensionCount; - vkEnumerateDeviceExtensionProperties(device, nullptr, &extensionCount, nullptr); + std::uint32_t extension_count; + vkEnumerateDeviceExtensionProperties(device, nullptr, &extension_count, nullptr); - std::vector availableExtensions(extensionCount); - vkEnumerateDeviceExtensionProperties(device, nullptr, &extensionCount, availableExtensions.data()); + std::vector available_extensions(extensionCount); + vkEnumerateDeviceExtensionProperties(device, nullptr, &extension_count, available_extensions.data()); - std::set requiredExtensions(deviceExtensions.begin(), deviceExtensions.end()); + std::set required_extensions(device_extensions.begin(), device_extensions.end()); - for(const auto& extension : availableExtensions) - requiredExtensions.erase(extension.extensionName); + for(const auto& extension : available_extensions) + required_extensions.erase(extension.extensionName); - return requiredExtensions.empty(); + return required_extensions.empty(); } - void Device::destroy() noexcept + void Device::Destroy() noexcept { - vkDestroyDevice(_device, nullptr); - _device = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed a logical device"); - #endif + vkDestroyDevice(m_device, nullptr); + m_device = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed a logical device"); } } diff --git a/runtime/Sources/Renderer/Core/Fence.cpp b/runtime/Sources/Renderer/Core/Fence.cpp index db3d8e2..7db2b3d 100644 --- a/runtime/Sources/Renderer/Core/Fence.cpp +++ b/runtime/Sources/Renderer/Core/Fence.cpp @@ -1,58 +1,54 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_fence.cpp :+: :+: :+: */ +/* Fence.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 17:53:06 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:14 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:13:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include +#include +#include namespace mlx { - void Fence::init() + void Fence::Init() { - VkFenceCreateInfo fenceInfo{}; - fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; - fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT; + VkFenceCreateInfo fence_info{}; + fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; VkResult res; - if((res = vkCreateFence(Render_Core::get().getDevice().get(), &fenceInfo, nullptr, &_fence)) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a synchronization object (fence), %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new fence"); - #endif + if((res = vkCreateFence(RenderCore::Get().GetDevice().Get(), &fence_info, nullptr, &m_fence)) != VK_SUCCESS) + FatalError("Vulkan : failed to create a synchronization object (fence), %", VerbaliseVkResult(res)); + DebugLog("Vulkan : created new fence"); } - void Fence::wait() noexcept + void Fence::Wait() noexcept { - vkWaitForFences(Render_Core::get().getDevice().get(), 1, &_fence, VK_TRUE, UINT64_MAX); + vkWaitForFences(RenderCore::Get().GetDevice().Get(), 1, &m_fence, VK_TRUE, UINT64_MAX); } - void Fence::reset() noexcept + void Fence::Reset() noexcept { - vkResetFences(Render_Core::get().getDevice().get(), 1, &_fence); + vkResetFences(RenderCore::Get().GetDevice().Get(), 1, &m_fence); } - bool Fence::isReady() const noexcept + bool Fence::IsReady() const noexcept { - return vkGetFenceStatus(Render_Core::get().getDevice().get(), _fence) == VK_SUCCESS; + return vkGetFenceStatus(RenderCore::Get().GetDevice().Get(), m_fence) == VK_SUCCESS; } void Fence::destroy() noexcept { - if(_fence != VK_NULL_HANDLE) - vkDestroyFence(Render_Core::get().getDevice().get(), _fence, nullptr); - _fence = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed fence"); - #endif + if(m_fence != VK_NULL_HANDLE) + vkDestroyFence(RenderCore::Get().GetDevice().Get(), m_fence, nullptr); + m_fence = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed fence"); } } diff --git a/runtime/Sources/Renderer/Core/Instance.cpp b/runtime/Sources/Renderer/Core/Instance.cpp index ad758ae..694e3b2 100644 --- a/runtime/Sources/Renderer/Core/Instance.cpp +++ b/runtime/Sources/Renderer/Core/Instance.cpp @@ -1,63 +1,67 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_instance.cpp :+: :+: :+: */ +/* Instance.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:04:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 23:10:37 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:43:47 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_instance.h" -#include "render_core.h" -#include +#include +#include namespace mlx { - void Instance::init() + void Instance::Init() { - VkApplicationInfo appInfo{}; - appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; - appInfo.pEngineName = "MacroLibX"; - appInfo.engineVersion = VK_MAKE_VERSION(1, 3, 1); - appInfo.apiVersion = VK_API_VERSION_1_2; + std::uint32_t api_version = std::min(volkGetInstanceVersion(), MLX_TARGET_VULKAN_API_VERSION); - auto extensions = getRequiredExtensions(); + if(api_version == 0) + FatalError("Vulkan API is not supported by this driver"); - VkInstanceCreateInfo createInfo{}; - createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; - createInfo.pApplicationInfo = &appInfo; - createInfo.enabledExtensionCount = static_cast(extensions.size()); - createInfo.ppEnabledExtensionNames = extensions.data(); - createInfo.enabledLayerCount = 0; // will be replaced if validation layers are enabled - createInfo.pNext = nullptr; + m_instance_version = api_version; - VkDebugUtilsMessengerCreateInfoEXT debugCreateInfo; - if constexpr(enableValidationLayers) + VkApplicationInfo app_info{}; + app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; + app_info.pEngineName = "MacroLibX"; + app_info.engineVersion = MLX_VERSION; + app_info.apiVersion = api_version; + + auto extensions = GetRequiredExtensions(); + + VkInstanceCreateInfo create_info{}; + create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + create_info.pApplicationInfo = &app_info; + create_info.enabledExtensionCount = static_cast(extensions.size()); + create_info.ppEnabledExtensionNames = extensions.data(); + create_info.enabledLayerCount = 0; // will be replaced if validation layers are enabled + create_info.pNext = nullptr; + + VkDebugUtilsMessengerCreateInfoEXT debug_create_info; + if constexpr(enable_validation_layers) { - if(Render_Core::get().getLayers().checkValidationLayerSupport()) + if(RenderCore::Get().GetLayers().CheckValidationLayerSupport()) { - createInfo.enabledLayerCount = static_cast(validationLayers.size()); - createInfo.ppEnabledLayerNames = validationLayers.data(); - Render_Core::get().getLayers().populateDebugMessengerCreateInfo(debugCreateInfo); - createInfo.pNext = static_cast(&debugCreateInfo); + create_info.enabledLayerCount = static_cast(validation_layers.size()); + create_info.ppEnabledLayerNames = validation_layers.data(); + RenderCore::Get().GetLayers().PopulateDebugMessengerCreateInfo(debug_create_info); + create_info.pNext = static_cast(&debug_create_info); } } VkResult res; - if((res = vkCreateInstance(&createInfo, nullptr, &_instance)) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create Vulkan instance, %s", RCore::verbaliseResultVk(res)); - volkLoadInstance(_instance); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new instance"); - #endif + if((res = vkCreateInstance(&create_info, nullptr, &m_instance)) != VK_SUCCESS) + FatalError("Vulkan : failed to create Vulkan instance, %", VerbaliseVkResult(res)); + volkLoadInstance(m_instance); + DebugLog("Vulkan : created new instance"); } - std::vector Instance::getRequiredExtensions() + std::vector Instance::GetRequiredExtensions() { std::uint32_t glfw_extension_count = 0; const char** glfw_extensions = glfwGetRequiredInstanceExtensions(&glfw_extension_count); @@ -75,12 +79,10 @@ namespace mlx return extensions; } - void Instance::destroy() noexcept + void Instance::Destroy() noexcept { - vkDestroyInstance(_instance, nullptr); - _instance = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed an instance"); - #endif + vkDestroyInstance(m_instance, nullptr); + m_instance = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed an instance"); } } diff --git a/runtime/Sources/Renderer/Core/Memory.cpp b/runtime/Sources/Renderer/Core/Memory.cpp index 7c1568e..80ce793 100644 --- a/runtime/Sources/Renderer/Core/Memory.cpp +++ b/runtime/Sources/Renderer/Core/Memory.cpp @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* memory.cpp :+: :+: :+: */ +/* Memory.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: kbz_8 +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/10/20 22:02:37 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 19:27:44 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:49:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -17,7 +17,6 @@ #define VK_NO_PROTOTYPES #define VMA_STATIC_VULKAN_FUNCTIONS 0 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 -#define VMA_VULKAN_VERSION 1002000 #define VMA_ASSERT(expr) ((void)0) #define VMA_IMPLEMENTATION @@ -39,11 +38,8 @@ #include #endif -#include - -#include -#include -#include +#include +#include namespace mlx { @@ -67,99 +63,99 @@ namespace mlx vma_vulkan_func.vkMapMemory = vkMapMemory; vma_vulkan_func.vkUnmapMemory = vkUnmapMemory; vma_vulkan_func.vkCmdCopyBuffer = vkCmdCopyBuffer; - vma_vulkan_func.vkGetBufferMemoryRequirements2KHR = vkGetBufferMemoryRequirements2; - vma_vulkan_func.vkGetImageMemoryRequirements2KHR = vkGetImageMemoryRequirements2; - vma_vulkan_func.vkBindBufferMemory2KHR = vkBindBufferMemory2; - vma_vulkan_func.vkBindImageMemory2KHR = vkBindImageMemory2; - vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties2KHR = vkGetPhysicalDeviceMemoryProperties2; +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + vma_vulkan_func.vkGetBufferMemoryRequirements2KHR = vkGetBufferMemoryRequirements2, + vma_vulkan_func.vkGetImageMemoryRequirements2KHR = vkGetImageMemoryRequirements2, +#endif +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + vma_vulkan_func.vkBindBufferMemory2KHR = vkBindBufferMemory2, + vma_vulkan_func.vkBindImageMemory2KHR = vkBindImageMemory2, +#endif +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties2KHR = vkGetPhysicalDeviceMemoryProperties2, +#endif +#if VMA_VULKAN_VERSION >= 1003000 + vma_vulkan_func.vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirements, + vma_vulkan_func.vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirements, +#endif - VmaAllocatorCreateInfo allocatorCreateInfo{}; - allocatorCreateInfo.vulkanApiVersion = VK_API_VERSION_1_2; - allocatorCreateInfo.physicalDevice = Render_Core::get().getDevice().getPhysicalDevice(); - allocatorCreateInfo.device = Render_Core::get().getDevice().get(); - allocatorCreateInfo.instance = Render_Core::get().getInstance().get(); - allocatorCreateInfo.pVulkanFunctions = &vma_vulkan_func; + VmaAllocatorCreateInfo allocator_create_info{}; + allocator_create_info.vulkanApiVersion = RenderCore::Get().GetInstance().GetInstanceVersion(); + allocator_create_info.physicalDevice = RenderCore::Get().GetDevice().GetPhysicalDevice(); + allocator_create_info.device = RenderCore::Get().GetDevice().Get(); + allocator_create_info.instance = RenderCore::Get().GetInstance().Get(); + allocator_create_info.pVulkanFunctions = &vma_vulkan_func; - VkResult res = vmaCreateAllocator(&allocatorCreateInfo, &_allocator); + VkResult res = vmaCreateAllocator(&allocator_create_info, &m_allocator); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Graphics allocator : failed to create graphics memory allocator, %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - core::error::report(e_kind::message, "Graphics allocator : created new allocator"); - #endif + FatalError("Graphics allocator : failed to create graphics memory allocator, %", VerbaliseVkResult(res)); + DebugLog("Graphics allocator : created new allocator"); } - VmaAllocation GPUallocator::createBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name) noexcept + VmaAllocation GPUallocator::CreateBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name) noexcept { MLX_PROFILE_FUNCTION(); VmaAllocation allocation; - VkResult res = vmaCreateBuffer(_allocator, binfo, vinfo, &buffer, &allocation, nullptr); + VkResult res = vmaCreateBuffer(m_allocator, binfo, vinfo, &buffer, &allocation, nullptr); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Graphics allocator : failed to allocate a buffer, %s", RCore::verbaliseResultVk(res)); + FatalError("Graphics allocator : failed to allocate a buffer, %s", RCore::verbaliseResultVk(res)); if(name != nullptr) { - Render_Core::get().getLayers().setDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_BUFFER, (std::uint64_t)buffer, name); - vmaSetAllocationName(_allocator, allocation, name); + RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_BUFFER, (std::uint64_t)buffer, name); + vmaSetAllocationName(m_allocator, allocation, name); } - #ifdef DEBUG - core::error::report(e_kind::message, "Graphics Allocator : created new buffer '%s'", name); - #endif - _active_buffers_allocations++; + DebugLog("Graphics Allocator : created new buffer '%s'", name); + m_active_buffers_allocations++; return allocation; } - void GPUallocator::destroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept + void GPUallocator::DestroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept { MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(Render_Core::get().getDevice().get()); - vmaDestroyBuffer(_allocator, buffer, allocation); - #ifdef DEBUG - core::error::report(e_kind::message, "Graphics Allocator : destroyed buffer"); - #endif - _active_buffers_allocations--; + vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); + vmaDestroyBuffer(m_allocator, buffer, allocation); + DebugLog("Graphics Allocator : destroyed buffer"); + m_active_buffers_allocations--; } - VmaAllocation GPUallocator::createImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name) noexcept + VmaAllocation GPUallocator::CreateImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name) noexcept { MLX_PROFILE_FUNCTION(); VmaAllocation allocation; - VkResult res = vmaCreateImage(_allocator, iminfo, vinfo, &image, &allocation, nullptr); + VkResult res = vmaCreateImage(m_allocator, iminfo, vinfo, &image, &allocation, nullptr); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Graphics allocator : failed to allocate an image, %s", RCore::verbaliseResultVk(res)); + FatalError("Graphics allocator : failed to allocate an image, %", VerbaliseVkResult(res)); if(name != nullptr) { - Render_Core::get().getLayers().setDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_IMAGE, (std::uint64_t)image, name); - vmaSetAllocationName(_allocator, allocation, name); + RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_IMAGE, (std::uint64_t)image, name); + vmaSetAllocationName(m_allocator, allocation, name); } - #ifdef DEBUG - core::error::report(e_kind::message, "Graphics Allocator : created new image '%s'", name); - #endif - _active_images_allocations++; + DebugLog("Graphics Allocator : created new image '%s'", name); + m_active_images_allocations++; return allocation; } - void GPUallocator::destroyImage(VmaAllocation allocation, VkImage image) noexcept + void GPUallocator::DestroyImage(VmaAllocation allocation, VkImage image) noexcept { MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(Render_Core::get().getDevice().get()); - vmaDestroyImage(_allocator, image, allocation); - #ifdef DEBUG - core::error::report(e_kind::message, "Graphics Allocator : destroyed image"); - #endif - _active_images_allocations--; + vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); + vmaDestroyImage(m_allocator, image, allocation); + DebugLog("Graphics Allocator : destroyed image"); + m_active_images_allocations--; } - void GPUallocator::mapMemory(VmaAllocation allocation, void** data) noexcept + void GPUallocator::MapMemory(VmaAllocation allocation, void** data) noexcept { MLX_PROFILE_FUNCTION(); - VkResult res = vmaMapMemory(_allocator, allocation, data); + VkResult res = vmaMapMemory(m_allocator, allocation, data); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Graphics allocator : unable to map GPU memory to CPU memory, %s", RCore::verbaliseResultVk(res)); + FatalError("Graphics allocator : unable to map GPU memory to CPU memory, %", VerbaliseVkResult(res)); } void GPUallocator::unmapMemory(VmaAllocation allocation) noexcept { MLX_PROFILE_FUNCTION(); - vmaUnmapMemory(_allocator, allocation); + vmaUnmapMemory(m_allocator, allocation); } void GPUallocator::dumpMemoryToJson() @@ -170,36 +166,34 @@ namespace mlx std::ofstream file(name); if(!file.is_open()) { - core::error::report(e_kind::error, "Graphics allocator : unable to dump memory to a json file"); + Error("Graphics allocator : unable to dump memory to a json file"); return; } char* str = nullptr; - vmaBuildStatsString(_allocator, &str, true); + vmaBuildStatsString(m_allocator, &str, true); file << str; - vmaFreeStatsString(_allocator, str); + vmaFreeStatsString(m_allocator, str); file.close(); id++; } - void GPUallocator::flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept + void GPUallocator::Flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept { MLX_PROFILE_FUNCTION(); - vmaFlushAllocation(_allocator, allocation, offset, size); + vmaFlushAllocation(m_allocator, allocation, offset, size); } - void GPUallocator::destroy() noexcept + void GPUallocator::Destroy() noexcept { - if(_active_images_allocations != 0) - core::error::report(e_kind::error, "Graphics allocator : some user-dependant allocations were not freed before destroying the display (%d active allocations). You may have not destroyed all the MLX resources you've created", _active_images_allocations); - else if(_active_buffers_allocations != 0) - core::error::report(e_kind::error, "Graphics allocator : some MLX-dependant allocations were not freed before destroying the display (%d active allocations). This is an error in the MLX, please report this should not happen", _active_buffers_allocations); - if(_active_images_allocations < 0 || _active_buffers_allocations < 0) - core::error::report(e_kind::warning, "Graphics allocator : the impossible happened, the MLX has freed more allocations than it has made (wtf)"); - vmaDestroyAllocator(_allocator); - _active_buffers_allocations = 0; - _active_images_allocations = 0; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed a graphics allocator"); - #endif + if(m_active_images_allocations != 0) + Error("Graphics allocator : some user-dependant allocations were not freed before destroying the display (% active allocations). You may have not destroyed all the MLX resources you've created", m_active_images_allocations); + else if(m_active_buffers_allocations != 0) + Error("Graphics allocator : some MLX-dependant allocations were not freed before destroying the display (% active allocations). This is an error in the MLX, please report this should not happen", m_active_buffers_allocations); + if(m_active_images_allocations < 0 || m_active_buffers_allocations < 0) + Warning("Graphics allocator : the impossible happened, the MLX has freed more allocations than it has made (wtf)"); + vmaDestroyAllocator(m_allocator); + m_active_buffers_allocations = 0; + m_active_images_allocations = 0; + DebugLog("Vulkan : destroyed a graphics allocator"); } } diff --git a/runtime/Sources/Renderer/Core/Queues.cpp b/runtime/Sources/Renderer/Core/Queues.cpp index b47fcf0..b1b7ae8 100644 --- a/runtime/Sources/Renderer/Core/Queues.cpp +++ b/runtime/Sources/Renderer/Core/Queues.cpp @@ -1,55 +1,53 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_queues.cpp :+: :+: :+: */ +/* Queues.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:02:42 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:29:19 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:51:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "render_core.h" +#include namespace mlx { Queues::QueueFamilyIndices Queues::findQueueFamilies(VkPhysicalDevice device) { - std::uint32_t queueFamilyCount = 0; - vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, nullptr); + std::uint32_t queue_family_count = 0; + vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, nullptr); - std::vector queueFamilies(queueFamilyCount); - vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, queueFamilies.data()); + std::vector queue_families(queueFamilyCount); + vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, queue_families.data()); - _families = Queues::QueueFamilyIndices{}; + n_families = Queues::QueueFamilyIndices{}; int i = 0; - for(const auto& queueFamily : queueFamilies) + for(const auto& queue_family : queue_families) { - if(queueFamily.queueFlags & VK_QUEUE_GRAPHICS_BIT) - _families->graphics_family = i; + if(queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) + m_families->graphics_family = i; - if(glfwGetPhysicalDevicePresentationSupport(Render_Core::get().getInstance().get(), device, i)) - _families->present_family = i; + if(glfwGetPhysicalDevicePresentationSupport(RenderCore::Get().GetInstance().Get(), device, i)) + m_families->present_family = i; - if(_families->isComplete()) - return *_families; + if(m_families->IsComplete()) + return *m_families; i++; } - return *_families; + return *m_families; } - void Queues::init() + void Queues::Init() { - if(!_families.has_value()) - findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice()); - vkGetDeviceQueue(Render_Core::get().getDevice().get(), _families->graphics_family.value(), 0, &_graphics_queue); - vkGetDeviceQueue(Render_Core::get().getDevice().get(), _families->present_family.value(), 0, &_present_queue); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : got graphics and present queues"); - #endif + if(!m_families.has_value()) + FindQueueFamilies(RenderCore::Get().GetDevice().GetPhysicalDevice()); + vkGetDeviceQueue(RenderCore::Get().GetDevice().Get(), m_families->graphics_family.value(), 0, &m_graphics_queue); + vkGetDeviceQueue(RenderCore::Get().GetDevice().Get(), m_families->present_family.value(), 0, &m_present_queue); + DebugLog("Vulkan : got graphics and present queues"); } } diff --git a/runtime/Sources/Renderer/Core/RenderCore.cpp b/runtime/Sources/Renderer/Core/RenderCore.cpp index 6c14b53..44ced4a 100644 --- a/runtime/Sources/Renderer/Core/RenderCore.cpp +++ b/runtime/Sources/Renderer/Core/RenderCore.cpp @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* render_core.cpp :+: :+: :+: */ +/* RenderCore.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/17 23:33:34 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:06 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:54:26 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,10 +14,10 @@ #define VOLK_IMPLEMENTATION #include -#include +#include -#include -#include +#include +#include #ifdef DEBUG #ifdef MLX_COMPILER_MSVC @@ -29,124 +29,106 @@ namespace mlx { - namespace RCore + const char* VerbaliseVkResult(VkResult result) { - std::optional findMemoryType(std::uint32_t typeFilter, VkMemoryPropertyFlags properties, bool error) + switch(result) { - VkPhysicalDeviceMemoryProperties memProperties; - vkGetPhysicalDeviceMemoryProperties(Render_Core::get().getDevice().getPhysicalDevice(), &memProperties); + case VK_SUCCESS: return "Success"; + case VK_NOT_READY: return "A fence or query has not yet completed"; + case VK_TIMEOUT: return "A wait operation has not completed in the specified time"; + case VK_EVENT_SET: return "An event is signaled"; + case VK_EVENT_RESET: return "An event is unsignaled"; + case VK_INCOMPLETE: return "A return array was too small for the result"; + case VK_ERROR_OUT_OF_HOST_MEMORY: return "A host memory allocation has failed"; + case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "A device memory allocation has failed"; + case VK_ERROR_INITIALIZATION_FAILED: return "Initialization of an object could not be completed for implementation-specific reasons"; + case VK_ERROR_DEVICE_LOST: return "The logical or physical device has been lost"; + case VK_ERROR_MEMORY_MAP_FAILED: return "Mapping of a memory object has failed"; + case VK_ERROR_LAYER_NOT_PRESENT: return "A requested layer is not present or could not be loaded"; + case VK_ERROR_EXTENSION_NOT_PRESENT: return "A requested extension is not supported"; + case VK_ERROR_FEATURE_NOT_PRESENT: return "A requested feature is not supported"; + case VK_ERROR_INCOMPATIBLE_DRIVER: return "The requested version of Vulkan is not supported by the driver or is otherwise incompatible"; + case VK_ERROR_TOO_MANY_OBJECTS: return "Too many objects of the type have already been created"; + case VK_ERROR_FORMAT_NOT_SUPPORTED: return "A requested format is not supported on this device"; + case VK_ERROR_SURFACE_LOST_KHR: return "A surface is no longer available"; + case VK_SUBOPTIMAL_KHR: return "A swapchain no longer matches the surface properties exactly, but can still be used"; + case VK_ERROR_OUT_OF_DATE_KHR: return "A surface has changed in such a way that it is no longer compatible with the swapchain"; + case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "The display used by a swapchain does not use the same presentable image layout"; + case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "The requested window is already connected to a VkSurfaceKHR, or to some other non-Vulkan API"; + case VK_ERROR_VALIDATION_FAILED_EXT: return "A validation layer found an error"; - for(std::uint32_t i = 0; i < memProperties.memoryTypeCount; i++) - { - if((typeFilter & (1 << i)) && (memProperties.memoryTypes[i].propertyFlags & properties) == properties) - return i; - } - if(error) - core::error::report(e_kind::fatal_error, "Vulkan : failed to find suitable memory type"); - return std::nullopt; - } - - const char* verbaliseResultVk(VkResult result) - { - switch(result) - { - case VK_SUCCESS: return "Success"; - case VK_NOT_READY: return "A fence or query has not yet completed"; - case VK_TIMEOUT: return "A wait operation has not completed in the specified time"; - case VK_EVENT_SET: return "An event is signaled"; - case VK_EVENT_RESET: return "An event is unsignaled"; - case VK_INCOMPLETE: return "A return array was too small for the result"; - case VK_ERROR_OUT_OF_HOST_MEMORY: return "A host memory allocation has failed"; - case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "A device memory allocation has failed"; - case VK_ERROR_INITIALIZATION_FAILED: return "Initialization of an object could not be completed for implementation-specific reasons"; - case VK_ERROR_DEVICE_LOST: return "The logical or physical device has been lost"; - case VK_ERROR_MEMORY_MAP_FAILED: return "Mapping of a memory object has failed"; - case VK_ERROR_LAYER_NOT_PRESENT: return "A requested layer is not present or could not be loaded"; - case VK_ERROR_EXTENSION_NOT_PRESENT: return "A requested extension is not supported"; - case VK_ERROR_FEATURE_NOT_PRESENT: return "A requested feature is not supported"; - case VK_ERROR_INCOMPATIBLE_DRIVER: return "The requested version of Vulkan is not supported by the driver or is otherwise incompatible"; - case VK_ERROR_TOO_MANY_OBJECTS: return "Too many objects of the type have already been created"; - case VK_ERROR_FORMAT_NOT_SUPPORTED: return "A requested format is not supported on this device"; - case VK_ERROR_SURFACE_LOST_KHR: return "A surface is no longer available"; - case VK_SUBOPTIMAL_KHR: return "A swapchain no longer matches the surface properties exactly, but can still be used"; - case VK_ERROR_OUT_OF_DATE_KHR: return "A surface has changed in such a way that it is no longer compatible with the swapchain"; - case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "The display used by a swapchain does not use the same presentable image layout"; - case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "The requested window is already connected to a VkSurfaceKHR, or to some other non-Vulkan API"; - case VK_ERROR_VALIDATION_FAILED_EXT: return "A validation layer found an error"; - - default: return "Unknown Vulkan error"; - } - return nullptr; - } - - VkPipelineStageFlags accessFlagsToPipelineStage(VkAccessFlags accessFlags, VkPipelineStageFlags stageFlags) - { - VkPipelineStageFlags stages = 0; - - while(accessFlags != 0) - { - VkAccessFlagBits AccessFlag = static_cast(accessFlags & (~(accessFlags - 1))); - if(AccessFlag == 0 || (AccessFlag & (AccessFlag - 1)) != 0) - core::error::report(e_kind::fatal_error, "Vulkan : an error has been caught during access flag to pipeline stage operation"); - accessFlags &= ~AccessFlag; - - switch(AccessFlag) - { - case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: stages |= VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; break; - case VK_ACCESS_INDEX_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; - case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; - case VK_ACCESS_UNIFORM_READ_BIT: stages |= stageFlags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; - case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break; - case VK_ACCESS_SHADER_READ_BIT: stages |= stageFlags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; - case VK_ACCESS_SHADER_WRITE_BIT: stages |= stageFlags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; - case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; - case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; - case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; - case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; - case VK_ACCESS_TRANSFER_READ_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; - case VK_ACCESS_TRANSFER_WRITE_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; - case VK_ACCESS_HOST_READ_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; - case VK_ACCESS_HOST_WRITE_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; - case VK_ACCESS_MEMORY_READ_BIT: break; - case VK_ACCESS_MEMORY_WRITE_BIT: break; - - default: core::error::report(e_kind::error, "Vulkan : unknown access flag"); break; - } - } - return stages; + default: return "Unknown Vulkan error"; } + return nullptr; } - void Render_Core::init() + VkPipelineStageFlags AccessFlagsToPipelineStage(VkAccessFlags access_flags, VkPipelineStageFlags stage_flags) + { + VkPipelineStageFlags stages = 0; + + while(access_flags != 0) + { + VkAccessFlagBits Access_flag = static_cast(access_flags & (~(access_flags - 1))); + if(Access_flag == 0 || (Access_flag & (Access_flag - 1)) != 0) + FatalError("Vulkan : an error has been caught during access flag to pipeline stage operation"); + access_flags &= ~Access_flag; + + switch(Access_flag) + { + case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: stages |= VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; break; + case VK_ACCESS_INDEX_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; + case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; + case VK_ACCESS_UNIFORM_READ_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break; + case VK_ACCESS_SHADER_READ_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_SHADER_WRITE_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; + case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; + case VK_ACCESS_TRANSFER_READ_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; + case VK_ACCESS_TRANSFER_WRITE_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; + case VK_ACCESS_HOST_READ_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; + case VK_ACCESS_HOST_WRITE_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; + case VK_ACCESS_MEMORY_READ_BIT: break; + case VK_ACCESS_MEMORY_WRITE_BIT: break; + + default: Error("Vulkan : unknown access flag"); break; + } + } + return stages; + } + + void RenderCore::Init() { if(volkInitialize() != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan loader : cannot load %s, are you sure Vulkan is installed on your system ?", VULKAN_LIB_NAME); + FatalError("Vulkan loader : cannot load %, are you sure Vulkan is installed on your system ?", VULKAN_LIB_NAME); - _instance.init(); - volkLoadInstance(_instance.get()); - _layers.init(); - _device.init(); - volkLoadDevice(_device.get()); - _queues.init(); - _allocator.init(); - _cmd_manager.init(); - _is_init = true; + m_instance.Init(); + volkLoadInstance(m_instance.Get()); + m_layers.Init(); + m_device.Init(); + volkLoadDevice(m_device.Get()); + m_queues.Init(); + m_allocator.Init(); + m_cmd_manager.Init(); + m_is_init = true; } - void Render_Core::destroy() + void RenderCore::Destroy() { - if(!_is_init) + if(!m_is_init) return; - vkDeviceWaitIdle(_device()); + vkDeviceWaitIdle(m_device.Get()); - _pool_manager.destroyAllPools(); - _cmd_manager.destroy(); - _allocator.destroy(); - _device.destroy(); - _layers.destroy(); - _instance.destroy(); + m_pool_manager.DestroyAllPools(); + m_cmd_manager.Destroy(); + m_allocator.Destroy(); + m_device.Destroy(); + m_layers.Destroy(); + m_instance.Destroy(); - _is_init = false; + m_is_init = false; } } diff --git a/runtime/Sources/Renderer/Core/Semaphore.cpp b/runtime/Sources/Renderer/Core/Semaphore.cpp index d8d8dd3..8a9d72a 100644 --- a/runtime/Sources/Renderer/Core/Semaphore.cpp +++ b/runtime/Sources/Renderer/Core/Semaphore.cpp @@ -1,45 +1,36 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_semaphore.cpp :+: :+: :+: */ +/* Semaphore.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:08 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:25 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:55:42 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include - -#include "vk_semaphore.h" -#include "render_core.h" -#include +#include +#include +#include namespace mlx { - void Semaphore::init() + void Semaphore::Init() { - VkSemaphoreCreateInfo semaphoreInfo{}; - semaphoreInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + VkSemaphoreCreateInfo semaphore_info{}; + semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; VkResult res; - if( (res = vkCreateSemaphore(Render_Core::get().getDevice().get(), &semaphoreInfo, nullptr, &_image_available_semaphore)) != VK_SUCCESS || - (res = vkCreateSemaphore(Render_Core::get().getDevice().get(), &semaphoreInfo, nullptr, &_render_finished_semaphore)) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a synchronization object (semaphore), %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new semaphores"); - #endif + if((res = vkCreateSemaphore(RenderCore::Get().GetDevice().Get(), &semaphore_info, nullptr, &m_semaphore)) != VK_SUCCESS) + FatalError("Vulkan : failed to create a synchronization object (semaphore), %", VerbaliseVkResult(res)); + DebugLog("Vulkan : created new semaphores"); } - void Semaphore::destroy() noexcept + void Semaphore::Destroy() noexcept { - vkDestroySemaphore(Render_Core::get().getDevice().get(), _render_finished_semaphore, nullptr); - _render_finished_semaphore = VK_NULL_HANDLE; - vkDestroySemaphore(Render_Core::get().getDevice().get(), _image_available_semaphore, nullptr); - _image_available_semaphore = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed semaphores"); - #endif + vkDestroySemaphore(RenderCore::Get().GetDevice().Get(), m_semaphore, nullptr); + m_semaphore = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed semaphore"); } } diff --git a/runtime/Sources/Renderer/Core/Surface.cpp b/runtime/Sources/Renderer/Core/Surface.cpp index b50a5f2..4715afe 100644 --- a/runtime/Sources/Renderer/Core/Surface.cpp +++ b/runtime/Sources/Renderer/Core/Surface.cpp @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_surface.cpp :+: :+: :+: */ +/* Surface.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 18:58:49 by maldavid #+# #+# */ -/* Updated: 2024/03/25 22:25:55 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 18:56:56 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -17,31 +17,27 @@ namespace mlx { - void Surface::create(Renderer& renderer) + void Surface::Create(Renderer& renderer) { - if(glfwCreateWindowSurface(Render_Core::get().getInstance().get(), renderer.getWindow()->getNativeWindow(), NULL, &_surface) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a surface"); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new surface"); - #endif + if(glfwCreateWindowSurface(RenderCore::Get().GetInstance().Get(), renderer.GetWindow()->GetNativeWindow(), NULL, &m_surface) != VK_SUCCESS) + FatalError("Vulkan : failed to create a surface"); + DebugLog("Vulkan : created new surface"); } - VkSurfaceFormatKHR Surface::chooseSwapSurfaceFormat(const std::vector& availableFormats) + VkSurfaceFormatKHR Surface::ChooseSwapSurfaceFormat(const std::vector& available_formats) { - auto it = std::find_if(availableFormats.begin(), availableFormats.end(), [](VkSurfaceFormatKHR format) + auto it = std::find_if(available_formats.begin(), available_formats.end(), [](VkSurfaceFormatKHR format) { return format.format == VK_FORMAT_R8G8B8A8_SRGB && format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; }); - return (it == availableFormats.end() ? availableFormats[0] : *it); + return (it == available_formats.end() ? available_formats[0] : *it); } - void Surface::destroy() noexcept + void Surface::Destroy() noexcept { - vkDestroySurfaceKHR(Render_Core::get().getInstance().get(), _surface, nullptr); - _surface = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed a surface"); - #endif + vkDestroySurfaceKHR(RenderCore::Get().GetInstance().Get(), m_surface, nullptr); + m_surface = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed a surface"); } } diff --git a/runtime/Sources/Renderer/Core/ValidationLayers.cpp b/runtime/Sources/Renderer/Core/ValidationLayers.cpp index ffecd3d..d64a033 100644 --- a/runtime/Sources/Renderer/Core/ValidationLayers.cpp +++ b/runtime/Sources/Renderer/Core/ValidationLayers.cpp @@ -1,79 +1,72 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_validation_layers.cpp :+: :+: :+: */ +/* ValidationLayers.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/19 14:05:25 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:00:06 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:20:21 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include -#include "render_core.h" -#include "vulkan/vulkan_core.h" - -#include +#include +#include namespace mlx { - void ValidationLayers::init() + void ValidationLayers::Init() { - if constexpr(!enableValidationLayers) + if constexpr(!enable_validation_layers) return; - std::uint32_t extensionCount; - vkEnumerateInstanceExtensionProperties(nullptr, &extensionCount, nullptr); - std::vector extensions(extensionCount); - vkEnumerateInstanceExtensionProperties(nullptr, &extensionCount, extensions.data()); + std::uint32_t extension_count; + vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr); + std::vector extensions(extension_count); + vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, extensions.data()); if(!std::any_of(extensions.begin(), extensions.end(), [=](VkExtensionProperties ext) { return std::strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0; })) { - core::error::report(e_kind::warning , "Vulkan : %s not present, debug utils are disabled", VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + Warning("Vulkan : %s not present, debug utils are disabled", VK_EXT_DEBUG_UTILS_EXTENSION_NAME); return; } - VkDebugUtilsMessengerCreateInfoEXT createInfo{}; - populateDebugMessengerCreateInfo(createInfo); - VkResult res = createDebugUtilsMessengerEXT(&createInfo, nullptr); + VkDebugUtilsMessengerCreateInfoEXT create_info{}; + populateDebugMessengerCreateInfo(create_info); + VkResult res = createDebugUtilsMessengerEXT(&create_info, nullptr); if(res != VK_SUCCESS) - core::error::report(e_kind::warning, "Vulkan : failed to set up debug messenger, %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG + Warning("Vulkan : failed to set up debug messenger, %", VerbaliseVkResult(res)); else - core::error::report(e_kind::message, "Vulkan : enabled validation layers"); - #endif + DebugLog("Vulkan : enabled validation layers"); - _vkSetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(Render_Core::get().getInstance().get(), "vkSetDebugUtilsObjectNameEXT"); - if(!_vkSetDebugUtilsObjectNameEXT) - core::error::report(e_kind::warning, "Vulkan : failed to set up debug object names, %s", RCore::verbaliseResultVk(VK_ERROR_EXTENSION_NOT_PRESENT)); - #ifdef DEBUG + f_vkSetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(RenderCore::Get().GetInstance().Get(), "vkSetDebugUtilsObjectNameEXT"); + if(!f_vkSetDebugUtilsObjectNameEXT) + Warning("Vulkan : failed to set up debug object names, %", VerbaliseVkResult(VK_ERROR_EXTENSION_NOT_PRESENT)); else - core::error::report(e_kind::message, "Vulkan : enabled debug object names"); - #endif + DebugLog("Vulkan : enabled debug object names"); } - bool ValidationLayers::checkValidationLayerSupport() + bool ValidationLayers::CheckValidationLayerSupport() { - std::uint32_t layerCount; - vkEnumerateInstanceLayerProperties(&layerCount, nullptr); + std::uint32_t layer_count; + vkEnumerateInstanceLayerProperties(&layer_count, nullptr); - std::vector availableLayers(layerCount); - vkEnumerateInstanceLayerProperties(&layerCount, availableLayers.data()); + std::vector available_layers(layer_count); + vkEnumerateInstanceLayerProperties(&layer_count, available_layers.data()); - return std::all_of(validationLayers.begin(), validationLayers.end(), [&](const char* layerName) + return std::all_of(validation_layers.begin(), validation_layers.end(), [&](const char* layer_name) { - if(!std::any_of(availableLayers.begin(), availableLayers.end(), [=](VkLayerProperties props) { return std::strcmp(layerName, props.layerName) == 0; })) + if(!std::any_of(available_layers.begin(), available_layers.end(), [=](VkLayerProperties props) { return std::strcmp(layer_name, props.layer_name) == 0; })) { - core::error::report(e_kind::error, "Vulkan : a validation layer was requested but was not found ('%s')", layerName); + Error("Vulkan : a validation layer was requested but was not found ('%')", layer_name); return false; } return true; }); } - VkResult ValidationLayers::setDebugUtilsObjectNameEXT(VkObjectType object_type, std::uint64_t object_handle, const char* object_name) + VkResult ValidationLayers::SetDebugUtilsObjectNameEXT(VkObjectType object_type, std::uint64_t object_handle, const char* object_name) { - if(!_vkSetDebugUtilsObjectNameEXT) + if(!f_vkSetDebugUtilsObjectNameEXT) return VK_ERROR_EXTENSION_NOT_PRESENT; VkDebugUtilsObjectNameInfoEXT name_info{}; @@ -81,49 +74,49 @@ namespace mlx name_info.objectType = object_type; name_info.objectHandle = object_handle; name_info.pObjectName = object_name; - return _vkSetDebugUtilsObjectNameEXT(Render_Core::get().getDevice().get(), &name_info); + return f_vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice().Get(), &name_info); } - void ValidationLayers::populateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& createInfo) + void ValidationLayers::PopulateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& create_info) { - createInfo = {}; - createInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; - createInfo.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; - createInfo.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; - createInfo.pfnUserCallback = ValidationLayers::debugCallback; + create_info = {}; + create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + create_info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; + create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; + create_info.pfnUserCallback = ValidationLayers::DebugCallback; } - void ValidationLayers::destroy() + void ValidationLayers::Destroy() { - if constexpr(enableValidationLayers) + if constexpr(enable_validation_layers) { - destroyDebugUtilsMessengerEXT(nullptr); + DestroyDebugUtilsMessengerEXT(nullptr); #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed validation layers"); + DebugLog("Vulkan : destroyed validation layers"); #endif } } - VkResult ValidationLayers::createDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator) + VkResult ValidationLayers::CreateDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator) { - auto func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(Render_Core::get().getInstance().get(), "vkCreateDebugUtilsMessengerEXT"); - return func != nullptr ? func(Render_Core::get().getInstance().get(), pCreateInfo, pAllocator, &_debug_messenger) : VK_ERROR_EXTENSION_NOT_PRESENT; + auto func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(RenderCore::Get().GetInstance().Get(), "vkCreateDebugUtilsMessengerEXT"); + return func != nullptr ? func(RenderCore::Get().GetInstance().Get(), pCreateInfo, pAllocator, &m_debug_messenger) : VK_ERROR_EXTENSION_NOT_PRESENT; } - VKAPI_ATTR VkBool32 VKAPI_CALL ValidationLayers::debugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, [[maybe_unused]] VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, [[maybe_unused]] void* pUserData) + VKAPI_ATTR VkBool32 VKAPI_CALL ValidationLayers::DebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, [[maybe_unused]] VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, [[maybe_unused]] void* pUserData) { if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) - core::error::report(e_kind::error, pCallbackData->pMessage); + Error(pCallbackData->pMessage); else if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) - core::error::report(e_kind::warning, pCallbackData->pMessage); + Warning(pCallbackData->pMessage); return VK_FALSE; } void ValidationLayers::destroyDebugUtilsMessengerEXT(const VkAllocationCallbacks* pAllocator) { - auto func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(Render_Core::get().getInstance().get(), "vkDestroyDebugUtilsMessengerEXT"); + auto func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(RenderCore::Get().GetInstance().Get(), "vkDestroyDebugUtilsMessengerEXT"); if(func != nullptr) - func(Render_Core::get().getInstance().get(), _debug_messenger, pAllocator); + func(RenderCore::Get().GetInstance().Get(), m_debug_messenger, pAllocator); } } diff --git a/runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp new file mode 100644 index 0000000..2cd8085 --- /dev/null +++ b/runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp @@ -0,0 +1,69 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* DescriptorPool.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:34:23 by maldavid #+# #+# */ +/* Updated: 2024/04/23 19:39:39 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +#include +#include +#include + +namespace mlx +{ + void DescriptorPool::Init(std::vector sizes) + { + VkDescriptorPoolCreateInfo pool_info{}; + pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; + pool_info.poolSizeCount = sizes.size(); + pool_info.pPoolSizes = sizes.data(); + pool_info.maxSets = MAX_SETS_PER_POOL; + pool_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; + + VkResult res = vkCreateDescriptorPool(RenderCore::Get().GetDevice().Get(), &pool_info, nullptr, &m_pool); + if(res != VK_SUCCESS) + FatalError("Vulkan : failed to create descriptor pool, %", VerbaliseVkResult(res)); + DebugLog("Vulkan : created new descriptor pool"); + } + + VkDescriptorSet DescriptorPool::AllocateDescriptorSet(class DescriptorSetLayout& layout) + { + VkDescriptorSet set; + + VkDescriptorSetAllocateInfo alloc_info{}; + alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + alloc_info.descriptorPool = m_pool; + alloc_info.descriptorSetCount = 1; + alloc_info.pSetLayouts = layouts.Get(); + + VkResult res = vkAllocateDescriptorSets(RenderCore::Get().GetDevice().Get(), &alloc_info, &set); + if(res != VK_SUCCESS) + FatalError("Vulkan : failed to allocate descriptor set, %", VerbaliseVkResult(res)); + m_allocated_sets++; + DebugLog("Vulkan : created new descriptor set"); + return set; + } + + void DescriptorPool::FreeDescriptor(VkDescriptorSet set) + { + if(!IsInit()) + return; + vkFreeDescriptorSets(RenderCore::Get().GetDevice().Get(), m_pool, 1, set); + m_allocated_sets--; // if this goes underflow I quit + } + + void DescriptorPool::Destroy() noexcept + { + if(m_pool != VK_NULL_HANDLE) + vkDestroyDescriptorPool(RenderCore::Get().GetDevice().Get(), m_pool, nullptr); + m_pool = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed a descriptor pool"); + } +} diff --git a/runtime/Sources/Renderer/Descriptors/descriptor_pool_manager.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorPoolManager.cpp similarity index 60% rename from runtime/Sources/Renderer/Descriptors/descriptor_pool_manager.cpp rename to runtime/Sources/Renderer/Descriptors/DescriptorPoolManager.cpp index 4279527..081f887 100644 --- a/runtime/Sources/Renderer/Descriptors/descriptor_pool_manager.cpp +++ b/runtime/Sources/Renderer/Descriptors/DescriptorPoolManager.cpp @@ -1,41 +1,41 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* descriptor_pool_manager.cpp :+: :+: :+: */ +/* DescriptorPoolManager.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/20 06:51:47 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:29 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:41:38 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include +#include +#include namespace mlx { - DescriptorPool& DescriptorPoolManager::getAvailablePool() + DescriptorPool& DescriptorPoolManager::GetAvailablePool() { - for(auto& pool : _pools) + for(auto& pool : m_pools) { - if(pool.getNumberOfSetsAllocated() < MAX_SETS_PER_POOL) + if(pool.GetNumberOfSetsAllocated() < MAX_SETS_PER_POOL) return pool; } - VkDescriptorPoolSize pool_sizes[] = { + std::vector pool_sizes = { { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, (MAX_FRAMES_IN_FLIGHT * NUMBER_OF_UNIFORM_BUFFERS) }, { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_SETS_PER_POOL - (MAX_FRAMES_IN_FLIGHT * NUMBER_OF_UNIFORM_BUFFERS) } }; - _pools.emplace_front().init((sizeof(pool_sizes) / sizeof(VkDescriptorPoolSize)), pool_sizes); - return _pools.front(); + m_pools.emplace_front().Init(std::move(pool_sizes)); + return m_pools.front(); } - void DescriptorPoolManager::destroyAllPools() + void DescriptorPoolManager::DestroyAllPools() { - for(auto& pool : _pools) - pool.destroy(); - _pools.clear(); + for(auto& pool : m_pools) + pool.Destroy(); + m_pools.clear(); } } diff --git a/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp new file mode 100644 index 0000000..b28ba9b --- /dev/null +++ b/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp @@ -0,0 +1,111 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* DescriptorSet.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2023/01/23 18:40:44 by maldavid #+# #+# */ +/* Updated: 2024/04/23 19:50:06 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include + +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + void DescriptorSet::Init(NonOwningPtr renderer, NonOwningPtr pool, DescriptorSetLayout layout) + { + MLX_PROFILE_FUNCTION(); + m_renderer = renderer; + m_layout = layout; + m_pool = pool; + + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + m_desc_set[i] = pool->AllocateDescriptorSet(layout); + } + + void DescriptorSet::WriteDescriptor(int binding, NonOwningPtr ubo) const noexcept + { + MLX_PROFILE_FUNCTION(); + auto device = RenderCore::Get().GetDevice().Get(); + + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + VkDescriptorBufferInfo buffer_info{}; + buffer_info.buffer = ubo->Get(i); + buffer_info.offset = ubo->GetOffset(i); + buffer_info.range = ubo->GetSize(i); + + VkWriteDescriptorSet descriptor_write{}; + descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptor_write.dstSet = m_desc_set[i]; + descriptor_write.dstBinding = binding; + descriptor_write.dstArrayElement = 0; + descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; + descriptor_write.descriptorCount = 1; + descriptor_write.pBufferInfo = &buffer_info; + + vkUpdateDescriptorSets(device, 1, &descriptor_write, 0, nullptr); + } + } + + void DescriptorSet::WriteDescriptor(int binding, const Image& image) const noexcept + { + MLX_PROFILE_FUNCTION(); + auto device = RenderCore::Get().GetDevice().Get(); + + VkDescriptorImageInfo image_info{}; + image_info.imageLayout = image.GetLayout(); + image_info.imageView = image.GetImageView(); + image_info.sampler = image.GetSampler(); + + VkWriteDescriptorSet descriptor_write{}; + descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptor_write.dstSet = m_desc_set[m_renderer->GetActiveImageIndex()]; + descriptor_write.dstBinding = binding; + descriptor_write.dstArrayElement = 0; + descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + descriptor_write.descriptorCount = 1; + descriptor_write.pImageInfo = &image_info; + + vkUpdateDescriptorSets(device, 1, &descriptor_write, 0, nullptr); + } + + DescriptorSet DescriptorSet::Duplicate() + { + MLX_PROFILE_FUNCTION(); + DescriptorSet set; + set.Init(m_renderer, &RenderCore::Get().GetDescriptorPool(), m_layout); + return set; + } + + VkDescriptorSet& DescriptorSet::operator()() noexcept + { + return m_desc_set[m_renderer->GetActiveImageIndex()]; + } + + VkDescriptorSet& DescriptorSet::Get() noexcept + { + return m_desc_set[m_renderer->GetActiveImageIndex()]; + } + + void DescriptorSet::Destroy() noexcept + { + MLX_PROFILE_FUNCTION(); + if(m_pool != nullptr && RenderCore::Get().IsInit()) // checks if the render core is still init (it should always be init but just in case) + m_pool->FreeDescriptor(*this); + for(auto& set : m_desc_set) + { + if(set != VK_NULL_HANDLE) + set = VK_NULL_HANDLE; + } + } +} diff --git a/runtime/Sources/Renderer/Descriptors/vk_descriptor_set_layout.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorSetLayout.cpp similarity index 56% rename from runtime/Sources/Renderer/Descriptors/vk_descriptor_set_layout.cpp rename to runtime/Sources/Renderer/Descriptors/DescriptorSetLayout.cpp index 4ceba14..cc61a34 100644 --- a/runtime/Sources/Renderer/Descriptors/vk_descriptor_set_layout.cpp +++ b/runtime/Sources/Renderer/Descriptors/DescriptorSetLayout.cpp @@ -1,23 +1,23 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_descriptor_set_layout.cpp :+: :+: :+: */ +/* DescriptorSetLayout.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:37:28 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:47 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 19:52:41 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_descriptor_set_layout.h" -#include +#include +#include namespace mlx { - void DescriptorSetLayout::init(std::vector> binds, VkShaderStageFlagBits stage) + void DescriptorSetLayout::Init(std::vector> binds, VkShaderStageFlagBits stage) { std::vector bindings(binds.size()); for(std::size_t i = 0; i < binds.size(); i++) @@ -29,21 +29,21 @@ namespace mlx bindings[i].stageFlags = stage; } - _bindings = std::move(binds); + m_bindings = std::move(binds); - VkDescriptorSetLayoutCreateInfo layoutInfo{}; - layoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; - layoutInfo.bindingCount = _bindings.size(); - layoutInfo.pBindings = bindings.data(); + VkDescriptorSetLayoutCreateInfo layout_info{}; + layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + layout_info.bindingCount = m_bindings.size(); + layout_info.pBindings = m_bindings.data(); - VkResult res = vkCreateDescriptorSetLayout(Render_Core::get().getDevice().get(), &layoutInfo, nullptr, &_layout); + VkResult res = vkCreateDescriptorSetLayout(RenderCore::Get().GetDevice().Get(), &layout_info, nullptr, &m_layout); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create descriptor set layout, %s", RCore::verbaliseResultVk(res)); + FatalError("Vulkan : failed to create descriptor set layout, %", VerbaliseVkResult(res)); } - void DescriptorSetLayout::destroy() noexcept + void DescriptorSetLayout::Destroy() noexcept { - vkDestroyDescriptorSetLayout(Render_Core::get().getDevice().get(), _layout, nullptr); - _layout = VK_NULL_HANDLE; + vkDestroyDescriptorSetLayout(RenderCore::Get().GetDevice().Get(), m_layout, nullptr); + m_layout = VK_NULL_HANDLE; } } diff --git a/runtime/Sources/Renderer/Descriptors/vk_descriptor_pool.cpp b/runtime/Sources/Renderer/Descriptors/vk_descriptor_pool.cpp deleted file mode 100644 index 87f44ce..0000000 --- a/runtime/Sources/Renderer/Descriptors/vk_descriptor_pool.cpp +++ /dev/null @@ -1,57 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* vk_descriptor_pool.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:34:23 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:37 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include "vk_descriptor_pool.h" -#include -#include - -namespace mlx -{ - void DescriptorPool::init(std::size_t n, VkDescriptorPoolSize* size) - { - VkDescriptorPoolCreateInfo poolInfo{}; - poolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; - poolInfo.poolSizeCount = n; - poolInfo.pPoolSizes = size; - poolInfo.maxSets = MAX_SETS_PER_POOL; - poolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; - - VkResult res = vkCreateDescriptorPool(Render_Core::get().getDevice().get(), &poolInfo, nullptr, &_pool); - if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create descriptor pool, %s", RCore::verbaliseResultVk(res)); - _allocated_sets++; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new descriptor pool"); - #endif - } - - void DescriptorPool::freeDescriptor(const DescriptorSet& set) - { - if(!isInit()) - return; - const auto& sets = set.getAllFramesDescriptorSets(); - vkFreeDescriptorSets(Render_Core::get().getDevice().get(), _pool, sets.size(), sets.data()); - _allocated_sets--; // if this goes in underflow I quit - } - - void DescriptorPool::destroy() noexcept - { - if(_pool != VK_NULL_HANDLE) - vkDestroyDescriptorPool(Render_Core::get().getDevice().get(), _pool, nullptr); - _pool = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed a descriptor pool"); - #endif - } -} diff --git a/runtime/Sources/Renderer/Descriptors/vk_descriptor_set.cpp b/runtime/Sources/Renderer/Descriptors/vk_descriptor_set.cpp deleted file mode 100644 index 07a5841..0000000 --- a/runtime/Sources/Renderer/Descriptors/vk_descriptor_set.cpp +++ /dev/null @@ -1,131 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* vk_descriptor_set.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:40:44 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:02:43 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include "vk_descriptor_set.h" -#include "renderer/core/render_core.h" -#include "vk_descriptor_pool.h" -#include "vk_descriptor_set_layout.h" -#include -#include -#include -#include - -namespace mlx -{ - void DescriptorSet::init(Renderer* renderer, DescriptorPool* pool, DescriptorSetLayout* layout) - { - MLX_PROFILE_FUNCTION(); - _renderer = renderer; - _layout = layout; - _pool = pool; - - auto device = Render_Core::get().getDevice().get(); - - std::array layouts; - layouts.fill(layout->get()); - - VkDescriptorSetAllocateInfo allocInfo{}; - allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; - allocInfo.descriptorPool = _pool->get(); - allocInfo.descriptorSetCount = static_cast(MAX_FRAMES_IN_FLIGHT); - allocInfo.pSetLayouts = layouts.data(); - - VkResult res = vkAllocateDescriptorSets(device, &allocInfo, _desc_set.data()); - if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to allocate descriptor set, %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new descriptor set"); - #endif - } - - void DescriptorSet::writeDescriptor(int binding, UBO* ubo) const noexcept - { - MLX_PROFILE_FUNCTION(); - auto device = Render_Core::get().getDevice().get(); - - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - { - VkDescriptorBufferInfo bufferInfo{}; - bufferInfo.buffer = ubo->get(i); - bufferInfo.offset = ubo->getOffset(i); - bufferInfo.range = ubo->getSize(i); - - VkWriteDescriptorSet descriptorWrite{}; - descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - descriptorWrite.dstSet = _desc_set[i]; - descriptorWrite.dstBinding = binding; - descriptorWrite.dstArrayElement = 0; - descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; - descriptorWrite.descriptorCount = 1; - descriptorWrite.pBufferInfo = &bufferInfo; - - vkUpdateDescriptorSets(device, 1, &descriptorWrite, 0, nullptr); - } - } - - void DescriptorSet::writeDescriptor(int binding, const Image& image) const noexcept - { - MLX_PROFILE_FUNCTION(); - auto device = Render_Core::get().getDevice().get(); - - VkDescriptorImageInfo imageInfo{}; - imageInfo.imageLayout = image.getLayout(); - imageInfo.imageView = image.getImageView(); - imageInfo.sampler = image.getSampler(); - - VkWriteDescriptorSet descriptorWrite{}; - descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - descriptorWrite.dstSet = _desc_set[_renderer->getActiveImageIndex()]; - descriptorWrite.dstBinding = binding; - descriptorWrite.dstArrayElement = 0; - descriptorWrite.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; - descriptorWrite.descriptorCount = 1; - descriptorWrite.pImageInfo = &imageInfo; - - vkUpdateDescriptorSets(device, 1, &descriptorWrite, 0, nullptr); - } - - DescriptorSet DescriptorSet::duplicate() - { - MLX_PROFILE_FUNCTION(); - DescriptorSet set; - set.init(_renderer, &Render_Core::get().getDescriptorPool(), _layout); - return set; - } - - VkDescriptorSet& DescriptorSet::operator()() noexcept - { - return _desc_set[_renderer->getActiveImageIndex()]; - } - - VkDescriptorSet& DescriptorSet::get() noexcept - { - return _desc_set[_renderer->getActiveImageIndex()]; - } - - void DescriptorSet::destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - if(_pool != nullptr && Render_Core::get().isInit()) // checks if the render core is still init (it should always be init but just in case) - _pool->freeDescriptor(*this); - for(auto& set : _desc_set) - { - if(set != VK_NULL_HANDLE) - set = VK_NULL_HANDLE; - } - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed descriptor set"); - #endif - } -} diff --git a/runtime/Sources/Renderer/Images/vk_image.cpp b/runtime/Sources/Renderer/Images/Image.cpp similarity index 60% rename from runtime/Sources/Renderer/Images/vk_image.cpp rename to runtime/Sources/Renderer/Images/Image.cpp index 28f9054..d6e62ba 100644 --- a/runtime/Sources/Renderer/Images/vk_image.cpp +++ b/runtime/Sources/Renderer/Images/Image.cpp @@ -1,26 +1,23 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_image.cpp :+: :+: :+: */ +/* Image.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 11:59:07 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:27 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 20:02:25 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "vk_image.h" -#include -#include -#include -#include +#include +#include namespace mlx { - bool isStencilFormat(VkFormat format) + bool IsStencilFormat(VkFormat format) { switch(format) { @@ -32,7 +29,7 @@ namespace mlx } } - bool isDepthFormat(VkFormat format) + bool IsDepthFormat(VkFormat format) { switch(format) { @@ -47,7 +44,7 @@ namespace mlx } } - VkFormat bitsToFormat(std::uint32_t bits) + VkFormat BitsToFormat(std::uint32_t bits) { switch(bits) { @@ -61,67 +58,67 @@ namespace mlx case 128: return VK_FORMAT_R32G32B32A32_SFLOAT; default: - core::error::report(e_kind::fatal_error, "Vulkan : unsupported image bit-depth"); + FatalError("Vulkan : unsupported image bit-depth"); return VK_FORMAT_R8G8B8A8_UNORM; } } - VkPipelineStageFlags layoutToAccessMask(VkImageLayout layout, bool isDestination) + VkPipelineStageFlags LayoutToAccessMask(VkImageLayout layout, bool is_destination) { - VkPipelineStageFlags accessMask = 0; + VkPipelineStageFlags access_mask = 0; switch(layout) { case VK_IMAGE_LAYOUT_UNDEFINED: - if(isDestination) - core::error::report(e_kind::error, "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); + if(is_destination) + Error("Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); break; - case VK_IMAGE_LAYOUT_GENERAL: accessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; break; - case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: accessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; break; - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_GENERAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; break; case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: - accessMask = VK_ACCESS_SHADER_READ_BIT; // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; + access_mask = VK_ACCESS_SHADER_READ_BIT; // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; - case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: accessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; break; - case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: accessMask = VK_ACCESS_TRANSFER_READ_BIT; break; - case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: accessMask = VK_ACCESS_TRANSFER_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: access_mask = VK_ACCESS_TRANSFER_READ_BIT; break; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: access_mask = VK_ACCESS_TRANSFER_WRITE_BIT; break; case VK_IMAGE_LAYOUT_PREINITIALIZED: - if(!isDestination) - accessMask = VK_ACCESS_HOST_WRITE_BIT; + if(!is_destination) + access_mask = VK_ACCESS_HOST_WRITE_BIT; else - core::error::report(e_kind::error, "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); + Error("Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); break; - case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; - case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; - case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: accessMask = VK_ACCESS_MEMORY_READ_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: access_mask = VK_ACCESS_MEMORY_READ_BIT; break; - default: core::error::report(e_kind::error, "Vulkan : unexpected image layout"); break; + default: Error("Vulkan : unexpected image layout"); break; } - return accessMask; + return access_mask; } - void Image::create(std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool dedicated_memory) + void Image::Create(std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool dedicated_memory) { - _width = width; - _height = height; - _format = format; - _tiling = tiling; + m_width = width; + m_height = height; + m_format = format; + m_tiling = tiling; - VkImageCreateInfo imageInfo{}; - imageInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; - imageInfo.imageType = VK_IMAGE_TYPE_2D; - imageInfo.extent.width = width; - imageInfo.extent.height = height; - imageInfo.extent.depth = 1; - imageInfo.mipLevels = 1; - imageInfo.arrayLayers = 1; - imageInfo.format = format; - imageInfo.tiling = tiling; - imageInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - imageInfo.usage = usage; - imageInfo.samples = VK_SAMPLE_COUNT_1_BIT; - imageInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + VkImageCreateInfo image_info{}; + image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_info.imageType = VK_IMAGE_TYPE_2D; + image_info.extent.width = width; + image_info.extent.height = height; + image_info.extent.depth = 1; + image_info.mipLevels = 1; + image_info.arrayLayers = 1; + image_info.format = format; + image_info.tiling = tiling; + image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + image_info.usage = usage; + image_info.samples = VK_SAMPLE_COUNT_1_BIT; + image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; VmaAllocationCreateInfo alloc_info{}; alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; @@ -131,35 +128,35 @@ namespace mlx alloc_info.priority = 1.0f; } - _allocation = Render_Core::get().getAllocator().createImage(&imageInfo, &alloc_info, _image, name); + m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, &alloc_info, m_image, name); #ifdef DEBUG - _name = name; + m_name = name; #endif } - void Image::createImageView(VkImageViewType type, VkImageAspectFlags aspectFlags) noexcept + void Image::CreateImageView(VkImageViewType type, VkImageAspectFlags aspect_flags) noexcept { - VkImageViewCreateInfo viewInfo{}; - viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; - viewInfo.image = _image; - viewInfo.viewType = type; - viewInfo.format = _format; - viewInfo.subresourceRange.aspectMask = aspectFlags; - viewInfo.subresourceRange.baseMipLevel = 0; - viewInfo.subresourceRange.levelCount = 1; - viewInfo.subresourceRange.baseArrayLayer = 0; - viewInfo.subresourceRange.layerCount = 1; + VkImageViewCreateInfo view_info{}; + view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + view_info.image = m_image; + view_info.viewType = type; + view_info.format = m_format; + view_info.subresourceRange.aspectMask = aspect_flags; + view_info.subresourceRange.baseMipLevel = 0; + view_info.subresourceRange.levelCount = 1; + view_info.subresourceRange.baseArrayLayer = 0; + view_info.subresourceRange.layerCount = 1; - VkResult res = vkCreateImageView(Render_Core::get().getDevice().get(), &viewInfo, nullptr, &_image_view); + VkResult res = vkCreateImageView(RenderCore::Get().GetDevice().Get(), &view_info, nullptr, &m_image_view); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create an image view, %s", RCore::verbaliseResultVk(res)); + FatalError("Vulkan : failed to create an image view, %s", VerbaliseVkResult(res)); #ifdef DEBUG else - Render_Core::get().getLayers().setDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_IMAGE_VIEW, (std::uint64_t)_image_view, _name.c_str()); + RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_IMAGE_VIEW, (std::uint64_t)m_image_view, m_name.c_str()); #endif } - void Image::createSampler() noexcept + void Image::CreateSampler() noexcept { VkSamplerCreateInfo info{}; info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; @@ -174,94 +171,94 @@ namespace mlx info.anisotropyEnable = VK_FALSE; info.maxAnisotropy = 1.0f; - VkResult res = vkCreateSampler(Render_Core::get().getDevice().get(), &info, nullptr, &_sampler); + VkResult res = vkCreateSampler(RenderCore::Get().GetDevice().Get(), &info, nullptr, &m_sampler); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create an image sampler, %s", RCore::verbaliseResultVk(res)); + FatalError("Vulkan : failed to create an image sampler, %", VerbaliseVkResult(res)); #ifdef DEBUG else - Render_Core::get().getLayers().setDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_SAMPLER, (std::uint64_t)_sampler, _name.c_str()); + RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_SAMPLER, (std::uint64_t)m_sampler, m_name.c_str()); #endif } - void Image::copyFromBuffer(Buffer& buffer) + void Image::CopyFromBuffer(Buffer& buffer) { - CmdBuffer& cmd = Render_Core::get().getSingleTimeCmdBuffer(); - cmd.beginRecord(); + CommandBuffer& cmd = RenderCore::Get().GetSingleTimeCmdBuffer(); + cmd.BeginRecord(); - VkImageLayout layout_save = _layout; - transitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &cmd); + VkImageLayout layout_save = m_layout; + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &cmd); - cmd.copyBufferToImage(buffer, *this); + cmd.CopyBufferToImage(buffer, *this); - transitionLayout(layout_save, &cmd); + TransitionLayout(layout_save, &cmd); - cmd.endRecord(); - cmd.submitIdle(); + cmd.EndRecord(); + cmd.SubmitIdle(); } - void Image::copyToBuffer(Buffer& buffer) + void Image::CopyToBuffer(Buffer& buffer) { - CmdBuffer& cmd = Render_Core::get().getSingleTimeCmdBuffer(); - cmd.beginRecord(); + CommandBuffer& cmd = RenderCore::Get().GetSingleTimeCmdBuffer(); + cmd.BeginRecord(); - VkImageLayout layout_save = _layout; - transitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, &cmd); + VkImageLayout layout_save = m_layout; + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, &cmd); - cmd.copyImagetoBuffer(*this, buffer); + cmd.CopyImagetoBuffer(*this, buffer); - transitionLayout(layout_save, &cmd); + TransitionLayout(layout_save, &cmd); - cmd.endRecord(); - cmd.submitIdle(); + cmd.EndRecord(); + cmd.SubmitIdle(); } - void Image::transitionLayout(VkImageLayout new_layout, CmdBuffer* cmd) + void Image::TransitionLayout(VkImageLayout new_layout, NonOwningPtr cmd) { - if(new_layout == _layout) + if(new_layout == m_layout) return; - bool singleTime = (cmd == nullptr); - if(singleTime) + bool single_time = (cmd == nullptr); + if(single_time) { - cmd = &Render_Core::get().getSingleTimeCmdBuffer(); - cmd->beginRecord(); + cmd = &RenderCore::Get().GetSingleTimeCmdBuffer(); + cmd->BeginRecord(); } - cmd->transitionImageLayout(*this, new_layout); + cmd->TransitionImageLayout(*this, new_layout); - if(singleTime) + if(single_time) { - cmd->endRecord(); - cmd->submitIdle(); + cmd->EndRecord(); + cmd->SubmitIdle(); } - _layout = new_layout; + m_layout = new_layout; } - void Image::destroySampler() noexcept + void Image::DestroySampler() noexcept { - if(_sampler != VK_NULL_HANDLE) - vkDestroySampler(Render_Core::get().getDevice().get(), _sampler, nullptr); - _sampler = VK_NULL_HANDLE; + if(m_sampler != VK_NULL_HANDLE) + vkDestroySampler(RenderCore::Get().GetDevice().Get(), m_sampler, nullptr); + m_sampler = VK_NULL_HANDLE; } - void Image::destroyImageView() noexcept + void Image::DestroyImageView() noexcept { - if(_image_view != VK_NULL_HANDLE) - vkDestroyImageView(Render_Core::get().getDevice().get(), _image_view, nullptr); - _image_view = VK_NULL_HANDLE; + if(m_image_view != VK_NULL_HANDLE) + vkDestroyImageView(RenderCore::Get().GetDevice().Get(), m_image_view, nullptr); + m_image_view = VK_NULL_HANDLE; } - void Image::destroy() noexcept + void Image::Destroy() noexcept { - destroySampler(); - destroyImageView(); + DestroySampler(); + DestroyImageView(); - if(_image != VK_NULL_HANDLE) - Render_Core::get().getAllocator().destroyImage(_allocation, _image); - _image = VK_NULL_HANDLE; + if(m_image != VK_NULL_HANDLE) + RenderCore::Get().GetAllocator().DestroyImage(m_allocation, m_image); + m_image = VK_NULL_HANDLE; } - std::uint32_t formatSize(VkFormat format) + std::uint32_t FormatSize(VkFormat format) { switch(format) { diff --git a/runtime/Sources/Renderer/Images/texture.cpp b/runtime/Sources/Renderer/Images/Texture.cpp similarity index 95% rename from runtime/Sources/Renderer/Images/texture.cpp rename to runtime/Sources/Renderer/Images/Texture.cpp index 5037ac2..420200f 100644 --- a/runtime/Sources/Renderer/Images/texture.cpp +++ b/runtime/Sources/Renderer/Images/Texture.cpp @@ -1,25 +1,23 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* texture.cpp :+: :+: :+: */ +/* Texture.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 18:03:35 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:04 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 20:59:20 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #define STB_IMAGE_IMPLEMENTATION #include -#include +#include -#include -#include -#include -#include -#include +#include +#include +#include #ifdef IMAGE_OPTIMIZED #define TILING VK_IMAGE_TILING_OPTIMAL diff --git a/runtime/Sources/Renderer/Images/texture_atlas.cpp b/runtime/Sources/Renderer/Images/TextureAtlas.cpp similarity index 100% rename from runtime/Sources/Renderer/Images/texture_atlas.cpp rename to runtime/Sources/Renderer/Images/TextureAtlas.cpp diff --git a/runtime/Sources/Renderer/Pipelines/pipeline.cpp b/runtime/Sources/Renderer/Pipelines/Pipeline.cpp similarity index 100% rename from runtime/Sources/Renderer/Pipelines/pipeline.cpp rename to runtime/Sources/Renderer/Pipelines/Pipeline.cpp diff --git a/xmake.lua b/xmake.lua index d2d835d..449cb19 100644 --- a/xmake.lua +++ b/xmake.lua @@ -53,9 +53,9 @@ target("mlx") add_options("images_optimized") add_options("force_integrated_gpu") add_options("graphics_memory_dump") - add_includedirs("includes", "src", "third_party") + add_includedirs("runtime/Includes", "runtime/Sources", "third_party") - set_pcxxheader("src/pre_compiled.h") + set_pcxxheader("runtime/Sources/PreCompiled.h") add_defines("MLX_BUILD", "SDL_MAIN_HANDLED") From be19b71c555a34c7250d76c5f2ecd4269d230c36 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 23 Apr 2024 22:59:33 +0200 Subject: [PATCH 010/131] still working on code refactoring --- .../Renderer/Descriptors/DescriptorSet.h | 8 +- .../Descriptors/DescriptorSetLayout.h | 6 +- .../Renderer/Images/TextureDescriptor.h | 6 +- .../{TextureManager.h => TextureRegistry.h} | 47 +--- .../Renderer/Images/TextureRegistry.inl | 52 ++++ runtime/Includes/Renderer/Renderer.h | 49 +--- runtime/Includes/Renderer/Vertex.h | 60 +++++ .../Renderer/Descriptors/DescriptorSet.cpp | 21 +- runtime/Sources/Renderer/Images/Texture.cpp | 153 +++++------ .../Sources/Renderer/Images/TextureAtlas.cpp | 40 +-- .../Sources/Renderer/Pipelines/Pipeline.cpp | 239 +++++++++--------- .../Renderer/Renderpass/Framebuffer.cpp | 54 ++-- .../Renderer/Renderpass/Renderpass.cpp | 128 +++++----- .../Sources/Renderer/Renderpass/Swapchain.cpp | 158 ++++++------ runtime/Sources/Renderer/Texts/Font.cpp | 51 ++-- .../Sources/Renderer/Texts/FontLibrary.cpp | 6 +- 16 files changed, 560 insertions(+), 518 deletions(-) rename runtime/Includes/Renderer/Images/{TextureManager.h => TextureRegistry.h} (52%) create mode 100644 runtime/Includes/Renderer/Images/TextureRegistry.inl create mode 100644 runtime/Includes/Renderer/Vertex.h diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h index 11b41a0..c50aec9 100644 --- a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:49:02 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:14:48 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -30,11 +30,15 @@ namespace mlx inline bool IsInit() const noexcept { return m_pool != nullptr && m_renderer != nullptr; } + void Bind() noexcept; + DescriptorSet Duplicate(); VkDescriptorSet& operator()() noexcept; VkDescriptorSet& Get() noexcept; + inline const DescriptorSetLayout& GetLayout() const noexcept { return m_layout; } + inline const std::array& GetAllFramesDescriptorSets() const { return m_desc_set; } void Destroy() noexcept; @@ -42,7 +46,7 @@ namespace mlx ~DescriptorSet() = default; private: - DescriptorSetLayout p_layout; + DescriptorSetLayout m_layout; std::array m_desc_set; NonOwningPtr p_pool; NonOwningPtr p_renderer; diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h b/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h index c53cfdf..ce13a5f 100644 --- a/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:36:22 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:50:50 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:15:01 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -23,8 +23,8 @@ namespace mlx void Init(std::vector> binds, VkShaderStageFlagBits stage); void Destroy() noexcept; - inline VkDescriptorSetLayout& operator()() noexcept { return m_layout; } - inline VkDescriptorSetLayout& Get() noexcept { return m_layout; } + inline VkDescriptorSetLayout operator()() const noexcept { return m_layout; } + inline VkDescriptorSetLayout Get() const noexcept { return m_layout; } inline const std::vector>& GetBindings() const noexcept { return m_bindings; } ~DescriptorSetLayout() = default; diff --git a/runtime/Includes/Renderer/Images/TextureDescriptor.h b/runtime/Includes/Renderer/Images/TextureDescriptor.h index 56683a9..7a9706b 100644 --- a/runtime/Includes/Renderer/Images/TextureDescriptor.h +++ b/runtime/Includes/Renderer/Images/TextureDescriptor.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 01:00:13 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:13:23 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:08:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -27,11 +27,11 @@ namespace mlx TextureRenderDescriptor(NonOwningPtr _texture, int _x, int _y) : texture(_texture), x(_x), y(_y) {} inline bool operator==(const TextureRenderDescriptor& rhs) const { return texture == rhs.texture && x == rhs.x && y == rhs.y; } - inline void Render(std::array& sets, class Renderer& renderer) override + inline void Render(class Renderer& renderer) override { if(!texture->IsInit()) return; - texture->Render(sets, renderer, x, y); + texture->Render(renderer, x, y); } inline void ResetUpdate() override { diff --git a/runtime/Includes/Renderer/Images/TextureManager.h b/runtime/Includes/Renderer/Images/TextureRegistry.h similarity index 52% rename from runtime/Includes/Renderer/Images/TextureManager.h rename to runtime/Includes/Renderer/Images/TextureRegistry.h index e9f655d..432ab21 100644 --- a/runtime/Includes/Renderer/Images/TextureManager.h +++ b/runtime/Includes/Renderer/Images/TextureRegistry.h @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* TextureManager.h :+: :+: :+: */ +/* TextureRegistry.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:56:15 by maldavid #+# #+# */ -/* Updated: 2024/04/03 16:24:51 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:10:08 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -17,48 +17,23 @@ namespace mlx { - class TextureManager + class TextureRegistry { public: - TextureManager() = default; + TextureRegistry() = default; - inline void Clear() { m_texture_descriptors.clear(); } + inline void Clear(); + inline std::pair, bool> RegisterTexture(NonOwningPtr texture, int x, int y); + inline bool IsTextureKnown(NonOwningPtr texture) noexcept; + inline void EraseTextures(NonOwningPtr texture); - inline std::pair, bool> RegisterTexture(NonOwningPtr texture, int x, int y) - { - MLX_PROFILE_FUNCTION(); - auto res = m_texture_descriptors.emplace(texture, x, y); - return std::make_pair(static_cast(&const_cast(*res.first)), res.second); - } - - inline bool IsTextureKnown(NonOwningPtr texture) noexcept - { - MLX_PROFILE_FUNCTION(); - for(const auto& desc : m_texture_descriptors) - { - if(desc.texture == texture) - return true; - } - return false; - } - - inline void EraseTextures(NonOwningPtr texture) - { - MLX_PROFILE_FUNCTION(); - for(auto it = m_texture_descriptors.begin(); it != m_texture_descriptors.end();) - { - if(it->texture == texture) - it = m_texture_descriptors.erase(it); - else - ++it; - } - } - - ~TextureManager() = default; + ~TextureRegistry() = default; private: std::unordered_set m_texture_descriptors; }; } +#include + #endif diff --git a/runtime/Includes/Renderer/Images/TextureRegistry.inl b/runtime/Includes/Renderer/Images/TextureRegistry.inl new file mode 100644 index 0000000..5a8986b --- /dev/null +++ b/runtime/Includes/Renderer/Images/TextureRegistry.inl @@ -0,0 +1,52 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* TextureRegistry.inl :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/23 22:08:46 by maldavid #+# #+# */ +/* Updated: 2024/04/23 22:11:09 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#pragma once +#include + +namespace mlx +{ + void TextureRegistry::Clear() + { + m_texture_descriptors.clear(); + } + + std::pair, bool> TextureRegistry::RegisterTexture(NonOwningPtr texture, int x, int y) + { + MLX_PROFILE_FUNCTION(); + auto res = m_texture_descriptors.emplace(texture, x, y); + return std::make_pair(static_cast(&const_cast(*res.first)), res.second); + } + + bool TextureRegistry::IsTextureKnown(NonOwningPtr texture) noexcept + { + MLX_PROFILE_FUNCTION(); + for(const auto& desc : m_texture_descriptors) + { + if(desc.texture == texture) + return true; + } + return false; + } + + void TextureRegistry::EraseTextures(NonOwningPtr texture) + { + MLX_PROFILE_FUNCTION(); + for(auto it = m_texture_descriptors.begin(); it != m_texture_descriptors.end();) + { + if(it->texture == texture) + it = m_texture_descriptors.erase(it); + else + ++it; + } + } +} diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index 4003a3e..ac6e58e 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -6,13 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:36:05 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:25:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __RENDERER__ #define __RENDERER__ +#include #include #include #include @@ -28,47 +29,6 @@ namespace mlx { - struct Vertex - { - glm::vec2 pos; - glm::vec4 color; - glm::vec2 uv; - - Vertex(glm::vec2 _pos, glm::vec4 _color, glm::vec2 _uv) : pos(std::move(_pos)), color(std::move(_color)), uv(std::move(_uv)) {} - - static VkVertexInputBindingDescription GetBindingDescription() - { - VkVertexInputBindingDescription binding_description{}; - binding_description.binding = 0; - binding_description.stride = sizeof(Vertex); - binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX; - - return binding_description; - } - - static std::array GetAttributeDescriptions() - { - std::array attribute_descriptions; - - attribute_descriptions[0].binding = 0; - attribute_descriptions[0].location = 0; - attribute_descriptions[0].format = VK_FORMAT_R32G32_SFLOAT; - attribute_descriptions[0].offset = offsetof(Vertex, pos); - - attribute_descriptions[1].binding = 0; - attribute_descriptions[1].location = 1; - attribute_descriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; - attribute_descriptions[1].offset = offsetof(Vertex, color); - - attribute_descriptions[2].binding = 0; - attribute_descriptions[2].location = 2; - attribute_descriptions[2].format = VK_FORMAT_R32G32_SFLOAT; - attribute_descriptions[2].offset = offsetof(Vertex, uv); - - return attribute_descriptions; - } - }; - class Renderer { public: @@ -96,8 +56,6 @@ namespace mlx inline FrameBuffer& GetFrameBuffer(int i) noexcept { return m_framebuffers[i]; } inline DescriptorSet& GetVertDescriptorSet() noexcept { return m_vert_set; } inline DescriptorSet& GetFragDescriptorSet() noexcept { return m_frag_set; } - inline DescriptorSetLayout& GetVertDescriptorSetLayout() noexcept { return m_vert_layout; } - inline DescriptorSetLayout& GetFragDescriptorSetLayout() noexcept { return m_frag_layout; } inline std::uint32_t GetActiveImageIndex() noexcept { return m_current_frame_index; } inline std::uint32_t GetImageIndex() noexcept { return m_image_index; } @@ -117,9 +75,6 @@ namespace mlx std::array m_semaphores; std::vector m_framebuffers; - DescriptorSetLayout m_vert_layout; - DescriptorSetLayout m_frag_layout; - DescriptorSet m_vert_set; DescriptorSet m_frag_set; diff --git a/runtime/Includes/Renderer/Vertex.h b/runtime/Includes/Renderer/Vertex.h new file mode 100644 index 0000000..285825e --- /dev/null +++ b/runtime/Includes/Renderer/Vertex.h @@ -0,0 +1,60 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* Vertex.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/04/23 22:24:33 by maldavid #+# #+# */ +/* Updated: 2024/04/23 22:25:01 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#ifndef __MLX_RENDERER_VERTEX__ +#define __MLX_RENDERER_VERTEX__ + +namespace mlx +{ + struct Vertex + { + glm::vec2 pos; + glm::vec4 color; + glm::vec2 uv; + + Vertex(glm::vec2 _pos, glm::vec4 _color, glm::vec2 _uv) : pos(std::move(_pos)), color(std::move(_color)), uv(std::move(_uv)) {} + + static VkVertexInputBindingDescription GetBindingDescription() + { + VkVertexInputBindingDescription binding_description{}; + binding_description.binding = 0; + binding_description.stride = sizeof(Vertex); + binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX; + + return binding_description; + } + + static std::array GetAttributeDescriptions() + { + std::array attribute_descriptions; + + attribute_descriptions[0].binding = 0; + attribute_descriptions[0].location = 0; + attribute_descriptions[0].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[0].offset = offsetof(Vertex, pos); + + attribute_descriptions[1].binding = 0; + attribute_descriptions[1].location = 1; + attribute_descriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; + attribute_descriptions[1].offset = offsetof(Vertex, color); + + attribute_descriptions[2].binding = 0; + attribute_descriptions[2].location = 2; + attribute_descriptions[2].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[2].offset = offsetof(Vertex, uv); + + return attribute_descriptions; + } + }; +} + +#endif diff --git a/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp index b28ba9b..75d6427 100644 --- a/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp +++ b/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:40:44 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:50:06 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 21:17:39 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -24,9 +24,9 @@ namespace mlx void DescriptorSet::Init(NonOwningPtr renderer, NonOwningPtr pool, DescriptorSetLayout layout) { MLX_PROFILE_FUNCTION(); - m_renderer = renderer; + p_renderer = renderer; m_layout = layout; - m_pool = pool; + p_pool = pool; for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) m_desc_set[i] = pool->AllocateDescriptorSet(layout); @@ -79,29 +79,34 @@ namespace mlx vkUpdateDescriptorSets(device, 1, &descriptor_write, 0, nullptr); } + void DescriptorSet::Bind() noexcept + { + vkCmdBindDescriptorSets(p_renderer->GetActiveCmdBuffer().Get(), VK_PIPELINE_BIND_POINT_GRAPHICS, p_renderer->GetPipeline().GetPipelineLayout(), 0, 1, m_desc_set[p_renderer->GetActiveImageIndex()], 0, nullptr); + } + DescriptorSet DescriptorSet::Duplicate() { MLX_PROFILE_FUNCTION(); DescriptorSet set; - set.Init(m_renderer, &RenderCore::Get().GetDescriptorPool(), m_layout); + set.Init(p_renderer, &RenderCore::Get().GetDescriptorPool(), m_layout); return set; } VkDescriptorSet& DescriptorSet::operator()() noexcept { - return m_desc_set[m_renderer->GetActiveImageIndex()]; + return m_desc_set[p_renderer->GetActiveImageIndex()]; } VkDescriptorSet& DescriptorSet::Get() noexcept { - return m_desc_set[m_renderer->GetActiveImageIndex()]; + return m_desc_set[p_renderer->GetActiveImageIndex()]; } void DescriptorSet::Destroy() noexcept { MLX_PROFILE_FUNCTION(); - if(m_pool != nullptr && RenderCore::Get().IsInit()) // checks if the render core is still init (it should always be init but just in case) - m_pool->FreeDescriptor(*this); + if(p_pool != nullptr && RenderCore::Get().IsInit()) // checks if the render core is still init (it should always be init but just in case) + p_pool->FreeDescriptor(*this); for(auto& set : m_desc_set) { if(set != VK_NULL_HANDLE) diff --git a/runtime/Sources/Renderer/Images/Texture.cpp b/runtime/Sources/Renderer/Images/Texture.cpp index 420200f..18090f8 100644 --- a/runtime/Sources/Renderer/Images/Texture.cpp +++ b/runtime/Sources/Renderer/Images/Texture.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 18:03:35 by maldavid #+# #+# */ -/* Updated: 2024/04/23 20:59:20 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 21:52:23 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -27,30 +27,30 @@ namespace mlx { - void Texture::create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory) + void Texture::Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory) { MLX_PROFILE_FUNCTION(); - Image::create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); - Image::createImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); - Image::createSampler(); - transitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + Image::Create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); + Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + Image::CreateSampler(); + TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); - std::vector vertexData = { + std::vector vertex_data = { {{0, 0}, {1.f, 1.f, 1.f, 1.f}, {0.0f, 0.0f}}, {{width, 0}, {1.f, 1.f, 1.f, 1.f}, {1.0f, 0.0f}}, {{width, height}, {1.f, 1.f, 1.f, 1.f}, {1.0f, 1.0f}}, {{0, height}, {1.f, 1.f, 1.f, 1.f}, {0.0f, 1.0f}} }; - std::vector indexData = { 0, 1, 2, 2, 3, 0 }; + std::vector index_data = { 0, 1, 2, 2, 3, 0 }; #ifdef DEBUG - _vbo.create(sizeof(Vertex) * vertexData.size(), vertexData.data(), name); - _ibo.create(sizeof(std::uint16_t) * indexData.size(), indexData.data(), name); - _name = name; + m_vbo.Create(sizeof(Vertex) * vertex_data.size(), vertex_data.data(), name); + m_ibo.Create(sizeof(std::uint16_t) * index_data.size(), index_data.data(), name); + m_name = name; #else - _vbo.create(sizeof(Vertex) * vertexData.size(), vertexData.data(), nullptr); - _ibo.create(sizeof(std::uint16_t) * indexData.size(), indexData.data(), nullptr); + m_vbo.Create(sizeof(Vertex) * vertex_data.size(), vertex_data.data(), nullptr); + m_ibo.Create(sizeof(std::uint16_t) * index_data.size(), index_data.data(), nullptr); #endif Buffer staging_buffer; @@ -58,43 +58,43 @@ namespace mlx if(pixels != nullptr) { #ifdef DEBUG - staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); + staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); #else - staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, pixels); + staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, pixels); #endif } else { std::vector default_pixels(width * height, 0x00000000); #ifdef DEBUG - staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, default_pixels.data()); + staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, default_pixels.data()); #else - staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, default_pixels.data()); + staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, default_pixels.data()); #endif } - Image::copyFromBuffer(staging_buffer); - staging_buffer.destroy(); + Image::CopyFromBuffer(staging_buffer); + staging_buffer.Destroy(); } - void Texture::setPixel(int x, int y, std::uint32_t color) noexcept + void Texture::SetPixel(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); - if(x < 0 || y < 0 || static_cast(x) > getWidth() || static_cast(y) > getHeight()) + if(x < 0 || y < 0 || static_cast(x) > GetWidth() || static_cast(y) > GetHeight()) return; - if(_map == nullptr) - openCPUmap(); - _cpu_map[(y * getWidth()) + x] = color; - _has_been_modified = true; + if(m_map == nullptr) + PpenCPUmap(); + m_cpu_map[(y * GetWidth()) + x] = color; + m_has_been_modified = true; } - int Texture::getPixel(int x, int y) noexcept + int Texture::GetPixel(int x, int y) noexcept { MLX_PROFILE_FUNCTION(); - if(x < 0 || y < 0 || static_cast(x) > getWidth() || static_cast(y) > getHeight()) + if(x < 0 || y < 0 || static_cast(x) > GetWidth() || static_cast(y) > GetHeight()) return 0; - if(_map == nullptr) - openCPUmap(); - std::uint32_t color = _cpu_map[(y * getWidth()) + x]; + if(m_map == nullptr) + OpenCPUmap(); + std::uint32_t color = m_cpu_map[(y * GetWidth()) + x]; std::uint8_t* bytes = reinterpret_cast(&color); std::uint8_t tmp = bytes[0]; bytes[0] = bytes[2]; @@ -102,86 +102,87 @@ namespace mlx return *reinterpret_cast(bytes); } - void Texture::openCPUmap() + void Texture::OpenCPUmap() { MLX_PROFILE_FUNCTION(); - if(_map != nullptr) + if(m_map != nullptr) return; + DebugLog("Texture : enabling CPU mapping"); + std::size_t size = GetWidth() * GetHeight() * FormatSize(GetFormat()); + m_buf_map.emplace(); #ifdef DEBUG - core::error::report(e_kind::message, "Texture : enabling CPU mapping"); - #endif - std::size_t size = getWidth() * getHeight() * formatSize(getFormat()); - _buf_map.emplace(); - #ifdef DEBUG - _buf_map->create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, _name.c_str()); + m_buf_map->Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, m_name.c_str()); #else - _buf_map->create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, nullptr); - #endif - Image::copyToBuffer(*_buf_map); - _buf_map->mapMem(&_map); - _cpu_map = std::vector(getWidth() * getHeight(), 0); - std::memcpy(_cpu_map.data(), _map, size); - #ifdef DEBUG - core::error::report(e_kind::message, "Texture : mapped CPU memory using staging buffer"); + m_buf_map->Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, nullptr); #endif + Image::CopyToBuffer(*m_buf_map); + m_buf_map->MapMem(&_map); + m_cpu_map = std::vector(GetWidth() * GetHeight(), 0); + std::memcpy(m_cpu_map.data(), m_map, size); + DebugLog("Texture : mapped CPU memory using staging buffer"); } - void Texture::render(std::array& sets, Renderer& renderer, int x, int y) + void Texture::Render(Renderer& renderer, int x, int y) { MLX_PROFILE_FUNCTION(); - if(_has_been_modified) + if(m_has_been_modified) { - std::memcpy(_map, _cpu_map.data(), _cpu_map.size() * formatSize(getFormat())); - Image::copyFromBuffer(*_buf_map); - _has_been_modified = false; + std::memcpy(m_map, m_cpu_map.data(), m_cpu_map.size() * FormatSize(GetFormat())); + Image::copyFromBuffer(*m_buf_map); + m_has_been_modified = false; } - if(!_set.isInit()) - _set = renderer.getFragDescriptorSet().duplicate(); - if(getLayout() != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) - transitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); - if(!_has_set_been_updated) - updateSet(0); - auto cmd = renderer.getActiveCmdBuffer(); - _vbo.bind(renderer); - _ibo.bind(renderer); + if(!m_set.IsInit()) + m_set = renderer.GetFragDescriptorSet().Duplicate(); + if(GetLayout() != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) + TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + if(!m_has_set_been_updated) + UpdateSet(0); + auto cmd = renderer.GetActiveCmdBuffer(); + m_vbo.bind(renderer); + m_ibo.bind(renderer); glm::vec2 translate(x, y); - vkCmdPushConstants(cmd.get(), renderer.getPipeline().getPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); - sets[1] = _set.get(); - vkCmdBindDescriptorSets(renderer.getActiveCmdBuffer().get(), VK_PIPELINE_BIND_POINT_GRAPHICS, renderer.getPipeline().getPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); - vkCmdDrawIndexed(cmd.get(), static_cast(_ibo.getSize() / sizeof(std::uint16_t)), 1, 0, 0, 0); + vkCmdPushConstants(cmd.Get(), renderer.GetPipeline().GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); + m_set.Bind(); + vkCmdDrawIndexed(cmd.Get(), static_cast(m_ibo.GetSize() / sizeof(std::uint16_t)), 1, 0, 0, 0); } - void Texture::destroy() noexcept + void Texture::Destroy() noexcept { MLX_PROFILE_FUNCTION(); - Image::destroy(); - _set.destroy(); - if(_buf_map.has_value()) - _buf_map->destroy(); - _vbo.destroy(); - _ibo.destroy(); + Image::Destroy(); + m_set.Destroy(); + if(m_buf_map.has_value()) + m_buf_map->Destroy(); + m_vbo.destroy(); + m_ibo.destroy(); } Texture stbTextureLoad(std::filesystem::path file, int* w, int* h) { MLX_PROFILE_FUNCTION(); - Texture texture; + Texture* texture = new Texture; int channels; std::uint8_t* data = nullptr; std::string filename = file.string(); if(!std::filesystem::exists(std::move(file))) - core::error::report(e_kind::fatal_error, "Image : file not found '%s'", filename.c_str()); + { + Error("Image : file not found '%s'", filename.c_str()); + return nullptr; + } if(stbi_is_hdr(filename.c_str())) - core::error::report(e_kind::fatal_error, "Texture : unsupported image format '%s'", filename.c_str()); + { + Error("Texture : unsupported image format '%s'", filename.c_str()); + return nullptr; + } int dummy_w; int dummy_h; data = stbi_load(filename.c_str(), (w == nullptr ? &dummy_w : w), (h == nullptr ? &dummy_h : h), &channels, 4); #ifdef DEBUG - texture.create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, filename.c_str()); + texture->Create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, filename.c_str()); #else - texture.create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, nullptr); + texture->Create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, nullptr); #endif stbi_image_free(data); return texture; diff --git a/runtime/Sources/Renderer/Images/TextureAtlas.cpp b/runtime/Sources/Renderer/Images/TextureAtlas.cpp index cace1c1..04bf46f 100644 --- a/runtime/Sources/Renderer/Images/TextureAtlas.cpp +++ b/runtime/Sources/Renderer/Images/TextureAtlas.cpp @@ -1,18 +1,18 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* texture_atlas.cpp :+: :+: :+: */ +/* TextureAtlas.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/07 16:40:09 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:13 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 21:54:05 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include +#include #ifdef IMAGE_OPTIMIZED #define TILING VK_IMAGE_TILING_OPTIMAL @@ -22,37 +22,37 @@ namespace mlx { - void TextureAtlas::create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory) + void TextureAtlas::Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory) { - Image::create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); - Image::createImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); - Image::createSampler(); - transitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + Image::Create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); + Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + Image::CreateSampler(); + TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); if(pixels == nullptr) { - core::error::report(e_kind::warning, "Renderer : creating an empty texture atlas. They cannot be updated after creation, this might be a mistake or a bug, please report"); + Warning("Renderer : creating an empty texture atlas. They cannot be updated after creation, this might be a mistake or a bug, please report"); return; } Buffer staging_buffer; - std::size_t size = width * height * formatSize(format); - staging_buffer.create(Buffer::kind::dynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); - Image::copyFromBuffer(staging_buffer); - staging_buffer.destroy(); + std::size_t size = width * height * FormatSize(format); + staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); + Image::CopyFromBuffer(staging_buffer); + staging_buffer.Destroy(); } - void TextureAtlas::render(Renderer& renderer, int x, int y, std::uint32_t ibo_size) const + void TextureAtlas::Render(Renderer& renderer, int x, int y, std::uint32_t ibo_size) const { - auto cmd = renderer.getActiveCmdBuffer().get(); + auto cmd = renderer.GetActiveCmdBuffer().Get(); glm::vec2 translate(x, y); - vkCmdPushConstants(cmd, renderer.getPipeline().getPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); + vkCmdPushConstants(cmd, renderer.GetPipeline().GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); vkCmdDrawIndexed(cmd, ibo_size / sizeof(std::uint16_t), 1, 0, 0, 0); } - void TextureAtlas::destroy() noexcept + void TextureAtlas::Destroy() noexcept { - Image::destroy(); - _set.destroy(); + Image::Destroy(); + m_set.Destroy(); } } diff --git a/runtime/Sources/Renderer/Pipelines/Pipeline.cpp b/runtime/Sources/Renderer/Pipelines/Pipeline.cpp index 1722c21..cd194fe 100644 --- a/runtime/Sources/Renderer/Pipelines/Pipeline.cpp +++ b/runtime/Sources/Renderer/Pipelines/Pipeline.cpp @@ -1,21 +1,21 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* pipeline.cpp :+: :+: :+: */ +/* Pipeline.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 21:27:38 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:31 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:24:13 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include "pipeline.h" -#include -#include -#include +#include +#include +#include +#include namespace mlx { @@ -26,19 +26,23 @@ namespace mlx layout(location = 1) in vec4 aColor; layout(location = 2) in vec2 aUV; - layout(set = 0, binding = 0) uniform uProjection { + layout(set = 0, binding = 0) uniform uProjection + { mat4 mat; } uProj; - layout(push_constant) uniform uModelPushConstant { + layout(push_constant) uniform uModelPushConstant + { vec2 vec; } uTranslate; - out gl_PerVertex { + out gl_PerVertex + { vec4 gl_Position; }; - layout(location = 0) out struct { + layout(location = 0) out struct + { vec4 Color; vec2 UV; } Out; @@ -113,7 +117,8 @@ namespace mlx layout(set = 1, binding = 0) uniform sampler2D sTexture; - layout(location = 0) in struct { + layout(location = 0) in struct + { vec4 Color; vec2 UV; } In; @@ -162,83 +167,83 @@ namespace mlx 0x000100fd,0x00010038 }; - void GraphicPipeline::init(Renderer& renderer) + void GraphicPipeline::Init(Renderer& renderer) { - VkShaderModuleCreateInfo createInfo{}; - createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; - createInfo.codeSize = vertex_shader.size() * sizeof(std::uint32_t); - createInfo.pCode = vertex_shader.data(); + VkShaderModuleCreateInfo create_info{}; + create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + create_info.codeSize = vertex_shader.size() * sizeof(std::uint32_t); + create_info.pCode = vertex_shader.data(); VkShaderModule vshader; - if(vkCreateShaderModule(Render_Core::get().getDevice().get(), &createInfo, nullptr, &vshader) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a vertex shader module"); + if(vkCreateShaderModule(RenderCore::Get().GetDevice().Get(), &create_info, nullptr, &vshader) != VK_SUCCESS) + FatalError("Vulkan : failed to create a vertex shader module"); VkPushConstantRange push_constant; push_constant.offset = 0; push_constant.size = sizeof(glm::vec2); push_constant.stageFlags = VK_SHADER_STAGE_VERTEX_BIT; - createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; - createInfo.codeSize = fragment_shader.size() * sizeof(std::uint32_t); - createInfo.pCode = fragment_shader.data(); + create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + create_info.codeSize = fragment_shader.size() * sizeof(std::uint32_t); + create_info.pCode = fragment_shader.data(); VkShaderModule fshader; - if(vkCreateShaderModule(Render_Core::get().getDevice().get(), &createInfo, nullptr, &fshader) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a fragment shader module"); + if(vkCreateShaderModule(RenderCore::Get().GetDevice().Get(), &create_info, nullptr, &fshader) != VK_SUCCESS) + FatalError("Vulkan : failed to create a fragment shader module"); - VkPipelineShaderStageCreateInfo vertShaderStageInfo{}; - vertShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - vertShaderStageInfo.stage = VK_SHADER_STAGE_VERTEX_BIT; - vertShaderStageInfo.module = vshader; - vertShaderStageInfo.pName = "main"; + VkPipelineShaderStageCreateInfo vert_shader_stage_info{}; + vert_shader_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + vert_shader_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; + vert_shader_stage_info.module = vshader; + vert_shader_stage_info.pName = "main"; - VkPipelineShaderStageCreateInfo fragShaderStageInfo{}; - fragShaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - fragShaderStageInfo.stage = VK_SHADER_STAGE_FRAGMENT_BIT; - fragShaderStageInfo.module = fshader; - fragShaderStageInfo.pName = "main"; + VkPipelineShaderStageCreateInfo frag_shader_stage_info{}; + frag_shader_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + frag_shader_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; + frag_shader_stage_info.module = fshader; + frag_shader_stage_info.pName = "main"; - std::array stages = {vertShaderStageInfo, fragShaderStageInfo}; + std::array stages = { vert_shader_stage_info, frag_shader_stage_info }; - auto bindingDescription = Vertex::getBindingDescription(); - auto attributeDescriptions = Vertex::getAttributeDescriptions(); + auto binding_description = Vertex::GetBindingDescription(); + auto attribute_descriptions = Vertex::GetAttributeDescriptions(); - VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo{}; - vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; - vertexInputStateCreateInfo.vertexBindingDescriptionCount = 1; - vertexInputStateCreateInfo.pVertexBindingDescriptions = &bindingDescription; - vertexInputStateCreateInfo.vertexAttributeDescriptionCount = static_cast(attributeDescriptions.size()); - vertexInputStateCreateInfo.pVertexAttributeDescriptions = attributeDescriptions.data(); + VkPipelineVertexInputStateCreateInfo vertex_input_state_create_info{}; + vertex_input_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + vertex_input_state_create_info.vertexBindingDescriptionCount = 1; + vertex_input_state_create_info.pVertexBindingDescriptions = &binding_description; + vertex_input_state_create_info.vertexAttributeDescriptionCount = static_cast(attribute_descriptions.size()); + vertex_input_state_create_info.pVertexAttributeDescriptions = attribute_descriptions.data(); - VkPipelineInputAssemblyStateCreateInfo inputAssembly{}; - inputAssembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; - inputAssembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; - inputAssembly.primitiveRestartEnable = VK_FALSE; + VkPipelineInputAssemblyStateCreateInfo input_assembly{}; + input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; + input_assembly.primitiveRestartEnable = VK_FALSE; VkDynamicState states[] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; - constexpr std::size_t statesCount = sizeof(states) / sizeof(VkDynamicState); - VkPipelineDynamicStateCreateInfo dynamicStates{}; - dynamicStates.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; - dynamicStates.dynamicStateCount = statesCount; - dynamicStates.pDynamicStates = states; + constexpr std::size_t states_count = sizeof(states) / sizeof(VkDynamicState); + VkPipelineDynamicStateCreateInfo dynamic_states{}; + dynamic_states.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_states.dynamicStateCount = states_count; + dynamic_states.pDynamicStates = states; VkViewport viewport{}; viewport.x = 0.0f; viewport.y = 0.0f; - viewport.width = (float)renderer.getFrameBuffer(0).getWidth(); - viewport.height = (float)renderer.getFrameBuffer(0).getHeight(); + viewport.width = (float)renderer.GetFrameBuffer(0).GetWidth(); + viewport.height = (float)renderer.GetFrameBuffer(0).GetHeight(); viewport.minDepth = 0.0f; viewport.maxDepth = 1.0f; VkRect2D scissor{}; scissor.offset = { 0, 0 }; - scissor.extent = { renderer.getFrameBuffer(0).getWidth(), renderer.getFrameBuffer(0).getHeight()}; + scissor.extent = { renderer.GetFrameBuffer(0).GetWidth(), renderer.GetFrameBuffer(0).GetHeight()}; - VkPipelineViewportStateCreateInfo viewportState{}; - viewportState.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; - viewportState.viewportCount = 1; - viewportState.pViewports = &viewport; - viewportState.scissorCount = 1; - viewportState.pScissors = &scissor; + VkPipelineViewportStateCreateInfo viewport_state{}; + viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state.viewportCount = 1; + viewport_state.pViewports = &viewport; + viewport_state.scissorCount = 1; + viewport_state.pScissors = &scissor; VkPipelineRasterizationStateCreateInfo rasterizer{}; rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; @@ -255,76 +260,72 @@ namespace mlx multisampling.sampleShadingEnable = VK_FALSE; multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; - VkPipelineColorBlendAttachmentState colorBlendAttachment{}; - colorBlendAttachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; - colorBlendAttachment.blendEnable = VK_TRUE; - colorBlendAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; - colorBlendAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; - colorBlendAttachment.colorBlendOp = VK_BLEND_OP_ADD; - colorBlendAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; - colorBlendAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; - colorBlendAttachment.alphaBlendOp = VK_BLEND_OP_ADD; + VkPipelineColorBlendAttachmentState color_blend_attachment{}; + color_blend_attachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + color_blend_attachment.blendEnable = VK_TRUE; + color_blend_attachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + color_blend_attachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + color_blend_attachment.colorBlendOp = VK_BLEND_OP_ADD; + color_blend_attachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + color_blend_attachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + color_blend_attachment.alphaBlendOp = VK_BLEND_OP_ADD; - VkPipelineColorBlendStateCreateInfo colorBlending{}; - colorBlending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; - colorBlending.logicOpEnable = VK_FALSE; - colorBlending.logicOp = VK_LOGIC_OP_COPY; - colorBlending.attachmentCount = 1; - colorBlending.pAttachments = &colorBlendAttachment; - colorBlending.blendConstants[0] = 1.0f; - colorBlending.blendConstants[1] = 1.0f; - colorBlending.blendConstants[2] = 1.0f; - colorBlending.blendConstants[3] = 1.0f; + VkPipelineColorBlendStateCreateInfo color_blending{}; + color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + color_blending.logicOpEnable = VK_FALSE; + color_blending.logicOp = VK_LOGIC_OP_COPY; + color_blending.attachmentCount = 1; + color_blending.pAttachments = &color_blend_attachment; + color_blending.blendConstants[0] = 1.0f; + color_blending.blendConstants[1] = 1.0f; + color_blending.blendConstants[2] = 1.0f; + color_blending.blendConstants[3] = 1.0f; VkDescriptorSetLayout layouts[] = { - renderer.getVertDescriptorSetLayout().get(), - renderer.getFragDescriptorSetLayout().get() + renderer.GetVertDescriptorSet().GetLayout(), + renderer.GetFragDescriptorSet().GetLayout() }; - VkPipelineLayoutCreateInfo pipelineLayoutInfo{}; - pipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; - pipelineLayoutInfo.setLayoutCount = 2; - pipelineLayoutInfo.pSetLayouts = layouts; - pipelineLayoutInfo.pushConstantRangeCount = 1; - pipelineLayoutInfo.pPushConstantRanges = &push_constant; + VkPipelineLayoutCreateInfo pipeline_layout_info{}; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.setLayoutCount = 2; + pipeline_layout_info.pSetLayouts = layouts; + pipeline_layout_info.pushConstantRangeCount = 1; + pipeline_layout_info.pPushConstantRanges = &push_constant; - if(vkCreatePipelineLayout(Render_Core::get().getDevice().get(), &pipelineLayoutInfo, nullptr, &_pipeline_layout) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a graphics pipeline layout"); + if(vkCreatePipelineLayout(RenderCore::Get().GetDevice().Get(), &pipeline_layout_info, nullptr, &m_pipeline_layout) != VK_SUCCESS) + FatalError("Vulkan : failed to create a graphics pipeline layout"); - VkGraphicsPipelineCreateInfo pipelineInfo{}; - pipelineInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; - pipelineInfo.stageCount = stages.size(); - pipelineInfo.pStages = stages.data(); - pipelineInfo.pVertexInputState = &vertexInputStateCreateInfo; - pipelineInfo.pInputAssemblyState = &inputAssembly; - pipelineInfo.pViewportState = &viewportState; - pipelineInfo.pRasterizationState = &rasterizer; - pipelineInfo.pMultisampleState = &multisampling; - pipelineInfo.pColorBlendState = &colorBlending; - pipelineInfo.pDynamicState = &dynamicStates; - pipelineInfo.layout = _pipeline_layout; - pipelineInfo.renderPass = renderer.getRenderPass().get(); - pipelineInfo.subpass = 0; - pipelineInfo.basePipelineHandle = VK_NULL_HANDLE; + VkGraphicsPipelineCreateInfo pipeline_info{}; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.stageCount = stages.size(); + pipeline_info.pStages = stages.data(); + pipeline_info.pVertexInputState = &vertex_input_state_create_info; + pipeline_info.pInputAssemblyState = &input_assembly; + pipeline_info.pViewportState = &viewport_state; + pipeline_info.pRasterizationState = &rasterizer; + pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pColorBlendState = &color_blending; + pipeline_info.pDynamicState = &dynamic_states; + pipeline_info.layout = m_pipeline_layout; + pipeline_info.renderPass = renderer.GetRenderPass().Get(); + pipeline_info.subpass = 0; + pipeline_info.basePipelineHandle = VK_NULL_HANDLE; - VkResult res = vkCreateGraphicsPipelines(Render_Core::get().getDevice().get(), VK_NULL_HANDLE, 1, &pipelineInfo, nullptr, &_graphics_pipeline); + VkResult res = vkCreateGraphicsPipelines(RenderCore::Get().GetDevice().Get(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &m_graphics_pipeline); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a graphics pipeline, %s", RCore::verbaliseResultVk(res)); -#ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new graphic pipeline"); -#endif + FatalError("Vulkan : failed to create a graphics pipeline, %", VerbaliseVkResult(res)); + DebugLog("Vulkan : created new graphic pipeline"); - vkDestroyShaderModule(Render_Core::get().getDevice().get(), fshader, nullptr); - vkDestroyShaderModule(Render_Core::get().getDevice().get(), vshader, nullptr); + vkDestroyShaderModule(RenderCore::Get().GetDevice().Get(), fshader, nullptr); + vkDestroyShaderModule(RenderCore::Get().GetDevice().Get(), vshader, nullptr); } - void GraphicPipeline::destroy() noexcept + void GraphicPipeline::Destroy() noexcept { - vkDestroyPipeline(Render_Core::get().getDevice().get(), _graphics_pipeline, nullptr); - vkDestroyPipelineLayout(Render_Core::get().getDevice().get(), _pipeline_layout, nullptr); - _graphics_pipeline = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed a graphics pipeline"); - #endif + vkDestroyPipeline(RenderCore::Get().GetDevice().Get(), m_graphics_pipeline, nullptr); + vkDestroyPipelineLayout(RenderCore::Get().GetDevice().Get(), m_pipeline_layout, nullptr); + m_graphics_pipeline = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed a graphics pipeline"); } } diff --git a/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp b/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp index c4f100b..a84b4a3 100644 --- a/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp +++ b/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp @@ -1,53 +1,49 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_framebuffer.cpp :+: :+: :+: */ +/* Framebuffer.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:18:06 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:35 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:28:07 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include +#include +#include +#include namespace mlx { - void FrameBuffer::init(RenderPass& renderpass, Image& image) + void FrameBuffer::Init(RenderPass& renderpass, Image& image) { - VkImageView attachments[] = { image.getImageView() }; + VkImageView attachments[] = { image.GetImageView() }; - _width = image.getWidth(); - _height = image.getHeight(); + m_width = image.GetWidth(); + m_height = image.GetHeight(); - VkFramebufferCreateInfo framebufferInfo{}; - framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - framebufferInfo.renderPass = renderpass.get(); - framebufferInfo.attachmentCount = 1; - framebufferInfo.pAttachments = attachments; - framebufferInfo.width = _width; - framebufferInfo.height = _height; - framebufferInfo.layers = 1; + VkFramebufferCreateInfo framebuffer_info{}; + framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebuffer_info.renderPass = renderpass.get(); + framebuffer_info.attachmentCount = 1; + framebuffer_info.pAttachments = attachments; + framebuffer_info.width = _width; + framebuffer_info.height = _height; + framebuffer_info.layers = 1; - VkResult res = vkCreateFramebuffer(Render_Core::get().getDevice().get(), &framebufferInfo, nullptr, &_framebuffer); + VkResult res = vkCreateFramebuffer(RenderCore::Get().GetDevice().Get(), &framebuffer_info, nullptr, &m_framebuffer); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create a framebuffer, %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new framebuffer"); - #endif + FatalError("Vulkan : failed to create a framebuffer, %s", RCore::verbaliseResultVk(res)); + DebugLog("Vulkan : created new framebuffer"); } - void FrameBuffer::destroy() noexcept + void FrameBuffer::Destroy() noexcept { - vkDestroyFramebuffer(Render_Core::get().getDevice().get(), _framebuffer, nullptr); - _framebuffer = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed a framebuffer"); - #endif + vkDestroyFramebuffer(RenderCore::Get().GetDevice().Get(), m_framebuffer, nullptr); + m_framebuffer = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed a framebuffer"); } } diff --git a/runtime/Sources/Renderer/Renderpass/Renderpass.cpp b/runtime/Sources/Renderer/Renderpass/Renderpass.cpp index 352f34f..b5d25e9 100644 --- a/runtime/Sources/Renderer/Renderpass/Renderpass.cpp +++ b/runtime/Sources/Renderer/Renderpass/Renderpass.cpp @@ -1,12 +1,12 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_render_pass.cpp :+: :+: :+: */ +/* Renderpass.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:21:36 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:37 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:31:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,21 +20,21 @@ namespace mlx { - static const VkClearValue clearColor = {{{ 0.f, 0.f, 0.f, 1.0f }}}; // wtf, this mess to satisfy a warning + static const VkClearValue clear_color = {{{ 0.f, 0.f, 0.f, 1.0f }}}; // wtf, this mess to satisfy a warning - void RenderPass::init(VkFormat attachement_format, VkImageLayout layout) + void RenderPass::Init(VkFormat attachement_format, VkImageLayout layout) { - VkAttachmentDescription colorAttachment{}; - colorAttachment.format = attachement_format; - colorAttachment.samples = VK_SAMPLE_COUNT_1_BIT; - colorAttachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; - colorAttachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; - colorAttachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; - colorAttachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; - colorAttachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - colorAttachment.finalLayout = layout; + VkAttachmentDescription color_attachment{}; + color_attachment.format = attachement_format; + color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; + color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + color_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + color_attachment.finalLayout = layout; - VkAttachmentReference colorAttachmentRef{}; + VkAttachmentReference color_attachment_ref{}; colorAttachmentRef.attachment = 0; colorAttachmentRef.layout = (layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : layout); @@ -45,77 +45,73 @@ namespace mlx VkSubpassDescription subpasses[] = { subpass1 }; - std::vector subpassesDeps; - subpassesDeps.emplace_back(); - subpassesDeps.back().srcSubpass = VK_SUBPASS_EXTERNAL; - subpassesDeps.back().dstSubpass = 0; - subpassesDeps.back().srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - subpassesDeps.back().dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - subpassesDeps.back().srcAccessMask = VK_ACCESS_MEMORY_READ_BIT; - subpassesDeps.back().dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - subpassesDeps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + std::vector subpasses_deps; + subpasses_deps.emplace_back(); + subpasses_deps.back().srcSubpass = VK_SUBPASS_EXTERNAL; + subpasses_deps.back().dstSubpass = 0; + subpasses_deps.back().srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + subpasses_deps.back().dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + subpasses_deps.back().srcAccessMask = VK_ACCESS_MEMORY_READ_BIT; + subpasses_deps.back().dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + subpasses_deps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; - subpassesDeps.emplace_back(); - subpassesDeps.back().srcSubpass = 0; - subpassesDeps.back().dstSubpass = VK_SUBPASS_EXTERNAL; - subpassesDeps.back().srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - subpassesDeps.back().dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - subpassesDeps.back().srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - subpassesDeps.back().dstAccessMask = VK_ACCESS_MEMORY_READ_BIT; - subpassesDeps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + subpasses_deps.emplace_back(); + subpasses_deps.back().srcSubpass = 0; + subpasses_deps.back().dstSubpass = VK_SUBPASS_EXTERNAL; + subpasses_deps.back().srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + subpasses_deps.back().dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + subpasses_deps.back().srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + subpasses_deps.back().dstAccessMask = VK_ACCESS_MEMORY_READ_BIT; + subpasses_deps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; - VkRenderPassCreateInfo renderPassInfo{}; - renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; - renderPassInfo.attachmentCount = 1; - renderPassInfo.pAttachments = &colorAttachment; - renderPassInfo.subpassCount = sizeof(subpasses) / sizeof(VkSubpassDescription); - renderPassInfo.pSubpasses = subpasses; - renderPassInfo.dependencyCount = static_cast(subpassesDeps.size()); - renderPassInfo.pDependencies = subpassesDeps.data(); + VkRenderPassCreateInfo render_pass_info{}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + render_pass_info.attachmentCount = 1; + render_pass_info.pAttachments = &color_attachment; + render_pass_info.subpassCount = sizeof(subpasses) / sizeof(VkSubpassDescription); + render_pass_info.pSubpasses = subpasses; + render_pass_info.dependencyCount = static_cast(subpasses_deps.size()); + render_pass_info.pDependencies = subpasses_deps.data(); - VkResult res = vkCreateRenderPass(Render_Core::get().getDevice().get(), &renderPassInfo, nullptr, &_render_pass); + VkResult res = vkCreateRenderPass(RenderCore::Get().GetDevice().Get(), &render_pass_info, nullptr, &m_render_pass); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create render pass, %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new render pass"); - #endif + FatalError("Vulkan : failed to create render pass, %", VerbaliseVkResult(res)); + DebugLog("Vulkan : created new render pass"); } - void RenderPass::begin(class CmdBuffer& cmd, class FrameBuffer& fb) + void RenderPass::Begin(class CommandBuffer& cmd, class FrameBuffer& fb) { MLX_PROFILE_FUNCTION(); - if(_is_running) + if(m_is_running) return; - VkRenderPassBeginInfo renderPassInfo{}; - renderPassInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; - renderPassInfo.renderPass = _render_pass; - renderPassInfo.framebuffer = fb.get(); - renderPassInfo.renderArea.offset = { 0, 0 }; - renderPassInfo.renderArea.extent = { fb.getWidth(), fb.getHeight() }; - renderPassInfo.clearValueCount = 1; - renderPassInfo.pClearValues = &clearColor; + VkRenderPassBeginInfo render_pass_info{}; + render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + render_pass_info.renderPass = m_render_pass; + render_pass_info.framebuffer = fb.Get(); + render_pass_info.renderArea.offset = { 0, 0 }; + render_pass_info.renderArea.extent = { fb.GetWidth(), fb.GetHeight() }; + render_pass_info.clearValueCount = 1; + render_pass_info.pClearValues = &clear_color; - vkCmdBeginRenderPass(cmd.get(), &renderPassInfo, VK_SUBPASS_CONTENTS_INLINE); + vkCmdBeginRenderPass(cmd.Get(), &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); - _is_running = true; + m_is_running = true; } - void RenderPass::end(class CmdBuffer& cmd) + void RenderPass::End(class CommandBuffer& cmd) { MLX_PROFILE_FUNCTION(); - if(!_is_running) + if(!m_is_running) return; - vkCmdEndRenderPass(cmd.get()); - _is_running = false; + vkCmdEndRenderPass(cmdd.Get()); + m_is_running = false; } - void RenderPass::destroy() noexcept + void RenderPass::Destroy() noexcept { - vkDestroyRenderPass(Render_Core::get().getDevice().get(), _render_pass, nullptr); - _render_pass = VK_NULL_HANDLE; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : destroyed a renderpass"); - #endif + vkDestroyRenderPass(RenderCore::Get().GetDevice().Get(), m_render_pass, nullptr); + m_render_pass = VK_NULL_HANDLE; + DebugLog("Vulkan : destroyed a renderpass"); } } diff --git a/runtime/Sources/Renderer/Renderpass/Swapchain.cpp b/runtime/Sources/Renderer/Renderpass/Swapchain.cpp index 485d413..ffc5df9 100644 --- a/runtime/Sources/Renderer/Renderpass/Swapchain.cpp +++ b/runtime/Sources/Renderer/Renderpass/Swapchain.cpp @@ -1,152 +1,150 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* vk_swapchain.cpp :+: :+: :+: */ +/* Swapchain.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:22:28 by maldavid #+# #+# */ -/* Updated: 2024/03/25 23:09:33 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:43:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include +#include +#include +#include namespace mlx { - void SwapChain::init(Renderer* renderer) + void SwapChain::Init(NonOwningPtr renderer) { - VkDevice device = Render_Core::get().getDevice().get(); + VkDevice device = RenderCore::get().GetDevice().Get(); - _renderer = renderer; - _swapchain_support = querySwapChainSupport(Render_Core::get().getDevice().getPhysicalDevice()); + m_renderer = renderer; + m_swapchain_support = QuerySwapChainSupport(RenderCore::Get().GetDevice().GetPhysicalDevice()); - VkSurfaceFormatKHR surfaceFormat = renderer->getSurface().chooseSwapSurfaceFormat(_swapchain_support.formats); - VkPresentModeKHR presentMode = chooseSwapPresentMode(_swapchain_support.present_modes); - _extent = chooseSwapExtent(_swapchain_support.capabilities); + VkSurfaceFormatKHR surface_format = renderer->GetSurface().ChooseSwapSurfaceFormat(m_swapchain_support.formats); + VkPresentModeKHR present_mode = ChooseSwapPresentMode(m_swapchain_support.present_modes); + m_extent = ChooseSwapExtent(m_swapchain_support.capabilities); - std::uint32_t imageCount = _swapchain_support.capabilities.minImageCount + 1; - if(_swapchain_support.capabilities.maxImageCount > 0 && imageCount > _swapchain_support.capabilities.maxImageCount) - imageCount = _swapchain_support.capabilities.maxImageCount; + std::uint32_t image_count = m_swapchain_support.capabilities.minImageCount + 1; + if(m_swapchain_support.capabilities.maxImageCount > 0 && image_count > m_swapchain_support.capabilities.maxImageCount) + image_count = m_swapchain_support.capabilities.maxImageCount; - Queues::QueueFamilyIndices indices = Render_Core::get().getQueue().findQueueFamilies(Render_Core::get().getDevice().getPhysicalDevice()); - std::uint32_t queueFamilyIndices[] = { indices.graphics_family.value(), indices.present_family.value() }; + Queues::QueueFamilyIndices indices = RenderCore::Get().GetQueue().FindQueueFamilies(RenderCore::Get().GetDevice().GetPhysicalDevice()); + std::uint32_t queue_family_indices[] = { indices.graphics_family.value(), indices.present_family.value() }; - VkSwapchainCreateInfoKHR createInfo{}; - createInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; - createInfo.surface = renderer->getSurface().get(); - createInfo.minImageCount = imageCount; - createInfo.imageFormat = surfaceFormat.format; - createInfo.imageColorSpace = surfaceFormat.colorSpace; - createInfo.imageExtent = _extent; - createInfo.imageArrayLayers = 1; - createInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; - createInfo.preTransform = _swapchain_support.capabilities.currentTransform; - createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; - createInfo.presentMode = presentMode; - createInfo.clipped = VK_TRUE; - createInfo.oldSwapchain = VK_NULL_HANDLE; + VkSwapchainCreateInfoKHR create_info{}; + create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + create_info.surface = renderer->GetSurface().Get(); + create_info.minImageCount = image_count; + create_info.imageFormat = surface_format.format; + create_info.imageColorSpace = surface_format.colorSpace; + create_info.imageExtent = m_extent; + create_info.imageArrayLayers = 1; + create_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + create_info.preTransform = m_swapchain_support.capabilities.currentTransform; + create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; + create_info.presentMode = present_mode; + create_info.clipped = VK_TRUE; + create_info.oldSwapchain = VK_NULL_HANDLE; if(indices.graphics_family != indices.present_family) { - createInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT; - createInfo.queueFamilyIndexCount = 2; - createInfo.pQueueFamilyIndices = queueFamilyIndices; + create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT; + create_info.queueFamilyIndexCount = 2; + create_info.pQueueFamilyIndices = queue_family_indices; } else - createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; + create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; - VkResult res = vkCreateSwapchainKHR(device, &createInfo, nullptr, &_swapchain); + VkResult res = vkCreateSwapchainKHR(device, &create_info, nullptr, &m_swapchain); if(res != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : failed to create the swapchain, %s", RCore::verbaliseResultVk(res)); + FatalError("Vulkan : failed to create the swapchain, %", VerbaliseVkResult(res)); std::vector tmp; - vkGetSwapchainImagesKHR(device, _swapchain, &imageCount, nullptr); - _images.resize(imageCount); - tmp.resize(imageCount); - vkGetSwapchainImagesKHR(device, _swapchain, &imageCount, tmp.data()); + vkGetSwapchainImagesKHR(device, m_swapchain, &image_count, nullptr); + m_images.resize(image_count); + tmp.resize(image_count); + vkGetSwapchainImagesKHR(device, m_swapchain, &image_count, tmp.data()); - for(std::size_t i = 0; i < imageCount; i++) + for(std::size_t i = 0; i < image_count; i++) { - _images[i].create(tmp[i], surfaceFormat.format, _extent.width, _extent.height); - _images[i].transitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - _images[i].createImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + m_images[i].Create(tmp[i], surface_format.format, m_extent.width, m_extent.height); + m_images[i].TransitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); + m_images[i].CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); } - _swapchain_image_format = surfaceFormat.format; - #ifdef DEBUG - core::error::report(e_kind::message, "Vulkan : created new swapchain"); - #endif + m_swapchain_image_format = surface_format.format; + DebugLog("Vulkan : created new swapchain"); } - SwapChain::SwapChainSupportDetails SwapChain::querySwapChainSupport(VkPhysicalDevice device) + SwapChain::SwapChainSupportDetails SwapChain::QuerySwapChainSupport(VkPhysicalDevice device) { SwapChain::SwapChainSupportDetails details; - VkSurfaceKHR surface = _renderer->getSurface().get(); + VkSurfaceKHR surface = m_renderer->GetSurface().Get(); if(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(device, surface, &details.capabilities) != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan : unable to retrieve surface capabilities"); + FatalError("Vulkan : unable to retrieve surface capabilities"); - std::uint32_t formatCount = 0; - vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &formatCount, nullptr); + std::uint32_t format_count = 0; + vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, nullptr); - if(formatCount != 0) + if(format_count != 0) { - details.formats.resize(formatCount); - vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &formatCount, details.formats.data()); + details.formats.resize(format_count); + vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, details.formats.data()); } - std::uint32_t presentModeCount; - vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &presentModeCount, nullptr); + std::uint32_t present_mode_count; + vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_mode_count, nullptr); - if(presentModeCount != 0) + if(present_mode_count != 0) { - details.present_modes.resize(presentModeCount); - vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &presentModeCount, details.present_modes.data()); + details.present_modes.resize(present_mode_count); + vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_mode_count, details.present_modes.data()); } return details; } - VkPresentModeKHR SwapChain::chooseSwapPresentMode([[maybe_unused]] const std::vector& availablePresentModes) + VkPresentModeKHR SwapChain::chooseSwapPresentMode([[maybe_unused]] const std::vector& available_present_modes) { // in the future, you may choose to activate vsync or not return VK_PRESENT_MODE_IMMEDIATE_KHR; } - VkExtent2D SwapChain::chooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities) + VkExtent2D SwapChain::ChooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities) { if(capabilities.currentExtent.width != std::numeric_limits::max()) return capabilities.currentExtent; int width, height; - glfwGetFramebufferSize(_renderer->getWindow()->getNativeWindow(), &width, &height); + glfwGetFramebufferSize(m_renderer->GetWindow()->GetNativeWindow(), &width, &height); - VkExtent2D actualExtent = { static_cast(width), static_cast(height) }; + VkExtent2D actual_extent = { static_cast(width), static_cast(height) }; - actualExtent.width = std::clamp(actualExtent.width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width); - actualExtent.height = std::clamp(actualExtent.height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height); + actual_extent.width = std::clamp(actual_extent.width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width); + actual_extent.height = std::clamp(actual_extent.height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height); - return actualExtent; + return actual_extent; } - void SwapChain::recreate() + void SwapChain::Recreate() { - destroy(); - init(_renderer); + Destroy(); + Init(m_renderer); } - void SwapChain::destroy() noexcept + void SwapChain::Destroy() noexcept { - if(_swapchain == VK_NULL_HANDLE) + if(m_swapchain == VK_NULL_HANDLE) return; - vkDeviceWaitIdle(Render_Core::get().getDevice().get()); - vkDestroySwapchainKHR(Render_Core::get().getDevice().get(), _swapchain, nullptr); - _swapchain = VK_NULL_HANDLE; - for(Image& img : _images) - img.destroyImageView(); + vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); + vkDestroySwapchainKHR(RenderCore::Get().GetDevice().Get(), m_swapchain, nullptr); + m_swapchain = VK_NULL_HANDLE; + for(Image& img : m_images) + img.DestroyImageView(); } } diff --git a/runtime/Sources/Renderer/Texts/Font.cpp b/runtime/Sources/Renderer/Texts/Font.cpp index 9ebec8a..6c93646 100644 --- a/runtime/Sources/Renderer/Texts/Font.cpp +++ b/runtime/Sources/Renderer/Texts/Font.cpp @@ -1,48 +1,47 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* font.cpp :+: :+: :+: */ +/* Font.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/11 22:06:09 by kbz_8 #+# #+# */ -/* Updated: 2024/03/25 19:03:54 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:48:30 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include +#include +#include constexpr const int RANGE = 1024; namespace mlx { - Font::Font(Renderer& renderer, const std::filesystem::path& path, float scale) : _name(path.string()), _renderer(renderer), _scale(scale) + Font::Font(Renderer& renderer, const std::filesystem::path& path, float scale) : m_name(path.string()), m_renderer(renderer), m_scale(scale) { - _build_data = path; + m_build_data = path; } - Font::Font(class Renderer& renderer, const std::string& name, const std::vector& ttf_data, float scale) : _name(name), _renderer(renderer), _scale(scale) + Font::Font(class Renderer& renderer, const std::string& name, const std::vector& ttf_data, float scale) : m_name(name), m_renderer(renderer), m_scale(scale) { - _build_data = ttf_data; + m_build_data = ttf_data; } - void Font::buildFont() + void Font::BuildFont() { MLX_PROFILE_FUNCTION(); std::vector file_bytes; - if(std::holds_alternative(_build_data)) + if(std::holds_alternative(m_build_data)) { - std::ifstream file(std::get(_build_data), std::ios::binary); + std::ifstream file(std::get(m_build_data), std::ios::binary); if(!file.is_open()) { - core::error::report(e_kind::error, "Font load : cannot open font file, %s", _name.c_str()); + Error("Font load : cannot open font file, %", m_name.c_str()); return; } - std::ifstream::pos_type fileSize = std::filesystem::file_size(std::get(_build_data)); + std::ifstream::pos_type fileSize = std::filesystem::file_size(std::get(m_build_data)); file.seekg(0, std::ios::beg); file_bytes.resize(fileSize); file.read(reinterpret_cast(file_bytes.data()), fileSize); @@ -53,10 +52,10 @@ namespace mlx std::vector vulkan_bitmap(RANGE * RANGE * 4); stbtt_pack_context pc; stbtt_PackBegin(&pc, tmp_bitmap.data(), RANGE, RANGE, RANGE, 1, nullptr); - if(std::holds_alternative(_build_data)) - stbtt_PackFontRange(&pc, file_bytes.data(), 0, _scale, 32, 96, _cdata.data()); + if(std::holds_alternative(m_build_data)) + stbtt_PackFontRange(&pc, file_bytes.data(), 0, m_scale, 32, 96, m_cdata.data()); else - stbtt_PackFontRange(&pc, std::get>(_build_data).data(), 0, _scale, 32, 96, _cdata.data()); + stbtt_PackFontRange(&pc, std::get>(m_build_data).data(), 0, m_scale, 32, 96, m_cdata.data()); stbtt_PackEnd(&pc); for(int i = 0, j = 0; i < RANGE * RANGE; i++, j += 4) { @@ -66,24 +65,24 @@ namespace mlx vulkan_bitmap[j + 3] = tmp_bitmap[i]; } #ifdef DEBUG - _atlas.create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, std::string(_name + "_font_altas").c_str(), true); + m_atlas.Create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, std::string(m_name + "_font_altas").c_str(), true); #else - _atlas.create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, nullptr, true); + m_atlas.Create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, nullptr, true); #endif - _atlas.setDescriptor(_renderer.getFragDescriptorSet().duplicate()); - _is_init = true; + m_atlas.SetDescriptor(m_renderer.GetFragDescriptorSet().Duplicate()); + m_is_init = true; } - void Font::destroy() + void Font::Destroy() { MLX_PROFILE_FUNCTION(); - _atlas.destroy(); - _is_init = false; + m_atlas.Destroy(); + m_is_init = false; } Font::~Font() { - if(_is_init) + if(m_is_init) destroy(); } } diff --git a/runtime/Sources/Renderer/Texts/FontLibrary.cpp b/runtime/Sources/Renderer/Texts/FontLibrary.cpp index 3265bbc..ec712b2 100644 --- a/runtime/Sources/Renderer/Texts/FontLibrary.cpp +++ b/runtime/Sources/Renderer/Texts/FontLibrary.cpp @@ -1,16 +1,16 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* font_library.cpp :+: :+: :+: */ +/* FontLibrary.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 09:28:14 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:57 by maldavid ### ########.fr */ +/* Updated: 2024/04/23 22:59:16 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include #include #include From edb44070a98e5a870a94b115223782d3e05a584a Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 24 Apr 2024 13:47:58 +0200 Subject: [PATCH 011/131] still refactor --- .../Includes/Renderer/Images/TextureAtlas.h | 3 +- runtime/Includes/Renderer/PixelPut.h | 6 +- runtime/Includes/Renderer/Renderer.h | 13 +- runtime/Sources/Renderer/PixelPut.cpp | 61 +++--- runtime/Sources/Renderer/Renderer.cpp | 199 +++++++++--------- .../Sources/Renderer/Texts/FontLibrary.cpp | 56 +++-- runtime/Sources/Renderer/Texts/Text.cpp | 55 +++-- .../Sources/Renderer/Texts/TextDescriptor.cpp | 99 +++++---- .../Sources/Renderer/Texts/TextLibrary.cpp | 54 +++-- .../Sources/Renderer/Texts/TextManager.cpp | 45 ++-- 10 files changed, 288 insertions(+), 303 deletions(-) diff --git a/runtime/Includes/Renderer/Images/TextureAtlas.h b/runtime/Includes/Renderer/Images/TextureAtlas.h index dc53e3c..34802fa 100644 --- a/runtime/Includes/Renderer/Images/TextureAtlas.h +++ b/runtime/Includes/Renderer/Images/TextureAtlas.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/07 16:36:33 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:12:13 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:37:54 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -27,7 +27,6 @@ namespace mlx void Destroy() noexcept override; inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } - inline VkDescriptorSet GetVkSet() noexcept { return m_set.isInit() ? m_set.get() : VK_NULL_HANDLE; } inline DescriptorSet GetSet() noexcept { return m_set; } inline void UpdateSet(int binding) noexcept { m_set.writeDescriptor(binding, *this); m_has_been_updated = true; } inline bool HasBeenUpdated() const noexcept { return m_has_been_updated; } diff --git a/runtime/Includes/Renderer/PixelPut.h b/runtime/Includes/Renderer/PixelPut.h index 9101643..c16d7c9 100644 --- a/runtime/Includes/Renderer/PixelPut.h +++ b/runtime/Includes/Renderer/PixelPut.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 13:18:50 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:28:46 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:46:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -26,12 +26,12 @@ namespace mlx void Init(std::uint32_t width, std::uint32_t height, class Renderer& renderer) noexcept; void SetPixel(int x, int y, std::uint32_t color) noexcept; - void Render(std::array& sets, class Renderer& renderer) noexcept; + void Render(class Renderer& renderer) noexcept; void Clear(); void Destroy() noexcept; - ~PixelPutPipeline(); + ~PixelPutPipeline() = default; private: Texture m_texture; diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index ac6e58e..9a0767f 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:25:13 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:53:20 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -45,14 +45,12 @@ namespace mlx inline void SetWindow(NonOwningPtr window) { m_window = window; } inline Surface& GetSurface() noexcept { return m_surface; } - inline CmdPool& GetCmdPool() noexcept { return m_cmd.GetCmdPool(); } inline NonOwningPtr GetUniformBuffer() noexcept { return m_uniform_buffer.get(); } inline SwapChain& GetSwapChain() noexcept { return m_swapchain; } - inline Semaphore& GetSemaphore(int i) noexcept { return m_semaphores[i]; } inline RenderPass& GetRenderPass() noexcept { return m_pass; } inline GraphicPipeline& GetPipeline() noexcept { return m_pipeline; } - inline CmdBuffer& GetCmdBuffer(int i) noexcept { return m_cmd.GetCmdBuffer(i); } - inline CmdBuffer& GetActiveCmdBuffer() noexcept { return m_cmd.GetCmdBuffer(m_current_frame_index); } + inline CommandBuffer& GetCmdBuffer(int i) noexcept { return m_cmd.GetCmdBuffer(i); } + inline CommandBuffer& GetActiveCmdBuffer() noexcept { return m_cmd.GetCmdBuffer(m_current_frame_index); } inline FrameBuffer& GetFrameBuffer(int i) noexcept { return m_framebuffers[i]; } inline DescriptorSet& GetVertDescriptorSet() noexcept { return m_vert_set; } inline DescriptorSet& GetFragDescriptorSet() noexcept { return m_frag_set; } @@ -68,11 +66,12 @@ namespace mlx private: GraphicPipeline m_pipeline; - CmdManager m_cmd; + CommandManager m_cmd; RenderPass m_pass; Surface m_surface; SwapChain m_swapchain; - std::array m_semaphores; + std::array m_render_finished_semaphores; + std::array m_image_available_semaphores; std::vector m_framebuffers; DescriptorSet m_vert_set; diff --git a/runtime/Sources/Renderer/PixelPut.cpp b/runtime/Sources/Renderer/PixelPut.cpp index 32c0ae3..f9743a6 100644 --- a/runtime/Sources/Renderer/PixelPut.cpp +++ b/runtime/Sources/Renderer/PixelPut.cpp @@ -1,70 +1,67 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* pixel_put.cpp :+: :+: :+: */ +/* PixelPut.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/31 15:14:50 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:45 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:46:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include +#include namespace mlx { - void PixelPutPipeline::init(std::uint32_t width, std::uint32_t height, Renderer& renderer) noexcept + void PixelPutPipeline::Init(std::uint32_t width, std::uint32_t height, Renderer& renderer) noexcept { MLX_PROFILE_FUNCTION(); - _texture.create(nullptr, width, height, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_pixel_put_pipeline_texture", true); - _texture.setDescriptor(renderer.getFragDescriptorSet().duplicate()); + m_texture.Create(nullptr, width, height, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_pixel_put_pipeline_texture", true); + m_texture.SetDescriptor(renderer.GetFragDescriptorSet().Duplicate()); - _buffer.create(Buffer::kind::dynamic, sizeof(std::uint32_t) * (width * height), VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "__mlx_pixel_put_pipeline_texture"); - _buffer.mapMem(&_buffer_map); - _cpu_map = std::vector(height * width + 1, 0); - _width = width; - _height = height; + m_buffer.Create(BufferType::HighDynamic, sizeof(std::uint32_t) * (width * height), VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "__mlx_pixel_put_pipeline_texture"); + m_buffer.MapMem(&_buffer_map); + m_cpu_map = std::vector(height * width + 1, 0); + m_width = width; + m_height = height; } - void PixelPutPipeline::setPixel(int x, int y, std::uint32_t color) noexcept + void PixelPutPipeline::SetPixel(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); - if(x < 0 || y < 0 || x > static_cast(_width) || y > static_cast(_height)) + if(x < 0 || y < 0 || x > static_cast(m_width) || y > static_cast(m_height)) return; - _cpu_map[(y * _width) + x] = color; - _has_been_modified = true; + m_cpu_map[(y * m_width) + x] = color; + m_has_been_modified = true; } - void PixelPutPipeline::clear() + void PixelPutPipeline::Clear() { MLX_PROFILE_FUNCTION(); - _cpu_map.assign(_width * _height, 0); - _has_been_modified = true; + m_cpu_map.assign(m_width * m_height, 0); + m_has_been_modified = true; } - void PixelPutPipeline::render(std::array& sets, Renderer& renderer) noexcept + void PixelPutPipeline::Render(Renderer& renderer) noexcept { MLX_PROFILE_FUNCTION(); - if(_has_been_modified) + if(m_has_been_modified) { - std::memcpy(_buffer_map, _cpu_map.data(), sizeof(std::uint32_t) * _cpu_map.size()); - _texture.copyFromBuffer(_buffer); - _has_been_modified = false; + std::memcpy(m_buffer_map, m_cpu_map.data(), sizeof(std::uint32_t) * m_cpu_map.size()); + m_texture.CopyFromBuffer(m_buffer); + m_has_been_modified = false; } - _texture.updateSet(0); - _texture.render(sets, renderer, 0, 0); + m_texture.UpdateSet(0); + m_texture.Render(renderer, 0, 0); } - void PixelPutPipeline::destroy() noexcept + void PixelPutPipeline::Destroy() noexcept { MLX_PROFILE_FUNCTION(); - _buffer.destroy(); - _texture.destroy(); + m_buffer.Destroy(); + m_texture.Destroy(); } - - PixelPutPipeline::~PixelPutPipeline() {} } diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 85102ef..0c131d0 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -1,192 +1,195 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* renderer.cpp :+: :+: :+: */ +/* Renderer.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:25:16 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:03:49 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:58:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include -#include +#include +#include +#include namespace mlx { - void Renderer::init(Texture* render_target) + void Renderer::Init(NonOwningPtr render_target) { MLX_PROFILE_FUNCTION(); - if(render_target == nullptr) + if(!render_target) { - _surface.create(*this); - _swapchain.init(this); - _pass.init(_swapchain.getImagesFormat(), VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - for(std::size_t i = 0; i < _swapchain.getImagesNumber(); i++) - _framebuffers.emplace_back().init(_pass, _swapchain.getImage(i)); + m_surface.Create(*this); + m_swapchain.Init(this); + m_pass.Init(m_swapchain.GetImagesFormat(), VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); + for(std::size_t i = 0; i < m_swapchain.GetImagesNumber(); i++) + m_framebuffers.emplace_back().Init(m_pass, m_swapchain.GetImage(i)); } else { - _render_target = render_target; - _render_target->transitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); - _pass.init(_render_target->getFormat(), _render_target->getLayout()); - _framebuffers.emplace_back().init(_pass, *static_cast(_render_target)); + m_render_target = render_target; + m_render_target->TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + m_pass.Init(m_render_target->GetFormat(), m_render_target->GetLayout()); + m_framebuffers.emplace_back().Init(m_pass, *static_cast(m_render_target)); } - _cmd.init(); + m_cmd.Init(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _semaphores[i].init(); + { + m_render_finished_semaphores[i].Init(); + m_image_available_semaphores[i].Init(); + } - _uniform_buffer.reset(new UBO); + m_uniform_buffer.reset(new UniformBuffer); #ifdef DEBUG - _uniform_buffer->create(this, sizeof(glm::mat4), "__mlx_matrices_uniform_buffer_"); + m_uniform_buffer->Create(this, sizeof(glm::mat4), "__mlx_matrices_uniform_buffer_"); #else - _uniform_buffer->create(this, sizeof(glm::mat4), nullptr); + m_uniform_buffer->Create(this, sizeof(glm::mat4), nullptr); #endif - _vert_layout.init({ + DescriptorSetLayout vert_layout; + DescriptorSetLayout frag_layout; + + vert_layout.Init({ {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER} }, VK_SHADER_STAGE_VERTEX_BIT); - _frag_layout.init({ + frag_layout.Init({ {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER} }, VK_SHADER_STAGE_FRAGMENT_BIT); - _vert_set.init(this, &Render_Core::get().getDescriptorPool(), &_vert_layout); - _frag_set.init(this, &Render_Core::get().getDescriptorPool(), &_frag_layout); + m_vert_set.Init(this, &RenderCore::Get().GetDescriptorPool(), std::move(vert_layout)); + m_frag_set.Init(this, &RenderCore::Get().GetDescriptorPool(), std::move(frag_layout)); - _vert_set.writeDescriptor(0, _uniform_buffer.get()); + m_vert_set.WriteDescriptor(0, m_uniform_buffer.Get()); - _pipeline.init(*this); + m_pipeline.Init(*this); - _framebuffer_resized = false; + m_framebuffer_resized = false; } - bool Renderer::beginFrame() + bool Renderer::BeginFrame() { MLX_PROFILE_FUNCTION(); - auto device = Render_Core::get().getDevice().get(); + auto device = RenderCore::Get().GetDevice().Get(); - if(_render_target == nullptr) + if(!m_render_target) { - _cmd.getCmdBuffer(_current_frame_index).waitForExecution(); - VkResult result = vkAcquireNextImageKHR(device, _swapchain(), UINT64_MAX, _semaphores[_current_frame_index].getImageSemaphore(), VK_NULL_HANDLE, &_image_index); + m_cmd.GetCmdBuffer(m_current_frame_index).WaitForExecution(); + VkResult result = vkAcquireNextImageKHR(device, m_swapchain.Get(), UINT64_MAX, m_image_available_semaphores[m_current_frame_index].Get(), VK_NULL_HANDLE, &m_image_index); if(result == VK_ERROR_OUT_OF_DATE_KHR) { - recreateRenderData(); + RecreateRenderData(); return false; } else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) - core::error::report(e_kind::fatal_error, "Vulkan error : failed to acquire swapchain image"); + FatalError("Vulkan error : failed to acquire swapchain image"); } else { - _image_index = 0; - if(_render_target->getLayout() != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) - _render_target->transitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + m_image_index = 0; + if(m_render_target->GetLayout() != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) + m_render_target->TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); } - _cmd.getCmdBuffer(_current_frame_index).reset(); - _cmd.getCmdBuffer(_current_frame_index).beginRecord(); + m_cmd.GetCmdBuffer(m_current_frame_index).Reset(); + m_cmd.GetCmdBuffer(m_current_frame_index).BeginRecord(); auto& fb = _framebuffers[_image_index]; - _pass.begin(getActiveCmdBuffer(), fb); + m_pass.Begin(GetActiveCmdBuffer(), fb); - _pipeline.bindPipeline(_cmd.getCmdBuffer(_current_frame_index)); + m_pipeline.BindPipeline(m_cmd.GetCmdBuffer(m_current_frame_index)); VkViewport viewport{}; viewport.x = 0.0f; viewport.y = 0.0f; - viewport.width = static_cast(fb.getWidth()); - viewport.height = static_cast(fb.getHeight()); + viewport.width = static_cast(fb.GetWidth()); + viewport.height = static_cast(fb.GetHeight()); viewport.minDepth = 0.0f; viewport.maxDepth = 1.0f; - vkCmdSetViewport(_cmd.getCmdBuffer(_current_frame_index).get(), 0, 1, &viewport); + vkCmdSetViewport(m_cmd.GetCmdBuffer(m_current_frame_index).Get(), 0, 1, &viewport); VkRect2D scissor{}; scissor.offset = { 0, 0 }; - scissor.extent = { fb.getWidth(), fb.getHeight()}; - vkCmdSetScissor(_cmd.getCmdBuffer(_current_frame_index).get(), 0, 1, &scissor); + scissor.extent = { fb.GetWidth(), fb.GetHeight()}; + vkCmdSetScissor(m_cmd.GetCmdBuffer(m_current_frame_index).Get(), 0, 1, &scissor); return true; } - void Renderer::endFrame() + void Renderer::EndFrame() { MLX_PROFILE_FUNCTION(); - _pass.end(getActiveCmdBuffer()); - _cmd.getCmdBuffer(_current_frame_index).endRecord(); + m_pass.End(GetActiveCmdBuffer()); + m_cmd.GetCmdBuffer(m_current_frame_index).EndRecord(); - if(_render_target == nullptr) + if(!m_render_target) { - _cmd.getCmdBuffer(_current_frame_index).submit(&_semaphores[_current_frame_index]); + m_cmd.GetCmdBuffer(m_current_frame_index).Submit(&m_render_finished_semaphores[m_current_frame_index]); - VkSwapchainKHR swapchain = _swapchain(); - VkSemaphore signalSemaphores[] = { _semaphores[_current_frame_index].getRenderImageSemaphore() }; + VkSwapchainKHR swapchain = m_swapchain.Get(); + VkSemaphore signal_semaphores[] = { m_render_finished_semaphores[m_current_frame_index].Get() }; - VkPresentInfoKHR presentInfo{}; - presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; - presentInfo.waitSemaphoreCount = 1; - presentInfo.pWaitSemaphores = signalSemaphores; - presentInfo.swapchainCount = 1; - presentInfo.pSwapchains = &swapchain; - presentInfo.pImageIndices = &_image_index; - - VkResult result = vkQueuePresentKHR(Render_Core::get().getQueue().getPresent(), &presentInfo); - - if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR || _framebuffer_resized) + VkPresentInfoKHR present_info{}; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + present_info.waitSemaphoreCount = 1; + present_info.pWaitSemaphores = signal_semaphores; + present_info.swapchainCount = 1; + present_info.pSwapchains = &swapchain; + present_info.pImageIndices = &m_image_index; + VkResult result = vkQueuePresentKHR(RenderCore::Get().GetQueue().GetPresent(), &present_info); + if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR || m_framebuffer_resized) { - _framebuffer_resized = false; - recreateRenderData(); + m_framebuffer_resized = false; + RecreateRenderData(); } else if(result != VK_SUCCESS) - core::error::report(e_kind::fatal_error, "Vulkan error : failed to present swap chain image"); - _current_frame_index = (_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; + FatalError("Vulkan error : failed to present swap chain image"); + m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; } else { - _cmd.getCmdBuffer(_current_frame_index).submitIdle(true); - _current_frame_index = 0; + m_cmd.GetCmdBuffer(m_current_frame_index).SubmitIdle(true); + m_current_frame_index = 0; } } - void Renderer::recreateRenderData() + void Renderer::RecreateRenderData() { - _swapchain.recreate(); - _pass.destroy(); - _pass.init(_swapchain.getImagesFormat(), VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - for(auto& fb : _framebuffers) - fb.destroy(); - _framebuffers.clear(); - for(std::size_t i = 0; i < _swapchain.getImagesNumber(); i++) - _framebuffers.emplace_back().init(_pass, _swapchain.getImage(i)); + m_swapchain.Recreate(); + m_pass.Destroy(); + m_pass.Init(m_swapchain.GetImagesFormat(), VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); + for(auto& fb : m_framebuffers) + fb.Destroy(); + m_framebuffers.clear(); + for(std::size_t i = 0; i < m_swapchain.GetImagesNumber(); i++) + m_framebuffers.emplace_back().Init(m_pass, m_swapchain.GetImage(i)); } - void Renderer::destroy() + void Renderer::Destroy() { MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(Render_Core::get().getDevice().get()); + vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); - _pipeline.destroy(); - _uniform_buffer->destroy(); - _vert_layout.destroy(); - _frag_layout.destroy(); - _frag_set.destroy(); - _vert_set.destroy(); - _cmd.destroy(); - _pass.destroy(); - if(_render_target == nullptr) + m_ipeline.Destroy(); + muniform_buffer->Destroy(); + mvert_layout.Destroy(); + mfrag_layout.Destroy(); + mfrag_set.Destroy(); + mvert_set.Destroy(); + mcmd.Destroy(); + mpass.Destroy(); + if(!m_render_target) { - _swapchain.destroy(); - _surface.destroy(); + m_swapchain.Destroy(); + m_surface.Destroy(); } - for(auto& fb : _framebuffers) - fb.destroy(); + for(auto& fb : m_framebuffers) + fb.Destroy(); for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _semaphores[i].destroy(); + m_semaphores[i].Destroy(); } } diff --git a/runtime/Sources/Renderer/Texts/FontLibrary.cpp b/runtime/Sources/Renderer/Texts/FontLibrary.cpp index ec712b2..eed3b60 100644 --- a/runtime/Sources/Renderer/Texts/FontLibrary.cpp +++ b/runtime/Sources/Renderer/Texts/FontLibrary.cpp @@ -6,64 +6,62 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/18 09:28:14 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:59:16 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:28:40 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include -#include -#include -#include -#include -#include +#include +#include +#include namespace mlx { - std::shared_ptr FontLibrary::getFontData(FontID id) + std::shared_ptr FontLibrary::GetFontData(FontID id) { MLX_PROFILE_FUNCTION(); - if(!_cache.count(id) || std::find(_invalid_ids.begin(), _invalid_ids.end(), id) != _invalid_ids.end()) - core::error::report(e_kind::fatal_error, "Font Library : wrong font ID '%d'", id); - return _cache[id]; + if(!m_cache.count(id) || std::find(m_invalid_ids.begin(), m_invalid_ids.end(), id) != m_invalid_ids.end()) + FatalError("Font Library : wrong font ID '%'", id); + return m_cache[id]; } - FontID FontLibrary::addFontToLibrary(std::shared_ptr font) + FontID FontLibrary::AddFontToLibrary(std::shared_ptr font) { MLX_PROFILE_FUNCTION(); - auto it = std::find_if(_cache.begin(), _cache.end(), [&](const std::pair>& v) + auto it = std::find_if(m_cache.begin(), m_cache.end(), [&](const std::pair>& v) { - return v.second->getScale() == font->getScale() && - v.second->getName() == font->getName() && - std::find(_invalid_ids.begin(), _invalid_ids.end(), v.first) == _invalid_ids.end(); + return v.second->GetScale() == font->GetScale() && + v.second->GetName() == font->GetName() && + std::find(m_invalid_ids.begin(), m_invalid_ids.end(), v.first) == m_invalid_ids.end(); }); - if(it != _cache.end()) + if(it != m_cache.end()) return it->first; - font->buildFont(); - _cache[_current_id] = font; - _current_id++; - return _current_id - 1; + font->BuildFont(); + m_cache[m_current_id] = font; + m_current_id++; + return m_current_id - 1; } - void FontLibrary::removeFontFromLibrary(FontID id) + void FontLibrary::RemoveFontFromLibrary(FontID id) { MLX_PROFILE_FUNCTION(); - if(!_cache.count(id) || std::find(_invalid_ids.begin(), _invalid_ids.end(), id) != _invalid_ids.end()) + if(!m_cache.count(id) || std::find(m_invalid_ids.begin(), m_invalid_ids.end(), id) != m_invalid_ids.end()) { - core::error::report(e_kind::warning, "Font Library : trying to remove a font with an unkown or invalid ID '%d'", id); + Warning("Font Library : trying to remove a font with an unkown or invalid ID '%'", id); return; } - _cache[id]->destroy(); - _invalid_ids.push_back(id); + m_cache[id]->Destroy(); + m_invalid_ids.push_back(id); } - void FontLibrary::clearLibrary() + void FontLibrary::ClearLibrary() { MLX_PROFILE_FUNCTION(); - for(auto& [id, font] : _cache) + for(auto& [id, font] : m_cache) { - font->destroy(); - _invalid_ids.push_back(id); + font->Destroy(); + m_invalid_ids.push_back(id); } // do not `_cache.clear();` as it releases the fonts and may not destroy the texture atlas that is in use by command buffers } diff --git a/runtime/Sources/Renderer/Texts/Text.cpp b/runtime/Sources/Renderer/Texts/Text.cpp index f5e3687..162307a 100644 --- a/runtime/Sources/Renderer/Texts/Text.cpp +++ b/runtime/Sources/Renderer/Texts/Text.cpp @@ -1,79 +1,78 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text.cpp :+: :+: :+: */ +/* Text.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:11:56 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:04:01 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:33:58 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include +#include +#include namespace mlx { - void Text::init(std::string text, FontID font, std::uint32_t color, std::vector vbo_data, std::vector ibo_data) + void Text::Init(std::string text, FontID font, std::uint32_t color, std::vector vbo_data, std::vector ibo_data) { MLX_PROFILE_FUNCTION(); - if(_is_init) + if(m_is_init) return; - _text = std::move(text); - _color = color; - _font = font; + m_text = std::move(text); + m_color = color; + m_font = font; #ifdef DEBUG - std::string debug_name = _text; + std::string debug_name = m_text; for(char& c : debug_name) { if(c == ' ' || c == '"' || c == '\'') c = '_'; } for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _vbo[i].create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), debug_name.c_str()); - _ibo.create(sizeof(std::uint16_t) * ibo_data.size(), ibo_data.data(), debug_name.c_str()); + m_vbo[i].Create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), debug_name.c_str()); + m_ibo.Create(sizeof(std::uint16_t) * ibo_data.size(), ibo_data.data(), debug_name.c_str()); #else for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _vbo[i].create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), nullptr); - _ibo.create(sizeof(std::uint16_t) * ibo_data.size(), ibo_data.data(), nullptr); + m_vbo[i].Create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), nullptr); + m_ibo.Create(sizeof(std::uint16_t) * ibo_data.size(), ibo_data.data(), nullptr); #endif - _is_init = true; + m_is_init = true; } - void Text::bind(Renderer& renderer) noexcept + void Text::Bind(Renderer& renderer) noexcept { MLX_PROFILE_FUNCTION(); - if(!_is_init) + if(!m_is_init) return; - _vbo[renderer.getActiveImageIndex()].bind(renderer); - _ibo.bind(renderer); + m_vbo[renderer.GetActiveImageIndex()].Bind(renderer); + m_ibo.Bind(renderer); } void Text::updateVertexData(int frame, std::vector vbo_data) { MLX_PROFILE_FUNCTION(); - if(!_is_init) + if(!m_is_init) return; - _vbo[frame].setData(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data())); + m_vbo[frame].SetData(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data())); } void Text::destroy() noexcept { MLX_PROFILE_FUNCTION(); - if(!_is_init) + if(!m_is_init) return; for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - _vbo[i].destroy(); - _ibo.destroy(); - _is_init = false; + m_vbo[i].Destroy(); + m_ibo.Destroy(); + m_is_init = false; } Text::~Text() { - destroy(); + Destroy(); } } diff --git a/runtime/Sources/Renderer/Texts/TextDescriptor.cpp b/runtime/Sources/Renderer/Texts/TextDescriptor.cpp index 4637bd7..b4a3208 100644 --- a/runtime/Sources/Renderer/Texts/TextDescriptor.cpp +++ b/runtime/Sources/Renderer/Texts/TextDescriptor.cpp @@ -1,61 +1,60 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text_descriptor.cpp :+: :+: :+: */ +/* TextDescriptor.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:23:11 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:04:52 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:38:40 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include -#include -#include +#include +#include +#include +#include #define STB_RECT_PACK_IMPLEMENTATION #include -#include +#include #define STB_TRUETYPE_IMPLEMENTATION -#define STB_malloc(x, u) ((void)(u), MemManager::malloc(x)) -#define STB_free(x, u) ((void)(u), MemManager::free(x)) +#define STB_malloc(x, u) ((void)(u), MemManager::Malloc(x)) +#define STB_free(x, u) ((void)(u), MemManager::Free(x)) #include constexpr const int RANGE = 1024; namespace mlx { - TextDrawDescriptor::TextDrawDescriptor(std::string text, std::uint32_t _color, int _x, int _y) : color(_color), x(_x), y(_y), _text(std::move(text)) + TextDrawDescriptor::TextDrawDescriptor(std::string text, std::uint32_t _color, int _x, int _y) : color(_color), x(_x), y(_y), m_text(std::move(text)) {} - void TextDrawDescriptor::init(FontID font) noexcept + void TextDrawDescriptor::Init(FontID font) noexcept { MLX_PROFILE_FUNCTION(); - std::vector vertexData; - std::vector indexData; + std::vector vertex_data; + std::vector index_data; float stb_x = 0.0f; float stb_y = 0.0f; { - std::shared_ptr font_data = FontLibrary::get().getFontData(font); + std::shared_ptr font_data = FontLibrary::Get().GetFontData(font); - for(char c : _text) + for(char c : m_text) { if(c < 32) continue; stbtt_aligned_quad q; - stbtt_GetPackedQuad(font_data->getCharData().data(), RANGE, RANGE, c - 32, &stb_x, &stb_y, &q, 1); + stbtt_GetPackedQuad(font_data->GetCharData().data(), RANGE, RANGE, c - 32, &stb_x, &stb_y, &q, 1); - std::size_t index = vertexData.size(); + std::size_t index = vertex_data.size(); glm::vec4 vertex_color = { static_cast((color & 0x000000FF)) / 255.f, @@ -64,49 +63,45 @@ namespace mlx static_cast((color & 0xFF000000) >> 24) / 255.f }; - vertexData.emplace_back(glm::vec2{q.x0, q.y0}, vertex_color, glm::vec2{q.s0, q.t0}); - vertexData.emplace_back(glm::vec2{q.x1, q.y0}, vertex_color, glm::vec2{q.s1, q.t0}); - vertexData.emplace_back(glm::vec2{q.x1, q.y1}, vertex_color, glm::vec2{q.s1, q.t1}); - vertexData.emplace_back(glm::vec2{q.x0, q.y1}, vertex_color, glm::vec2{q.s0, q.t1}); + vertex_data.emplace_back(glm::vec2{q.x0, q.y0}, vertex_color, glm::vec2{q.s0, q.t0}); + vertex_data.emplace_back(glm::vec2{q.x1, q.y0}, vertex_color, glm::vec2{q.s1, q.t0}); + vertex_data.emplace_back(glm::vec2{q.x1, q.y1}, vertex_color, glm::vec2{q.s1, q.t1}); + vertex_data.emplace_back(glm::vec2{q.x0, q.y1}, vertex_color, glm::vec2{q.s0, q.t1}); - indexData.emplace_back(index + 0); - indexData.emplace_back(index + 1); - indexData.emplace_back(index + 2); - indexData.emplace_back(index + 2); - indexData.emplace_back(index + 3); - indexData.emplace_back(index + 0); + index_data.emplace_back(index + 0); + index_data.emplace_back(index + 1); + index_data.emplace_back(index + 2); + index_data.emplace_back(index + 2); + index_data.emplace_back(index + 3); + index_data.emplace_back(index + 0); } } std::shared_ptr text_data = std::make_shared(); - text_data->init(_text, font, color, std::move(vertexData), std::move(indexData)); - id = TextLibrary::get().addTextToLibrary(text_data); - - #ifdef DEBUG - core::error::report(e_kind::message, "Text put : registered new text to render"); - #endif + text_data->Init(m_text, font, color, std::move(vertex_data), std::move(index_data)); + id = TextLibrary::Get().AddTextToLibrary(text_data); + DebugLog("Text put : registered new text to render"); } - void TextDrawDescriptor::render(std::array& sets, Renderer& renderer) + void TextDrawDescriptor::Render(Renderer& renderer) { MLX_PROFILE_FUNCTION(); - std::shared_ptr draw_data = TextLibrary::get().getTextData(id); - std::shared_ptr font_data = FontLibrary::get().getFontData(draw_data->getFontInUse()); - TextureAtlas& atlas = const_cast(font_data->getAtlas()); - draw_data->bind(renderer); - if(!atlas.getSet().isInit()) - atlas.setDescriptor(renderer.getFragDescriptorSet().duplicate()); - if(!atlas.hasBeenUpdated()) - atlas.updateSet(0); - sets[1] = const_cast(atlas).getVkSet(); - vkCmdBindDescriptorSets(renderer.getActiveCmdBuffer().get(), VK_PIPELINE_BIND_POINT_GRAPHICS, renderer.getPipeline().getPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); - atlas.render(renderer, x, y, draw_data->getIBOsize()); + std::shared_ptr draw_data = TextLibrary::Get().GetTextData(id); + std::shared_ptr font_data = FontLibrary::Get().GetFontData(draw_data->GetFontInUse()); + TextureAtlas& atlas = const_cast(font_data->GetAtlas()); + draw_data->Bind(renderer); + if(!atlas.GetSet().IsInit()) + atlas.SetDescriptor(renderer.GetFragDescriptorSet().Duplicate()); + if(!atlas.HasBeenUpdated()) + atlas.UpdateSet(0); + atlas.GetSet().Bind(); + atlas.Render(renderer, x, y, draw_data->GetIBOsize()); } - void TextDrawDescriptor::resetUpdate() + void TextDrawDescriptor::ResetUpdate() { - std::shared_ptr draw_data = TextLibrary::get().getTextData(id); - std::shared_ptr font_data = FontLibrary::get().getFontData(draw_data->getFontInUse()); - TextureAtlas& atlas = const_cast(font_data->getAtlas()); - atlas.resetUpdate(); + std::shared_ptr draw_data = TextLibrary::Get().GetTextData(id); + std::shared_ptr font_data = FontLibrary::Get().GetFontData(draw_data->GetFontInUse()); + TextureAtlas& atlas = const_cast(font_data->GetAtlas()); + atlas.ResetUpdate(); } } diff --git a/runtime/Sources/Renderer/Texts/TextLibrary.cpp b/runtime/Sources/Renderer/Texts/TextLibrary.cpp index 9ed7d4c..5f995d9 100644 --- a/runtime/Sources/Renderer/Texts/TextLibrary.cpp +++ b/runtime/Sources/Renderer/Texts/TextLibrary.cpp @@ -1,64 +1,62 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text_library.cpp :+: :+: :+: */ +/* TextLibrary.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/10 11:59:57 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:05:09 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:40:28 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include -#include -#include +#include +#include +#include namespace mlx { - std::shared_ptr TextLibrary::getTextData(TextID id) + std::shared_ptr TextLibrary::GetTextData(TextID id) { MLX_PROFILE_FUNCTION(); - if(!_cache.count(id)) - core::error::report(e_kind::fatal_error, "Text Library : wrong text ID '%d'", id); - return _cache[id]; + if(!m_cache.count(id)) + FatalError("Text Library : wrong text ID '%d'", id); + return m_cache[id]; } - TextID TextLibrary::addTextToLibrary(std::shared_ptr text) + TextID TextLibrary::AddTextToLibrary(std::shared_ptr text) { MLX_PROFILE_FUNCTION(); - auto it = std::find_if(_cache.begin(), _cache.end(), [&](const std::pair>& v) + auto it = std::find_if(m_cache.begin(), m_cache.end(), [&](const std::pair>& v) { - return v.second->getText() == text->getText() && v.second->getColor() == text->getColor(); + return v.second->GetText() == text->GetText() && v.second->GetColor() == text->GetColor(); }); - if(it != _cache.end()) + if(it != m_cache.end()) return it->first; - _cache[_current_id] = text; - _current_id++; - return _current_id - 1; + m_cache[m_current_id] = text; + m_current_id++; + return m_current_id - 1; } - void TextLibrary::removeTextFromLibrary(TextID id) + void TextLibrary::RemoveTextFromLibrary(TextID id) { MLX_PROFILE_FUNCTION(); - if(!_cache.count(id)) + if(!m_cache.count(id)) { - core::error::report(e_kind::warning, "Text Library : trying to remove a text with an unkown or invalid ID '%d'", id); + Warning("Text Library : trying to remove a text with an unkown or invalid ID '%d'", id); return; } - _cache[id]->destroy(); - _cache.erase(id); + m_cache[id]->Destroy(); + m_cache.erase(id); } - void TextLibrary::clearLibrary() + void TextLibrary::ClearLibrary() { MLX_PROFILE_FUNCTION(); - for(auto& [id, text] : _cache) - text->destroy(); - _cache.clear(); + for(auto& [id, text] : m_cache) + text->Destroy(); + m_cache.clear(); } } diff --git a/runtime/Sources/Renderer/Texts/TextManager.cpp b/runtime/Sources/Renderer/Texts/TextManager.cpp index 5430e2e..b660ea2 100644 --- a/runtime/Sources/Renderer/Texts/TextManager.cpp +++ b/runtime/Sources/Renderer/Texts/TextManager.cpp @@ -1,34 +1,32 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* text_manager.cpp :+: :+: :+: */ +/* TextManager.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/06 16:41:13 by maldavid #+# #+# */ -/* Updated: 2024/03/25 19:05:13 by maldavid ### ########.fr */ +/* Updated: 2024/04/24 01:42:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include -#include -#include - -#include +#include +#include +#include +#include +#include namespace mlx { - void TextManager::init(Renderer& renderer) noexcept + void TextManager::Init(Renderer& renderer) noexcept { MLX_PROFILE_FUNCTION(); - loadFont(renderer, "default", 6.f); + LoadFont(renderer, "default", 6.f); } - void TextManager::loadFont(Renderer& renderer, const std::filesystem::path& filepath, float scale) + void TextManager::LoadFont(Renderer& renderer, const std::filesystem::path& filepath, float scale) { MLX_PROFILE_FUNCTION(); std::shared_ptr font; @@ -36,33 +34,32 @@ namespace mlx font = std::make_shared(renderer, "default", dogica_ttf, scale); else font = std::make_shared(renderer, filepath, scale); - - _font_in_use = FontLibrary::get().addFontToLibrary(font); + m_font_in_use = FontLibrary::Get().AddFontToLibrary(font); } - std::pair TextManager::registerText(int x, int y, std::uint32_t color, std::string str) + std::pair TextManager::RegisterText(int x, int y, std::uint32_t color, std::string str) { MLX_PROFILE_FUNCTION(); - auto res = _text_descriptors.emplace(std::move(str), color, x, y); + auto res = m_text_descriptors.emplace(std::move(str), color, x, y); if(res.second) { - const_cast(*res.first).init(_font_in_use); + const_cast(*res.first).Init(m_font_in_use); return std::make_pair(static_cast(&const_cast(*res.first)), true); } - auto text_ptr = TextLibrary::get().getTextData(res.first->id); - if(_font_in_use != text_ptr->getFontInUse()) + auto text_ptr = TextLibrary::Get().GetTextData(res.first->id); + if(_font_in_use != text_ptr->GetFontInUse()) { // TODO : update text vertex buffers rather than destroying it and recreating it - TextLibrary::get().removeTextFromLibrary(res.first->id); - const_cast(*res.first).init(_font_in_use); + TextLibrary::Get().RemoveTextFromLibrary(res.first->id); + const_cast(*res.first).Init(_font_in_use); } return std::make_pair(static_cast(&const_cast(*res.first)), false); } - void TextManager::destroy() noexcept + void TextManager::Destroy() noexcept { MLX_PROFILE_FUNCTION(); - _text_descriptors.clear(); + m_text_descriptors.clear(); } } From 37e9410d12d9bd52cbcfba9f1213ca4811395dcb Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 25 May 2024 16:47:28 +0200 Subject: [PATCH 012/131] removing GLFW support --- Makefile | 9 +- runtime/Includes/Core/Application.h | 6 +- runtime/Includes/Core/DriverLoader.inl | 30 ------ runtime/Includes/Core/Graphics.h | 4 +- .../Core/{DriverLoader.h => SDLManager.h} | 36 +++---- runtime/Includes/Drivers/DriverInstance.h | 35 ------- .../Drivers/GLFW/GLFWDriverInstance.h | 34 ------- .../Drivers/GLFW/GLFWDriverInstance.inl | 32 ------- runtime/Includes/Drivers/GLFW/GLFWInputs.h | 44 --------- runtime/Includes/Platform/Window.h | 11 +-- runtime/Includes/PreCompiled.h | 10 +- runtime/Includes/Utils/NonOwningPtr.inl | 8 +- runtime/Sources/Core/Application.cpp | 5 +- runtime/Sources/Core/SDLManager.cpp | 94 +++++++++++++++++++ runtime/Sources/Drivers/GLFW/GLFWInputs.cpp | 22 ----- runtime/Sources/Platform/Window.cpp | 28 +++--- xmake.lua | 5 + 17 files changed, 153 insertions(+), 260 deletions(-) delete mode 100644 runtime/Includes/Core/DriverLoader.inl rename runtime/Includes/Core/{DriverLoader.h => SDLManager.h} (52%) delete mode 100644 runtime/Includes/Drivers/DriverInstance.h delete mode 100644 runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h delete mode 100644 runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl delete mode 100644 runtime/Includes/Drivers/GLFW/GLFWInputs.h create mode 100644 runtime/Sources/Core/SDLManager.cpp delete mode 100644 runtime/Sources/Drivers/GLFW/GLFWInputs.cpp diff --git a/Makefile b/Makefile index e488de3..e93dcfa 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ # By: maldavid +#+ +:+ +#+ # # +#+#+#+#+#+ +#+ # # Created: 2022/10/04 16:43:41 by maldavid #+# #+# # -# Updated: 2024/03/27 21:30:44 by maldavid ### ########.fr # +# Updated: 2024/05/25 16:08:57 by maldavid ### ########.fr # # # # **************************************************************************** # @@ -16,7 +16,6 @@ SRCS = $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Core)) SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Platform)) SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer)) SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/**)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Drivers/**)) OBJ_DIR = objs/makefile OBJS = $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) @@ -31,7 +30,7 @@ IMAGES_OPTIMIZED ?= true FORCE_INTEGRATED_GPU ?= false GRAPHICS_MEMORY_DUMP ?= false PROFILER ?= false -LEGACY ?= false +FORCE_WAYLAND ?= false MODE = "release" @@ -73,8 +72,8 @@ ifeq ($(PROFILER), true) CXXFLAGS += -D PROFILER endif -ifeq ($(LEGACY), true) - CXXFLAGS += -D LEGACY +ifeq ($(FORCE_WAYLAND), true) + CXXFLAGS += -D FORCE_WAYLAND endif RM = rm -rf diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 316dc68..27de075 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/04/21 20:39:33 by maldavid ### ########.fr */ +/* Updated: 2024/05/25 15:26:36 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,7 +15,6 @@ #include #include -#include #include #include @@ -61,7 +60,6 @@ namespace mlx private: FpsManager m_fps; Input m_in; - DriverLoader m_driver_loader; ImageRegistry m_image_registry; std::vector> m_graphics; std::function f_loop_hook; @@ -69,6 +67,6 @@ namespace mlx }; } -#include +#include #endif // __MLX_APPLICATION__ diff --git a/runtime/Includes/Core/DriverLoader.inl b/runtime/Includes/Core/DriverLoader.inl deleted file mode 100644 index 6104d76..0000000 --- a/runtime/Includes/Core/DriverLoader.inl +++ /dev/null @@ -1,30 +0,0 @@ -/* **************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DriverLoader.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/03 14:55:01 by maldavid #+# #+# */ -/* Updated: 2024/04/03 14:55:01 by maldavid ### ########.fr */ -/* */ -/* **************************************************************************** */ - -#pragma once -#include - -namespace mlx -{ - template - bool DriverLoader::LoadDriver() - { - m_instances.emplace_back(new T)->InitDriver(); - } - - void DriverLoader::ShutdownAllDrivers() - { - for(auto& driver : m_instances) - driver->ShutdownDriver(); - m_instances.clear(); - } -} diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index e29ce56..448e518 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:02:48 by maldavid ### ########.fr */ +/* Updated: 2024/05/25 01:00:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -70,6 +70,6 @@ namespace mlx }; } -#include +#include #endif diff --git a/runtime/Includes/Core/DriverLoader.h b/runtime/Includes/Core/SDLManager.h similarity index 52% rename from runtime/Includes/Core/DriverLoader.h rename to runtime/Includes/Core/SDLManager.h index 18e5695..b32bf6f 100644 --- a/runtime/Includes/Core/DriverLoader.h +++ b/runtime/Includes/Core/SDLManager.h @@ -1,39 +1,41 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* DriverLoader.h :+: :+: :+: */ +/* SDLManager.h :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/02 16:56:10 by maldavid #+# #+# */ -/* Updated: 2024/04/03 15:02:44 by maldavid ### ########.fr */ +/* Created: 2024/05/25 15:28:59 by maldavid #+# #+# */ +/* Updated: 2024/05/25 16:11:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#ifndef __MLX_CORE_DRIVER_LOADER__ -#define __MLX_CORE_DRIVER_LOADER__ +#ifndef __MLX_SDL_MANAGER__ +#define __MLX_SDL_MANAGER__ -#include +#include namespace mlx { - class DriverLoader + class SDLManager : public Singleton { + friend class Singleton; + public: - DriverLoader() = default; + void Init() noexcept; + void Shutdown() noexcept; - template - inline bool LoadDriver(); - - inline void ShutdownAllDrivers(); - - ~DriverLoader() = default; + void* CreateWindow(const std::string& title, std::size_t w, std::size_t h); + void DestroyWindow(void* window) noexcept; private: - std::vector > m_instances; + SDLManager() = default; + ~SDLManager() = default; + + private: + std::unordered_set m_windows_registry; + bool m_drop_sdl_responsability = false; }; } -#include - #endif diff --git a/runtime/Includes/Drivers/DriverInstance.h b/runtime/Includes/Drivers/DriverInstance.h deleted file mode 100644 index c72cf97..0000000 --- a/runtime/Includes/Drivers/DriverInstance.h +++ /dev/null @@ -1,35 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DriverInstance.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/02 16:57:20 by maldavid #+# #+# */ -/* Updated: 2024/04/02 17:01:03 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_DRIVER_INSTANCE__ -#define __MLX_DRIVER_INSTANCE__ - -namespace mlx -{ - class DriverInstance - { - public: - DriverInstance() = default; - - virtual bool InitDriver() { m_is_up = true; return true; } - virtual void ShutdownDriver() { m_is_up = false; } - - inline bool IsRunning() const noexcept { return m_is_up; } - - virtual ~DriverInstance() = default; - - private: - bool m_is_up = false; - }; -} - -#endif diff --git a/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h b/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h deleted file mode 100644 index fe1d124..0000000 --- a/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.h +++ /dev/null @@ -1,34 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* GLFWDriverInstance.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/02 17:01:51 by maldavid #+# #+# */ -/* Updated: 2024/04/02 17:04:12 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_GLFW_DRIVER_INSTANCE__ -#define __MLX_GLFW_DRIVER_INSTANCE__ - -#include - -namespace mlx -{ - class GLFWDriverInstance : public DriverInstance - { - public: - GLFWDriverInstance() = default; - - inline bool InitDriver() override; - inline void ShutdownDriver() override; - - ~GLFWDriverInstance() override = default; - }; -} - -#include - -#endif diff --git a/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl b/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl deleted file mode 100644 index b96b66d..0000000 --- a/runtime/Includes/Drivers/GLFW/GLFWDriverInstance.inl +++ /dev/null @@ -1,32 +0,0 @@ -/* **************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* GLFWDriverInstance.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/02 17:04:23 by maldavid #+# #+# */ -/* Updated: 2024/04/02 17:04:23 by maldavid ### ########.fr */ -/* */ -/* **************************************************************************** */ - -#include - -namespace mlx -{ - bool GLFWDriverInstance::InitDriver() - { - glfwSetErrorCallback([]([[maybe_unused]] int code, const char* desc) - { - FatalError("GLFW Driver Error : %", desc); - }); - glfwInit(); - DebugLog("GLFW Driver loaded"); - } - - void GLFWDriverInstance::ShutdownDriver() - { - glfwTerminate(); - DebugLog("GLFW Driver shutted down"); - } -} diff --git a/runtime/Includes/Drivers/GLFW/GLFWInputs.h b/runtime/Includes/Drivers/GLFW/GLFWInputs.h deleted file mode 100644 index 0901ec9..0000000 --- a/runtime/Includes/Drivers/GLFW/GLFWInputs.h +++ /dev/null @@ -1,44 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* GLFWInputs.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 18:32:29 by maldavid #+# #+# */ -/* Updated: 2024/03/27 18:37:58 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_GLFW_INPUTS__ -#define __MLX_GLFW_INPUTS__ - -#include - -namespace mlx -{ - class GLFWInputs : public Inputs - { - public: - GLFWInputs() = default; - - void Update() override noexcept; - - void RegisterWindow(std::shared_ptr window) override; - - inline std::int32_t GetX() override const noexcept { return m_x; } - inline std::int32_t GetY() override const noexcept { return m_y; } - inline std::int32_t GetXRel() override const noexcept { return m_x_rel; } - inline std::int32_t GetYRel() override const noexcept { return m_y_rel; } - - ~GLFWInputs() override = default; - - private: - std::int32_t m_x = 0; - std::int32_t m_y = 0; - std::int32_t m_x_rel = 0; - std::int32_t m_y_rel = 0; - }; -} - -#endif diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index d9e15a1..30c3945 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:58:18 by maldavid ### ########.fr */ +/* Updated: 2024/05/25 16:11:00 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -15,18 +15,12 @@ namespace mlx { - #ifdef LEGACY - using WindowHandle = SDL_Window; - #else - using WindowHandle = GLFWwindow; - #endif - class Window { public: Window(std::size_t w, std::size_t h, const std::string& title); - inline NonOwningPtr GetWindowHandle() const noexcept = 0; + inline void* GetWindowHandle() const noexcept { return p_window; } inline int GetWidth() const noexcept { return m_width; } inline int GetHeight() const noexcept { return m_height; } inline std::uint32_t GetID() const noexcept { return m_id; } @@ -36,6 +30,7 @@ namespace mlx ~Window() = default; private: + void* p_window = nullptr; std::uint32_t m_id = -1; int m_width = 0; int m_height = 0; diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 07647ff..d5f3ace 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/04/23 13:49:52 by maldavid ### ########.fr */ +/* Updated: 2024/05/25 15:46:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -23,12 +23,8 @@ #include #include -#ifdef MLX_LEGACY - #include - #include -#else - #include -#endif +#include +#include #include #include diff --git a/runtime/Includes/Utils/NonOwningPtr.inl b/runtime/Includes/Utils/NonOwningPtr.inl index 0247092..b36a337 100644 --- a/runtime/Includes/Utils/NonOwningPtr.inl +++ b/runtime/Includes/Utils/NonOwningPtr.inl @@ -16,7 +16,7 @@ namespace mlx { template NonOwningPtr::NonOwningPtr(T* ptr) : p_ptr(ptr) {} - + template NonOwningPtr::NonOwningPtr(NonOwningPtr&& ptr) noexcept : p_ptr(ptr.p_ptr) { @@ -28,7 +28,7 @@ namespace mlx { p_ptr = ptr; } - + template NonOwningPtr& NonOwningPtr::operator=(NonOwningPtr&& ptr) noexcept { @@ -47,13 +47,13 @@ namespace mlx { return p_ptr; } - + template T* NonOwningPtr::operator->() const noexcept { return p_ptr; } - + template T& NonOwningPtr::operator*() const noexcept { diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index d0c754b..a097b9c 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/04/23 15:06:26 by maldavid ### ########.fr */ +/* Updated: 2024/05/25 16:06:57 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -19,6 +19,7 @@ #include #include #include +#include namespace mlx { @@ -29,6 +30,7 @@ namespace mlx }, "__internal_application" }); m_fps.init(); + SDLManager::Get().Init(); } void Application::Run() noexcept @@ -111,5 +113,6 @@ namespace mlx { TextLibrary::Get().ClearLibrary(); FontLibrary::Get().ClearLibrary(); + SDLManager::Get().Shutdown(); } } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp new file mode 100644 index 0000000..c0f27b9 --- /dev/null +++ b/runtime/Sources/Core/SDLManager.cpp @@ -0,0 +1,94 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* SDLManager.cpp :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/05/25 15:44:03 by maldavid #+# #+# */ +/* Updated: 2024/05/25 16:46:48 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +#include +#include +#include +#include + +namespace mlx +{ + #if SDL_BYTEORDER == SDL_BIG_ENDIAN + constexpr const std::uint32_t rmask = 0xff000000; + constexpr const std::uint32_t gmask = 0x00ff0000; + constexpr const std::uint32_t bmask = 0x0000ff00; + constexpr const std::uint32_t amask = 0x000000ff; + #else + constexpr const std::uint32_t rmask = 0x000000ff; + constexpr const std::uint32_t gmask = 0x0000ff00; + constexpr const std::uint32_t bmask = 0x00ff0000; + constexpr const std::uint32_t amask = 0xff000000; + #endif + + namespace details + { + struct WindowInfos + { + SDL_Window* window; + SDL_Surface* icon; + }; + } + + void SDLManager::Init() noexcept + { + MLX_PROFILE_FUNCTION(); + m_drop_sdl_responsability = SDL_WasInit(SDL_INIT_VIDEO); + if(m_drop_sdl_responsability) // is case the mlx is running in a sandbox like MacroUnitTester where SDL is already init + return; + SDL_SetMemoryFunctions(MemManager::malloc, MemManager::calloc, MemManager::realloc, MemManager::free); + + #ifdef FORCE_WAYLAND + SDL_SetHint(SDL_HINT_VIDEODRIVER, "wayland,x11"); + #endif + + if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_TIMER) != 0) + FatalError("SDL : unable to init all subsystems; %", SDL_GetError()); + } + + void* SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h) + { + details::WindowInfos* infos = new details::WindowInfos; + Verify(infos != nullptr, "SDL : window allocation failed"); + + infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | SDL_WINDOW_SHOWN); + if(!infos->window) + FatalError("SDL : unable to open a new window; %", SDL_GetError()); + infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); + SDL_SetWindowIcon(infos->window, infos->icon); + + m_windows_registry.insert(infos); + + return infos; + } + + void SDLManager::DestroyWindow(void* window) noexcept + { + Verify(m_windows_registry.find(window) != m_windows_registry.end(), "SDL : cannot destroy window; unknown window pointer"); + + details::WindowInfos* infos = static_cast(window); + if(infos->window != nullptr) + SDL_DestroyWindow(infos->window); + if(infos->icon != nullptr) + SDL_FreeSurface(infos->icon); + + m_windows_registry.erase(infos); + delete infos; + } + + void SDLManager::Shutdown() noexcept + { + if(m_drop_sdl_responsability) + return; + SDL_QuitSubSystem(SDL_INIT_VIDEO | SDL_INIT_TIMER | SDL_INIT_EVENTS); + SDL_Quit(); + } +} diff --git a/runtime/Sources/Drivers/GLFW/GLFWInputs.cpp b/runtime/Sources/Drivers/GLFW/GLFWInputs.cpp deleted file mode 100644 index 9379cf1..0000000 --- a/runtime/Sources/Drivers/GLFW/GLFWInputs.cpp +++ /dev/null @@ -1,22 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* GLFWInputs.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 18:39:32 by maldavid #+# #+# */ -/* Updated: 2024/03/27 18:42:18 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include -#include - -namespace mlx -{ - void GLFWInputs::Update() noexcept - { - glfwPollEvents(); - } -} diff --git a/runtime/Sources/Platform/Window.cpp b/runtime/Sources/Platform/Window.cpp index 182f18f..1b48a77 100644 --- a/runtime/Sources/Platform/Window.cpp +++ b/runtime/Sources/Platform/Window.cpp @@ -1,40 +1,38 @@ /* ************************************************************************** */ /* */ /* ::: :::::::: */ -/* window.cpp :+: :+: :+: */ +/* Window.cpp :+: :+: :+: */ /* +:+ +:+ +:+ */ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:36:44 by maldavid #+# #+# */ -/* Updated: 2024/03/26 23:03:59 by maldavid ### ########.fr */ +/* Updated: 2024/05/25 16:13:31 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include -#include -#include -#include +#include +#include namespace mlx { - Window::Window(std::size_t w, std::size_t h, const std::string& title) : _width(w), _height(h) + Window::Window(std::size_t w, std::size_t h, const std::string& title) : m_width(w), m_height(h) { static std::uint64_t ids = 0; if(title.find("vvaas") != std::string::npos) - core::error::report(e_kind::message, "vvaas est mauvais"); - glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API); - _win = glfwCreateWindow(_width, _height, title.c_str(), NULL, NULL);; - _id = ids++; + Message("vvaas est mauvais"); + p_window = SDLManager::Get().CreateWindow(title, w, h); + m_id = ids++; } - void Window::destroy() noexcept + void Window::Destroy() noexcept { - if(_win != nullptr) + if(p_window != nullptr) { - glfwDestroyWindow(_win); - _win = nullptr; + SDLManager::Get().DestroyWindow(p_window); + p_window = nullptr; } } } diff --git a/xmake.lua b/xmake.lua index 449cb19..d9f1b9b 100644 --- a/xmake.lua +++ b/xmake.lua @@ -44,6 +44,11 @@ option("profiler") add_defines("PROFILER") option_end() +option("force_wayland") + set_default(false) + add_defines("FORCE_WAYLAND") +option_end() + -- Targets target("mlx") From 7ecee717f8048e5d6e723aa0f1df23e84b85a527 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 5 Jul 2024 22:15:36 +0200 Subject: [PATCH 013/131] fixing compoilation issues --- Makefile | 6 +++--- runtime/Includes/Core/Application.h | 4 ++-- runtime/Includes/Core/Enums.h | 5 +++-- runtime/Includes/Core/Graphics.h | 6 +++--- runtime/Includes/Core/Graphics.inl | 10 +++++----- runtime/Includes/Core/Profiler.h | 6 +++--- runtime/Includes/Core/SDLManager.h | 4 +++- runtime/Includes/Platform/Inputs.h | 16 +++++++--------- runtime/Includes/PreCompiled.h | 5 +++-- runtime/Includes/Renderer/Buffers/IndexBuffer.h | 4 ++-- .../Includes/Renderer/Buffers/UniformBuffer.h | 12 ++++++------ runtime/Includes/Renderer/Buffers/VertexBuffer.h | 4 ++-- .../Includes/Renderer/Command/CommandBuffer.h | 4 ++-- .../Renderer/Command/SingleTimeCmdManager.h | 8 ++++---- runtime/Includes/Renderer/Core/Queues.h | 6 +++--- runtime/Includes/Renderer/Core/RenderCore.h | 4 ++-- .../Renderer/Descriptors/DescriptorSet.h | 4 ++-- runtime/Includes/Renderer/Images/Image.h | 4 ++-- runtime/Includes/Renderer/Images/Texture.h | 4 ++-- runtime/Includes/Renderer/Images/TextureAtlas.h | 4 ++-- .../Includes/Renderer/Images/TextureRegistry.h | 4 ++-- runtime/Includes/Renderer/Renderer.h | 6 +++--- runtime/Includes/Renderer/Texts/Font.h | 6 +++--- runtime/Includes/Renderer/Texts/TextDescriptor.h | 4 ++-- runtime/Sources/Core/Logs.cpp | 6 ++---- runtime/Sources/Platform/Window.cpp | 5 +---- 26 files changed, 74 insertions(+), 77 deletions(-) diff --git a/Makefile b/Makefile index e93dcfa..b0b4275 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ # By: maldavid +#+ +:+ +#+ # # +#+#+#+#+#+ +#+ # # Created: 2022/10/04 16:43:41 by maldavid #+# #+# # -# Updated: 2024/05/25 16:08:57 by maldavid ### ########.fr # +# Updated: 2024/07/05 13:34:03 by maldavid ### ########.fr # # # # **************************************************************************** # @@ -85,10 +85,10 @@ $(OBJ_DIR)/%.o: %.cpp $(GCH) all: $(NAME) $(GCH): - @printf "\033[1;32m[compiling "$(MODE)" "$(CXX)"]\033[1;00m PreCompiled header\n" + @printf "\033[1;32m[compiling... "$(MODE)" "$(CXX)"]\033[1;00m PreCompiled header\n" @$(CXX) $(CXXFLAGS) $(INCLUDES) -c $(PCH) -o $(GCH) -$(NAME): $(GCH) $(OBJ_DIR) $(OBJS) +$(NAME): $(OBJ_DIR) $(GCH) $(OBJS) @printf "\033[1;32m[linking ... "$(MODE)"]\033[1;00m "$@"\n" @$(CXX) -shared -o $(NAME) $(OBJS) $(LDLIBS) @printf "\033[1;32m[build finished]\033[1;00m\n" diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 27de075..fed04e2 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/05/25 15:26:36 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 14:04:19 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -59,7 +59,7 @@ namespace mlx private: FpsManager m_fps; - Input m_in; + Inputs m_in; ImageRegistry m_image_registry; std::vector> m_graphics; std::function f_loop_hook; diff --git a/runtime/Includes/Core/Enums.h b/runtime/Includes/Core/Enums.h index 80ef797..68f036b 100644 --- a/runtime/Includes/Core/Enums.h +++ b/runtime/Includes/Core/Enums.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/27 17:15:24 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:16:03 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:23:10 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -23,11 +23,12 @@ namespace mlx Warning, Error, FatalError, + Debug, EndEnum }; - constexpr std::size_t LogTypeCount = static_cast(LogType::EndEnum) + 1; + constexpr std::size_t LogTypeCount = static_cast(LogType::EndEnum); } #endif diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 448e518..8ab5bbf 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/05/25 01:00:10 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:46:58 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -17,7 +17,7 @@ #include #include #include -#include +#include #include #include #include @@ -55,7 +55,7 @@ namespace mlx std::vector> m_drawlist; TextManager m_text_manager; - TextureManager m_texture_manager; + TextureRegistry m_texture_registry; glm::mat4 m_proj = glm::mat4(1.0); diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 6e9771e..50573a1 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -23,7 +23,7 @@ namespace mlx m_drawlist.clear(); m_pixel_put_pipeline.Clear(); m_text_manager.Clear(); - m_texture_manager.Clear(); + m_texture_registry.Clear(); } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept @@ -48,7 +48,7 @@ namespace mlx void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) { MLX_PROFILE_FUNCTION(); - auto res = m_texture_manager.RegisterTexture(texture, x, y); + auto res = m_texture_registry.RegisterTexture(texture, x, y); if(!res.second) // if this is not a completly new texture draw { auto it = std::find(m_drawlist.begin(), m_drawlist.end(), res.first); @@ -61,7 +61,7 @@ namespace mlx void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) { MLX_PROFILE_FUNCTION(); - m_text_manager.LoadFont(*_renderer, filepath, scale); + m_text_manager.LoadFont(m_renderer, filepath, scale); } void GraphicsSupport::TryEraseTextureFromManager(NonOwningPtr texture) noexcept @@ -69,11 +69,11 @@ namespace mlx MLX_PROFILE_FUNCTION(); for(auto it = m_drawlist.begin(); it != m_drawlist.end();) { - if(m_texture_manager.IsTextureKnown(texture)) + if(m_texture_registry.IsTextureKnown(texture)) it = m_drawlist.erase(it); else ++it; } - m_texture_manager.EraseTextures(texture); + m_texture_registry.EraseTextures(texture); } } diff --git a/runtime/Includes/Core/Profiler.h b/runtime/Includes/Core/Profiler.h index dbd6b1f..9a24f42 100644 --- a/runtime/Includes/Core/Profiler.h +++ b/runtime/Includes/Core/Profiler.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/10 13:35:45 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:19:01 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:24:17 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -76,7 +76,7 @@ namespace mlx auto high_res_start = FloatingPointMilliseconds{ m_start_timepoint.time_since_epoch() }; auto elapsed_time = std::chrono::time_point_cast(end_timepoint).time_since_epoch() - std::chrono::time_point_cast(m_start_timepoint).time_since_epoch(); - Profiler::get().appendProfileData({ m_name, elapsed_time, std::this_thread::get_id() }); + Profiler::Get().AppendProfileData({ m_name, elapsed_time, std::this_thread::get_id() }); m_stopped = true; } @@ -84,7 +84,7 @@ namespace mlx ~ProfilerTimer() { if(!m_stopped) - stop(); + Stop(); } private: diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index b32bf6f..8ad7ea7 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/05/25 15:28:59 by maldavid #+# #+# */ -/* Updated: 2024/05/25 16:11:50 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 22:15:22 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -28,6 +28,8 @@ namespace mlx void* CreateWindow(const std::string& title, std::size_t w, std::size_t h); void DestroyWindow(void* window) noexcept; + void SetEventCallback(); + private: SDLManager() = default; ~SDLManager() = default; diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h index aa8ad0f..ca3d485 100644 --- a/runtime/Includes/Platform/Inputs.h +++ b/runtime/Includes/Platform/Inputs.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ -/* Updated: 2024/03/27 18:36:21 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 20:35:09 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -30,14 +30,12 @@ namespace mlx public: Inputs() = default; - virtual void Update() noexcept = 0; + void RegisterWindow(std::shared_ptr window); - virtual void RegisterWindow(std::shared_ptr window) = 0; - - virtual std::int32_t GetX() const noexcept = 0; - virtual std::int32_t GetY() const noexcept = 0; - virtual std::int32_t GetXRel() const noexcept = 0; - virtual std::int32_t GetYRel() const noexcept = 0; + std::int32_t GetX() const noexcept; + std::int32_t GetY() const noexcept; + std::int32_t GetXRel() const noexcept; + std::int32_t GetYRel() const noexcept; inline bool IsMouseMoving() const noexcept { return GetXRel() || GetYRel(); } MLX_FORCEINLINE bool IsRunning() const noexcept { return m_run; } @@ -50,7 +48,7 @@ namespace mlx m_events_hooks[id][event].param = param; } - virtual ~Inputs() = default; + ~Inputs() = default; protected: std::unordered_map> m_windows; diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index d5f3ace..6601fe9 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/05/25 15:46:37 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:25:07 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -37,6 +37,7 @@ #include #include #include +#include #include #include #include @@ -78,7 +79,7 @@ #undef Window #include -#include +#include #include #include diff --git a/runtime/Includes/Renderer/Buffers/IndexBuffer.h b/runtime/Includes/Renderer/Buffers/IndexBuffer.h index 4a8f4d8..877d518 100644 --- a/runtime/Includes/Renderer/Buffers/IndexBuffer.h +++ b/runtime/Includes/Renderer/Buffers/IndexBuffer.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 15:05:05 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:11:05 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:24:46 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __VK_IBO__ #define __VK_IBO__ -#include +#include #include namespace mlx diff --git a/runtime/Includes/Renderer/Buffers/UniformBuffer.h b/runtime/Includes/Renderer/Buffers/UniformBuffer.h index 6b84cd5..d7f6584 100644 --- a/runtime/Includes/Renderer/Buffers/UniformBuffer.h +++ b/runtime/Includes/Renderer/Buffers/UniformBuffer.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:23:56 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:39:32 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_UBO__ #define __MLX_VK_UBO__ -#include +#include namespace mlx { @@ -33,10 +33,10 @@ namespace mlx VkBuffer& operator()() noexcept; VkBuffer& Get() noexcept; - inline VkDeviceSize GetSize(int i) noexcept { return m_buffers[i].getSize(); } - inline VkDeviceSize GetOffset(int i) noexcept { return m_buffers[i].getOffset(); } - inline VkBuffer& operator()(int i) noexcept { return m_buffers[i].get(); } - inline VkBuffer& Get(int i) noexcept { return m_buffers[i].get(); } + inline VkDeviceSize GetSize(int i) noexcept { return m_buffers[i].GetSize(); } + inline VkDeviceSize GetOffset(int i) noexcept { return m_buffers[i].GetOffset(); } + inline VkBuffer& operator()(int i) noexcept { return m_buffers[i].Get(); } + inline VkBuffer& Get(int i) noexcept { return m_buffers[i].Get(); } ~UniformBuffer() = default; diff --git a/runtime/Includes/Renderer/Buffers/VertexBuffer.h b/runtime/Includes/Renderer/Buffers/VertexBuffer.h index ec193b7..8feb7c0 100644 --- a/runtime/Includes/Renderer/Buffers/VertexBuffer.h +++ b/runtime/Includes/Renderer/Buffers/VertexBuffer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:27:38 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:23:32 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:24:41 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -14,7 +14,7 @@ #define __MLX_VK_VBO__ #include -#include +#include #include namespace mlx diff --git a/runtime/Includes/Renderer/Command/CommandBuffer.h b/runtime/Includes/Renderer/Command/CommandBuffer.h index b5be0ad..90a0a48 100644 --- a/runtime/Includes/Renderer/Command/CommandBuffer.h +++ b/runtime/Includes/Renderer/Command/CommandBuffer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ -/* Updated: 2024/04/23 17:59:50 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:37:54 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -32,7 +32,7 @@ namespace mlx void Submit(NonOwningPtr signal, NonOwningPtr wait) noexcept; void SubmitIdle(bool shouldWaitForExecution = true) noexcept; // TODO : handle `shouldWaitForExecution` as false by default (needs to modify CmdResources lifetimes to do so) void UpdateSubmitState() noexcept; - inline void WaitForExecution() noexcept { m_fence.wait(); UpdateSubmitState(); m_state = CommandBufferState::Ready; } + inline void WaitForExecution() noexcept { m_fence.Wait(); UpdateSubmitState(); m_state = CommandBufferState::Ready; } inline void Reset() noexcept { vkResetCommandBuffer(m_cmd_buffer, 0); } void EndRecord(); diff --git a/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h b/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h index be7f896..1ac2ec6 100644 --- a/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h +++ b/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/15 18:25:57 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:46:48 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:38:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -34,15 +34,15 @@ namespace mlx void WaitForAllExecutions() noexcept; inline CommandPool& GetCmdPool() noexcept { return m_pool; } - CommanddBuffer& GetCmdBuffer() noexcept; + CommandBuffer& GetCmdBuffer() noexcept; ~SingleTimeCmdManager() = default; inline static constexpr const std::uint8_t BASE_POOL_SIZE = 16; private: - std::vector m_buffers; - CommanddPool m_pool; + std::vector m_buffers; + CommandPool m_pool; }; } diff --git a/runtime/Includes/Renderer/Core/Queues.h b/runtime/Includes/Renderer/Core/Queues.h index 3986e17..2fdc7ec 100644 --- a/runtime/Includes/Renderer/Core/Queues.h +++ b/runtime/Includes/Renderer/Core/Queues.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:05:15 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:38:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -31,8 +31,8 @@ namespace mlx void Init(); - inline VkQueue& GetGraphic() noexcept { return _graphics_queue; } - inline VkQueue& GetPresent() noexcept { return _present_queue; } + inline VkQueue& GetGraphic() noexcept { return m_graphics_queue; } + inline VkQueue& GetPresent() noexcept { return m_present_queue; } inline QueueFamilyIndices GetFamilies() noexcept { if(m_families.has_value()) diff --git a/runtime/Includes/Renderer/Core/RenderCore.h b/runtime/Includes/Renderer/Core/RenderCore.h index a976b92..b0939e7 100644 --- a/runtime/Includes/Renderer/Core/RenderCore.h +++ b/runtime/Includes/Renderer/Core/RenderCore.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/08 19:16:32 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:55:43 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:39:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,7 +20,7 @@ #include #include #include -#include +#include #include diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h index c50aec9..f687c8c 100644 --- a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h +++ b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:14:48 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:42:22 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -28,7 +28,7 @@ namespace mlx void WriteDescriptor(int binding, NonOwningPtr ubo) const noexcept; void WriteDescriptor(int binding, const class Image& image) const noexcept; - inline bool IsInit() const noexcept { return m_pool != nullptr && m_renderer != nullptr; } + inline bool IsInit() const noexcept { return p_pool && p_renderer; } void Bind() noexcept; diff --git a/runtime/Includes/Renderer/Images/Image.h b/runtime/Includes/Renderer/Images/Image.h index 2f71614..e86b12d 100644 --- a/runtime/Includes/Renderer/Images/Image.h +++ b/runtime/Includes/Renderer/Images/Image.h @@ -6,14 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/01/25 11:54:21 by maldavid #+# #+# */ -/* Updated: 2024/04/23 20:00:53 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:40:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #ifndef __MLX_VK_IMAGE__ #define __MLX_VK_IMAGE__ -#include +#include #include #include diff --git a/runtime/Includes/Renderer/Images/Texture.h b/runtime/Includes/Renderer/Images/Texture.h index 16091a5..5cc468d 100644 --- a/runtime/Includes/Renderer/Images/Texture.h +++ b/runtime/Includes/Renderer/Images/Texture.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/03/08 02:24:58 by maldavid #+# #+# */ -/* Updated: 2024/04/23 20:03:59 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:42:41 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -33,7 +33,7 @@ namespace mlx int GetPixel(int x, int y) noexcept; inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } - inline VkDescriptorSet GetSet() noexcept { return m_set.IsInit() ? m_set.get() : VK_NULL_HANDLE; } + inline VkDescriptorSet GetSet() noexcept { return m_set.IsInit() ? m_set.Get() : VK_NULL_HANDLE; } inline void UpdateSet(int binding) noexcept { m_set.WriteDescriptor(binding, *this); m_has_set_been_updated = true; } inline bool HasBeenUpdated() const noexcept { return m_has_set_been_updated; } inline constexpr void ResetUpdate() noexcept { m_has_set_been_updated = false; } diff --git a/runtime/Includes/Renderer/Images/TextureAtlas.h b/runtime/Includes/Renderer/Images/TextureAtlas.h index 34802fa..669097f 100644 --- a/runtime/Includes/Renderer/Images/TextureAtlas.h +++ b/runtime/Includes/Renderer/Images/TextureAtlas.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/04/07 16:36:33 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:37:54 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:52:40 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -28,7 +28,7 @@ namespace mlx inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } inline DescriptorSet GetSet() noexcept { return m_set; } - inline void UpdateSet(int binding) noexcept { m_set.writeDescriptor(binding, *this); m_has_been_updated = true; } + inline void UpdateSet(int binding) noexcept { m_set.WriteDescriptor(binding, *this); m_has_been_updated = true; } inline bool HasBeenUpdated() const noexcept { return m_has_been_updated; } inline constexpr void ResetUpdate() noexcept { m_has_been_updated = false; } diff --git a/runtime/Includes/Renderer/Images/TextureRegistry.h b/runtime/Includes/Renderer/Images/TextureRegistry.h index 432ab21..c1248e9 100644 --- a/runtime/Includes/Renderer/Images/TextureRegistry.h +++ b/runtime/Includes/Renderer/Images/TextureRegistry.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:56:15 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:10:08 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:43:48 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -34,6 +34,6 @@ namespace mlx }; } -#include +#include #endif diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index 9a0767f..a7ff20c 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:53:20 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:40:20 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -18,9 +18,9 @@ #include #include #include -#include +#include #include -#include +#include #include #include #include diff --git a/runtime/Includes/Renderer/Texts/Font.h b/runtime/Includes/Renderer/Texts/Font.h index ebf873f..91b6c4c 100644 --- a/runtime/Includes/Renderer/Texts/Font.h +++ b/runtime/Includes/Renderer/Texts/Font.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/12/11 21:17:04 by kbz_8 #+# #+# */ -/* Updated: 2024/03/28 22:19:39 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:53:11 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -31,8 +31,8 @@ namespace mlx inline float GetScale() const noexcept { return m_scale; } inline const std::array& GetCharData() const { return m_cdata; } inline const TextureAtlas& GetAtlas() const noexcept { return m_atlas; } - inline bool operator==(const Font& rhs) const { return rhs._name == m_name && rhs._scale == m_scale; } - inline bool operator!=(const Font& rhs) const { return rhs._name != m_name || rhs._scale != m_scale; } + inline bool operator==(const Font& rhs) const { return rhs.m_name == m_name && rhs.m_scale == m_scale; } + inline bool operator!=(const Font& rhs) const { return rhs.m_name != m_name || rhs.m_scale != m_scale; } void Destroy(); diff --git a/runtime/Includes/Renderer/Texts/TextDescriptor.h b/runtime/Includes/Renderer/Texts/TextDescriptor.h index 87496ec..4b5f72a 100644 --- a/runtime/Includes/Renderer/Texts/TextDescriptor.h +++ b/runtime/Includes/Renderer/Texts/TextDescriptor.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/01/11 00:13:34 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:25:09 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 14:03:43 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -35,7 +35,7 @@ namespace mlx void Init(FontID font) noexcept; bool operator==(const TextDrawDescriptor& rhs) const { return m_text == rhs.m_text && x == rhs.x && y == rhs.y && color == rhs.color; } - void Render(std::array& sets, Renderer& renderer) override; + void Render(Renderer& renderer) override; void ResetUpdate() override; TextDrawDescriptor() = default; diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index adc9a0c..b70ca49 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -6,14 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/03/27 17:20:55 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:26:59 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:31:02 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#include +#include #include -#include -#include namespace mlx { diff --git a/runtime/Sources/Platform/Window.cpp b/runtime/Sources/Platform/Window.cpp index 1b48a77..66ae725 100644 --- a/runtime/Sources/Platform/Window.cpp +++ b/runtime/Sources/Platform/Window.cpp @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:36:44 by maldavid #+# #+# */ -/* Updated: 2024/05/25 16:13:31 by maldavid ### ########.fr */ +/* Updated: 2024/07/05 13:12:51 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -20,9 +20,6 @@ namespace mlx Window::Window(std::size_t w, std::size_t h, const std::string& title) : m_width(w), m_height(h) { static std::uint64_t ids = 0; - - if(title.find("vvaas") != std::string::npos) - Message("vvaas est mauvais"); p_window = SDLManager::Get().CreateWindow(title, w, h); m_id = ids++; } From d5eeef9559bced05af4f65209af0084af1231138 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 2 Sep 2024 09:44:42 +0200 Subject: [PATCH 014/131] big refactoring ! ci skip --- Makefile | 19 +- runtime/Includes/Core/Application.h | 46 +- runtime/Includes/Core/Application.inl | 13 +- runtime/Includes/Core/Enums.h | 24 +- runtime/Includes/Core/EventBase.h | 12 - runtime/Includes/Core/EventBus.h | 12 - runtime/Includes/Core/EventListener.h | 12 - runtime/Includes/Core/Format.h | 12 - runtime/Includes/Core/Format.inl | 13 +- runtime/Includes/Core/Fps.h | 12 - runtime/Includes/Core/Graphics.h | 12 - runtime/Includes/Core/Graphics.inl | 13 +- runtime/Includes/Core/ImagesRegistry.h | 12 - runtime/Includes/Core/ImagesRegistry.inl | 5 - runtime/Includes/Core/Logs.h | 12 - runtime/Includes/Core/Logs.inl | 13 +- runtime/Includes/Core/Memory.h | 12 - runtime/Includes/Core/Profiler.h | 12 - runtime/Includes/Core/SDLManager.h | 32 +- runtime/Includes/Core/UUID.h | 12 - runtime/Includes/Embedded/2DFragment.nzsl | 28 + runtime/Includes/Embedded/2DFragment.spv.h | 44 + runtime/Includes/Embedded/2DVertex.nzsl | 45 + runtime/Includes/Embedded/2DVertex.spv.h | 80 + .../Includes/{Utils => Embedded}/DogicaTTF.h | 12 - .../Includes/{Utils => Embedded}/IconMlx.h | 12 - runtime/Includes/Embedded/ScreenFragment.nzsl | 46 + .../Includes/Embedded/ScreenFragment.spv.h | 49 + runtime/Includes/Embedded/ScreenVertex.nzsl | 31 + runtime/Includes/Embedded/ScreenVertex.spv.h | 48 + runtime/Includes/Graphics/Mesh.h | 53 + runtime/Includes/Graphics/Scene.h | 32 + runtime/Includes/Graphics/Sprite.h | 53 + runtime/Includes/Maths/Angles.h | 108 + runtime/Includes/Maths/Angles.inl | 488 + runtime/Includes/Maths/Constants.h | 87 + runtime/Includes/Maths/Enums.h | 20 + runtime/Includes/Maths/EulerAngles.h | 57 + runtime/Includes/Maths/EulerAngles.inl | 169 + runtime/Includes/Maths/Mat4.h | 122 + runtime/Includes/Maths/Mat4.inl | 879 + runtime/Includes/Maths/MathsUtils.h | 26 + runtime/Includes/Maths/MathsUtils.inl | 47 + runtime/Includes/Maths/Quaternions.h | 91 + runtime/Includes/Maths/Quaternions.inl | 508 + runtime/Includes/Maths/Vec2.h | 116 + runtime/Includes/Maths/Vec2.inl | 388 + runtime/Includes/Maths/Vec3.h | 133 + runtime/Includes/Maths/Vec3.inl | 509 + runtime/Includes/Maths/Vec4.h | 115 + runtime/Includes/Maths/Vec4.inl | 424 + runtime/Includes/Platform/Inputs.h | 13 +- runtime/Includes/Platform/Window.h | 24 +- runtime/Includes/PreCompiled.h | 22 +- runtime/Includes/Renderer/Buffer.h | 83 + runtime/Includes/Renderer/Buffers/Buffer.h | 66 - .../Includes/Renderer/Buffers/IndexBuffer.h | 29 - .../Includes/Renderer/Buffers/UniformBuffer.h | 50 - .../Includes/Renderer/Buffers/VertexBuffer.h | 46 - .../Includes/Renderer/Command/CommandBuffer.h | 70 - .../Renderer/Command/CommandManager.h | 43 - .../Includes/Renderer/Command/CommandPool.h | 36 - .../Renderer/Command/CommandResource.h | 38 - .../Renderer/Command/SingleTimeCmdManager.h | 49 - runtime/Includes/Renderer/Core/Device.h | 40 - .../Includes/Renderer/Core/DrawableResource.h | 28 - runtime/Includes/Renderer/Core/Fence.h | 40 - runtime/Includes/Renderer/Core/Instance.h | 38 - runtime/Includes/Renderer/Core/Queues.h | 51 - runtime/Includes/Renderer/Core/RenderCore.h | 78 - runtime/Includes/Renderer/Core/Semaphore.h | 31 - runtime/Includes/Renderer/Core/Surface.h | 34 - .../Includes/Renderer/Core/ValidationLayers.h | 44 - runtime/Includes/Renderer/Descriptor.h | 48 + .../Renderer/Descriptors/DescriptorPool.h | 42 - .../Descriptors/DescriptorPoolManager.h | 35 - .../Renderer/Descriptors/DescriptorSet.h | 56 - .../Descriptors/DescriptorSetLayout.h | 38 - runtime/Includes/Renderer/Enums.h | 41 +- runtime/Includes/Renderer/Image.h | 102 + runtime/Includes/Renderer/Images/Image.h | 84 - runtime/Includes/Renderer/Images/Texture.h | 63 - .../Includes/Renderer/Images/TextureAtlas.h | 43 - .../Renderer/Images/TextureDescriptor.h | 59 - .../Renderer/Images/TextureRegistry.h | 39 - .../Renderer/Images/TextureRegistry.inl | 52 - runtime/Includes/Renderer/{Core => }/Memory.h | 18 +- .../Includes/Renderer/Pipelines/Graphics.h | 57 + .../Includes/Renderer/Pipelines/Pipeline.h | 35 +- runtime/Includes/Renderer/Pipelines/Shader.h | 68 + runtime/Includes/Renderer/PixelPut.h | 48 - runtime/Includes/Renderer/RenderCore.h | 39 + .../Includes/Renderer/RenderPasses/2DPass.h | 29 + .../Renderer/RenderPasses/FinalPass.h | 27 + .../Includes/Renderer/RenderPasses/Passes.h | 26 + runtime/Includes/Renderer/Renderer.h | 101 +- .../Renderer/Renderpass/FrameBuffer.h | 36 - .../Includes/Renderer/Renderpass/RenderPass.h | 36 - .../Includes/Renderer/Renderpass/Swapchain.h | 65 - runtime/Includes/Renderer/ScenesRenderer.h | 22 + runtime/Includes/Renderer/Texts/Font.h | 55 - runtime/Includes/Renderer/Texts/FontLibrary.h | 47 - runtime/Includes/Renderer/Texts/Text.h | 49 - .../Includes/Renderer/Texts/TextDescriptor.h | 62 - runtime/Includes/Renderer/Texts/TextLibrary.h | 48 - runtime/Includes/Renderer/Texts/TextManager.h | 43 - runtime/Includes/Renderer/Vertex.h | 61 +- runtime/Includes/Renderer/Vertex.inl | 36 + runtime/Includes/Renderer/ViewerData.h | 14 + .../Renderer/Vulkan/VulkanPrototypes.h | 170 + runtime/Includes/Utils/Ansi.h | 12 - runtime/Includes/Utils/Buffer.h | 41 + runtime/Includes/Utils/CombineHash.h | 12 - runtime/Includes/Utils/ConstMap.h | 12 - runtime/Includes/Utils/NonCopyable.h | 12 - runtime/Includes/Utils/NonOwningPtr.h | 12 - runtime/Includes/Utils/NonOwningPtr.inl | 13 +- runtime/Includes/Utils/Singleton.h | 12 - runtime/Sources/Core/Application.cpp | 12 - runtime/Sources/Core/Bridge.cpp | 12 - runtime/Sources/Core/EventBus.cpp | 12 - runtime/Sources/Core/EventListener.cpp | 12 - runtime/Sources/Core/Fps.cpp | 12 - runtime/Sources/Core/Graphics.cpp | 12 - runtime/Sources/Core/Logs.cpp | 12 - runtime/Sources/Core/Memory.cpp | 12 - runtime/Sources/Core/Profiler.cpp | 12 - runtime/Sources/Core/SDLManager.cpp | 106 +- runtime/Sources/Core/UUID.cpp | 14 +- runtime/Sources/Graphics/Mesh.cpp | 31 + runtime/Sources/Graphics/Scene.cpp | 19 + runtime/Sources/Graphics/Sprite.cpp | 44 + runtime/Sources/Platform/Inputs.cpp | 18 +- runtime/Sources/Platform/Window.cpp | 16 +- runtime/Sources/Renderer/Buffer.cpp | 175 + runtime/Sources/Renderer/Buffers/Buffer.cpp | 150 - .../Renderer/Buffers/UniformBuffer.cpp | 78 - .../Sources/Renderer/Buffers/VertexBuffer.cpp | 56 - .../Renderer/Command/CommandBuffer.cpp | 365 - .../Renderer/Command/CommandManager.cpp | 42 - .../Sources/Renderer/Command/CommandPool.cpp | 37 - .../Command/SingleTimeCommandManager.cpp | 64 - runtime/Sources/Renderer/Core/Device.cpp | 142 - runtime/Sources/Renderer/Core/Fence.cpp | 54 - runtime/Sources/Renderer/Core/Instance.cpp | 88 - runtime/Sources/Renderer/Core/Memory.cpp | 199 - runtime/Sources/Renderer/Core/Queues.cpp | 53 - runtime/Sources/Renderer/Core/RenderCore.cpp | 134 - runtime/Sources/Renderer/Core/Semaphore.cpp | 36 - runtime/Sources/Renderer/Core/Surface.cpp | 43 - .../Renderer/Core/ValidationLayers.cpp | 122 - runtime/Sources/Renderer/Descriptor.cpp | 141 + .../Renderer/Descriptors/DescriptorPool.cpp | 69 - .../Descriptors/DescriptorPoolManager.cpp | 41 - .../Renderer/Descriptors/DescriptorSet.cpp | 116 - .../Descriptors/DescriptorSetLayout.cpp | 49 - runtime/Sources/Renderer/Image.cpp | 113 + runtime/Sources/Renderer/Images/Image.cpp | 393 - runtime/Sources/Renderer/Images/Texture.cpp | 190 - .../Sources/Renderer/Images/TextureAtlas.cpp | 58 - runtime/Sources/Renderer/Memory.cpp | 159 + .../Sources/Renderer/Pipelines/Graphics.cpp | 164 + .../Sources/Renderer/Pipelines/Pipeline.cpp | 331 - runtime/Sources/Renderer/Pipelines/Shader.cpp | 83 + runtime/Sources/Renderer/PixelPut.cpp | 67 - runtime/Sources/Renderer/RenderCore.cpp | 78 + .../Sources/Renderer/RenderPasses/2DPass.cpp | 112 + .../Renderer/RenderPasses/FinalPass.cpp | 76 + .../Sources/Renderer/RenderPasses/Passes.cpp | 45 + runtime/Sources/Renderer/Renderer.cpp | 260 +- .../Renderer/Renderpass/Framebuffer.cpp | 49 - .../Renderer/Renderpass/Renderpass.cpp | 117 - .../Sources/Renderer/Renderpass/Swapchain.cpp | 150 - runtime/Sources/Renderer/SceneRenderer.cpp | 23 + runtime/Sources/Renderer/Texts/Font.cpp | 88 - .../Sources/Renderer/Texts/FontLibrary.cpp | 68 - runtime/Sources/Renderer/Texts/Text.cpp | 78 - .../Sources/Renderer/Texts/TextDescriptor.cpp | 107 - .../Sources/Renderer/Texts/TextLibrary.cpp | 62 - .../Sources/Renderer/Texts/TextManager.cpp | 65 - .../Sources/Renderer/Vulkan/VulkanLoader.cpp | 416 + .../Sources/Renderer/Vulkan/VulkanLoader.h | 42 + scripts/fetch_dependencies.sh | 17 +- third_party/glm/common.hpp | 539 - third_party/glm/detail/_features.hpp | 394 - third_party/glm/detail/_fixes.hpp | 27 - third_party/glm/detail/_noise.hpp | 81 - third_party/glm/detail/_swizzle.hpp | 804 - third_party/glm/detail/_swizzle_func.hpp | 682 - third_party/glm/detail/_vectorize.hpp | 162 - third_party/glm/detail/compute_common.hpp | 50 - .../glm/detail/compute_vector_relational.hpp | 30 - third_party/glm/detail/func_common.inl | 792 - third_party/glm/detail/func_common_simd.inl | 231 - third_party/glm/detail/func_exponential.inl | 152 - .../glm/detail/func_exponential_simd.inl | 37 - third_party/glm/detail/func_geometric.inl | 243 - .../glm/detail/func_geometric_simd.inl | 165 - third_party/glm/detail/func_integer.inl | 372 - third_party/glm/detail/func_integer_simd.inl | 65 - third_party/glm/detail/func_matrix.inl | 398 - third_party/glm/detail/func_matrix_simd.inl | 249 - third_party/glm/detail/func_packing.inl | 189 - third_party/glm/detail/func_packing_simd.inl | 6 - third_party/glm/detail/func_trigonometric.inl | 197 - .../glm/detail/func_trigonometric_simd.inl | 0 .../glm/detail/func_vector_relational.inl | 87 - .../detail/func_vector_relational_simd.inl | 6 - third_party/glm/detail/glm.cpp | 263 - third_party/glm/detail/qualifier.hpp | 230 - third_party/glm/detail/setup.hpp | 1135 - third_party/glm/detail/type_float.hpp | 68 - third_party/glm/detail/type_half.hpp | 16 - third_party/glm/detail/type_half.inl | 241 - third_party/glm/detail/type_mat2x2.hpp | 177 - third_party/glm/detail/type_mat2x2.inl | 536 - third_party/glm/detail/type_mat2x3.hpp | 159 - third_party/glm/detail/type_mat2x3.inl | 510 - third_party/glm/detail/type_mat2x4.hpp | 161 - third_party/glm/detail/type_mat2x4.inl | 520 - third_party/glm/detail/type_mat3x2.hpp | 167 - third_party/glm/detail/type_mat3x2.inl | 532 - third_party/glm/detail/type_mat3x3.hpp | 184 - third_party/glm/detail/type_mat3x3.inl | 601 - third_party/glm/detail/type_mat3x4.hpp | 166 - third_party/glm/detail/type_mat3x4.inl | 578 - third_party/glm/detail/type_mat4x2.hpp | 171 - third_party/glm/detail/type_mat4x2.inl | 574 - third_party/glm/detail/type_mat4x3.hpp | 171 - third_party/glm/detail/type_mat4x3.inl | 598 - third_party/glm/detail/type_mat4x4.hpp | 189 - third_party/glm/detail/type_mat4x4.inl | 706 - third_party/glm/detail/type_mat4x4_simd.inl | 6 - third_party/glm/detail/type_quat.hpp | 186 - third_party/glm/detail/type_quat.inl | 408 - third_party/glm/detail/type_quat_simd.inl | 188 - third_party/glm/detail/type_vec1.hpp | 308 - third_party/glm/detail/type_vec1.inl | 551 - third_party/glm/detail/type_vec2.hpp | 399 - third_party/glm/detail/type_vec2.inl | 913 - third_party/glm/detail/type_vec3.hpp | 432 - third_party/glm/detail/type_vec3.inl | 1068 - third_party/glm/detail/type_vec4.hpp | 505 - third_party/glm/detail/type_vec4.inl | 1140 - third_party/glm/detail/type_vec4_simd.inl | 775 - third_party/glm/exponential.hpp | 110 - third_party/glm/ext.hpp | 196 - third_party/glm/ext/matrix_clip_space.hpp | 522 - third_party/glm/ext/matrix_clip_space.inl | 555 - third_party/glm/ext/matrix_common.hpp | 36 - third_party/glm/ext/matrix_common.inl | 16 - third_party/glm/ext/matrix_double2x2.hpp | 23 - .../glm/ext/matrix_double2x2_precision.hpp | 49 - third_party/glm/ext/matrix_double2x3.hpp | 18 - .../glm/ext/matrix_double2x3_precision.hpp | 31 - third_party/glm/ext/matrix_double2x4.hpp | 18 - .../glm/ext/matrix_double2x4_precision.hpp | 31 - third_party/glm/ext/matrix_double3x2.hpp | 18 - .../glm/ext/matrix_double3x2_precision.hpp | 31 - third_party/glm/ext/matrix_double3x3.hpp | 23 - .../glm/ext/matrix_double3x3_precision.hpp | 49 - third_party/glm/ext/matrix_double3x4.hpp | 18 - .../glm/ext/matrix_double3x4_precision.hpp | 31 - third_party/glm/ext/matrix_double4x2.hpp | 18 - .../glm/ext/matrix_double4x2_precision.hpp | 31 - third_party/glm/ext/matrix_double4x3.hpp | 18 - .../glm/ext/matrix_double4x3_precision.hpp | 31 - third_party/glm/ext/matrix_double4x4.hpp | 23 - .../glm/ext/matrix_double4x4_precision.hpp | 49 - third_party/glm/ext/matrix_float2x2.hpp | 23 - .../glm/ext/matrix_float2x2_precision.hpp | 49 - third_party/glm/ext/matrix_float2x3.hpp | 18 - .../glm/ext/matrix_float2x3_precision.hpp | 31 - third_party/glm/ext/matrix_float2x4.hpp | 18 - .../glm/ext/matrix_float2x4_precision.hpp | 31 - third_party/glm/ext/matrix_float3x2.hpp | 18 - .../glm/ext/matrix_float3x2_precision.hpp | 31 - third_party/glm/ext/matrix_float3x3.hpp | 23 - .../glm/ext/matrix_float3x3_precision.hpp | 49 - third_party/glm/ext/matrix_float3x4.hpp | 18 - .../glm/ext/matrix_float3x4_precision.hpp | 31 - third_party/glm/ext/matrix_float4x2.hpp | 18 - .../glm/ext/matrix_float4x2_precision.hpp | 31 - third_party/glm/ext/matrix_float4x3.hpp | 18 - .../glm/ext/matrix_float4x3_precision.hpp | 31 - third_party/glm/ext/matrix_float4x4.hpp | 23 - .../glm/ext/matrix_float4x4_precision.hpp | 49 - third_party/glm/ext/matrix_projection.hpp | 149 - third_party/glm/ext/matrix_projection.inl | 104 - third_party/glm/ext/matrix_relational.hpp | 132 - third_party/glm/ext/matrix_relational.inl | 82 - third_party/glm/ext/matrix_transform.hpp | 144 - third_party/glm/ext/matrix_transform.inl | 152 - third_party/glm/ext/quaternion_common.hpp | 120 - third_party/glm/ext/quaternion_common.inl | 107 - .../glm/ext/quaternion_common_simd.inl | 18 - third_party/glm/ext/quaternion_double.hpp | 39 - .../glm/ext/quaternion_double_precision.hpp | 42 - .../glm/ext/quaternion_exponential.hpp | 63 - .../glm/ext/quaternion_exponential.inl | 85 - third_party/glm/ext/quaternion_float.hpp | 39 - .../glm/ext/quaternion_float_precision.hpp | 36 - third_party/glm/ext/quaternion_geometric.hpp | 70 - third_party/glm/ext/quaternion_geometric.inl | 36 - third_party/glm/ext/quaternion_relational.hpp | 62 - third_party/glm/ext/quaternion_relational.inl | 35 - third_party/glm/ext/quaternion_transform.hpp | 47 - third_party/glm/ext/quaternion_transform.inl | 24 - .../glm/ext/quaternion_trigonometric.hpp | 63 - .../glm/ext/quaternion_trigonometric.inl | 34 - third_party/glm/ext/scalar_common.hpp | 103 - third_party/glm/ext/scalar_common.inl | 115 - third_party/glm/ext/scalar_constants.hpp | 40 - third_party/glm/ext/scalar_constants.inl | 24 - third_party/glm/ext/scalar_int_sized.hpp | 70 - third_party/glm/ext/scalar_integer.hpp | 92 - third_party/glm/ext/scalar_integer.inl | 243 - third_party/glm/ext/scalar_relational.hpp | 65 - third_party/glm/ext/scalar_relational.inl | 40 - third_party/glm/ext/scalar_uint_sized.hpp | 70 - third_party/glm/ext/scalar_ulp.hpp | 74 - third_party/glm/ext/scalar_ulp.inl | 284 - third_party/glm/ext/vector_bool1.hpp | 30 - .../glm/ext/vector_bool1_precision.hpp | 34 - third_party/glm/ext/vector_bool2.hpp | 18 - .../glm/ext/vector_bool2_precision.hpp | 31 - third_party/glm/ext/vector_bool3.hpp | 18 - .../glm/ext/vector_bool3_precision.hpp | 31 - third_party/glm/ext/vector_bool4.hpp | 18 - .../glm/ext/vector_bool4_precision.hpp | 31 - third_party/glm/ext/vector_common.hpp | 144 - third_party/glm/ext/vector_common.inl | 88 - third_party/glm/ext/vector_double1.hpp | 31 - .../glm/ext/vector_double1_precision.hpp | 36 - third_party/glm/ext/vector_double2.hpp | 18 - .../glm/ext/vector_double2_precision.hpp | 31 - third_party/glm/ext/vector_double3.hpp | 18 - .../glm/ext/vector_double3_precision.hpp | 34 - third_party/glm/ext/vector_double4.hpp | 18 - .../glm/ext/vector_double4_precision.hpp | 35 - third_party/glm/ext/vector_float1.hpp | 31 - .../glm/ext/vector_float1_precision.hpp | 36 - third_party/glm/ext/vector_float2.hpp | 18 - .../glm/ext/vector_float2_precision.hpp | 31 - third_party/glm/ext/vector_float3.hpp | 18 - .../glm/ext/vector_float3_precision.hpp | 31 - third_party/glm/ext/vector_float4.hpp | 18 - .../glm/ext/vector_float4_precision.hpp | 31 - third_party/glm/ext/vector_int1.hpp | 32 - third_party/glm/ext/vector_int1_precision.hpp | 34 - third_party/glm/ext/vector_int2.hpp | 18 - third_party/glm/ext/vector_int2_precision.hpp | 31 - third_party/glm/ext/vector_int3.hpp | 18 - third_party/glm/ext/vector_int3_precision.hpp | 31 - third_party/glm/ext/vector_int4.hpp | 18 - third_party/glm/ext/vector_int4_precision.hpp | 31 - third_party/glm/ext/vector_integer.hpp | 149 - third_party/glm/ext/vector_integer.inl | 85 - third_party/glm/ext/vector_relational.hpp | 107 - third_party/glm/ext/vector_relational.inl | 75 - third_party/glm/ext/vector_uint1.hpp | 32 - .../glm/ext/vector_uint1_precision.hpp | 40 - third_party/glm/ext/vector_uint2.hpp | 18 - .../glm/ext/vector_uint2_precision.hpp | 31 - third_party/glm/ext/vector_uint3.hpp | 18 - .../glm/ext/vector_uint3_precision.hpp | 31 - third_party/glm/ext/vector_uint4.hpp | 18 - .../glm/ext/vector_uint4_precision.hpp | 31 - third_party/glm/ext/vector_ulp.hpp | 109 - third_party/glm/ext/vector_ulp.inl | 74 - third_party/glm/fwd.hpp | 818 - third_party/glm/geometric.hpp | 116 - third_party/glm/glm.hpp | 136 - third_party/glm/gtc/bitfield.hpp | 266 - third_party/glm/gtc/bitfield.inl | 626 - third_party/glm/gtc/color_space.hpp | 56 - third_party/glm/gtc/color_space.inl | 84 - third_party/glm/gtc/constants.hpp | 165 - third_party/glm/gtc/constants.inl | 167 - third_party/glm/gtc/epsilon.hpp | 60 - third_party/glm/gtc/epsilon.inl | 80 - third_party/glm/gtc/integer.hpp | 65 - third_party/glm/gtc/integer.inl | 68 - third_party/glm/gtc/matrix_access.hpp | 60 - third_party/glm/gtc/matrix_access.inl | 62 - third_party/glm/gtc/matrix_integer.hpp | 487 - third_party/glm/gtc/matrix_inverse.hpp | 50 - third_party/glm/gtc/matrix_inverse.inl | 118 - third_party/glm/gtc/matrix_transform.hpp | 36 - third_party/glm/gtc/matrix_transform.inl | 3 - third_party/glm/gtc/noise.hpp | 61 - third_party/glm/gtc/noise.inl | 807 - third_party/glm/gtc/packing.hpp | 728 - third_party/glm/gtc/packing.inl | 938 - third_party/glm/gtc/quaternion.hpp | 173 - third_party/glm/gtc/quaternion.inl | 200 - third_party/glm/gtc/quaternion_simd.inl | 0 third_party/glm/gtc/random.hpp | 82 - third_party/glm/gtc/random.inl | 303 - third_party/glm/gtc/reciprocal.hpp | 135 - third_party/glm/gtc/reciprocal.inl | 191 - third_party/glm/gtc/round.hpp | 160 - third_party/glm/gtc/round.inl | 155 - third_party/glm/gtc/type_aligned.hpp | 1315 - third_party/glm/gtc/type_precision.hpp | 2138 - third_party/glm/gtc/type_precision.inl | 6 - third_party/glm/gtc/type_ptr.hpp | 230 - third_party/glm/gtc/type_ptr.inl | 386 - third_party/glm/gtc/ulp.hpp | 152 - third_party/glm/gtc/ulp.inl | 173 - third_party/glm/gtc/vec1.hpp | 30 - third_party/glm/gtx/associated_min_max.hpp | 207 - third_party/glm/gtx/associated_min_max.inl | 354 - third_party/glm/gtx/bit.hpp | 98 - third_party/glm/gtx/bit.inl | 92 - third_party/glm/gtx/closest_point.hpp | 49 - third_party/glm/gtx/closest_point.inl | 45 - third_party/glm/gtx/color_encoding.hpp | 54 - third_party/glm/gtx/color_encoding.inl | 45 - third_party/glm/gtx/color_space.hpp | 72 - third_party/glm/gtx/color_space.inl | 141 - third_party/glm/gtx/color_space_YCoCg.hpp | 60 - third_party/glm/gtx/color_space_YCoCg.inl | 107 - third_party/glm/gtx/common.hpp | 76 - third_party/glm/gtx/common.inl | 125 - third_party/glm/gtx/compatibility.hpp | 133 - third_party/glm/gtx/compatibility.inl | 62 - third_party/glm/gtx/component_wise.hpp | 69 - third_party/glm/gtx/component_wise.inl | 127 - third_party/glm/gtx/dual_quaternion.hpp | 274 - third_party/glm/gtx/dual_quaternion.inl | 352 - third_party/glm/gtx/easing.hpp | 219 - third_party/glm/gtx/easing.inl | 436 - third_party/glm/gtx/euler_angles.hpp | 335 - third_party/glm/gtx/euler_angles.inl | 899 - third_party/glm/gtx/extend.hpp | 42 - third_party/glm/gtx/extend.inl | 48 - third_party/glm/gtx/extended_min_max.hpp | 182 - third_party/glm/gtx/extended_min_max.inl | 218 - third_party/glm/gtx/exterior_product.hpp | 45 - third_party/glm/gtx/exterior_product.inl | 26 - third_party/glm/gtx/fast_exponential.hpp | 95 - third_party/glm/gtx/fast_exponential.inl | 136 - third_party/glm/gtx/fast_square_root.hpp | 92 - third_party/glm/gtx/fast_square_root.inl | 75 - third_party/glm/gtx/fast_trigonometry.hpp | 79 - third_party/glm/gtx/fast_trigonometry.inl | 142 - third_party/glm/gtx/float_notmalize.inl | 13 - third_party/glm/gtx/functions.hpp | 56 - third_party/glm/gtx/functions.inl | 30 - third_party/glm/gtx/gradient_paint.hpp | 53 - third_party/glm/gtx/gradient_paint.inl | 36 - .../glm/gtx/handed_coordinate_space.hpp | 50 - .../glm/gtx/handed_coordinate_space.inl | 26 - third_party/glm/gtx/hash.hpp | 142 - third_party/glm/gtx/hash.inl | 184 - third_party/glm/gtx/integer.hpp | 76 - third_party/glm/gtx/integer.inl | 185 - third_party/glm/gtx/intersect.hpp | 92 - third_party/glm/gtx/intersect.inl | 200 - third_party/glm/gtx/io.hpp | 201 - third_party/glm/gtx/io.inl | 440 - third_party/glm/gtx/log_base.hpp | 48 - third_party/glm/gtx/log_base.inl | 16 - third_party/glm/gtx/matrix_cross_product.hpp | 47 - third_party/glm/gtx/matrix_cross_product.inl | 37 - third_party/glm/gtx/matrix_decompose.hpp | 46 - third_party/glm/gtx/matrix_decompose.inl | 186 - third_party/glm/gtx/matrix_factorisation.hpp | 69 - third_party/glm/gtx/matrix_factorisation.inl | 84 - third_party/glm/gtx/matrix_interpolation.hpp | 60 - third_party/glm/gtx/matrix_interpolation.inl | 129 - third_party/glm/gtx/matrix_major_storage.hpp | 119 - third_party/glm/gtx/matrix_major_storage.inl | 166 - third_party/glm/gtx/matrix_operation.hpp | 103 - third_party/glm/gtx/matrix_operation.inl | 176 - third_party/glm/gtx/matrix_query.hpp | 77 - third_party/glm/gtx/matrix_query.inl | 113 - third_party/glm/gtx/matrix_transform_2d.hpp | 81 - third_party/glm/gtx/matrix_transform_2d.inl | 68 - third_party/glm/gtx/mixed_product.hpp | 41 - third_party/glm/gtx/mixed_product.inl | 15 - third_party/glm/gtx/norm.hpp | 88 - third_party/glm/gtx/norm.inl | 95 - third_party/glm/gtx/normal.hpp | 41 - third_party/glm/gtx/normal.inl | 15 - third_party/glm/gtx/normalize_dot.hpp | 49 - third_party/glm/gtx/normalize_dot.inl | 16 - third_party/glm/gtx/number_precision.hpp | 61 - third_party/glm/gtx/number_precision.inl | 6 - third_party/glm/gtx/optimum_pow.hpp | 54 - third_party/glm/gtx/optimum_pow.inl | 22 - third_party/glm/gtx/orthonormalize.hpp | 49 - third_party/glm/gtx/orthonormalize.inl | 29 - third_party/glm/gtx/perpendicular.hpp | 41 - third_party/glm/gtx/perpendicular.inl | 10 - third_party/glm/gtx/polar_coordinates.hpp | 48 - third_party/glm/gtx/polar_coordinates.inl | 36 - third_party/glm/gtx/projection.hpp | 43 - third_party/glm/gtx/projection.inl | 10 - third_party/glm/gtx/quaternion.hpp | 174 - third_party/glm/gtx/quaternion.inl | 159 - third_party/glm/gtx/range.hpp | 98 - third_party/glm/gtx/raw_data.hpp | 51 - third_party/glm/gtx/raw_data.inl | 2 - .../glm/gtx/rotate_normalized_axis.hpp | 68 - .../glm/gtx/rotate_normalized_axis.inl | 58 - third_party/glm/gtx/rotate_vector.hpp | 123 - third_party/glm/gtx/rotate_vector.inl | 187 - third_party/glm/gtx/scalar_multiplication.hpp | 75 - third_party/glm/gtx/scalar_relational.hpp | 36 - third_party/glm/gtx/scalar_relational.inl | 88 - third_party/glm/gtx/spline.hpp | 65 - third_party/glm/gtx/spline.inl | 60 - third_party/glm/gtx/std_based_type.hpp | 68 - third_party/glm/gtx/std_based_type.inl | 6 - third_party/glm/gtx/string_cast.hpp | 52 - third_party/glm/gtx/string_cast.inl | 492 - third_party/glm/gtx/texture.hpp | 46 - third_party/glm/gtx/texture.inl | 17 - third_party/glm/gtx/transform.hpp | 60 - third_party/glm/gtx/transform.inl | 23 - third_party/glm/gtx/transform2.hpp | 89 - third_party/glm/gtx/transform2.inl | 125 - third_party/glm/gtx/type_aligned.hpp | 982 - third_party/glm/gtx/type_aligned.inl | 6 - third_party/glm/gtx/type_trait.hpp | 85 - third_party/glm/gtx/type_trait.inl | 61 - third_party/glm/gtx/vec_swizzle.hpp | 2782 -- third_party/glm/gtx/vector_angle.hpp | 57 - third_party/glm/gtx/vector_angle.inl | 44 - third_party/glm/gtx/vector_query.hpp | 66 - third_party/glm/gtx/vector_query.inl | 154 - third_party/glm/gtx/wrap.hpp | 55 - third_party/glm/gtx/wrap.inl | 57 - third_party/glm/integer.hpp | 212 - third_party/glm/mat2x2.hpp | 9 - third_party/glm/mat2x3.hpp | 9 - third_party/glm/mat2x4.hpp | 9 - third_party/glm/mat3x2.hpp | 9 - third_party/glm/mat3x3.hpp | 8 - third_party/glm/mat3x4.hpp | 8 - third_party/glm/mat4x2.hpp | 9 - third_party/glm/mat4x3.hpp | 8 - third_party/glm/mat4x4.hpp | 9 - third_party/glm/matrix.hpp | 161 - third_party/glm/packing.hpp | 173 - third_party/glm/simd/common.h | 240 - third_party/glm/simd/exponential.h | 20 - third_party/glm/simd/geometric.h | 124 - third_party/glm/simd/integer.h | 115 - third_party/glm/simd/matrix.h | 1028 - third_party/glm/simd/neon.h | 155 - third_party/glm/simd/packing.h | 8 - third_party/glm/simd/platform.h | 398 - third_party/glm/simd/trigonometric.h | 9 - third_party/glm/simd/vector_relational.h | 8 - third_party/glm/trigonometric.hpp | 210 - third_party/glm/vec2.hpp | 14 - third_party/glm/vec3.hpp | 14 - third_party/glm/vec4.hpp | 15 - third_party/glm/vector_relational.hpp | 121 - third_party/kvf.h | 2334 + third_party/vma.h | 38234 ++++++++-------- third_party/volk.c | 3041 -- third_party/volk.h | 1985 - third_party/vulkan/vulkan.cppm | 148 +- third_party/vulkan/vulkan.hpp | 1128 +- third_party/vulkan/vulkan_core.h | 436 +- third_party/vulkan/vulkan_enums.hpp | 622 +- .../vulkan/vulkan_extension_inspection.hpp | 88 +- third_party/vulkan/vulkan_format_traits.hpp | 18 +- third_party/vulkan/vulkan_funcs.hpp | 2472 +- third_party/vulkan/vulkan_handles.hpp | 1326 +- third_party/vulkan/vulkan_hash.hpp | 571 +- third_party/vulkan/vulkan_metal.h | 12 +- third_party/vulkan/vulkan_raii.hpp | 1405 +- third_party/vulkan/vulkan_shared.hpp | 50 +- .../vulkan/vulkan_static_assertions.hpp | 301 +- third_party/vulkan/vulkan_structs.hpp | 13556 +++--- third_party/vulkan/vulkan_to_string.hpp | 184 +- 581 files changed, 42971 insertions(+), 99170 deletions(-) create mode 100644 runtime/Includes/Embedded/2DFragment.nzsl create mode 100644 runtime/Includes/Embedded/2DFragment.spv.h create mode 100644 runtime/Includes/Embedded/2DVertex.nzsl create mode 100644 runtime/Includes/Embedded/2DVertex.spv.h rename runtime/Includes/{Utils => Embedded}/DogicaTTF.h (99%) rename runtime/Includes/{Utils => Embedded}/IconMlx.h (99%) create mode 100644 runtime/Includes/Embedded/ScreenFragment.nzsl create mode 100644 runtime/Includes/Embedded/ScreenFragment.spv.h create mode 100644 runtime/Includes/Embedded/ScreenVertex.nzsl create mode 100644 runtime/Includes/Embedded/ScreenVertex.spv.h create mode 100644 runtime/Includes/Graphics/Mesh.h create mode 100644 runtime/Includes/Graphics/Scene.h create mode 100644 runtime/Includes/Graphics/Sprite.h create mode 100644 runtime/Includes/Maths/Angles.h create mode 100644 runtime/Includes/Maths/Angles.inl create mode 100644 runtime/Includes/Maths/Constants.h create mode 100644 runtime/Includes/Maths/Enums.h create mode 100644 runtime/Includes/Maths/EulerAngles.h create mode 100644 runtime/Includes/Maths/EulerAngles.inl create mode 100644 runtime/Includes/Maths/Mat4.h create mode 100644 runtime/Includes/Maths/Mat4.inl create mode 100644 runtime/Includes/Maths/MathsUtils.h create mode 100644 runtime/Includes/Maths/MathsUtils.inl create mode 100644 runtime/Includes/Maths/Quaternions.h create mode 100644 runtime/Includes/Maths/Quaternions.inl create mode 100755 runtime/Includes/Maths/Vec2.h create mode 100755 runtime/Includes/Maths/Vec2.inl create mode 100755 runtime/Includes/Maths/Vec3.h create mode 100755 runtime/Includes/Maths/Vec3.inl create mode 100755 runtime/Includes/Maths/Vec4.h create mode 100755 runtime/Includes/Maths/Vec4.inl create mode 100644 runtime/Includes/Renderer/Buffer.h delete mode 100644 runtime/Includes/Renderer/Buffers/Buffer.h delete mode 100644 runtime/Includes/Renderer/Buffers/IndexBuffer.h delete mode 100644 runtime/Includes/Renderer/Buffers/UniformBuffer.h delete mode 100644 runtime/Includes/Renderer/Buffers/VertexBuffer.h delete mode 100644 runtime/Includes/Renderer/Command/CommandBuffer.h delete mode 100644 runtime/Includes/Renderer/Command/CommandManager.h delete mode 100644 runtime/Includes/Renderer/Command/CommandPool.h delete mode 100644 runtime/Includes/Renderer/Command/CommandResource.h delete mode 100644 runtime/Includes/Renderer/Command/SingleTimeCmdManager.h delete mode 100644 runtime/Includes/Renderer/Core/Device.h delete mode 100644 runtime/Includes/Renderer/Core/DrawableResource.h delete mode 100644 runtime/Includes/Renderer/Core/Fence.h delete mode 100644 runtime/Includes/Renderer/Core/Instance.h delete mode 100644 runtime/Includes/Renderer/Core/Queues.h delete mode 100644 runtime/Includes/Renderer/Core/RenderCore.h delete mode 100644 runtime/Includes/Renderer/Core/Semaphore.h delete mode 100644 runtime/Includes/Renderer/Core/Surface.h delete mode 100644 runtime/Includes/Renderer/Core/ValidationLayers.h create mode 100644 runtime/Includes/Renderer/Descriptor.h delete mode 100644 runtime/Includes/Renderer/Descriptors/DescriptorPool.h delete mode 100644 runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h delete mode 100644 runtime/Includes/Renderer/Descriptors/DescriptorSet.h delete mode 100644 runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h create mode 100644 runtime/Includes/Renderer/Image.h delete mode 100644 runtime/Includes/Renderer/Images/Image.h delete mode 100644 runtime/Includes/Renderer/Images/Texture.h delete mode 100644 runtime/Includes/Renderer/Images/TextureAtlas.h delete mode 100644 runtime/Includes/Renderer/Images/TextureDescriptor.h delete mode 100644 runtime/Includes/Renderer/Images/TextureRegistry.h delete mode 100644 runtime/Includes/Renderer/Images/TextureRegistry.inl rename runtime/Includes/Renderer/{Core => }/Memory.h (50%) create mode 100644 runtime/Includes/Renderer/Pipelines/Graphics.h create mode 100644 runtime/Includes/Renderer/Pipelines/Shader.h delete mode 100644 runtime/Includes/Renderer/PixelPut.h create mode 100644 runtime/Includes/Renderer/RenderCore.h create mode 100644 runtime/Includes/Renderer/RenderPasses/2DPass.h create mode 100644 runtime/Includes/Renderer/RenderPasses/FinalPass.h create mode 100644 runtime/Includes/Renderer/RenderPasses/Passes.h delete mode 100644 runtime/Includes/Renderer/Renderpass/FrameBuffer.h delete mode 100644 runtime/Includes/Renderer/Renderpass/RenderPass.h delete mode 100644 runtime/Includes/Renderer/Renderpass/Swapchain.h create mode 100644 runtime/Includes/Renderer/ScenesRenderer.h delete mode 100644 runtime/Includes/Renderer/Texts/Font.h delete mode 100644 runtime/Includes/Renderer/Texts/FontLibrary.h delete mode 100644 runtime/Includes/Renderer/Texts/Text.h delete mode 100644 runtime/Includes/Renderer/Texts/TextDescriptor.h delete mode 100644 runtime/Includes/Renderer/Texts/TextLibrary.h delete mode 100644 runtime/Includes/Renderer/Texts/TextManager.h create mode 100644 runtime/Includes/Renderer/Vertex.inl create mode 100644 runtime/Includes/Renderer/ViewerData.h create mode 100644 runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h create mode 100644 runtime/Includes/Utils/Buffer.h create mode 100644 runtime/Sources/Graphics/Mesh.cpp create mode 100644 runtime/Sources/Graphics/Scene.cpp create mode 100644 runtime/Sources/Graphics/Sprite.cpp create mode 100644 runtime/Sources/Renderer/Buffer.cpp delete mode 100644 runtime/Sources/Renderer/Buffers/Buffer.cpp delete mode 100644 runtime/Sources/Renderer/Buffers/UniformBuffer.cpp delete mode 100644 runtime/Sources/Renderer/Buffers/VertexBuffer.cpp delete mode 100644 runtime/Sources/Renderer/Command/CommandBuffer.cpp delete mode 100644 runtime/Sources/Renderer/Command/CommandManager.cpp delete mode 100644 runtime/Sources/Renderer/Command/CommandPool.cpp delete mode 100644 runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp delete mode 100644 runtime/Sources/Renderer/Core/Device.cpp delete mode 100644 runtime/Sources/Renderer/Core/Fence.cpp delete mode 100644 runtime/Sources/Renderer/Core/Instance.cpp delete mode 100644 runtime/Sources/Renderer/Core/Memory.cpp delete mode 100644 runtime/Sources/Renderer/Core/Queues.cpp delete mode 100644 runtime/Sources/Renderer/Core/RenderCore.cpp delete mode 100644 runtime/Sources/Renderer/Core/Semaphore.cpp delete mode 100644 runtime/Sources/Renderer/Core/Surface.cpp delete mode 100644 runtime/Sources/Renderer/Core/ValidationLayers.cpp create mode 100644 runtime/Sources/Renderer/Descriptor.cpp delete mode 100644 runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp delete mode 100644 runtime/Sources/Renderer/Descriptors/DescriptorPoolManager.cpp delete mode 100644 runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp delete mode 100644 runtime/Sources/Renderer/Descriptors/DescriptorSetLayout.cpp create mode 100644 runtime/Sources/Renderer/Image.cpp delete mode 100644 runtime/Sources/Renderer/Images/Image.cpp delete mode 100644 runtime/Sources/Renderer/Images/Texture.cpp delete mode 100644 runtime/Sources/Renderer/Images/TextureAtlas.cpp create mode 100644 runtime/Sources/Renderer/Memory.cpp create mode 100644 runtime/Sources/Renderer/Pipelines/Graphics.cpp delete mode 100644 runtime/Sources/Renderer/Pipelines/Pipeline.cpp create mode 100644 runtime/Sources/Renderer/Pipelines/Shader.cpp delete mode 100644 runtime/Sources/Renderer/PixelPut.cpp create mode 100644 runtime/Sources/Renderer/RenderCore.cpp create mode 100644 runtime/Sources/Renderer/RenderPasses/2DPass.cpp create mode 100644 runtime/Sources/Renderer/RenderPasses/FinalPass.cpp create mode 100644 runtime/Sources/Renderer/RenderPasses/Passes.cpp delete mode 100644 runtime/Sources/Renderer/Renderpass/Framebuffer.cpp delete mode 100644 runtime/Sources/Renderer/Renderpass/Renderpass.cpp delete mode 100644 runtime/Sources/Renderer/Renderpass/Swapchain.cpp create mode 100644 runtime/Sources/Renderer/SceneRenderer.cpp delete mode 100644 runtime/Sources/Renderer/Texts/Font.cpp delete mode 100644 runtime/Sources/Renderer/Texts/FontLibrary.cpp delete mode 100644 runtime/Sources/Renderer/Texts/Text.cpp delete mode 100644 runtime/Sources/Renderer/Texts/TextDescriptor.cpp delete mode 100644 runtime/Sources/Renderer/Texts/TextLibrary.cpp delete mode 100644 runtime/Sources/Renderer/Texts/TextManager.cpp create mode 100644 runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp create mode 100644 runtime/Sources/Renderer/Vulkan/VulkanLoader.h delete mode 100755 third_party/glm/common.hpp delete mode 100755 third_party/glm/detail/_features.hpp delete mode 100755 third_party/glm/detail/_fixes.hpp delete mode 100755 third_party/glm/detail/_noise.hpp delete mode 100755 third_party/glm/detail/_swizzle.hpp delete mode 100755 third_party/glm/detail/_swizzle_func.hpp delete mode 100755 third_party/glm/detail/_vectorize.hpp delete mode 100755 third_party/glm/detail/compute_common.hpp delete mode 100755 third_party/glm/detail/compute_vector_relational.hpp delete mode 100755 third_party/glm/detail/func_common.inl delete mode 100755 third_party/glm/detail/func_common_simd.inl delete mode 100755 third_party/glm/detail/func_exponential.inl delete mode 100755 third_party/glm/detail/func_exponential_simd.inl delete mode 100755 third_party/glm/detail/func_geometric.inl delete mode 100755 third_party/glm/detail/func_geometric_simd.inl delete mode 100755 third_party/glm/detail/func_integer.inl delete mode 100755 third_party/glm/detail/func_integer_simd.inl delete mode 100755 third_party/glm/detail/func_matrix.inl delete mode 100755 third_party/glm/detail/func_matrix_simd.inl delete mode 100755 third_party/glm/detail/func_packing.inl delete mode 100755 third_party/glm/detail/func_packing_simd.inl delete mode 100755 third_party/glm/detail/func_trigonometric.inl delete mode 100755 third_party/glm/detail/func_trigonometric_simd.inl delete mode 100755 third_party/glm/detail/func_vector_relational.inl delete mode 100755 third_party/glm/detail/func_vector_relational_simd.inl delete mode 100755 third_party/glm/detail/glm.cpp delete mode 100755 third_party/glm/detail/qualifier.hpp delete mode 100755 third_party/glm/detail/setup.hpp delete mode 100755 third_party/glm/detail/type_float.hpp delete mode 100755 third_party/glm/detail/type_half.hpp delete mode 100755 third_party/glm/detail/type_half.inl delete mode 100755 third_party/glm/detail/type_mat2x2.hpp delete mode 100755 third_party/glm/detail/type_mat2x2.inl delete mode 100755 third_party/glm/detail/type_mat2x3.hpp delete mode 100755 third_party/glm/detail/type_mat2x3.inl delete mode 100755 third_party/glm/detail/type_mat2x4.hpp delete mode 100755 third_party/glm/detail/type_mat2x4.inl delete mode 100755 third_party/glm/detail/type_mat3x2.hpp delete mode 100755 third_party/glm/detail/type_mat3x2.inl delete mode 100755 third_party/glm/detail/type_mat3x3.hpp delete mode 100755 third_party/glm/detail/type_mat3x3.inl delete mode 100755 third_party/glm/detail/type_mat3x4.hpp delete mode 100755 third_party/glm/detail/type_mat3x4.inl delete mode 100755 third_party/glm/detail/type_mat4x2.hpp delete mode 100755 third_party/glm/detail/type_mat4x2.inl delete mode 100755 third_party/glm/detail/type_mat4x3.hpp delete mode 100755 third_party/glm/detail/type_mat4x3.inl delete mode 100755 third_party/glm/detail/type_mat4x4.hpp delete mode 100755 third_party/glm/detail/type_mat4x4.inl delete mode 100755 third_party/glm/detail/type_mat4x4_simd.inl delete mode 100755 third_party/glm/detail/type_quat.hpp delete mode 100755 third_party/glm/detail/type_quat.inl delete mode 100755 third_party/glm/detail/type_quat_simd.inl delete mode 100755 third_party/glm/detail/type_vec1.hpp delete mode 100755 third_party/glm/detail/type_vec1.inl delete mode 100755 third_party/glm/detail/type_vec2.hpp delete mode 100755 third_party/glm/detail/type_vec2.inl delete mode 100755 third_party/glm/detail/type_vec3.hpp delete mode 100755 third_party/glm/detail/type_vec3.inl delete mode 100755 third_party/glm/detail/type_vec4.hpp delete mode 100755 third_party/glm/detail/type_vec4.inl delete mode 100755 third_party/glm/detail/type_vec4_simd.inl delete mode 100755 third_party/glm/exponential.hpp delete mode 100755 third_party/glm/ext.hpp delete mode 100755 third_party/glm/ext/matrix_clip_space.hpp delete mode 100755 third_party/glm/ext/matrix_clip_space.inl delete mode 100755 third_party/glm/ext/matrix_common.hpp delete mode 100755 third_party/glm/ext/matrix_common.inl delete mode 100755 third_party/glm/ext/matrix_double2x2.hpp delete mode 100755 third_party/glm/ext/matrix_double2x2_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double2x3.hpp delete mode 100755 third_party/glm/ext/matrix_double2x3_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double2x4.hpp delete mode 100755 third_party/glm/ext/matrix_double2x4_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double3x2.hpp delete mode 100755 third_party/glm/ext/matrix_double3x2_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double3x3.hpp delete mode 100755 third_party/glm/ext/matrix_double3x3_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double3x4.hpp delete mode 100755 third_party/glm/ext/matrix_double3x4_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double4x2.hpp delete mode 100755 third_party/glm/ext/matrix_double4x2_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double4x3.hpp delete mode 100755 third_party/glm/ext/matrix_double4x3_precision.hpp delete mode 100755 third_party/glm/ext/matrix_double4x4.hpp delete mode 100755 third_party/glm/ext/matrix_double4x4_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float2x2.hpp delete mode 100755 third_party/glm/ext/matrix_float2x2_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float2x3.hpp delete mode 100755 third_party/glm/ext/matrix_float2x3_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float2x4.hpp delete mode 100755 third_party/glm/ext/matrix_float2x4_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float3x2.hpp delete mode 100755 third_party/glm/ext/matrix_float3x2_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float3x3.hpp delete mode 100755 third_party/glm/ext/matrix_float3x3_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float3x4.hpp delete mode 100755 third_party/glm/ext/matrix_float3x4_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float4x2.hpp delete mode 100755 third_party/glm/ext/matrix_float4x2_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float4x3.hpp delete mode 100755 third_party/glm/ext/matrix_float4x3_precision.hpp delete mode 100755 third_party/glm/ext/matrix_float4x4.hpp delete mode 100755 third_party/glm/ext/matrix_float4x4_precision.hpp delete mode 100755 third_party/glm/ext/matrix_projection.hpp delete mode 100755 third_party/glm/ext/matrix_projection.inl delete mode 100755 third_party/glm/ext/matrix_relational.hpp delete mode 100755 third_party/glm/ext/matrix_relational.inl delete mode 100755 third_party/glm/ext/matrix_transform.hpp delete mode 100755 third_party/glm/ext/matrix_transform.inl delete mode 100755 third_party/glm/ext/quaternion_common.hpp delete mode 100755 third_party/glm/ext/quaternion_common.inl delete mode 100755 third_party/glm/ext/quaternion_common_simd.inl delete mode 100755 third_party/glm/ext/quaternion_double.hpp delete mode 100755 third_party/glm/ext/quaternion_double_precision.hpp delete mode 100755 third_party/glm/ext/quaternion_exponential.hpp delete mode 100755 third_party/glm/ext/quaternion_exponential.inl delete mode 100755 third_party/glm/ext/quaternion_float.hpp delete mode 100755 third_party/glm/ext/quaternion_float_precision.hpp delete mode 100755 third_party/glm/ext/quaternion_geometric.hpp delete mode 100755 third_party/glm/ext/quaternion_geometric.inl delete mode 100755 third_party/glm/ext/quaternion_relational.hpp delete mode 100755 third_party/glm/ext/quaternion_relational.inl delete mode 100755 third_party/glm/ext/quaternion_transform.hpp delete mode 100755 third_party/glm/ext/quaternion_transform.inl delete mode 100755 third_party/glm/ext/quaternion_trigonometric.hpp delete mode 100755 third_party/glm/ext/quaternion_trigonometric.inl delete mode 100755 third_party/glm/ext/scalar_common.hpp delete mode 100755 third_party/glm/ext/scalar_common.inl delete mode 100755 third_party/glm/ext/scalar_constants.hpp delete mode 100755 third_party/glm/ext/scalar_constants.inl delete mode 100755 third_party/glm/ext/scalar_int_sized.hpp delete mode 100755 third_party/glm/ext/scalar_integer.hpp delete mode 100755 third_party/glm/ext/scalar_integer.inl delete mode 100755 third_party/glm/ext/scalar_relational.hpp delete mode 100755 third_party/glm/ext/scalar_relational.inl delete mode 100755 third_party/glm/ext/scalar_uint_sized.hpp delete mode 100755 third_party/glm/ext/scalar_ulp.hpp delete mode 100755 third_party/glm/ext/scalar_ulp.inl delete mode 100755 third_party/glm/ext/vector_bool1.hpp delete mode 100755 third_party/glm/ext/vector_bool1_precision.hpp delete mode 100755 third_party/glm/ext/vector_bool2.hpp delete mode 100755 third_party/glm/ext/vector_bool2_precision.hpp delete mode 100755 third_party/glm/ext/vector_bool3.hpp delete mode 100755 third_party/glm/ext/vector_bool3_precision.hpp delete mode 100755 third_party/glm/ext/vector_bool4.hpp delete mode 100755 third_party/glm/ext/vector_bool4_precision.hpp delete mode 100755 third_party/glm/ext/vector_common.hpp delete mode 100755 third_party/glm/ext/vector_common.inl delete mode 100755 third_party/glm/ext/vector_double1.hpp delete mode 100755 third_party/glm/ext/vector_double1_precision.hpp delete mode 100755 third_party/glm/ext/vector_double2.hpp delete mode 100755 third_party/glm/ext/vector_double2_precision.hpp delete mode 100755 third_party/glm/ext/vector_double3.hpp delete mode 100755 third_party/glm/ext/vector_double3_precision.hpp delete mode 100755 third_party/glm/ext/vector_double4.hpp delete mode 100755 third_party/glm/ext/vector_double4_precision.hpp delete mode 100755 third_party/glm/ext/vector_float1.hpp delete mode 100755 third_party/glm/ext/vector_float1_precision.hpp delete mode 100755 third_party/glm/ext/vector_float2.hpp delete mode 100755 third_party/glm/ext/vector_float2_precision.hpp delete mode 100755 third_party/glm/ext/vector_float3.hpp delete mode 100755 third_party/glm/ext/vector_float3_precision.hpp delete mode 100755 third_party/glm/ext/vector_float4.hpp delete mode 100755 third_party/glm/ext/vector_float4_precision.hpp delete mode 100755 third_party/glm/ext/vector_int1.hpp delete mode 100755 third_party/glm/ext/vector_int1_precision.hpp delete mode 100755 third_party/glm/ext/vector_int2.hpp delete mode 100755 third_party/glm/ext/vector_int2_precision.hpp delete mode 100755 third_party/glm/ext/vector_int3.hpp delete mode 100755 third_party/glm/ext/vector_int3_precision.hpp delete mode 100755 third_party/glm/ext/vector_int4.hpp delete mode 100755 third_party/glm/ext/vector_int4_precision.hpp delete mode 100755 third_party/glm/ext/vector_integer.hpp delete mode 100755 third_party/glm/ext/vector_integer.inl delete mode 100755 third_party/glm/ext/vector_relational.hpp delete mode 100755 third_party/glm/ext/vector_relational.inl delete mode 100755 third_party/glm/ext/vector_uint1.hpp delete mode 100755 third_party/glm/ext/vector_uint1_precision.hpp delete mode 100755 third_party/glm/ext/vector_uint2.hpp delete mode 100755 third_party/glm/ext/vector_uint2_precision.hpp delete mode 100755 third_party/glm/ext/vector_uint3.hpp delete mode 100755 third_party/glm/ext/vector_uint3_precision.hpp delete mode 100755 third_party/glm/ext/vector_uint4.hpp delete mode 100755 third_party/glm/ext/vector_uint4_precision.hpp delete mode 100755 third_party/glm/ext/vector_ulp.hpp delete mode 100755 third_party/glm/ext/vector_ulp.inl delete mode 100755 third_party/glm/fwd.hpp delete mode 100755 third_party/glm/geometric.hpp delete mode 100755 third_party/glm/glm.hpp delete mode 100755 third_party/glm/gtc/bitfield.hpp delete mode 100755 third_party/glm/gtc/bitfield.inl delete mode 100755 third_party/glm/gtc/color_space.hpp delete mode 100755 third_party/glm/gtc/color_space.inl delete mode 100755 third_party/glm/gtc/constants.hpp delete mode 100755 third_party/glm/gtc/constants.inl delete mode 100755 third_party/glm/gtc/epsilon.hpp delete mode 100755 third_party/glm/gtc/epsilon.inl delete mode 100755 third_party/glm/gtc/integer.hpp delete mode 100755 third_party/glm/gtc/integer.inl delete mode 100755 third_party/glm/gtc/matrix_access.hpp delete mode 100755 third_party/glm/gtc/matrix_access.inl delete mode 100755 third_party/glm/gtc/matrix_integer.hpp delete mode 100755 third_party/glm/gtc/matrix_inverse.hpp delete mode 100755 third_party/glm/gtc/matrix_inverse.inl delete mode 100755 third_party/glm/gtc/matrix_transform.hpp delete mode 100755 third_party/glm/gtc/matrix_transform.inl delete mode 100755 third_party/glm/gtc/noise.hpp delete mode 100755 third_party/glm/gtc/noise.inl delete mode 100755 third_party/glm/gtc/packing.hpp delete mode 100755 third_party/glm/gtc/packing.inl delete mode 100755 third_party/glm/gtc/quaternion.hpp delete mode 100755 third_party/glm/gtc/quaternion.inl delete mode 100755 third_party/glm/gtc/quaternion_simd.inl delete mode 100755 third_party/glm/gtc/random.hpp delete mode 100755 third_party/glm/gtc/random.inl delete mode 100755 third_party/glm/gtc/reciprocal.hpp delete mode 100755 third_party/glm/gtc/reciprocal.inl delete mode 100755 third_party/glm/gtc/round.hpp delete mode 100755 third_party/glm/gtc/round.inl delete mode 100755 third_party/glm/gtc/type_aligned.hpp delete mode 100755 third_party/glm/gtc/type_precision.hpp delete mode 100755 third_party/glm/gtc/type_precision.inl delete mode 100755 third_party/glm/gtc/type_ptr.hpp delete mode 100755 third_party/glm/gtc/type_ptr.inl delete mode 100755 third_party/glm/gtc/ulp.hpp delete mode 100755 third_party/glm/gtc/ulp.inl delete mode 100755 third_party/glm/gtc/vec1.hpp delete mode 100755 third_party/glm/gtx/associated_min_max.hpp delete mode 100755 third_party/glm/gtx/associated_min_max.inl delete mode 100755 third_party/glm/gtx/bit.hpp delete mode 100755 third_party/glm/gtx/bit.inl delete mode 100755 third_party/glm/gtx/closest_point.hpp delete mode 100755 third_party/glm/gtx/closest_point.inl delete mode 100755 third_party/glm/gtx/color_encoding.hpp delete mode 100755 third_party/glm/gtx/color_encoding.inl delete mode 100755 third_party/glm/gtx/color_space.hpp delete mode 100755 third_party/glm/gtx/color_space.inl delete mode 100755 third_party/glm/gtx/color_space_YCoCg.hpp delete mode 100755 third_party/glm/gtx/color_space_YCoCg.inl delete mode 100755 third_party/glm/gtx/common.hpp delete mode 100755 third_party/glm/gtx/common.inl delete mode 100755 third_party/glm/gtx/compatibility.hpp delete mode 100755 third_party/glm/gtx/compatibility.inl delete mode 100755 third_party/glm/gtx/component_wise.hpp delete mode 100755 third_party/glm/gtx/component_wise.inl delete mode 100755 third_party/glm/gtx/dual_quaternion.hpp delete mode 100755 third_party/glm/gtx/dual_quaternion.inl delete mode 100755 third_party/glm/gtx/easing.hpp delete mode 100755 third_party/glm/gtx/easing.inl delete mode 100755 third_party/glm/gtx/euler_angles.hpp delete mode 100755 third_party/glm/gtx/euler_angles.inl delete mode 100755 third_party/glm/gtx/extend.hpp delete mode 100755 third_party/glm/gtx/extend.inl delete mode 100755 third_party/glm/gtx/extended_min_max.hpp delete mode 100755 third_party/glm/gtx/extended_min_max.inl delete mode 100755 third_party/glm/gtx/exterior_product.hpp delete mode 100755 third_party/glm/gtx/exterior_product.inl delete mode 100755 third_party/glm/gtx/fast_exponential.hpp delete mode 100755 third_party/glm/gtx/fast_exponential.inl delete mode 100755 third_party/glm/gtx/fast_square_root.hpp delete mode 100755 third_party/glm/gtx/fast_square_root.inl delete mode 100755 third_party/glm/gtx/fast_trigonometry.hpp delete mode 100755 third_party/glm/gtx/fast_trigonometry.inl delete mode 100755 third_party/glm/gtx/float_notmalize.inl delete mode 100755 third_party/glm/gtx/functions.hpp delete mode 100755 third_party/glm/gtx/functions.inl delete mode 100755 third_party/glm/gtx/gradient_paint.hpp delete mode 100755 third_party/glm/gtx/gradient_paint.inl delete mode 100755 third_party/glm/gtx/handed_coordinate_space.hpp delete mode 100755 third_party/glm/gtx/handed_coordinate_space.inl delete mode 100755 third_party/glm/gtx/hash.hpp delete mode 100755 third_party/glm/gtx/hash.inl delete mode 100755 third_party/glm/gtx/integer.hpp delete mode 100755 third_party/glm/gtx/integer.inl delete mode 100755 third_party/glm/gtx/intersect.hpp delete mode 100755 third_party/glm/gtx/intersect.inl delete mode 100755 third_party/glm/gtx/io.hpp delete mode 100755 third_party/glm/gtx/io.inl delete mode 100755 third_party/glm/gtx/log_base.hpp delete mode 100755 third_party/glm/gtx/log_base.inl delete mode 100755 third_party/glm/gtx/matrix_cross_product.hpp delete mode 100755 third_party/glm/gtx/matrix_cross_product.inl delete mode 100755 third_party/glm/gtx/matrix_decompose.hpp delete mode 100755 third_party/glm/gtx/matrix_decompose.inl delete mode 100755 third_party/glm/gtx/matrix_factorisation.hpp delete mode 100755 third_party/glm/gtx/matrix_factorisation.inl delete mode 100755 third_party/glm/gtx/matrix_interpolation.hpp delete mode 100755 third_party/glm/gtx/matrix_interpolation.inl delete mode 100755 third_party/glm/gtx/matrix_major_storage.hpp delete mode 100755 third_party/glm/gtx/matrix_major_storage.inl delete mode 100755 third_party/glm/gtx/matrix_operation.hpp delete mode 100755 third_party/glm/gtx/matrix_operation.inl delete mode 100755 third_party/glm/gtx/matrix_query.hpp delete mode 100755 third_party/glm/gtx/matrix_query.inl delete mode 100755 third_party/glm/gtx/matrix_transform_2d.hpp delete mode 100755 third_party/glm/gtx/matrix_transform_2d.inl delete mode 100755 third_party/glm/gtx/mixed_product.hpp delete mode 100755 third_party/glm/gtx/mixed_product.inl delete mode 100755 third_party/glm/gtx/norm.hpp delete mode 100755 third_party/glm/gtx/norm.inl delete mode 100755 third_party/glm/gtx/normal.hpp delete mode 100755 third_party/glm/gtx/normal.inl delete mode 100755 third_party/glm/gtx/normalize_dot.hpp delete mode 100755 third_party/glm/gtx/normalize_dot.inl delete mode 100755 third_party/glm/gtx/number_precision.hpp delete mode 100755 third_party/glm/gtx/number_precision.inl delete mode 100755 third_party/glm/gtx/optimum_pow.hpp delete mode 100755 third_party/glm/gtx/optimum_pow.inl delete mode 100755 third_party/glm/gtx/orthonormalize.hpp delete mode 100755 third_party/glm/gtx/orthonormalize.inl delete mode 100755 third_party/glm/gtx/perpendicular.hpp delete mode 100755 third_party/glm/gtx/perpendicular.inl delete mode 100755 third_party/glm/gtx/polar_coordinates.hpp delete mode 100755 third_party/glm/gtx/polar_coordinates.inl delete mode 100755 third_party/glm/gtx/projection.hpp delete mode 100755 third_party/glm/gtx/projection.inl delete mode 100755 third_party/glm/gtx/quaternion.hpp delete mode 100755 third_party/glm/gtx/quaternion.inl delete mode 100755 third_party/glm/gtx/range.hpp delete mode 100755 third_party/glm/gtx/raw_data.hpp delete mode 100755 third_party/glm/gtx/raw_data.inl delete mode 100755 third_party/glm/gtx/rotate_normalized_axis.hpp delete mode 100755 third_party/glm/gtx/rotate_normalized_axis.inl delete mode 100755 third_party/glm/gtx/rotate_vector.hpp delete mode 100755 third_party/glm/gtx/rotate_vector.inl delete mode 100755 third_party/glm/gtx/scalar_multiplication.hpp delete mode 100755 third_party/glm/gtx/scalar_relational.hpp delete mode 100755 third_party/glm/gtx/scalar_relational.inl delete mode 100755 third_party/glm/gtx/spline.hpp delete mode 100755 third_party/glm/gtx/spline.inl delete mode 100755 third_party/glm/gtx/std_based_type.hpp delete mode 100755 third_party/glm/gtx/std_based_type.inl delete mode 100755 third_party/glm/gtx/string_cast.hpp delete mode 100755 third_party/glm/gtx/string_cast.inl delete mode 100755 third_party/glm/gtx/texture.hpp delete mode 100755 third_party/glm/gtx/texture.inl delete mode 100755 third_party/glm/gtx/transform.hpp delete mode 100755 third_party/glm/gtx/transform.inl delete mode 100755 third_party/glm/gtx/transform2.hpp delete mode 100755 third_party/glm/gtx/transform2.inl delete mode 100755 third_party/glm/gtx/type_aligned.hpp delete mode 100755 third_party/glm/gtx/type_aligned.inl delete mode 100755 third_party/glm/gtx/type_trait.hpp delete mode 100755 third_party/glm/gtx/type_trait.inl delete mode 100755 third_party/glm/gtx/vec_swizzle.hpp delete mode 100755 third_party/glm/gtx/vector_angle.hpp delete mode 100755 third_party/glm/gtx/vector_angle.inl delete mode 100755 third_party/glm/gtx/vector_query.hpp delete mode 100755 third_party/glm/gtx/vector_query.inl delete mode 100755 third_party/glm/gtx/wrap.hpp delete mode 100755 third_party/glm/gtx/wrap.inl delete mode 100755 third_party/glm/integer.hpp delete mode 100755 third_party/glm/mat2x2.hpp delete mode 100755 third_party/glm/mat2x3.hpp delete mode 100755 third_party/glm/mat2x4.hpp delete mode 100755 third_party/glm/mat3x2.hpp delete mode 100755 third_party/glm/mat3x3.hpp delete mode 100755 third_party/glm/mat3x4.hpp delete mode 100755 third_party/glm/mat4x2.hpp delete mode 100755 third_party/glm/mat4x3.hpp delete mode 100755 third_party/glm/mat4x4.hpp delete mode 100755 third_party/glm/matrix.hpp delete mode 100755 third_party/glm/packing.hpp delete mode 100755 third_party/glm/simd/common.h delete mode 100755 third_party/glm/simd/exponential.h delete mode 100755 third_party/glm/simd/geometric.h delete mode 100755 third_party/glm/simd/integer.h delete mode 100755 third_party/glm/simd/matrix.h delete mode 100755 third_party/glm/simd/neon.h delete mode 100755 third_party/glm/simd/packing.h delete mode 100755 third_party/glm/simd/platform.h delete mode 100755 third_party/glm/simd/trigonometric.h delete mode 100755 third_party/glm/simd/vector_relational.h delete mode 100755 third_party/glm/trigonometric.hpp delete mode 100755 third_party/glm/vec2.hpp delete mode 100755 third_party/glm/vec3.hpp delete mode 100755 third_party/glm/vec4.hpp delete mode 100755 third_party/glm/vector_relational.hpp create mode 100755 third_party/kvf.h delete mode 100644 third_party/volk.c delete mode 100644 third_party/volk.h diff --git a/Makefile b/Makefile index b0b4275..cb43f27 100644 --- a/Makefile +++ b/Makefile @@ -1,21 +1,12 @@ -# **************************************************************************** # -# # -# ::: :::::::: # -# Makefile :+: :+: :+: # -# +:+ +:+ +:+ # -# By: maldavid +#+ +:+ +#+ # -# +#+#+#+#+#+ +#+ # -# Created: 2022/10/04 16:43:41 by maldavid #+# #+# # -# Updated: 2024/07/05 13:34:03 by maldavid ### ########.fr # -# # -# **************************************************************************** # - NAME = libmlx.so SRCS = $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Core)) SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Platform)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Graphics)) SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/**)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/Vulkan)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/Pipelines)) +SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/RenderPasses)) OBJ_DIR = objs/makefile OBJS = $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) @@ -37,7 +28,7 @@ MODE = "release" CXX = clang++ CXXFLAGS = -std=c++17 -O3 -fPIC -Wall -Wextra -Wno-deprecated -DSDL_MAIN_HANDLED -INCLUDES = -I./includes -I./runtime/Includes -I./third_party +INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party LDLIBS = diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index fed04e2..ec74102 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Application.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/07/05 14:04:19 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_APPLICATION__ #define __MLX_APPLICATION__ @@ -26,32 +14,32 @@ namespace mlx Application(); inline void GetMousePos(int* x, int* y) noexcept; - inline void MouseMove(void* win, int x, int y) noexcept; + inline void MouseMove(Handle win, int x, int y) noexcept; - inline void OnEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; + inline void OnEvent(Handle win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; - inline void GetScreenSize(void* win, int* w, int* h) noexcept; + inline void GetScreenSize(Handle win, int* w, int* h) noexcept; inline void SetFPSCap(std::uint32_t fps) noexcept; - inline void* NewGraphicsSuport(std::size_t w, std::size_t h, const char* title); - inline void ClearGraphicsSupport(void* win); - inline void DestroyGraphicsSupport(void* win); + inline Handle NewGraphicsSuport(std::size_t w, std::size_t h, const char* title); + inline void ClearGraphicsSupport(Handle win); + inline void DestroyGraphicsSupport(Handle win); - inline void PixelPut(void* win, int x, int y, std::uint32_t color) const noexcept; - inline void StringPut(void* win, int x, int y, std::uint32_t color, char* str); + inline void PixelPut(Handle win, int x, int y, std::uint32_t color) const noexcept; + inline void StringPut(Handle win, int x, int y, std::uint32_t color, char* str); - void* NewTexture(int w, int h); - void* NewStbTexture(char* file, int* w, int* h); // stb textures are image files (png, jpg, bpm, ...) - inline void TexturePut(void* win, void* img, int x, int y); - inline int GetTexturePixel(void* img, int x, int y); - inline void SetTexturePixel(void* img, int x, int y, std::uint32_t color); - void DestroyTexture(void* ptr); + Handle NewTexture(int w, int h); + Handle NewStbTexture(char* file, int* w, int* h); // stb textures are image files (png, jpg, bpm, ...) + inline void TexturePut(Handle win, Handle img, int x, int y); + inline int GetTexturePixel(Handle img, int x, int y); + inline void SetTexturePixel(Handle img, int x, int y, std::uint32_t color); + void DestroyTexture(Handle ptr); inline void LoopHook(int (*f)(void*), void* param); inline void LoopEnd() noexcept; - inline void LoadFont(void* win, const std::filesystem::path& filepath, float scale); + inline void LoadFont(Handle win, const std::filesystem::path& filepath, float scale); void Run() noexcept; @@ -62,8 +50,8 @@ namespace mlx Inputs m_in; ImageRegistry m_image_registry; std::vector> m_graphics; - std::function f_loop_hook; - void* p_param = nullptr; + std::function f_loop_hook; + Handle p_param = nullptr; }; } diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index cd95318..3193aee 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -1,15 +1,4 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Application.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 21:49:46 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:45:07 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - +#pragma once #include #define CHECK_WINDOW_PTR(win) \ diff --git a/runtime/Includes/Core/Enums.h b/runtime/Includes/Core/Enums.h index 68f036b..082373e 100644 --- a/runtime/Includes/Core/Enums.h +++ b/runtime/Includes/Core/Enums.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Enums.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:15:24 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:23:10 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_CORE_ENUMS__ #define __MLX_CORE_ENUMS__ @@ -27,8 +15,18 @@ namespace mlx EndEnum }; - constexpr std::size_t LogTypeCount = static_cast(LogType::EndEnum); + + enum class Event + { + ResizeEventCode = 56, + FrameBeginEventCode = 57, + FatalErrorEventCode = 168, + QuitEventCode = 168, + + EndEnum + }; + constexpr std::size_t EventCount = static_cast(Event::EndEnum); } #endif diff --git a/runtime/Includes/Core/EventBase.h b/runtime/Includes/Core/EventBase.h index 37b5944..7f4464a 100644 --- a/runtime/Includes/Core/EventBase.h +++ b/runtime/Includes/Core/EventBase.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* EventBase.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:27:22 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:31:16 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_BASE_EVENT__ #define __MLX_BASE_EVENT__ diff --git a/runtime/Includes/Core/EventBus.h b/runtime/Includes/Core/EventBus.h index db110d3..4b51f15 100644 --- a/runtime/Includes/Core/EventBus.h +++ b/runtime/Includes/Core/EventBus.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* EventBus.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:30:36 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:31:41 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_EVENT_BUS__ #define __MLX_EVENT_BUS__ diff --git a/runtime/Includes/Core/EventListener.h b/runtime/Includes/Core/EventListener.h index d6c002c..4907aa4 100644 --- a/runtime/Includes/Core/EventListener.h +++ b/runtime/Includes/Core/EventListener.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* EventListener.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:28:17 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:37:53 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_EVENT_LISTENER__ #define __MLX_EVENT_LISTENER__ diff --git a/runtime/Includes/Core/Format.h b/runtime/Includes/Core/Format.h index d55b824..e948709 100644 --- a/runtime/Includes/Core/Format.h +++ b/runtime/Includes/Core/Format.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Format.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:11:09 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:12:03 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_FORMAT__ #define __MLX_FORMAT__ diff --git a/runtime/Includes/Core/Format.inl b/runtime/Includes/Core/Format.inl index e6d94d2..3bc490a 100644 --- a/runtime/Includes/Core/Format.inl +++ b/runtime/Includes/Core/Format.inl @@ -1,15 +1,4 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Format.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:11:09 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:12:03 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - +#pragma once #include #include #include diff --git a/runtime/Includes/Core/Fps.h b/runtime/Includes/Core/Fps.h index 0803bed..6cebc77 100644 --- a/runtime/Includes/Core/Fps.h +++ b/runtime/Includes/Core/Fps.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Fps.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/18 14:53:30 by maldavid #+# #+# */ -/* Updated: 2024/03/27 20:52:06 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_FPS__ #define __MLX_FPS__ diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 8ab5bbf..842c2c9 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Graphics.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 14:49:49 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:46:58 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_GRAPHICS__ #define __MLX_GRAPHICS__ diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 50573a1..10286cc 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -1,15 +1,4 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* graphics.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ -/* Updated: 2023/04/02 15:26:16 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - +#pragma once #include namespace mlx diff --git a/runtime/Includes/Core/ImagesRegistry.h b/runtime/Includes/Core/ImagesRegistry.h index e2bd494..01c471b 100644 --- a/runtime/Includes/Core/ImagesRegistry.h +++ b/runtime/Includes/Core/ImagesRegistry.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* ImagesRegistry.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/03 15:11:47 by maldavid #+# #+# */ -/* Updated: 2024/04/21 20:31:00 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_CORE_IMAGES_REGISTRY__ #define __MLX_CORE_IMAGES_REGISTRY__ diff --git a/runtime/Includes/Core/ImagesRegistry.inl b/runtime/Includes/Core/ImagesRegistry.inl index 2a69334..c7a2909 100644 --- a/runtime/Includes/Core/ImagesRegistry.inl +++ b/runtime/Includes/Core/ImagesRegistry.inl @@ -1,8 +1,3 @@ -// This file is a part of Akel -// Authors : @kbz_8 -// Created : 21/04/2024 -// Updated : 21/04/2024 - #pragma once #include diff --git a/runtime/Includes/Core/Logs.h b/runtime/Includes/Core/Logs.h index edd816b..906f969 100644 --- a/runtime/Includes/Core/Logs.h +++ b/runtime/Includes/Core/Logs.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Logs.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:14:10 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:19:23 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_LOGS__ #define __MLX_LOGS__ diff --git a/runtime/Includes/Core/Logs.inl b/runtime/Includes/Core/Logs.inl index 2c33f55..744fbaa 100644 --- a/runtime/Includes/Core/Logs.inl +++ b/runtime/Includes/Core/Logs.inl @@ -1,15 +1,4 @@ -/* **************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Logs.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:19:47 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:19:47 by maldavid ### ########.fr */ -/* */ -/* **************************************************************************** */ - +#pragma once #include #include diff --git a/runtime/Includes/Core/Memory.h b/runtime/Includes/Core/Memory.h index db11105..4a51eb5 100644 --- a/runtime/Includes/Core/Memory.h +++ b/runtime/Includes/Core/Memory.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Memory.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/07 16:31:51 by kbz_8 #+# #+# */ -/* Updated: 2024/03/27 21:16:44 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_MEMORY__ #define __MLX_MEMORY__ diff --git a/runtime/Includes/Core/Profiler.h b/runtime/Includes/Core/Profiler.h index 9a24f42..ec977bc 100644 --- a/runtime/Includes/Core/Profiler.h +++ b/runtime/Includes/Core/Profiler.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Profiler.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/10 13:35:45 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:24:17 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_PROFILER__ #define __MLX_PROFILER__ diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 8ad7ea7..5fcac81 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -1,19 +1,7 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* SDLManager.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/05/25 15:28:59 by maldavid #+# #+# */ -/* Updated: 2024/07/05 22:15:22 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_SDL_MANAGER__ #define __MLX_SDL_MANAGER__ -#include +#include namespace mlx { @@ -25,17 +13,27 @@ namespace mlx void Init() noexcept; void Shutdown() noexcept; - void* CreateWindow(const std::string& title, std::size_t w, std::size_t h); - void DestroyWindow(void* window) noexcept; + Handle CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden); + void DestroyWindow(Handle window) noexcept; - void SetEventCallback(); + VkSurfaceKHR CreateVulkanSurface(Handle window, VkInstance instance) const noexcept; + std::vector GetRequiredVulkanInstanceExtentions(Handle window) const noexcept; + Vec2ui GetVulkanDrawableSize(Handle window) const noexcept; + + inline void SetEventCallback(func::function functor, void* userdata) { f_callback = std::move(functor); p_callback_data = userdata; } private: SDLManager() = default; ~SDLManager() = default; private: - std::unordered_set m_windows_registry; + std::unordered_set m_windows_registry; + func::function f_callback; + void* p_callback_data = nullptr; + std::int32_t m_x; + std::int32_t m_y; + std::int32_t m_rel_x; + std::int32_t m_rel_y; bool m_drop_sdl_responsability = false; }; } diff --git a/runtime/Includes/Core/UUID.h b/runtime/Includes/Core/UUID.h index 0b4075f..9911302 100644 --- a/runtime/Includes/Core/UUID.h +++ b/runtime/Includes/Core/UUID.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* UUID.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/06 11:13:23 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:19:18 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_UUID__ #define __MLX_UUID__ diff --git a/runtime/Includes/Embedded/2DFragment.nzsl b/runtime/Includes/Embedded/2DFragment.nzsl new file mode 100644 index 0000000..a972887 --- /dev/null +++ b/runtime/Includes/Embedded/2DFragment.nzsl @@ -0,0 +1,28 @@ +[nzsl_version("1.0")] +module; + +struct VertOut +{ + [location(0)] color: vec4[f32], + [location(1)] uv: vec2[f32] +} + +struct FragOut +{ + [location(0)] color: vec4[f32] +} + +external +{ + [set(1), binding(0)] u_texture: sampler2D[f32] +} + +[entry(frag)] +fn main(input: VertOut) -> FragOut +{ + let output: FragOut; + output.color = input.color * u_texture.Sample(input.uv); + if(output.color.w == 0.0) + discard; + return output; +} diff --git a/runtime/Includes/Embedded/2DFragment.spv.h b/runtime/Includes/Embedded/2DFragment.spv.h new file mode 100644 index 0000000..5792158 --- /dev/null +++ b/runtime/Includes/Embedded/2DFragment.spv.h @@ -0,0 +1,44 @@ +3,2,35,7,0,0,1,0,39,0,0,0,51,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,8,0,4,0,0,0,28,0,0,0,109,97,105,110,0,0,0,0, +10,0,0,0,16,0,0,0,22,0,0,0,16,0,3,0,28,0,0,0,7,0,0,0,3,0,3,0,0,0, +0,0,100,0,0,0,5,0,4,0,19,0,0,0,86,101,114,116,79,117,116,0,6,0,5,0,19,0,0,0, +0,0,0,0,99,111,108,111,114,0,0,0,6,0,4,0,19,0,0,0,1,0,0,0,117,118,0,0,5,0, +4,0,23,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,23,0,0,0,0,0,0,0,99,111,108,111, +114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0,5,0,4,0,10,0, +0,0,99,111,108,111,114,0,0,0,5,0,3,0,16,0,0,0,117,118,0,0,5,0,4,0,22,0,0,0, +99,111,108,111,114,0,0,0,5,0,4,0,28,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,5,0, +0,0,33,0,0,0,0,0,0,0,71,0,4,0,5,0,0,0,34,0,0,0,1,0,0,0,71,0,4,0, +10,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,16,0,0,0,30,0,0,0,1,0,0,0,71,0, +4,0,22,0,0,0,30,0,0,0,0,0,0,0,72,0,5,0,19,0,0,0,0,0,0,0,35,0,0,0, +0,0,0,0,72,0,5,0,19,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,23,0, +0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0,3,0,1,0,0,0,32,0,0,0,25,0,9,0, +2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0, +0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0,4,0,4,0,0,0,0,0,0,0,3,0,0,0, +19,0,2,0,6,0,0,0,33,0,3,0,7,0,0,0,6,0,0,0,23,0,4,0,8,0,0,0,1,0, +0,0,4,0,0,0,32,0,4,0,9,0,0,0,1,0,0,0,8,0,0,0,21,0,4,0,11,0,0,0, +32,0,0,0,1,0,0,0,43,0,4,0,11,0,0,0,12,0,0,0,0,0,0,0,32,0,4,0,13,0, +0,0,7,0,0,0,8,0,0,0,23,0,4,0,14,0,0,0,1,0,0,0,2,0,0,0,32,0,4,0, +15,0,0,0,1,0,0,0,14,0,0,0,43,0,4,0,11,0,0,0,17,0,0,0,1,0,0,0,32,0, +4,0,18,0,0,0,7,0,0,0,14,0,0,0,30,0,4,0,19,0,0,0,8,0,0,0,14,0,0,0, +32,0,4,0,20,0,0,0,7,0,0,0,19,0,0,0,32,0,4,0,21,0,0,0,3,0,0,0,8,0, +0,0,30,0,3,0,23,0,0,0,8,0,0,0,32,0,4,0,24,0,0,0,7,0,0,0,23,0,0,0, +43,0,4,0,11,0,0,0,25,0,0,0,3,0,0,0,43,0,4,0,1,0,0,0,26,0,0,0,0,0, +0,0,20,0,2,0,27,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0,59,0,4,0, +9,0,0,0,10,0,0,0,1,0,0,0,59,0,4,0,15,0,0,0,16,0,0,0,1,0,0,0,59,0, +4,0,21,0,0,0,22,0,0,0,3,0,0,0,54,0,5,0,6,0,0,0,28,0,0,0,0,0,0,0, +7,0,0,0,248,0,2,0,29,0,0,0,59,0,4,0,24,0,0,0,30,0,0,0,7,0,0,0,59,0, +4,0,20,0,0,0,31,0,0,0,7,0,0,0,65,0,5,0,13,0,0,0,32,0,0,0,31,0,0,0, +12,0,0,0,63,0,3,0,32,0,0,0,10,0,0,0,65,0,5,0,18,0,0,0,33,0,0,0,31,0, +0,0,17,0,0,0,63,0,3,0,33,0,0,0,16,0,0,0,65,0,5,0,13,0,0,0,34,0,0,0, +31,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0,35,0,0,0,34,0,0,0,61,0,4,0,3,0, +0,0,36,0,0,0,5,0,0,0,65,0,5,0,18,0,0,0,37,0,0,0,31,0,0,0,17,0,0,0, +61,0,4,0,14,0,0,0,38,0,0,0,37,0,0,0,87,0,5,0,8,0,0,0,39,0,0,0,36,0, +0,0,38,0,0,0,133,0,5,0,8,0,0,0,40,0,0,0,35,0,0,0,39,0,0,0,65,0,5,0, +13,0,0,0,41,0,0,0,30,0,0,0,12,0,0,0,62,0,3,0,41,0,0,0,40,0,0,0,65,0, +5,0,13,0,0,0,45,0,0,0,30,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0,46,0,0,0, +45,0,0,0,81,0,5,0,1,0,0,0,47,0,0,0,46,0,0,0,3,0,0,0,180,0,5,0,27,0, +0,0,48,0,0,0,47,0,0,0,26,0,0,0,247,0,3,0,42,0,0,0,0,0,0,0,250,0,4,0, +48,0,0,0,43,0,0,0,44,0,0,0,248,0,2,0,43,0,0,0,252,0,1,0,248,0,2,0,44,0, +0,0,249,0,2,0,42,0,0,0,248,0,2,0,42,0,0,0,61,0,4,0,23,0,0,0,49,0,0,0, +30,0,0,0,81,0,5,0,8,0,0,0,50,0,0,0,49,0,0,0,0,0,0,0,62,0,3,0,22,0, +0,0,50,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Embedded/2DVertex.nzsl b/runtime/Includes/Embedded/2DVertex.nzsl new file mode 100644 index 0000000..ec65918 --- /dev/null +++ b/runtime/Includes/Embedded/2DVertex.nzsl @@ -0,0 +1,45 @@ +[nzsl_version("1.0")] +module; + +struct VertIn +{ + [location(0)] pos: vec4[f32], + [location(1)] color: vec4[f32], // unused + [location(2)] normal: vec4[f32], // unused + [location(3)] uv: vec2[f32] +} + +struct VertOut +{ + [location(0)] color: vec4[f32], + [location(1)] uv: vec2[f32], + [builtin(position)] pos: vec4[f32] +} + +struct ViewerData +{ + projection_matrix: mat4[f32] +} + +struct SpriteData +{ + color: vec4[f32], + position: vec2[f32] +} + +external +{ + [set(0), binding(0)] viewer_data: uniform[ViewerData], + model : push_constant[SpriteData] +} + +[entry(vert)] +fn main(input: VertIn) -> VertOut +{ + input.uv.x *= -1.0; + let output: VertOut; + output.uv = input.uv; + output.color = model.color; + output.pos = viewer_data.projection_matrix * vec4[f32](input.pos.xy + model.position, 0.0, 1.0); + return output; +} diff --git a/runtime/Includes/Embedded/2DVertex.spv.h b/runtime/Includes/Embedded/2DVertex.spv.h new file mode 100644 index 0000000..94cf250 --- /dev/null +++ b/runtime/Includes/Embedded/2DVertex.spv.h @@ -0,0 +1,80 @@ +3,2,35,7,0,0,1,0,39,0,0,0,77,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,12,0,0,0,0,0,37,0,0,0,109,97,105,110,0,0,0,0, +14,0,0,0,18,0,0,0,20,0,0,0,23,0,0,0,29,0,0,0,31,0,0,0,32,0,0,0,3,0, +3,0,0,0,0,0,100,0,0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0, +6,0,8,0,4,0,0,0,0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0, +0,0,5,0,5,0,8,0,0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,8,0,0,0, +0,0,0,0,99,111,108,111,114,0,0,0,6,0,6,0,8,0,0,0,1,0,0,0,112,111,115,105,116,105, +111,110,0,0,0,0,5,0,4,0,26,0,0,0,86,101,114,116,73,110,0,0,6,0,4,0,26,0,0,0, +0,0,0,0,112,111,115,0,6,0,5,0,26,0,0,0,1,0,0,0,99,111,108,111,114,0,0,0,6,0, +5,0,26,0,0,0,2,0,0,0,110,111,114,109,97,108,0,0,6,0,4,0,26,0,0,0,3,0,0,0, +117,118,0,0,5,0,4,0,33,0,0,0,86,101,114,116,79,117,116,0,6,0,5,0,33,0,0,0,0,0, +0,0,99,111,108,111,114,0,0,0,6,0,4,0,33,0,0,0,1,0,0,0,117,118,0,0,6,0,4,0, +33,0,0,0,2,0,0,0,112,111,115,0,5,0,5,0,6,0,0,0,118,105,101,119,101,114,95,100,97,116, +97,0,5,0,4,0,10,0,0,0,109,111,100,101,108,0,0,0,5,0,3,0,14,0,0,0,112,111,115,0, +5,0,4,0,18,0,0,0,99,111,108,111,114,0,0,0,5,0,4,0,20,0,0,0,110,111,114,109,97,108, +0,0,5,0,3,0,23,0,0,0,117,118,0,0,5,0,4,0,29,0,0,0,99,111,108,111,114,0,0,0, +5,0,3,0,31,0,0,0,117,118,0,0,5,0,5,0,32,0,0,0,112,111,115,105,116,105,111,110,0,0, +0,0,5,0,4,0,37,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,6,0,0,0,33,0,0,0, +0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0,0,0,0,0,71,0,4,0,32,0,0,0,11,0, +0,0,0,0,0,0,71,0,4,0,14,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,18,0,0,0, +30,0,0,0,1,0,0,0,71,0,4,0,20,0,0,0,30,0,0,0,2,0,0,0,71,0,4,0,23,0, +0,0,30,0,0,0,3,0,0,0,71,0,4,0,29,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0, +31,0,0,0,30,0,0,0,1,0,0,0,71,0,3,0,4,0,0,0,2,0,0,0,72,0,4,0,4,0, +0,0,0,0,0,0,5,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0,7,0,0,0,16,0,0,0, +72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,71,0,3,0,8,0,0,0,2,0, +0,0,72,0,5,0,8,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,8,0,0,0, +1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,26,0,0,0,0,0,0,0,35,0,0,0,0,0, +0,0,72,0,5,0,26,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,26,0,0,0, +2,0,0,0,35,0,0,0,32,0,0,0,72,0,5,0,26,0,0,0,3,0,0,0,35,0,0,0,48,0, +0,0,72,0,5,0,33,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,33,0,0,0, +1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,33,0,0,0,2,0,0,0,35,0,0,0,32,0, +0,0,22,0,3,0,1,0,0,0,32,0,0,0,23,0,4,0,2,0,0,0,1,0,0,0,4,0,0,0, +24,0,4,0,3,0,0,0,2,0,0,0,4,0,0,0,30,0,3,0,4,0,0,0,3,0,0,0,32,0, +4,0,5,0,0,0,2,0,0,0,4,0,0,0,23,0,4,0,7,0,0,0,1,0,0,0,2,0,0,0, +30,0,4,0,8,0,0,0,2,0,0,0,7,0,0,0,32,0,4,0,9,0,0,0,9,0,0,0,8,0, +0,0,19,0,2,0,11,0,0,0,33,0,3,0,12,0,0,0,11,0,0,0,32,0,4,0,13,0,0,0, +1,0,0,0,2,0,0,0,21,0,4,0,15,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,15,0, +0,0,16,0,0,0,0,0,0,0,32,0,4,0,17,0,0,0,7,0,0,0,2,0,0,0,43,0,4,0, +15,0,0,0,19,0,0,0,1,0,0,0,43,0,4,0,15,0,0,0,21,0,0,0,2,0,0,0,32,0, +4,0,22,0,0,0,1,0,0,0,7,0,0,0,43,0,4,0,15,0,0,0,24,0,0,0,3,0,0,0, +32,0,4,0,25,0,0,0,7,0,0,0,7,0,0,0,30,0,6,0,26,0,0,0,2,0,0,0,2,0, +0,0,2,0,0,0,7,0,0,0,32,0,4,0,27,0,0,0,7,0,0,0,26,0,0,0,32,0,4,0, +28,0,0,0,3,0,0,0,2,0,0,0,32,0,4,0,30,0,0,0,3,0,0,0,7,0,0,0,30,0, +5,0,33,0,0,0,2,0,0,0,7,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0, +0,0,128,63,32,0,4,0,35,0,0,0,7,0,0,0,33,0,0,0,43,0,4,0,1,0,0,0,36,0, +0,0,0,0,0,0,32,0,4,0,51,0,0,0,7,0,0,0,1,0,0,0,32,0,4,0,56,0,0,0, +9,0,0,0,2,0,0,0,32,0,4,0,60,0,0,0,2,0,0,0,3,0,0,0,32,0,4,0,66,0, +0,0,9,0,0,0,7,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0, +9,0,0,0,10,0,0,0,9,0,0,0,59,0,4,0,13,0,0,0,14,0,0,0,1,0,0,0,59,0, +4,0,13,0,0,0,18,0,0,0,1,0,0,0,59,0,4,0,13,0,0,0,20,0,0,0,1,0,0,0, +59,0,4,0,22,0,0,0,23,0,0,0,1,0,0,0,59,0,4,0,28,0,0,0,29,0,0,0,3,0, +0,0,59,0,4,0,30,0,0,0,31,0,0,0,3,0,0,0,59,0,4,0,28,0,0,0,32,0,0,0, +3,0,0,0,54,0,5,0,11,0,0,0,37,0,0,0,0,0,0,0,12,0,0,0,248,0,2,0,38,0, +0,0,59,0,4,0,35,0,0,0,39,0,0,0,7,0,0,0,59,0,4,0,27,0,0,0,40,0,0,0, +7,0,0,0,65,0,5,0,17,0,0,0,41,0,0,0,40,0,0,0,16,0,0,0,63,0,3,0,41,0, +0,0,14,0,0,0,65,0,5,0,17,0,0,0,42,0,0,0,40,0,0,0,19,0,0,0,63,0,3,0, +42,0,0,0,18,0,0,0,65,0,5,0,17,0,0,0,43,0,0,0,40,0,0,0,21,0,0,0,63,0, +3,0,43,0,0,0,20,0,0,0,65,0,5,0,25,0,0,0,44,0,0,0,40,0,0,0,24,0,0,0, +63,0,3,0,44,0,0,0,23,0,0,0,65,0,5,0,25,0,0,0,45,0,0,0,40,0,0,0,24,0, +0,0,61,0,4,0,7,0,0,0,46,0,0,0,45,0,0,0,81,0,5,0,1,0,0,0,47,0,0,0, +46,0,0,0,0,0,0,0,127,0,4,0,1,0,0,0,48,0,0,0,34,0,0,0,133,0,5,0,1,0, +0,0,49,0,0,0,47,0,0,0,48,0,0,0,65,0,5,0,25,0,0,0,50,0,0,0,40,0,0,0, +24,0,0,0,65,0,5,0,51,0,0,0,52,0,0,0,50,0,0,0,16,0,0,0,62,0,3,0,52,0, +0,0,49,0,0,0,65,0,5,0,25,0,0,0,53,0,0,0,40,0,0,0,24,0,0,0,61,0,4,0, +7,0,0,0,54,0,0,0,53,0,0,0,65,0,5,0,25,0,0,0,55,0,0,0,39,0,0,0,19,0, +0,0,62,0,3,0,55,0,0,0,54,0,0,0,65,0,5,0,56,0,0,0,57,0,0,0,10,0,0,0, +16,0,0,0,61,0,4,0,2,0,0,0,58,0,0,0,57,0,0,0,65,0,5,0,17,0,0,0,59,0, +0,0,39,0,0,0,16,0,0,0,62,0,3,0,59,0,0,0,58,0,0,0,65,0,5,0,60,0,0,0, +61,0,0,0,6,0,0,0,16,0,0,0,61,0,4,0,3,0,0,0,62,0,0,0,61,0,0,0,65,0, +5,0,17,0,0,0,63,0,0,0,40,0,0,0,16,0,0,0,61,0,4,0,2,0,0,0,64,0,0,0, +63,0,0,0,79,0,7,0,7,0,0,0,65,0,0,0,64,0,0,0,64,0,0,0,0,0,0,0,1,0, +0,0,65,0,5,0,66,0,0,0,67,0,0,0,10,0,0,0,19,0,0,0,61,0,4,0,7,0,0,0, +68,0,0,0,67,0,0,0,129,0,5,0,7,0,0,0,69,0,0,0,65,0,0,0,68,0,0,0,80,0, +6,0,2,0,0,0,70,0,0,0,69,0,0,0,36,0,0,0,34,0,0,0,145,0,5,0,2,0,0,0, +71,0,0,0,62,0,0,0,70,0,0,0,65,0,5,0,17,0,0,0,72,0,0,0,39,0,0,0,21,0, +0,0,62,0,3,0,72,0,0,0,71,0,0,0,61,0,4,0,33,0,0,0,73,0,0,0,39,0,0,0, +81,0,5,0,2,0,0,0,74,0,0,0,73,0,0,0,0,0,0,0,62,0,3,0,29,0,0,0,74,0, +0,0,81,0,5,0,7,0,0,0,75,0,0,0,73,0,0,0,1,0,0,0,62,0,3,0,31,0,0,0, +75,0,0,0,81,0,5,0,2,0,0,0,76,0,0,0,73,0,0,0,2,0,0,0,62,0,3,0,32,0, +0,0,76,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Utils/DogicaTTF.h b/runtime/Includes/Embedded/DogicaTTF.h similarity index 99% rename from runtime/Includes/Utils/DogicaTTF.h rename to runtime/Includes/Embedded/DogicaTTF.h index 3d24ce9..f624ff2 100644 --- a/runtime/Includes/Utils/DogicaTTF.h +++ b/runtime/Includes/Embedded/DogicaTTF.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DogicaTTF.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/11 16:20:25 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:59:40 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_DOGICA_TTF__ #define __MLX_DOGICA_TTF__ diff --git a/runtime/Includes/Utils/IconMlx.h b/runtime/Includes/Embedded/IconMlx.h similarity index 99% rename from runtime/Includes/Utils/IconMlx.h rename to runtime/Includes/Embedded/IconMlx.h index 4a8e5a3..b74d1ea 100644 --- a/runtime/Includes/Utils/IconMlx.h +++ b/runtime/Includes/Embedded/IconMlx.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* IconMlx.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/11/25 11:23:16 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:59:45 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __ICON_MLX__ #define __ICON_MLX__ diff --git a/runtime/Includes/Embedded/ScreenFragment.nzsl b/runtime/Includes/Embedded/ScreenFragment.nzsl new file mode 100644 index 0000000..562aca0 --- /dev/null +++ b/runtime/Includes/Embedded/ScreenFragment.nzsl @@ -0,0 +1,46 @@ +[nzsl_version("1.0")] +module; + +struct VertOut +{ + [location(0)] uv : vec2[f32] +} + +struct FragOut +{ + [location(0)] color: vec4[f32] +} + +external +{ + [set(0), binding(0)] u_texture: sampler2D[f32] +} + +option approximates_rgb: bool = false; + +fn LinearTosRGB(color: vec3[f32]) -> vec3[f32] +{ + const if(!approximates_rgb) + { + return select( + color > (0.0031308).rrr, + 1.055 * pow(color, (1.0 / 2.4).rrr) - (0.055).rrr, + 12.92 * color + ); + } + else + return pow(color, (1.0 / 2.2).rrr); +} + +option gamma_correction: bool = false; + +[entry(frag)] +fn main(input: VertOut) -> FragOut +{ + let output: FragOut; + const if(gamma_correction) + output.color = vec4[f32](LinearTosRGB(u_texture.Sample(input.uv).xyz), 1.0); + else + output.color = u_texture.Sample(input.uv); + return output; +} diff --git a/runtime/Includes/Embedded/ScreenFragment.spv.h b/runtime/Includes/Embedded/ScreenFragment.spv.h new file mode 100644 index 0000000..ed5f334 --- /dev/null +++ b/runtime/Includes/Embedded/ScreenFragment.spv.h @@ -0,0 +1,49 @@ +3,2,35,7,0,0,1,0,39,0,0,0,62,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,11,0, +6,0,32,0,0,0,71,76,83,76,46,115,116,100,46,52,53,48,0,0,0,0,14,0,3,0,0,0,0,0, +1,0,0,0,15,0,7,0,4,0,0,0,34,0,0,0,109,97,105,110,0,0,0,0,23,0,0,0,29,0, +0,0,16,0,3,0,34,0,0,0,7,0,0,0,3,0,3,0,0,0,0,0,100,0,0,0,5,0,4,0, +25,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,25,0,0,0,0,0,0,0,117,118,0,0,5,0, +4,0,30,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,30,0,0,0,0,0,0,0,99,111,108,111, +114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0,5,0,3,0,23,0, +0,0,117,118,0,0,5,0,4,0,29,0,0,0,99,111,108,111,114,0,0,0,5,0,6,0,33,0,0,0, +76,105,110,101,97,114,84,111,115,82,71,66,0,0,0,0,5,0,4,0,34,0,0,0,109,97,105,110,0,0, +0,0,71,0,4,0,5,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,5,0,0,0,34,0,0,0, +0,0,0,0,71,0,4,0,23,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,29,0,0,0,30,0, +0,0,0,0,0,0,72,0,5,0,25,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0, +30,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0,3,0,1,0,0,0,32,0,0,0,25,0, +9,0,2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,0,0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0,4,0,4,0,0,0,0,0,0,0,3,0, +0,0,23,0,4,0,6,0,0,0,1,0,0,0,3,0,0,0,32,0,4,0,7,0,0,0,7,0,0,0, +6,0,0,0,33,0,4,0,8,0,0,0,6,0,0,0,7,0,0,0,43,0,4,0,1,0,0,0,9,0, +0,0,28,46,77,59,21,0,4,0,10,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,10,0,0,0, +11,0,0,0,0,0,0,0,20,0,2,0,12,0,0,0,23,0,4,0,13,0,0,0,12,0,0,0,3,0, +0,0,43,0,4,0,1,0,0,0,14,0,0,0,61,10,135,63,43,0,4,0,1,0,0,0,15,0,0,0, +0,0,128,63,43,0,4,0,1,0,0,0,16,0,0,0,154,153,25,64,43,0,4,0,1,0,0,0,17,0, +0,0,174,71,97,61,43,0,4,0,1,0,0,0,18,0,0,0,82,184,78,65,19,0,2,0,19,0,0,0, +33,0,3,0,20,0,0,0,19,0,0,0,23,0,4,0,21,0,0,0,1,0,0,0,2,0,0,0,32,0, +4,0,22,0,0,0,1,0,0,0,21,0,0,0,32,0,4,0,24,0,0,0,7,0,0,0,21,0,0,0, +30,0,3,0,25,0,0,0,21,0,0,0,32,0,4,0,26,0,0,0,7,0,0,0,25,0,0,0,23,0, +4,0,27,0,0,0,1,0,0,0,4,0,0,0,32,0,4,0,28,0,0,0,3,0,0,0,27,0,0,0, +30,0,3,0,30,0,0,0,27,0,0,0,32,0,4,0,31,0,0,0,7,0,0,0,30,0,0,0,32,0, +4,0,59,0,0,0,7,0,0,0,27,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0, +59,0,4,0,22,0,0,0,23,0,0,0,1,0,0,0,59,0,4,0,28,0,0,0,29,0,0,0,3,0, +0,0,54,0,5,0,6,0,0,0,33,0,0,0,0,0,0,0,8,0,0,0,55,0,3,0,7,0,0,0, +35,0,0,0,248,0,2,0,36,0,0,0,61,0,4,0,6,0,0,0,37,0,0,0,35,0,0,0,80,0, +6,0,6,0,0,0,38,0,0,0,9,0,0,0,9,0,0,0,9,0,0,0,186,0,5,0,13,0,0,0, +39,0,0,0,37,0,0,0,38,0,0,0,61,0,4,0,6,0,0,0,40,0,0,0,35,0,0,0,136,0, +5,0,1,0,0,0,41,0,0,0,15,0,0,0,16,0,0,0,80,0,6,0,6,0,0,0,42,0,0,0, +41,0,0,0,41,0,0,0,41,0,0,0,12,0,7,0,6,0,0,0,43,0,0,0,32,0,0,0,26,0, +0,0,40,0,0,0,42,0,0,0,142,0,5,0,6,0,0,0,44,0,0,0,43,0,0,0,14,0,0,0, +80,0,6,0,6,0,0,0,45,0,0,0,17,0,0,0,17,0,0,0,17,0,0,0,131,0,5,0,6,0, +0,0,46,0,0,0,44,0,0,0,45,0,0,0,61,0,4,0,6,0,0,0,47,0,0,0,35,0,0,0, +142,0,5,0,6,0,0,0,48,0,0,0,47,0,0,0,18,0,0,0,169,0,6,0,6,0,0,0,49,0, +0,0,39,0,0,0,46,0,0,0,48,0,0,0,254,0,2,0,49,0,0,0,56,0,1,0,54,0,5,0, +19,0,0,0,34,0,0,0,0,0,0,0,20,0,0,0,248,0,2,0,50,0,0,0,59,0,4,0,31,0, +0,0,51,0,0,0,7,0,0,0,59,0,4,0,26,0,0,0,52,0,0,0,7,0,0,0,65,0,5,0, +24,0,0,0,53,0,0,0,52,0,0,0,11,0,0,0,63,0,3,0,53,0,0,0,23,0,0,0,61,0, +4,0,3,0,0,0,54,0,0,0,5,0,0,0,65,0,5,0,24,0,0,0,55,0,0,0,52,0,0,0, +11,0,0,0,61,0,4,0,21,0,0,0,56,0,0,0,55,0,0,0,87,0,5,0,27,0,0,0,57,0, +0,0,54,0,0,0,56,0,0,0,65,0,5,0,59,0,0,0,58,0,0,0,51,0,0,0,11,0,0,0, +62,0,3,0,58,0,0,0,57,0,0,0,61,0,4,0,30,0,0,0,60,0,0,0,51,0,0,0,81,0, +5,0,27,0,0,0,61,0,0,0,60,0,0,0,0,0,0,0,62,0,3,0,29,0,0,0,61,0,0,0, +253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Embedded/ScreenVertex.nzsl b/runtime/Includes/Embedded/ScreenVertex.nzsl new file mode 100644 index 0000000..94a4440 --- /dev/null +++ b/runtime/Includes/Embedded/ScreenVertex.nzsl @@ -0,0 +1,31 @@ +[nzsl_version("1.0")] +module; + +struct VertIn +{ + [builtin(vertex_index)] vert_index: i32 +} + +struct VertOut +{ + [location(0)] uv: vec2[f32], + [builtin(position)] position: vec4[f32] +} + +const vertices = array[vec2[f32]]( + vec2[f32](-1.0, -3.0), + vec2[f32](-1.0, 1.0), + vec2[f32]( 3.0, 1.0) +); + +[entry(vert)] +fn main(input: VertIn) -> VertOut +{ + let position = vertices[input.vert_index]; + + let output: VertOut; + output.position = vec4[f32](position, 0.0, 1.0); + output.uv = position * 0.5 + vec2[f32](0.5, 0.5); + + return output; +} diff --git a/runtime/Includes/Embedded/ScreenVertex.spv.h b/runtime/Includes/Embedded/ScreenVertex.spv.h new file mode 100644 index 0000000..3ba7900 --- /dev/null +++ b/runtime/Includes/Embedded/ScreenVertex.spv.h @@ -0,0 +1,48 @@ +3,2,35,7,0,0,1,0,39,0,0,0,59,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,8,0,0,0,0,0,36,0,0,0,109,97,105,110,0,0,0,0, +20,0,0,0,26,0,0,0,29,0,0,0,3,0,3,0,0,0,0,0,100,0,0,0,5,0,4,0,23,0, +0,0,86,101,114,116,73,110,0,0,6,0,6,0,23,0,0,0,0,0,0,0,118,101,114,116,95,105,110,100, +101,120,0,0,5,0,4,0,30,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,30,0,0,0,0,0, +0,0,117,118,0,0,6,0,6,0,30,0,0,0,1,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0, +5,0,5,0,15,0,0,0,118,101,114,116,105,99,101,115,0,0,0,0,5,0,6,0,20,0,0,0,118,101, +114,116,101,120,95,105,110,100,101,120,0,0,0,0,5,0,3,0,26,0,0,0,117,118,0,0,5,0,5,0, +29,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,36,0,0,0,109,97,105,110,0,0, +0,0,71,0,4,0,20,0,0,0,11,0,0,0,42,0,0,0,71,0,4,0,29,0,0,0,11,0,0,0, +0,0,0,0,71,0,4,0,26,0,0,0,30,0,0,0,0,0,0,0,72,0,5,0,23,0,0,0,0,0, +0,0,35,0,0,0,0,0,0,0,72,0,5,0,30,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0, +72,0,5,0,30,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,22,0,3,0,1,0,0,0,32,0, +0,0,23,0,4,0,2,0,0,0,1,0,0,0,2,0,0,0,21,0,4,0,3,0,0,0,32,0,0,0, +0,0,0,0,43,0,4,0,3,0,0,0,4,0,0,0,3,0,0,0,28,0,4,0,5,0,0,0,2,0, +0,0,4,0,0,0,32,0,4,0,6,0,0,0,6,0,0,0,5,0,0,0,43,0,4,0,1,0,0,0, +7,0,0,0,0,0,128,191,43,0,4,0,1,0,0,0,8,0,0,0,0,0,64,192,44,0,5,0,2,0, +0,0,9,0,0,0,7,0,0,0,8,0,0,0,43,0,4,0,1,0,0,0,10,0,0,0,0,0,128,63, +44,0,5,0,2,0,0,0,11,0,0,0,7,0,0,0,10,0,0,0,43,0,4,0,1,0,0,0,12,0, +0,0,0,0,64,64,44,0,5,0,2,0,0,0,13,0,0,0,12,0,0,0,10,0,0,0,44,0,6,0, +5,0,0,0,14,0,0,0,9,0,0,0,11,0,0,0,13,0,0,0,19,0,2,0,16,0,0,0,33,0, +3,0,17,0,0,0,16,0,0,0,21,0,4,0,18,0,0,0,32,0,0,0,1,0,0,0,32,0,4,0, +19,0,0,0,1,0,0,0,18,0,0,0,43,0,4,0,18,0,0,0,21,0,0,0,0,0,0,0,32,0, +4,0,22,0,0,0,7,0,0,0,18,0,0,0,30,0,3,0,23,0,0,0,18,0,0,0,32,0,4,0, +24,0,0,0,7,0,0,0,23,0,0,0,32,0,4,0,25,0,0,0,3,0,0,0,2,0,0,0,23,0, +4,0,27,0,0,0,1,0,0,0,4,0,0,0,32,0,4,0,28,0,0,0,3,0,0,0,27,0,0,0, +30,0,4,0,30,0,0,0,2,0,0,0,27,0,0,0,32,0,4,0,31,0,0,0,7,0,0,0,2,0, +0,0,32,0,4,0,32,0,0,0,7,0,0,0,30,0,0,0,43,0,4,0,18,0,0,0,33,0,0,0, +1,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0,0,0,0,0,43,0,4,0,1,0,0,0,35,0, +0,0,0,0,0,63,32,0,4,0,44,0,0,0,6,0,0,0,2,0,0,0,32,0,4,0,50,0,0,0, +7,0,0,0,27,0,0,0,59,0,5,0,6,0,0,0,15,0,0,0,6,0,0,0,14,0,0,0,59,0, +4,0,19,0,0,0,20,0,0,0,1,0,0,0,59,0,4,0,25,0,0,0,26,0,0,0,3,0,0,0, +59,0,4,0,28,0,0,0,29,0,0,0,3,0,0,0,54,0,5,0,16,0,0,0,36,0,0,0,0,0, +0,0,17,0,0,0,248,0,2,0,37,0,0,0,59,0,4,0,31,0,0,0,38,0,0,0,7,0,0,0, +59,0,4,0,32,0,0,0,39,0,0,0,7,0,0,0,59,0,4,0,24,0,0,0,40,0,0,0,7,0, +0,0,65,0,5,0,22,0,0,0,41,0,0,0,40,0,0,0,21,0,0,0,63,0,3,0,41,0,0,0, +20,0,0,0,65,0,5,0,22,0,0,0,42,0,0,0,40,0,0,0,21,0,0,0,61,0,4,0,18,0, +0,0,43,0,0,0,42,0,0,0,65,0,5,0,44,0,0,0,45,0,0,0,15,0,0,0,43,0,0,0, +61,0,4,0,2,0,0,0,46,0,0,0,45,0,0,0,62,0,3,0,38,0,0,0,46,0,0,0,61,0, +4,0,2,0,0,0,47,0,0,0,38,0,0,0,80,0,6,0,27,0,0,0,48,0,0,0,47,0,0,0, +34,0,0,0,10,0,0,0,65,0,5,0,50,0,0,0,49,0,0,0,39,0,0,0,33,0,0,0,62,0, +3,0,49,0,0,0,48,0,0,0,61,0,4,0,2,0,0,0,51,0,0,0,38,0,0,0,142,0,5,0, +2,0,0,0,52,0,0,0,51,0,0,0,35,0,0,0,80,0,5,0,2,0,0,0,53,0,0,0,35,0, +0,0,35,0,0,0,129,0,5,0,2,0,0,0,54,0,0,0,52,0,0,0,53,0,0,0,65,0,5,0, +31,0,0,0,55,0,0,0,39,0,0,0,21,0,0,0,62,0,3,0,55,0,0,0,54,0,0,0,61,0, +4,0,30,0,0,0,56,0,0,0,39,0,0,0,81,0,5,0,2,0,0,0,57,0,0,0,56,0,0,0, +0,0,0,0,62,0,3,0,26,0,0,0,57,0,0,0,81,0,5,0,27,0,0,0,58,0,0,0,56,0, +0,0,1,0,0,0,62,0,3,0,29,0,0,0,58,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Mesh.h b/runtime/Includes/Graphics/Mesh.h new file mode 100644 index 0000000..999213f --- /dev/null +++ b/runtime/Includes/Graphics/Mesh.h @@ -0,0 +1,53 @@ +#ifndef __MLX_RENDERER_MESH__ +#define __MLX_RENDERER_MESH__ + +#include +#include +#include + +namespace mlx +{ + class Mesh + { + public: + struct SubMesh + { + VertexBuffer vbo; + IndexBuffer ibo; + std::size_t triangle_count = 0; + + inline SubMesh(const std::vector& vertices, const std::vector& indices) + { + CPUBuffer vb(vertices.size() * sizeof(Vertex)); + std::memcpy(vb.GetData(), vertices.data(), vb.GetSize()); + vbo.Init(vb.GetSize()); + vbo.SetData(std::move(vb)); + + CPUBuffer ib(indices.size() * sizeof(std::uint32_t)); + std::memcpy(ib.GetData(), indices.data(), ib.GetSize()); + ibo.Init(ib.GetSize()); + ibo.SetData(std::move(ib)); + + triangle_count = vertices.size() / 3; + } + }; + + public: + Mesh() = default; + + void Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn) const noexcept; + void Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn, std::size_t submesh_index) const noexcept; + + inline std::size_t GetSubMeshCount() const { return m_sub_meshes.size(); } + + inline void AddSubMesh(SubMesh mesh) { m_sub_meshes.emplace_back(std::move(mesh)); } + [[nodiscard]] inline SubMesh& GetSubMesh(std::size_t index) { return m_sub_meshes.at(index); } + + ~Mesh(); + + private: + std::vector m_sub_meshes; + }; +} + +#endif diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h new file mode 100644 index 0000000..32f06bf --- /dev/null +++ b/runtime/Includes/Graphics/Scene.h @@ -0,0 +1,32 @@ +#ifndef __MLX_SCENE__ +#define __MLX_SCENE__ + +#include + +namespace mlx +{ + struct SceneDescriptor + { + NonOwningPtr renderer; + // More description may come in future + }; + + class Scene + { + public: + Scene(SceneDescriptor desc); + + Sprite& CreateSprite(std::shared_ptr texture) noexcept; + + [[nodiscard]] inline const std::vector>& GetSprites() const noexcept { return m_sprites; } + [[nodiscard]] inline const SceneDescriptor& GetDescription() const noexcept { return m_descriptor; } + + ~Scene() = default; + + private: + SceneDescriptor m_descriptor; + std::vector> m_sprites; + }; +} + +#endif diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h new file mode 100644 index 0000000..1720f6a --- /dev/null +++ b/runtime/Includes/Graphics/Sprite.h @@ -0,0 +1,53 @@ +#ifndef __MLX_SPRITE__ +#define __MLX_SPRITE__ + +#include +#include +#include +#include +#include + +namespace mlx +{ + class Sprite + { + friend class Render2DPass; + + public: + Sprite(std::shared_ptr texture); + + inline void SetColor(Vec4f color) noexcept { m_color = color; } + inline void SetPosition(Vec2ui position) noexcept { m_position = position; } + + [[nodiscard]] inline const Vec4f& GetColor() const noexcept { return m_color; } + [[nodiscard]] inline const Vec2ui& GetPosition() const noexcept { return m_position; } + [[nodiscard]] inline std::shared_ptr GetMesh() const { return p_mesh; } + [[nodiscard]] inline std::shared_ptr GetTexture() const { return p_texture; } + + ~Sprite() = default; + + private: + [[nodiscard]] inline bool IsSetInit() const noexcept { return m_set.IsInit(); } + [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t frame_index) const noexcept { return m_set.GetSet(frame_index); } + + inline void UpdateDescriptorSet(const DescriptorSet& set) + { + m_set = set.Duplicate(); + } + + inline void Bind(std::size_t frame_index, VkCommandBuffer cmd) + { + m_set.SetImage(frame_index, 0, *p_texture); + m_set.Update(frame_index, cmd); + } + + private: + DescriptorSet m_set; + std::shared_ptr p_texture; + std::shared_ptr p_mesh; + Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; + Vec2ui m_position = Vec2ui{ 0, 0 }; + }; +} + +#endif diff --git a/runtime/Includes/Maths/Angles.h b/runtime/Includes/Maths/Angles.h new file mode 100644 index 0000000..801d71e --- /dev/null +++ b/runtime/Includes/Maths/Angles.h @@ -0,0 +1,108 @@ +#ifndef __SCOP_ANGLES__ +#define __SCOP_ANGLES__ + +#include +#include +#include + +namespace Scop +{ + template struct EulerAngles; + template struct Quat; + + template + struct Angle + { + T value; + + constexpr Angle() = default; + constexpr Angle(T angle); + template constexpr explicit Angle(const Angle& Angle); + template constexpr Angle(const Angle& angle); + constexpr Angle(const Angle&) = default; + constexpr Angle(Angle&&) noexcept = default; + ~Angle() = default; + + constexpr bool ApproxEqual(const Angle& angle) const; + constexpr bool ApproxEqual(const Angle& angle, T max_difference) const; + + T GetCos() const; + T GetSin() const; + std::pair GetSinCos() const; + T GetTan() const; + + constexpr Angle& Normalize(); + + template T To() const; + template Angle ToAngle() const; + constexpr T ToDegrees() const; + constexpr Angle ToDegreeAngle() const; + EulerAngles ToEulerAngles() const; + Quat ToQuat() const; + constexpr T ToRadians() const; + constexpr Angle ToRadianAngle() const; + std::string ToString() const; + constexpr T ToTurns() const; + constexpr Angle ToTurnAngle() const; + + constexpr Angle& operator=(const Angle&) = default; + constexpr Angle& operator=(Angle&&) noexcept = default; + + constexpr Angle operator+() const; + constexpr Angle operator-() const; + + constexpr Angle operator+(Angle other) const; + constexpr Angle operator-(Angle other) const; + constexpr Angle operator*(T scalar) const; + constexpr Angle operator/(T divider) const; + + constexpr Angle& operator+=(Angle other); + constexpr Angle& operator-=(Angle other); + constexpr Angle& operator*=(T scalar); + constexpr Angle& operator/=(T divider); + + constexpr bool operator==(Angle other) const; + constexpr bool operator!=(Angle other) const; + constexpr bool operator<(Angle other) const; + constexpr bool operator<=(Angle other) const; + constexpr bool operator>(Angle other) const; + constexpr bool operator>=(Angle other) const; + + static constexpr bool ApproxEqual(const Angle& lhs, const Angle& rhs); + static constexpr bool ApproxEqual(const Angle& lhs, const Angle& rhs, T max_difference); + static constexpr Angle Clamp(Angle angle, Angle min, Angle max); + template static constexpr Angle From(T value); + static constexpr Angle FromDegrees(T degrees); + static constexpr Angle FromRadians(T radians); + static constexpr Angle FromTurns(T turn); + static constexpr Angle Zero(); + }; + + template + using DegreeAngle = Angle; + + using DegreeAngled = DegreeAngle; + using DegreeAnglef = DegreeAngle; + + template + using RadianAngle = Angle; + + using RadianAngled = RadianAngle; + using RadianAnglef = RadianAngle; + + template + using TurnAngle = Angle; + + using TurnAngled = TurnAngle; + using TurnAnglef = TurnAngle; + + template Angle operator*(T scale, Angle angle); + + template Angle operator/(T divider, Angle angle); + + template std::ostream& operator<<(std::ostream& out, Angle angle); +} + +#include + +#endif diff --git a/runtime/Includes/Maths/Angles.inl b/runtime/Includes/Maths/Angles.inl new file mode 100644 index 0000000..5c2631e --- /dev/null +++ b/runtime/Includes/Maths/Angles.inl @@ -0,0 +1,488 @@ +#pragma once +#include + +#include +#include + +#include +#include + +namespace Scop +{ + namespace Internal + { + template struct AngleConversion; + + template + struct AngleConversion + { + template + static constexpr T Convert(T angle) + { + return angle; + } + }; + + template<> + struct AngleConversion + { + template + static constexpr T Convert(T angle) + { + return DegreeToRadian(angle); + } + }; + + template<> + struct AngleConversion + { + template + static constexpr T Convert(T angle) + { + return angle / T(360); + } + }; + + template<> + struct AngleConversion + { + template + static constexpr T Convert(T angle) + { + return RadianToDegree(angle); + } + }; + + template<> + struct AngleConversion + { + template + static constexpr T Convert(T angle) + { + return angle / Tau(); + } + }; + + template<> + struct AngleConversion + { + template + static constexpr T Convert(T angle) + { + return angle * T(360); + } + }; + + template<> + struct AngleConversion + { + template + static constexpr T Convert(T angle) + { + return angle * Tau(); + } + }; + + template struct AngleUtils; + + template<> + struct AngleUtils + { + template + static constexpr T GetEpsilon() + { + return T(1e-4); + } + + template + static constexpr T GetLimit() + { + return 360; + } + + template static std::ostream& ToString(std::ostream& out, T value) + { + return out << "Angle(" << value << "deg)"; + } + }; + + template<> + struct AngleUtils + { + template + static constexpr T GetEpsilon() + { + return T(1e-5); + } + + template + static constexpr T GetLimit() + { + return Tau(); + } + + template + static std::ostream& ToString(std::ostream& out, T value) + { + return out << "Angle(" << value << "rad)"; + } + }; + + template<> + struct AngleUtils + { + template + static constexpr T GetEpsilon() + { + return T(1e-5); + } + + template + static constexpr T GetLimit() + { + return 1; + } + + template + static std::ostream& ToString(std::ostream& out, T value) + { + return out << "Angle(" << value << "turn)"; + } + }; + + template + void SinCos(T x, T* sin, T* cos) + { + double s, c; + ::sincos(x, &s, &c); + + *sin = static_cast(s); + *cos = static_cast(c); + } + + template<> + inline void SinCos(float x, float* s, float* c) + { + ::sincosf(x, s, c); + } + + template<> + inline void SinCos(long double x, long double* s, long double* c) + { + ::sincosl(x, s, c); + } + } + + template + constexpr Angle::Angle(T angle) : + value(angle) + { + } + + template + template + constexpr Angle::Angle(const Angle& angle) : + value(static_cast(angle.value)) + { + } + + template + template + constexpr Angle::Angle(const Angle& angle) : + value(Internal::AngleConversion::Convert(angle.value)) + { + } + + template + constexpr bool Angle::ApproxEqual(const Angle& angle) const + { + return ApproxEqual(angle, Internal::AngleUtils::template GetEpsilon()); + } + + template + constexpr bool Angle::ApproxEqual(const Angle& angle, T maxDifference) const + { + return NumberEquals(value, angle.value, maxDifference); + } + + template + T Angle::GetCos() const + { + return std::cos(ToRadians()); + } + + template + T Angle::GetSin() const + { + return std::sin(ToRadians()); + } + + template + std::pair Angle::GetSinCos() const + { + T sin, cos; + Internal::SinCos(ToRadians(), &sin, &cos); + + return std::make_pair(sin, cos); + } + + template + T Angle::GetTan() const + { + return std::tan(ToRadians()); + } + + template + constexpr Angle& Angle::Normalize() + { + constexpr T limit = Internal::AngleUtils::template GetLimit(); + constexpr T halfLimit = limit / T(2); + + value = Mod(value + halfLimit, limit); + if (value < T(0)) + value += limit; + + value -= halfLimit; + return *this; + } + + template + template + T Angle::To() const + { + return Internal::AngleConversion::Convert(value); + } + + template + template + Angle Angle::ToAngle() const + { + return Angle(To()); + } + + template + constexpr T Angle::ToDegrees() const + { + return To(); + } + + template + constexpr Angle Angle::ToDegreeAngle() const + { + return ToAngle(); + } + + template + EulerAngles Angle::ToEulerAngles() const + { + return EulerAngles(0, 0, ToDegrees()); + } + + template + Quat Angle::ToQuat() const + { + auto halfAngle = Angle(*this) / 2.f; + auto sincos = halfAngle.GetSinCos(); + return Quat(sincos.second, 0, 0, sincos.first); + } + + template + constexpr T Angle::ToRadians() const + { + return To(); + } + + template + constexpr Angle Angle::ToRadianAngle() const + { + return ToAngle(); + } + + template + std::string Angle::ToString() const + { + std::ostringstream oss; + Internal::AngleUtils::ToString(oss, value); + + return oss.str(); + } + + template + constexpr T Angle::ToTurns() const + { + return To(value); + } + + template + constexpr Angle Angle::ToTurnAngle() const + { + return ToAngle(); + } + + template + constexpr Angle Angle::operator+() const + { + return *this; + } + + template + constexpr Angle Angle::operator-() const + { + return Angle(-value); + } + + template + constexpr Angle Angle::operator+(Angle other) const + { + return Angle(value + other.value); + } + + template + constexpr Angle Angle::operator-(Angle other) const + { + return Angle(value - other.value); + } + + template + constexpr Angle Angle::operator*(T scalar) const + { + return Angle(value * scalar); + } + + template + constexpr Angle Angle::operator/(T divider) const + { + return Angle(value / divider); + } + + template + constexpr Angle& Angle::operator+=(Angle other) + { + value += other.value; + return *this; + } + + template + constexpr Angle& Angle::operator-=(Angle other) + { + value -= other.value; + return *this; + } + + template + constexpr Angle& Angle::operator*=(T scalar) + { + value *= scalar; + return *this; + } + + template + constexpr Angle& Angle::operator/=(T divider) + { + value /= divider; + return *this; + } + + template + constexpr bool Angle::operator==(Angle other) const + { + return value == other.value; + } + + template + constexpr bool Angle::operator!=(Angle other) const + { + return value != other.value; + } + + template + constexpr bool Angle::operator<(Angle other) const + { + return value < other.value; + } + + template + constexpr bool Angle::operator<=(Angle other) const + { + return value <= other.value; + } + + template + constexpr bool Angle::operator>(Angle other) const + { + return value > other.value; + } + + template + constexpr bool Angle::operator>=(Angle other) const + { + return value >= other.value; + } + + template + constexpr bool Angle::ApproxEqual(const Angle& lhs, const Angle& rhs) + { + return lhs.ApproxEqual(rhs); + } + + template + constexpr bool Angle::ApproxEqual(const Angle& lhs, const Angle& rhs, T maxDifference) + { + return lhs.ApproxEqual(rhs, maxDifference); + } + + template + constexpr Angle Angle::Clamp(Angle angle, Angle min, Angle max) + { + return Angle(std::clamp(angle.value, min.value, max.value)); + } + + template + template + constexpr Angle Angle::From(T value) + { + return Angle(Internal::AngleConversion::Convert(value)); + } + + template + constexpr Angle Angle::FromDegrees(T degrees) + { + return From(degrees); + } + + template + constexpr Angle Angle::FromRadians(T radians) + { + return From(radians); + } + + template + constexpr Angle Angle::FromTurns(T turns) + { + return From(turns); + } + + template + constexpr Angle Angle::Zero() + { + return Angle(0); + } + + template + Angle operator/(T scale, Angle angle) + { + return Angle(scale / angle.value); + } + + template + std::ostream& operator<<(std::ostream& out, Angle angle) + { + return Internal::AngleUtils::ToString(out, angle.value); + } + + template + constexpr Angle Clamp(Angle value, T min, T max) + { + return std::max(std::min(value.value, max), min); + } +} diff --git a/runtime/Includes/Maths/Constants.h b/runtime/Includes/Maths/Constants.h new file mode 100644 index 0000000..015346f --- /dev/null +++ b/runtime/Includes/Maths/Constants.h @@ -0,0 +1,87 @@ +#ifndef __SCOP_MATHS_CONSTANTS__ +#define __SCOP_MATHS_CONSTANTS__ + +#include +#include +#include + +namespace Scop +{ + template constexpr std::size_t BitCount = CHAR_BIT * sizeof(T); + + template + struct MathConstants + { + static constexpr T Infinity() + { + static_assert(std::numeric_limits::has_infinity); + return std::numeric_limits::infinity(); + } + + static constexpr T Max() + { + return std::numeric_limits::max(); + } + + static constexpr T Min() + { + return std::numeric_limits::min(); + } + + static constexpr T NaN() + { + static_assert(std::numeric_limits::has_signaling_NaN); + return std::numeric_limits::quiet_NaN(); + } + + // Math constants + static constexpr T HalfPi() + { + static_assert(std::is_floating_point_v); + return T(1.5707963267948966192313216916398); + } + + static constexpr T Pi() + { + static_assert(std::is_floating_point_v); + return T(3.1415926535897932384626433832795); + } + + static constexpr T Sqrt2() + { + static_assert(std::is_floating_point_v); + return T(1.4142135623730950488016887242097); + } + + static constexpr T Sqrt3() + { + static_assert(std::is_floating_point_v); + return T(1.7320508075688772935274463415059); + } + + static constexpr T Sqrt5() + { + static_assert(std::is_floating_point_v); + return T(2.2360679774997896964091736687313); + } + + static constexpr T Tau() + { + static_assert(std::is_floating_point_v); + return T(6.2831853071795864769252867665590); + } + }; + + template constexpr auto Infinity() { return MathConstants::Infinity(); } + template constexpr auto MaxValue() { return MathConstants::Max(); } + template constexpr auto MinValue() { return MathConstants::Min(); } + template constexpr auto NaN() { return MathConstants::NaN(); } + template constexpr auto HalfPi() { return MathConstants::HalfPi(); } + template constexpr auto Pi() { return MathConstants::Pi(); } + template constexpr auto Sqrt2() { return MathConstants::Sqrt2(); } + template constexpr auto Sqrt3() { return MathConstants::Sqrt3(); } + template constexpr auto Sqrt5() { return MathConstants::Sqrt5(); } + template constexpr auto Tau() { return MathConstants::Tau(); } +} + +#endif diff --git a/runtime/Includes/Maths/Enums.h b/runtime/Includes/Maths/Enums.h new file mode 100644 index 0000000..df12fbd --- /dev/null +++ b/runtime/Includes/Maths/Enums.h @@ -0,0 +1,20 @@ +#ifndef __SCOPE_MATHS_ENUMS__ +#define __SCOPE_MATHS_ENUMS__ + +#include + +namespace Scop +{ + enum class AngleUnit + { + Degree = 0, + Radian, + Turn, + + EndEnum + }; + + constexpr std::size_t AngleUnitCount = static_cast(AngleUnit::EndEnum); +} + +#endif diff --git a/runtime/Includes/Maths/EulerAngles.h b/runtime/Includes/Maths/EulerAngles.h new file mode 100644 index 0000000..8a873cb --- /dev/null +++ b/runtime/Includes/Maths/EulerAngles.h @@ -0,0 +1,57 @@ +#ifndef __SCOP_EULER_ANGLES__ +#define __SCOP_EULER_ANGLES__ + +#include + +#include + +namespace Scop +{ + template + struct EulerAngles + { + constexpr EulerAngles() = default; + constexpr EulerAngles(DegreeAngle P, DegreeAngle Y, DegreeAngle R); + constexpr EulerAngles(const DegreeAngle angles[3]); + template constexpr EulerAngles(const Angle& angle); + constexpr EulerAngles(const Quat& quat); + template constexpr explicit EulerAngles(const EulerAngles& angles); + constexpr EulerAngles(const EulerAngles&) = default; + constexpr EulerAngles(EulerAngles&&) = default; + ~EulerAngles() = default; + + constexpr bool ApproxEqual(const EulerAngles& angles, T maxDifference = std::numeric_limits::epsilon()) const; + + constexpr EulerAngles& Normalize(); + + Quat ToQuat() const; + std::string ToString() const; + + constexpr EulerAngles operator+(const EulerAngles& angles) const; + constexpr EulerAngles operator-(const EulerAngles& angles) const; + + constexpr EulerAngles& operator=(const EulerAngles&) = default; + constexpr EulerAngles& operator=(EulerAngles&&) = default; + + constexpr EulerAngles& operator+=(const EulerAngles& angles); + constexpr EulerAngles& operator-=(const EulerAngles& angles); + + constexpr bool operator==(const EulerAngles& angles) const; + constexpr bool operator!=(const EulerAngles& angles) const; + constexpr bool operator<(const EulerAngles& angles) const; + constexpr bool operator<=(const EulerAngles& angles) const; + constexpr bool operator>(const EulerAngles& angles) const; + constexpr bool operator>=(const EulerAngles& angles) const; + + static constexpr bool ApproxEqual(const EulerAngles& lhs, const EulerAngles& rhs, T maxDifference = std::numeric_limits::epsilon()); + static constexpr EulerAngles Zero(); + + DegreeAngle pitch, yaw, roll; + }; + + using EulerAnglesf = EulerAngles; +} + +#include + +#endif diff --git a/runtime/Includes/Maths/EulerAngles.inl b/runtime/Includes/Maths/EulerAngles.inl new file mode 100644 index 0000000..97fab12 --- /dev/null +++ b/runtime/Includes/Maths/EulerAngles.inl @@ -0,0 +1,169 @@ +#pragma once +#include + +namespace Scop +{ + template + constexpr EulerAngles::EulerAngles(DegreeAngle P, DegreeAngle Y, DegreeAngle R) : + pitch(P), yaw(Y), roll(R) + {} + + template + constexpr EulerAngles::EulerAngles(const DegreeAngle angles[3]) : + EulerAngles(angles[0], angles[1], angles[2]) + {} + + template + template + constexpr EulerAngles::EulerAngles(const Angle& angle) : + EulerAngles(angle.ToEulerAngles()) + {} + + template + constexpr EulerAngles::EulerAngles(const Quat& quat) : + EulerAngles(quat.ToEulerAngles()) + {} + + template + template + constexpr EulerAngles::EulerAngles(const EulerAngles& angles) : + pitch(DegreeAngle(angles.pitch)), yaw(DegreeAngle(angles.yaw)), roll(DegreeAngle(angles.roll)) + {} + + template + constexpr bool EulerAngles::ApproxEqual(const EulerAngles& angles, T maxDifference) const + { + return pitch.ApproxEqual(angles.pitch, maxDifference) && yaw.ApproxEqual(angles.yaw, maxDifference) && roll.ApproxEqual(angles.roll, maxDifference); + } + + template + constexpr EulerAngles& EulerAngles::Normalize() + { + pitch.Normalize(); + yaw.Normalize(); + roll.Normalize(); + return *this; + } + + template + Quat EulerAngles::ToQuat() const + { + // XYZ + auto [s1, c1] = (yaw / T(2.0)).GetSinCos(); + auto [s2, c2] = (roll / T(2.0)).GetSinCos(); + auto [s3, c3] = (pitch / T(2.0)).GetSinCos(); + + return Quat(c1 * c2 * c3 - s1 * s2 * s3, + s1 * s2 * c3 + c1 * c2 * s3, + s1 * c2 * c3 + c1 * s2 * s3, + c1 * s2 * c3 - s1 * c2 * s3); + } + + template + std::string EulerAngles::ToString() const + { + std::ostringstream ss; + ss << *this; + return ss.str(); + } + + template + constexpr EulerAngles EulerAngles::operator+(const EulerAngles& angles) const + { + return EulerAngles(pitch + angles.pitch, yaw + angles.yaw, roll + angles.roll); + } + + template + constexpr EulerAngles EulerAngles::operator-(const EulerAngles& angles) const + { + return EulerAngles(pitch - angles.pitch, yaw - angles.yaw, roll - angles.roll); + } + + template + constexpr EulerAngles& EulerAngles::operator+=(const EulerAngles& angles) + { + pitch += angles.pitch; + yaw += angles.yaw; + roll += angles.roll; + return *this; + } + + template + constexpr EulerAngles& EulerAngles::operator-=(const EulerAngles& angles) + { + pitch -= angles.pitch; + yaw -= angles.yaw; + roll -= angles.roll; + return *this; + } + + template + constexpr bool EulerAngles::operator==(const EulerAngles& angles) const + { + return pitch == angles.pitch && yaw == angles.yaw && roll == angles.roll; + } + + template + constexpr bool EulerAngles::operator!=(const EulerAngles& angles) const + { + return !operator==(angles); + } + + template + constexpr bool EulerAngles::operator<(const EulerAngles& angles) const + { + if (pitch != angles.pitch) + return pitch < angles.pitch; + if (yaw != angles.yaw) + return yaw < angles.yaw; + return roll < angles.roll; + } + + template + constexpr bool EulerAngles::operator<=(const EulerAngles& angles) const + { + if (pitch != angles.pitch) + return pitch < angles.pitch; + if (yaw != angles.yaw) + return yaw < angles.yaw; + return roll <= angles.roll; + } + + template + constexpr bool EulerAngles::operator>(const EulerAngles& angles) const + { + if (pitch != angles.pitch) + return pitch > angles.pitch; + if (yaw != angles.yaw) + return yaw > angles.yaw; + return roll > angles.roll; + } + + template + constexpr bool EulerAngles::operator>=(const EulerAngles& angles) const + { + if (pitch != angles.pitch) + return pitch > angles.pitch; + if (yaw != angles.yaw) + return yaw > angles.yaw; + return roll >= angles.roll; + } + + template + constexpr bool EulerAngles::ApproxEqual(const EulerAngles& lhs, const EulerAngles& rhs, T maxDifference) + { + return lhs.ApproxEqual(rhs, maxDifference); + } + + template + constexpr EulerAngles EulerAngles::Zero() + { + return EulerAngles(0, 0, 0); + } + + template + std::ostream& operator<<(std::ostream& out, const EulerAngles& angles) + { + return out << "EulerAngles(" << angles.pitch << ", " << angles.yaw << ", " << angles.roll << ')'; + } +} diff --git a/runtime/Includes/Maths/Mat4.h b/runtime/Includes/Maths/Mat4.h new file mode 100644 index 0000000..4151ba9 --- /dev/null +++ b/runtime/Includes/Maths/Mat4.h @@ -0,0 +1,122 @@ +#ifndef __SCOP_MAT4__ +#define __SCOP_MAT4__ + +#include +#include +#include + +#include + +namespace Scop +{ + template struct Vec2; + template struct Vec3; + template struct Vec4; + template struct Quat; + + template + struct Mat4 + { + T m11, m12, m13, m14; + T m21, m22, m23, m24; + T m31, m32, m33, m34; + T m41, m42, m43, m44; + + constexpr Mat4() = default; + constexpr Mat4(T r11, T r12, T r13, T r14, + T r21, T r22, T r23, T r24, + T r31, T r32, T r33, T r34, + T r41, T r42, T r43, T r44); + constexpr Mat4(const T matrix[16]); + constexpr Mat4(const Mat4&) = default; + constexpr Mat4(Mat4&&) = default; + + constexpr Mat4& ApplyRotation(const Quat& rotation); + constexpr Mat4& ApplyScale(const Vec3& scale); + constexpr Mat4& ApplyTranslation(const Vec3& translation); + + constexpr bool ApproxEqual(const Mat4& vec, T max_difference = std::numeric_limits::epsilon()) const; + + constexpr Mat4& Concatenate(const Mat4& matrix); + constexpr Mat4& ConcatenateTransform(const Mat4& matrix); + + constexpr Vec4 GetColumn(std::size_t column) const; + constexpr T GetDeterminant() const; + constexpr T GetDeterminantTransform() const; + constexpr bool GetInverse(Mat4* dest) const; + constexpr bool GetInverseTransform(Mat4* dest) const; + Quat GetRotation() const; + constexpr Vec4 GetRow(std::size_t row) const; + constexpr Vec3 GetScale() const; + constexpr Vec3 GetSquaredScale() const; + constexpr Vec3 GetTranslation() const; + constexpr void GetTransposed(Mat4* dest) const; + + constexpr bool HasNegativeScale() const; + constexpr bool HasScale() const; + + constexpr Mat4& Inverse(bool* succeeded = nullptr); + constexpr Mat4& InverseTransform(bool* succeeded = nullptr); + + constexpr bool IsTransformMatrix() const; + constexpr bool IsIdentity() const; + + constexpr Mat4& SetRotation(const Quat& rotation); + constexpr Mat4& SetScale(const Vec3& scale); + constexpr Mat4& SetTranslation(const Vec3& translation); + + std::string ToString() const; + + constexpr Vec2 Transform(const Vec2& vector, T z = 0.0, T w = 1.0) const; + constexpr Vec3 Transform(const Vec3& vector, T w = 1.0) const; + constexpr Vec4 Transform(const Vec4& vector) const; + + constexpr Mat4& Transpose(); + + constexpr T& operator()(std::size_t x, std::size_t y); + constexpr const T& operator()(std::size_t x, std::size_t y) const; + + constexpr T& operator[](std::size_t i); + constexpr const T& operator[](std::size_t i) const; + + constexpr Mat4& operator=(const Mat4&) = default; + constexpr Mat4& operator=(Mat4&&) = default; + + constexpr Mat4 operator*(const Mat4& matrix) const; + constexpr Vec2 operator*(const Vec2& vector) const; + constexpr Vec3 operator*(const Vec3& vector) const; + constexpr Vec4 operator*(const Vec4& vector) const; + constexpr Mat4 operator*(T scalar) const; + + constexpr Mat4& operator*=(const Mat4& matrix); + constexpr Mat4& operator*=(T scalar); + + constexpr bool operator==(const Mat4& mat) const; + constexpr bool operator!=(const Mat4& mat) const; + + static constexpr bool ApproxEqual(const Mat4& lhs, const Mat4& rhs, T max_difference = std::numeric_limits::epsilon()); + static constexpr Mat4 Concatenate(const Mat4& left, const Mat4& right); + static constexpr Mat4 ConcatenateTransform(const Mat4& left, const Mat4& right); + static constexpr Mat4 Identity(); + static constexpr Mat4 LookAt(const Vec3& eye, const Vec3& target, const Vec3& up = Vec3::Up()); + static constexpr Mat4 Ortho(T left, T right, T top, T bottom, T zNear = -1.0, T zFar = 1.0); + static Mat4 Perspective(RadianAngle angle, T ratio, T zNear, T zFar); + static constexpr Mat4 Rotate(const Quat& rotation); + static constexpr Mat4 Scale(const Vec3& scale); + static constexpr Mat4 Translate(const Vec3& translation); + static constexpr Mat4 Transform(const Vec3& translation, const Quat& rotation); + static constexpr Mat4 Transform(const Vec3& translation, const Quat& rotation, const Vec3& scale); + static constexpr Mat4 TransformInverse(const Vec3& translation, const Quat& rotation); + static constexpr Mat4 TransformInverse(const Vec3& translation, const Quat& rotation, const Vec3& scale); + static constexpr Mat4 Zero(); + + ~Mat4() = default; + }; + + using Mat4d = Mat4; + using Mat4f = Mat4; +} + +#include + +#endif diff --git a/runtime/Includes/Maths/Mat4.inl b/runtime/Includes/Maths/Mat4.inl new file mode 100644 index 0000000..7f67930 --- /dev/null +++ b/runtime/Includes/Maths/Mat4.inl @@ -0,0 +1,879 @@ +#pragma once +#include + +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace Scop +{ + template + constexpr Mat4::Mat4(T r11, T r12, T r13, T r14, + T r21, T r22, T r23, T r24, + T r31, T r32, T r33, T r34, + T r41, T r42, T r43, T r44) : + m11(r11), m12(r12), m13(r13), m14(r14), + m21(r21), m22(r22), m23(r23), m24(r24), + m31(r31), m32(r32), m33(r33), m34(r34), + m41(r41), m42(r42), m43(r43), m44(r44) + {} + + template + constexpr Mat4::Mat4(const T matrix[16]) : + Mat4(matrix[ 0], matrix[ 1], matrix[ 2], matrix[ 3], + matrix[ 4], matrix[ 5], matrix[ 6], matrix[ 7], + matrix[ 8], matrix[ 9], matrix[10], matrix[11], + matrix[12], matrix[13], matrix[14], matrix[15]) + {} + + template + constexpr Mat4& Mat4::ApplyRotation(const Quat& rotation) + { + return Concatenate(Mat4::Rotate(rotation)); + } + + template + constexpr Mat4& Mat4::ApplyScale(const Vec3& scale) + { + m11 *= scale.x; + m12 *= scale.x; + m13 *= scale.x; + + m21 *= scale.y; + m22 *= scale.y; + m23 *= scale.y; + + m31 *= scale.z; + m32 *= scale.z; + m33 *= scale.z; + + return *this; + } + + template + constexpr Mat4& Mat4::ApplyTranslation(const Vec3& translation) + { + m41 += translation.x; + m42 += translation.y; + m43 += translation.z; + + return *this; + } + + template + constexpr bool Mat4::ApproxEqual(const Mat4& mat, T maxDifference) const + { + for(unsigned int i = 0; i < 16; ++i) + if(!NumberEquals((&m11)[i], (&mat.m11)[i], maxDifference)) + return false; + + return true; + } + + template + constexpr Mat4& Mat4::Concatenate(const Mat4& matrix) + { + return operator=(Mat4( + m11 * matrix.m11 + m12 * matrix.m21 + m13 * matrix.m31 + m14 * matrix.m41, + m11 * matrix.m12 + m12 * matrix.m22 + m13 * matrix.m32 + m14 * matrix.m42, + m11 * matrix.m13 + m12 * matrix.m23 + m13 * matrix.m33 + m14 * matrix.m43, + m11 * matrix.m14 + m12 * matrix.m24 + m13 * matrix.m34 + m14 * matrix.m44, + + m21 * matrix.m11 + m22 * matrix.m21 + m23 * matrix.m31 + m24 * matrix.m41, + m21 * matrix.m12 + m22 * matrix.m22 + m23 * matrix.m32 + m24 * matrix.m42, + m21 * matrix.m13 + m22 * matrix.m23 + m23 * matrix.m33 + m24 * matrix.m43, + m21 * matrix.m14 + m22 * matrix.m24 + m23 * matrix.m34 + m24 * matrix.m44, + + m31 * matrix.m11 + m32 * matrix.m21 + m33 * matrix.m31 + m34 * matrix.m41, + m31 * matrix.m12 + m32 * matrix.m22 + m33 * matrix.m32 + m34 * matrix.m42, + m31 * matrix.m13 + m32 * matrix.m23 + m33 * matrix.m33 + m34 * matrix.m43, + m31 * matrix.m14 + m32 * matrix.m24 + m33 * matrix.m34 + m34 * matrix.m44, + + m41 * matrix.m11 + m42 * matrix.m21 + m43 * matrix.m31 + m44 * matrix.m41, + m41 * matrix.m12 + m42 * matrix.m22 + m43 * matrix.m32 + m44 * matrix.m42, + m41 * matrix.m13 + m42 * matrix.m23 + m43 * matrix.m33 + m44 * matrix.m43, + m41 * matrix.m14 + m42 * matrix.m24 + m43 * matrix.m34 + m44 * matrix.m44 + )); + } + + template + constexpr Mat4& Mat4::ConcatenateTransform(const Mat4& matrix) + { + return operator=(Mat4( + m11*matrix.m11 + m12*matrix.m21 + m13*matrix.m31, + m11*matrix.m12 + m12*matrix.m22 + m13*matrix.m32, + m11*matrix.m13 + m12*matrix.m23 + m13*matrix.m33, + T(0.0), + + m21*matrix.m11 + m22*matrix.m21 + m23*matrix.m31, + m21*matrix.m12 + m22*matrix.m22 + m23*matrix.m32, + m21*matrix.m13 + m22*matrix.m23 + m23*matrix.m33, + T(0.0), + + m31*matrix.m11 + m32*matrix.m21 + m33*matrix.m31, + m31*matrix.m12 + m32*matrix.m22 + m33*matrix.m32, + m31*matrix.m13 + m32*matrix.m23 + m33*matrix.m33, + T(0.0), + + m41*matrix.m11 + m42*matrix.m21 + m43*matrix.m31 + matrix.m41, + m41*matrix.m12 + m42*matrix.m22 + m43*matrix.m32 + matrix.m42, + m41*matrix.m13 + m42*matrix.m23 + m43*matrix.m33 + matrix.m43, + T(1.0) + )); + } + + template + constexpr Vec4 Mat4::GetColumn(std::size_t column) const + { + Assert(column < 4, "column index out of range"); + const T* ptr = &m11 + column * 4; + return Vec4(ptr[0], ptr[1], ptr[2], ptr[3]); + } + + template + constexpr T Mat4::GetDeterminant() const + { + T A = m22*(m33*m44 - m43*m34) - m32*(m23*m44 - m43*m24) + m42*(m23*m34 - m33*m24); + T B = m12*(m33*m44 - m43*m34) - m32*(m13*m44 - m43*m14) + m42*(m13*m34 - m33*m14); + T C = m12*(m23*m44 - m43*m24) - m22*(m13*m44 - m43*m14) + m42*(m13*m24 - m23*m14); + T D = m12*(m23*m34 - m33*m24) - m22*(m13*m34 - m33*m14) + m32*(m13*m24 - m23*m14); + + return m11*A - m21*B + m31*C - m41*D; + } + + template + constexpr T Mat4::GetDeterminantTransform() const + { + T A = m22*m33 - m32*m23; + T B = m12*m33 - m32*m13; + T C = m12*m23 - m22*m13; + + return m11*A - m21*B + m31*C; + } + + template + constexpr bool Mat4::GetInverse(Mat4* dest) const + { + Assert(dest, "destination matrix must be valid"); + + T det = GetDeterminant(); + if(det == T(0.0)) + return false; + + // http://stackoverflow.com/questions/1148309/inverting-a-4x4-matrix + T inv[16]; + inv[0] = m22 * m33 * m44 - + m22 * m34 * m43 - + m32 * m23 * m44 + + m32 * m24 * m43 + + m42 * m23 * m34 - + m42 * m24 * m33; + + inv[1] = -m12 * m33 * m44 + + m12 * m34 * m43 + + m32 * m13 * m44 - + m32 * m14 * m43 - + m42 * m13 * m34 + + m42 * m14 * m33; + + inv[2] = m12 * m23 * m44 - + m12 * m24 * m43 - + m22 * m13 * m44 + + m22 * m14 * m43 + + m42 * m13 * m24 - + m42 * m14 * m23; + + inv[3] = -m12 * m23 * m34 + + m12 * m24 * m33 + + m22 * m13 * m34 - + m22 * m14 * m33 - + m32 * m13 * m24 + + m32 * m14 * m23; + + inv[4] = -m21 * m33 * m44 + + m21 * m34 * m43 + + m31 * m23 * m44 - + m31 * m24 * m43 - + m41 * m23 * m34 + + m41 * m24 * m33; + + inv[5] = m11 * m33 * m44 - + m11 * m34 * m43 - + m31 * m13 * m44 + + m31 * m14 * m43 + + m41 * m13 * m34 - + m41 * m14 * m33; + + inv[6] = -m11 * m23 * m44 + + m11 * m24 * m43 + + m21 * m13 * m44 - + m21 * m14 * m43 - + m41 * m13 * m24 + + m41 * m14 * m23; + + inv[7] = m11 * m23 * m34 - + m11 * m24 * m33 - + m21 * m13 * m34 + + m21 * m14 * m33 + + m31 * m13 * m24 - + m31 * m14 * m23; + + inv[8] = m21 * m32 * m44 - + m21 * m34 * m42 - + m31 * m22 * m44 + + m31 * m24 * m42 + + m41 * m22 * m34 - + m41 * m24 * m32; + + inv[9] = -m11 * m32 * m44 + + m11 * m34 * m42 + + m31 * m12 * m44 - + m31 * m14 * m42 - + m41 * m12 * m34 + + m41 * m14 * m32; + + inv[10] = m11 * m22 * m44 - + m11 * m24 * m42 - + m21 * m12 * m44 + + m21 * m14 * m42 + + m41 * m12 * m24 - + m41 * m14 * m22; + + inv[11] = -m11 * m22 * m34 + + m11 * m24 * m32 + + m21 * m12 * m34 - + m21 * m14 * m32 - + m31 * m12 * m24 + + m31 * m14 * m22; + + inv[12] = -m21 * m32 * m43 + + m21 * m33 * m42 + + m31 * m22 * m43 - + m31 * m23 * m42 - + m41 * m22 * m33 + + m41 * m23 * m32; + + inv[13] = m11 * m32 * m43 - + m11 * m33 * m42 - + m31 * m12 * m43 + + m31 * m13 * m42 + + m41 * m12 * m33 - + m41 * m13 * m32; + + inv[14] = -m11 * m22 * m43 + + m11 * m23 * m42 + + m21 * m12 * m43 - + m21 * m13 * m42 - + m41 * m12 * m23 + + m41 * m13 * m22; + + inv[15] = m11 * m22 * m33 - + m11 * m23 * m32 - + m21 * m12 * m33 + + m21 * m13 * m32 + + m31 * m12 * m23 - + m31 * m13 * m22; + + T invDet = T(1.0) / det; + for(unsigned int i = 0; i < 16; ++i) + inv[i] *= invDet; + + *dest = inv; + return true; + } + + template + constexpr bool Mat4::GetInverseTransform(Mat4* dest) const + { + Assert(dest, "destination matrix must be valid"); + + T det = GetDeterminantTransform(); + if(det == T(0.0)) + return false; + + + // http://stackoverflow.com/questions/1148309/inverting-a-4x4-matrix + T inv[16]; + inv[0] = m22 * m33 - + m32 * m23; + + inv[1] = -m12 * m33 + + m32 * m13; + + inv[2] = m12 * m23 - + m22 * m13; + + inv[3] = T(0.0); + + inv[4] = -m21 * m33 + + m31 * m23; + + inv[5] = m11 * m33 - + m31 * m13; + + inv[6] = -m11 * m23 + + m21 * m13; + + inv[7] = T(0.0); + + inv[8] = m21 * m32 - + m31 * m22; + + inv[9] = -m11 * m32 + + m31 * m12; + + inv[10] = m11 * m22 - + m21 * m12; + + inv[11] = T(0.0); + + inv[12] = -m21 * m32 * m43 + + m21 * m33 * m42 + + m31 * m22 * m43 - + m31 * m23 * m42 - + m41 * m22 * m33 + + m41 * m23 * m32; + + inv[13] = m11 * m32 * m43 - + m11 * m33 * m42 - + m31 * m12 * m43 + + m31 * m13 * m42 + + m41 * m12 * m33 - + m41 * m13 * m32; + + inv[14] = -m11 * m22 * m43 + + m11 * m23 * m42 + + m21 * m12 * m43 - + m21 * m13 * m42 - + m41 * m12 * m23 + + m41 * m13 * m22; + + T invDet = T(1.0) / det; + for(unsigned int i = 0; i < 16; ++i) + inv[i] *= invDet; + + inv[15] = T(1.0); + + *dest = inv; + return true; + } + + template + Quat Mat4::GetRotation() const + { + // http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuat/ + Quat quat; + + T trace = m11 + m22 + m33; + if(trace > T(0.0)) + { + T s = T(0.5) / std::sqrt(trace + T(1.0)); + quat.w = T(0.25) / s; + quat.x = (m23 - m32) * s; + quat.y = (m31 - m13) * s; + quat.z = (m12 - m21) * s; + } + else + { + if(m11 > m22 && m11 > m33) + { + T s = T(2.0) * std::sqrt(T(1.0) + m11 - m22 - m33); + + quat.w = (m23 - m32) / s; + quat.x = T(0.25) * s; + quat.y = (m21 + m12) / s; + quat.z = (m31 + m13) / s; + } + else if(m22 > m33) + { + T s = T(2.0) * std::sqrt(T(1.0) + m22 - m11 - m33); + + quat.w = (m31 - m13) / s; + quat.x = (m21 + m12) / s; + quat.y = T(0.25) * s; + quat.z = (m32 + m23) / s; + } + else + { + T s = T(2.0) * std::sqrt(T(1.0) + m33 - m11 - m22); + + quat.w = (m12 - m21) / s; + quat.x = (m31 + m13) / s; + quat.y = (m32 + m23) / s; + quat.z = T(0.25) * s; + } + } + + return quat; + } + + template + constexpr Vec4 Mat4::GetRow(std::size_t row) const + { + Assert(row < 4, "row index out of range"); + + const T* ptr = &m11; + return Vec4(ptr[row], ptr[row+4], ptr[row+8], ptr[row+12]); + } + + template + constexpr Vec3 Mat4::GetScale() const + { + Vec3 squaredScale = GetSquaredScale(); + return Vec3(std::sqrt(squaredScale.x), std::sqrt(squaredScale.y), std::sqrt(squaredScale.z)); + } + + template + constexpr Vec3 Mat4::GetSquaredScale() const + { + return Vec3(m11 * m11 + m12 * m12 + m13 * m13, + m21 * m21 + m22 * m22 + m23 * m23, + m31 * m31 + m32 * m32 + m33 * m33); + } + + template + constexpr Vec3 Mat4::GetTranslation() const + { + return Vec3(m41, m42, m43); + } + + template + constexpr void Mat4::GetTransposed(Mat4* dest) const + { + (*dest) = Mat4f( + m11, m21, m31, m41, + m12, m22, m32, m42, + m13, m23, m33, m43, + m14, m24, m34, m44 + ); + } + + template + constexpr bool Mat4::HasNegativeScale() const + { + return GetDeterminant() < T(0.0); + } + + template + constexpr bool Mat4::HasScale() const + { + T t = m11*m11 + m21*m21 + m31*m31; + if(!NumberEquals(t, T(1.0))) + return true; + + t = m12*m12 + m22*m22 + m32*m32; + if(!NumberEquals(t, T(1.0))) + return true; + + t = m13*m13 + m23*m23 + m33*m33; + if(!NumberEquals(t, T(1.0))) + return true; + + return false; + } + + template + constexpr Mat4& Mat4::Inverse(bool* succeeded) + { + bool result = GetInverse(this); + if(succeeded) + *succeeded = result; + + return *this; + } + + template + constexpr Mat4& Mat4::InverseTransform(bool* succeeded) + { + bool result = GetInverseTransform(this); + if(succeeded) + *succeeded = result; + + return *this; + } + + template + constexpr bool Mat4::IsTransformMatrix() const + { + return NumberEquals(m14, T(0.0)) && NumberEquals(m24, T(0.0)) && NumberEquals(m34, T(0.0)) && NumberEquals(m44, T(1.0)); + } + + template + constexpr bool Mat4::IsIdentity() const + { + return (NumberEquals(m11, T(1.0)) && NumberEquals(m12, T(0.0)) && NumberEquals(m13, T(0.0)) && NumberEquals(m14, T(0.0)) && + NumberEquals(m21, T(0.0)) && NumberEquals(m22, T(1.0)) && NumberEquals(m23, T(0.0)) && NumberEquals(m24, T(0.0)) && + NumberEquals(m31, T(0.0)) && NumberEquals(m32, T(0.0)) && NumberEquals(m33, T(1.0)) && NumberEquals(m34, T(0.0)) && + NumberEquals(m41, T(0.0)) && NumberEquals(m42, T(0.0)) && NumberEquals(m43, T(0.0)) && NumberEquals(m44, T(1.0))); + } + + template + constexpr Mat4& Mat4::SetRotation(const Quat& rotation) + { + T qw = rotation.w; + T qx = rotation.x; + T qy = rotation.y; + T qz = rotation.z; + + T qx2 = qx * qx; + T qy2 = qy * qy; + T qz2 = qz * qz; + + m11 = T(1.0) - T(2.0) * qy2 - T(2.0) * qz2; + m21 = T(2.0) * qx * qy - T(2.0) * qz * qw; + m31 = T(2.0) * qx * qz + T(2.0) * qy * qw; + + m12 = T(2.0) * qx * qy + T(2.0) * qz * qw; + m22 = T(1.0) - T(2.0) * qx2 - T(2.0) * qz2; + m32 = T(2.0) * qy * qz - T(2.0) * qx * qw; + + m13 = T(2.0) * qx * qz - T(2.0) * qy * qw; + m23 = T(2.0) * qy * qz + T(2.0) * qx * qw; + m33 = T(1.0) - T(2.0) * qx2 - T(2.0) * qy2; + + return *this; + } + + template + constexpr Mat4& Mat4::SetScale(const Vec3& scale) + { + m11 = scale.x; + m22 = scale.y; + m33 = scale.z; + + return *this; + } + + template + constexpr Mat4& Mat4::SetTranslation(const Vec3& translation) + { + m41 = translation.x; + m42 = translation.y; + m43 = translation.z; + + return *this; + } + + template + std::string Mat4::ToString() const + { + std::ostringstream ss; + ss << *this; + + return ss.str(); + } + + template + constexpr Vec2 Mat4::Transform(const Vec2& vector, T z, T w) const + { + return Vec2(m11 * vector.x + m21 * vector.y + m31 * z + m41 * w, + m12 * vector.x + m22 * vector.y + m32 * z + m42 * w); + } + + template + constexpr Vec3 Mat4::Transform(const Vec3& vector, T w) const + { + return Vec3(m11 * vector.x + m21 * vector.y + m31 * vector.z + m41 * w, + m12 * vector.x + m22 * vector.y + m32 * vector.z + m42 * w, + m13 * vector.x + m23 * vector.y + m33 * vector.z + m43 * w); + } + + template + constexpr Vec4 Mat4::Transform(const Vec4& vector) const + { + return Vec4(m11 * vector.x + m21 * vector.y + m31 * vector.z + m41 * vector.w, + m12 * vector.x + m22 * vector.y + m32 * vector.z + m42 * vector.w, + m13 * vector.x + m23 * vector.y + m33 * vector.z + m43 * vector.w, + m14 * vector.x + m24 * vector.y + m34 * vector.z + m44 * vector.w); + } + + template + constexpr Mat4& Mat4::Transpose() + { + std::swap(m12, m21); + std::swap(m13, m31); + std::swap(m14, m41); + std::swap(m23, m32); + std::swap(m24, m42); + std::swap(m34, m43); + + return *this; + } + + template + constexpr T& Mat4::operator()(std::size_t x, std::size_t y) + { + Assert(x <= 3, "index out of range"); + Assert(y <= 3, "index out of range"); + + return (&m11)[y*4 + x]; + } + + template + constexpr const T& Mat4::operator()(std::size_t x, std::size_t y) const + { + Assert(x <= 3, "index out of range"); + Assert(y <= 3, "index out of range"); + + return (&m11)[y*4+x]; + } + + template + constexpr T& Mat4::operator[](std::size_t i) + { + Assert(i <= 16, "index out of range"); + + return (&m11)[i]; + } + + template + constexpr const T& Mat4::operator[](std::size_t i) const + { + Assert(i <= 16, "index out of range"); + + return (&m11)[i]; + } + + template + constexpr Mat4 Mat4::operator*(const Mat4& matrix) const + { + Mat4 result(*this); + return result.Concatenate(matrix); + } + + template + constexpr Vec2 Mat4::operator*(const Vec2& vector) const + { + return Transform(vector); + } + + template + constexpr Vec3 Mat4::operator*(const Vec3& vector) const + { + return Transform(vector); + } + + template + constexpr Vec4 Mat4::operator*(const Vec4& vector) const + { + return Transform(vector); + } + + template + constexpr Mat4 Mat4::operator*(T scalar) const + { + Mat4 mat; + for(unsigned int i = 0; i < 16; ++i) + mat[i] = (&m11)[i] * scalar; + + return mat; + } + + template + constexpr Mat4& Mat4::operator*=(const Mat4& matrix) + { + Concatenate(matrix); + + return *this; + } + + template + constexpr Mat4& Mat4::operator*=(T scalar) + { + for(unsigned int i = 0; i < 16; ++i) + (&m11)[i] *= scalar; + + return *this; + } + + template + constexpr bool Mat4::operator==(const Mat4& mat) const + { + for(unsigned int i = 0; i < 16; ++i) + if((&m11)[i] != (&mat.m11)[i]) + return false; + + return true; + } + + template + constexpr bool Mat4::operator!=(const Mat4& mat) const + { + return !operator==(mat); + } + + template + constexpr bool Mat4::ApproxEqual(const Mat4& lhs, const Mat4& rhs, T maxDifference) + { + return lhs.ApproxEqual(rhs, maxDifference); + } + + template + constexpr Mat4 Mat4::Concatenate(const Mat4& left, const Mat4& right) + { + Mat4 matrix(left); // Copy of left-hand side matrix + matrix.Concatenate(right); // Concatenation with right-hand side + + return matrix; + } + + template + constexpr Mat4 Mat4::ConcatenateTransform(const Mat4& left, const Mat4& right) + { + Mat4 matrix(left); // Copy of left-hand side matrix + matrix.ConcatenateTransform(right); // Affine concatenation with right-hand side + + return matrix; + } + + template + constexpr Mat4 Mat4::Identity() + { + return Mat4( + T(1.0), T(0.0), T(0.0), T(0.0), + T(0.0), T(1.0), T(0.0), T(0.0), + T(0.0), T(0.0), T(1.0), T(0.0), + T(0.0), T(0.0), T(0.0), T(1.0) + ); + } + + template + constexpr Mat4 Mat4::LookAt(const Vec3& eye, const Vec3& target, const Vec3& up) + { + Vec3 f = Vec3::Normalize(target - eye); + Vec3 s = Vec3::Normalize(f.CrossProduct(up)); + Vec3 u = s.CrossProduct(f); + + return Mat4( + s.x, u.x, -f.x, T(0.0), + s.y, u.y, -f.y, T(0.0), + s.z, u.z, -f.z, T(0.0), + -s.DotProduct(eye), -u.DotProduct(eye), f.DotProduct(eye), T(1.0) + ); + } + + template + constexpr Mat4 Mat4::Ortho(T left, T right, T top, T bottom, T zNear, T zFar) + { + // http://msdn.microsoft.com/en-us/library/windows/desktop/bb204942(v=vs.85).aspx + return Mat4( + T(2.0) / (right - left), T(0.0), T(0.0), T(0.0), + T(0.0), T(2.0) / (top - bottom), T(0.0), T(0.0), + T(0.0), T(0.0), T(1.0) / (zNear - zFar), T(0.0), + (left + right) / (left - right), (top + bottom) / (bottom - top), zNear / (zNear - zFar), T(1.0) + ); + } + + template + Mat4 Mat4::Perspective(RadianAngle angle, T ratio, T zNear, T zFar) + { + angle /= T(2.0); + + T yScale = angle.GetTan(); + + return Mat4( + T(1.0) / (ratio * yScale), T(0.0), T(0.0), T(0.0), + T(0.0), T(-1.0) / (yScale), T(0.0), T(0.0), + T(0.0), T(0.0), zFar / (zNear - zFar), T(-1.0), + T(0.0), T(0.0), -(zNear * zFar) / (zFar - zNear), T(0.0) + ); + } + + template + constexpr Mat4 Mat4::Rotate(const Quat& rotation) + { + Mat4 matrix = Mat4::Identity(); + matrix.SetRotation(rotation); + + return matrix; + } + + template + constexpr Mat4 Mat4::Scale(const Vec3& scale) + { + return Mat4( + scale.x, T(0.0), T(0.0), T(0.0), + T(0.0), scale.y, T(0.0), T(0.0), + T(0.0), T(0.0), scale.z, T(0.0), + T(0.0), T(0.0), T(0.0), T(1.0) + ); + } + + template + constexpr Mat4 Mat4::Translate(const Vec3& translation) + { + return Mat4( + T(1.0), T(0.0), T(0.0), T(0.0), + T(0.0), T(1.0), T(0.0), T(0.0), + T(0.0), T(0.0), T(1.0), T(0.0), + translation.x, translation.y, translation.z, T(1.0) + ); + } + + template + constexpr Mat4 Mat4::Transform(const Vec3& translation, const Quat& rotation) + { + Mat4 mat = Mat4f::Identity(); + mat.SetRotation(rotation); + mat.SetTranslation(translation); + + return mat; + } + + template + constexpr Mat4 Mat4::Transform(const Vec3& translation, const Quat& rotation, const Vec3& scale) + { + Mat4 mat = Transform(translation, rotation); + mat.ApplyScale(scale); + + return mat; + } + + template + constexpr Mat4 Mat4::TransformInverse(const Vec3& translation, const Quat& rotation) + { + // A view matrix must apply an inverse transformation of the 'world' matrix + Quat invRot = rotation.GetConjugate(); // Inverse of the rotation + + return Transform(-(invRot * translation), invRot); + } + + template + constexpr Mat4 Mat4::TransformInverse(const Vec3& translation, const Quat& rotation, const Vec3& scale) + { + return TransformInverse(translation, rotation).ApplyScale(T(1.0) / scale); + } + + template + constexpr Mat4 Mat4::Zero() + { + return Mat4( + T(0.0), T(0.0), T(0.0), T(0.0), + T(0.0), T(0.0), T(0.0), T(0.0), + T(0.0), T(0.0), T(0.0), T(0.0), + T(0.0), T(0.0), T(0.0), T(0.0) + ); + } + + template + std::ostream& operator<<(std::ostream& out, const Mat4& matrix) + { + return out << "Mat4(" << matrix.m11 << ", " << matrix.m12 << ", " << matrix.m13 << ", " << matrix.m14 << ",\n" + << " " << matrix.m21 << ", " << matrix.m22 << ", " << matrix.m23 << ", " << matrix.m24 << ",\n" + << " " << matrix.m31 << ", " << matrix.m32 << ", " << matrix.m33 << ", " << matrix.m34 << ",\n" + << " " << matrix.m41 << ", " << matrix.m42 << ", " << matrix.m43 << ", " << matrix.m44 << ')'; + } + + template + constexpr Mat4 operator*(T scale, const Mat4& matrix) + { + return matrix * scale; + } +} diff --git a/runtime/Includes/Maths/MathsUtils.h b/runtime/Includes/Maths/MathsUtils.h new file mode 100644 index 0000000..c17d0da --- /dev/null +++ b/runtime/Includes/Maths/MathsUtils.h @@ -0,0 +1,26 @@ +#ifndef __SCOP_MATHS_UTILS__ +#define __SCOP_MATHS_UTILS__ + +#include + +namespace Scop +{ + template + [[nodiscard]] constexpr T Mod(T x, T y) noexcept; + + template + [[nodiscard]] constexpr T DegreeToRadian(T degrees) noexcept; + + template + [[nodiscard]] constexpr T RadianToDegree(T radians) noexcept; + + template + [[nodiscard]] constexpr T Clamp(T value, T min, T max) noexcept; + + template + [[nodiscard]] constexpr T Lerp(const T& from, const T& to, const T2& interpolation) noexcept; +} + +#include + +#endif diff --git a/runtime/Includes/Maths/MathsUtils.inl b/runtime/Includes/Maths/MathsUtils.inl new file mode 100644 index 0000000..d47a421 --- /dev/null +++ b/runtime/Includes/Maths/MathsUtils.inl @@ -0,0 +1,47 @@ +#pragma once +#include + +#include +#include + +#include + +namespace Scop +{ + template + [[nodiscard]] constexpr T Mod(T x, T y) noexcept + { + if constexpr(std::is_floating_point_v) + { + if(!std::is_constant_evaluated()) + return x - static_cast(x / y) * y; + else + return std::fmod(x, y); + } + return x % y; + } + + template + [[nodiscard]] constexpr T DegreeToRadian(T degrees) noexcept + { + return degrees * (Pi() / T(180.0)); + } + + template + [[nodiscard]] constexpr T RadianToDegree(T radians) noexcept + { + return radians * (T(180.0) / Pi()); + } + + template + [[nodiscard]] constexpr T Clamp(T value, T min, T max) noexcept + { + return std::max(std::min(value, max), min); + } + + template + [[nodiscard]] constexpr T Lerp(const T& from, const T& to, const T2& interpolation) noexcept + { + return static_cast(from + interpolation * (to - from)); + } +} diff --git a/runtime/Includes/Maths/Quaternions.h b/runtime/Includes/Maths/Quaternions.h new file mode 100644 index 0000000..5937268 --- /dev/null +++ b/runtime/Includes/Maths/Quaternions.h @@ -0,0 +1,91 @@ +#ifndef __SCOP_QUATERNIONS__ +#define __SCOP_QUATERNIONS__ + +#include +#include + +namespace Scop +{ + template + struct Quat + { + T w, x, y, z; + + constexpr Quat() = default; + constexpr Quat(T W, T X, T Y, T Z); + template Quat(const Angle& angle); + Quat(const EulerAngles& angles); + constexpr Quat(RadianAngle angle, const Vec3& axis); + constexpr Quat(const T quat[4]); + template constexpr explicit Quat(const Quat& quat); + constexpr Quat(const Quat&) = default; + constexpr Quat(Quat&&) = default; + ~Quat() = default; + + RadianAngle AngleBetween(const Quat& vec) const; + constexpr bool ApproxEqual(const Quat& quat, T maxDifference = std::numeric_limits::epsilon()) const; + + Quat& ComputeW(); + constexpr Quat& Conjugate(); + + constexpr T DotProduct(const Quat& vec) const; + + constexpr Quat GetConjugate() const; + Quat GetInverse() const; + Quat GetNormal(T* length = nullptr) const; + + Quat& Inverse(); + + T Magnitude() const; + + Quat& Normalize(T* length = nullptr); + + constexpr T SquaredMagnitude() const; + + RadianAngle To2DAngle() const; + EulerAngles ToEulerAngles() const; + std::string ToString() const; + + constexpr Quat& operator=(const Quat& quat) = default; + constexpr Quat& operator=(Quat&&) = default; + + constexpr Quat operator+(const Quat& quat) const; + constexpr Quat operator*(const Quat& quat) const; + constexpr Vec3 operator*(const Vec3& vec) const; + constexpr Quat operator*(T scale) const; + constexpr Quat operator/(const Quat& quat) const; + + constexpr Quat& operator+=(const Quat& quat); + constexpr Quat& operator*=(const Quat& quat); + constexpr Quat& operator*=(T scale); + constexpr Quat& operator/=(const Quat& quat); + + constexpr bool operator==(const Quat& quat) const; + constexpr bool operator!=(const Quat& quat) const; + constexpr bool operator<(const Quat& quat) const; + constexpr bool operator<=(const Quat& quat) const; + constexpr bool operator>(const Quat& quat) const; + constexpr bool operator>=(const Quat& quat) const; + + static RadianAngle AngleBetween(const Quat& lhs, const Quat& rhs); + static constexpr bool ApproxEqual(const Quat& lhs, const Quat& rhs, T maxDifference = std::numeric_limits::epsilon()); + static constexpr Quat Identity(); + static constexpr Quat Lerp(const Quat& from, const Quat& to, T interpolation); + static Quat LookAt(const Vec3& forward, const Vec3& up); + static Quat Normalize(const Quat& quat, T* length = nullptr); + static Quat RotationBetween(const Vec3& from, const Vec3& to); + static Quat RotateTowards(const Quat& from, const Quat& to, RadianAngle maxRotation); + static Quat Mirror(Quat quat, const Vec3& axis); + static Quat Slerp(const Quat& from, const Quat& to, T interpolation); + static constexpr Quat Zero(); + }; + + using Quatd = Quat; + using Quatf = Quat; + + template std::ostream& operator<<(std::ostream& out, const Quat& quat); +} + +#include + +#endif diff --git a/runtime/Includes/Maths/Quaternions.inl b/runtime/Includes/Maths/Quaternions.inl new file mode 100644 index 0000000..bea9a95 --- /dev/null +++ b/runtime/Includes/Maths/Quaternions.inl @@ -0,0 +1,508 @@ +#pragma once +#include + +namespace Scop +{ + template + constexpr Quat::Quat(T W, T X, T Y, T Z) : w(W), x(X), y(Y), z(Z) + {} + + template + template + Quat::Quat(const Angle& angle) : Quat(angle.ToQuat()) + {} + + template + Quat::Quat(const EulerAngles& angles) : Quat(angles.ToQuat()) + {} + + template + constexpr Quat::Quat(RadianAngle angle, const Vec3& axis) + { + angle /= T(2.0); + + Vec3 normalizedAxis = axis.GetNormal(); + + auto sincos = angle.GetSinCos(); + + w = sincos.second; + x = normalizedAxis.x * sincos.first; + y = normalizedAxis.y * sincos.first; + z = normalizedAxis.z * sincos.first; + + Normalize(); + } + + template + constexpr Quat::Quat(const T quat[4]) : w(quat[0]), x(quat[1]), y(quat[2]), z(quat[3]) + {} + + template + template + constexpr Quat::Quat(const Quat& quat) : w(static_cast(quat.w)), x(static_cast(quat.x)), y(static_cast(quat.y)), z(static_cast(quat.z)) + {} + + template + RadianAngle Quat::AngleBetween(const Quat& quat) const + { + T alpha = Vec3::DotProduct(Vec3(x, y, z), Vec3(quat.x, quat.y, quat.z)); + return std::acos(Scop::Clamp(alpha, T(-1.0), T(1.0))); + } + + template + constexpr bool Quat::ApproxEqual(const Quat& quat, T maxDifference) const + { + return NumberEquals(w, quat.w, maxDifference) && + NumberEquals(x, quat.x, maxDifference) && + NumberEquals(y, quat.y, maxDifference) && + NumberEquals(z, quat.z, maxDifference); + } + + template + Quat& Quat::ComputeW() + { + T t = T(1.0) - SquaredMagnitude(); + + if(t < T(0.0)) + w = T(0.0); + else + w = -std::sqrt(t); + + return *this; + } + + template + constexpr Quat& Quat::Conjugate() + { + x = -x; + y = -y; + z = -z; + return *this; + } + + template + constexpr T Quat::DotProduct(const Quat& quat) const + { + return w * quat.w + x * quat.x + y * quat.y + z * quat.z; + } + + template + constexpr Quat Quat::GetConjugate() const + { + Quat quat(*this); + quat.Conjugate(); + return quat; + } + + template + Quat Quat::GetInverse() const + { + Quat quat(*this); + quat.Inverse(); + return quat; + } + + template + Quat Quat::GetNormal(T* length) const + { + Quat quat(*this); + quat.Normalize(length); + return quat; + } + + template + Quat& Quat::Inverse() + { + T norm = SquaredMagnitude(); + if(norm > T(0.0)) + { + T invNorm = T(1.0) / std::sqrt(norm); + + w *= invNorm; + x *= -invNorm; + y *= -invNorm; + z *= -invNorm; + } + + return *this; + } + + template + T Quat::Magnitude() const + { + return std::sqrt(SquaredMagnitude()); + } + + template + Quat& Quat::Normalize(T* length) + { + T norm = std::sqrt(SquaredMagnitude()); + if(norm > T(0.0)) + { + T invNorm = T(1.0) / norm; + w *= invNorm; + x *= invNorm; + y *= invNorm; + z *= invNorm; + } + + if(length) + *length = norm; + + return *this; + } + + template + constexpr T Quat::SquaredMagnitude() const + { + return w * w + x * x + y * y + z * z; + } + + template + RadianAngle Quat::To2DAngle() const + { + T siny_cosp = T(2.0) * (w * z + x * y); + T cosy_cosp = T(1.0) - T(2.0) * (y * y + z * z); + + return std::atan2(siny_cosp, cosy_cosp); + } + + template + EulerAngles Quat::ToEulerAngles() const + { + T test = x * y + z * w; + if(test > T(0.499)) + // singularity at north pole + return EulerAngles(DegreeAngle(T(0.0)), RadianAngle(T(2.0) * std::atan2(x, w)), DegreeAngle(T(90.0))); + + if(test < T(-0.499)) + // singularity at south pole + return EulerAngles(DegreeAngle(T(0.0)), RadianAngle(T(-2.0) * std::atan2(x, w)), DegreeAngle(T(-90.0))); + + return EulerAngles(RadianAngle(std::atan2(T(2.0) * x * w - T(2.0) * y * z, T(1.0) - T(2.0) * x * x - T(2.0) * z * z)), + RadianAngle(std::atan2(T(2.0) * y * w - T(2.0) * x * z, T(1.0) - T(2.0) * y * y - T(2.0) * z * z)), + RadianAngle(std::asin(T(2.0) * test))); + } + + template + std::string Quat::ToString() const + { + std::ostringstream ss; + ss << *this; + + return ss.str(); + } + + template + constexpr Quat Quat::operator+(const Quat& quat) const + { + Quat result; + result.w = w + quat.w; + result.x = x + quat.x; + result.y = y + quat.y; + result.z = z + quat.z; + + return result; + } + + template + constexpr Quat Quat::operator*(const Quat& quat) const + { + Quat result; + result.w = w * quat.w - x * quat.x - y * quat.y - z * quat.z; + result.x = w * quat.x + x * quat.w + y * quat.z - z * quat.y; + result.y = w * quat.y + y * quat.w + z * quat.x - x * quat.z; + result.z = w * quat.z + z * quat.w + x * quat.y - y * quat.x; + + return result; + } + + template + constexpr Vec3 Quat::operator*(const Vec3& vec) const + { + Vec3 quatVec(x, y, z); + Vec3 uv = quatVec.CrossProduct(vec); + Vec3 uuv = quatVec.CrossProduct(uv); + uv *= T(2.0) * w; + uuv *= T(2.0); + + return vec + uv + uuv; + } + + template + constexpr Quat Quat::operator*(T scale) const + { + return Quat(w * scale, + x * scale, + y * scale, + z * scale); + } + + template + constexpr Quat Quat::operator/(const Quat& quat) const + { + return quat.GetConjugate() * (*this); + } + + template + constexpr Quat& Quat::operator+=(const Quat& quat) + { + return operator=(operator+(quat)); + } + + template + constexpr Quat& Quat::operator*=(const Quat& quat) + { + return operator=(operator*(quat)); + } + + template + constexpr Quat& Quat::operator*=(T scale) + { + return operator=(operator*(scale)); + } + + template + constexpr Quat& Quat::operator/=(const Quat& quat) + { + return operator=(operator/(quat)); + } + + template + constexpr bool Quat::operator==(const Quat& quat) const + { + return w == quat.w && x == quat.x && y == quat.y && z == quat.z; + } + + template + constexpr bool Quat::operator!=(const Quat& quat) const + { + return !operator==(quat); + } + + template + constexpr bool Quat::operator<(const Quat& quat) const + { + if(w != quat.w) + return w < quat.w; + + if(x != quat.x) + return x < quat.x; + + if(y != quat.y) + return y < quat.y; + + if(z != quat.z) + return z < quat.z; + } + + template + constexpr bool Quat::operator<=(const Quat& quat) const + { + if(w != quat.w) + return w < quat.w; + + if(x != quat.x) + return x < quat.x; + + if(y != quat.y) + return y < quat.y; + + if(z != quat.z) + return z <= quat.z; + } + + template + constexpr bool Quat::operator>(const Quat& quat) const + { + if(w != quat.w) + return w > quat.w; + + if(x != quat.x) + return x > quat.x; + + if(y != quat.y) + return y > quat.y; + + if(z != quat.z) + return z > quat.z; + } + + template + constexpr bool Quat::operator>=(const Quat& quat) const + { + if(w != quat.w) + return w > quat.w; + + if(x != quat.x) + return x > quat.x; + + if(y != quat.y) + return y > quat.y; + + if(z != quat.z) + return z >= quat.z; + } + + template + RadianAngle Quat::AngleBetween(const Quat& lhs, const Quat& rhs) + { + return lhs.AngleBetween(rhs); + } + + template + constexpr bool Quat::ApproxEqual(const Quat& lhs, const Quat& rhs, T maxDifference) + { + return lhs.ApproxEqual(rhs, maxDifference); + } + + template + constexpr Quat Quat::Identity() + { + return Quat(1, 0, 0, 0); + } + + template + constexpr Quat Quat::Lerp(const Quat& from, const Quat& to, T interpolation) + { + Quat interpolated; + interpolated.w = Scop::Lerp(from.w, to.w, interpolation); + interpolated.x = Scop::Lerp(from.x, to.x, interpolation); + interpolated.y = Scop::Lerp(from.y, to.y, interpolation); + interpolated.z = Scop::Lerp(from.z, to.z, interpolation); + + return interpolated; + } + + template + Quat Quat::LookAt(const Vec3& forward, const Vec3& up) + { + // From https://gamedev.stackexchange.com/questions/53129/quaternion-look-at-with-up-vector + Vec3 forward_w = Vec3::Forward(); + Vec3 axis = Vec3::CrossProduct(forward, forward_w); + RadianAngle angle = std::acos(Vec3::DotProduct(forward, forward_w)); + + Vec3 third = Vec3::CrossProduct(axis, forward_w); + if(Vec3::DotProduct(third, forward) < 0) + angle = -angle; + + Quat q1 = Quat(angle, axis); + + Vec3 up_l = q1 * up; + Vec3 right = Vec3::Normalize(Vec3::CrossProduct(forward, up)); + Vec3 up_w = Vec3::Normalize(Vec3::CrossProduct(right, forward)); + + Vec3 axis2 = Vec3::CrossProduct(up_l, up_w); + RadianAngle angle2 = std::acos(Vec3::DotProduct(forward, forward_w)); + + Quat q2 = Quat(angle2, axis2); + + return q2 * q1; + } + + template + Quat Quat::Normalize(const Quat& quat, T* length) + { + return quat.GetNormal(length); + } + + template + Quat Quat::RotationBetween(const Vec3& from, const Vec3& to) + { + T dot = from.DotProduct(to); + if(dot < T(-0.999999)) + { + Vec3 crossProduct; + if(from.DotProduct(Vec3::UnitX()) < T(0.999999)) + crossProduct = Vec3::UnitX().CrossProduct(from); + else + crossProduct = Vec3::UnitY().CrossProduct(from); + + crossProduct.Normalize(); + return Quat(Pi(), crossProduct); + } + else if(dot > T(0.999999)) + return Quat::Identity(); + else + { + T norm = std::sqrt(from.GetSquaredLength() * to.GetSquaredLength()); + Vec3 crossProduct = from.CrossProduct(to); + + return Quat(norm + dot, crossProduct.x, crossProduct.y, crossProduct.z).GetNormal(); + } + } + + template + Quat Quat::RotateTowards(const Quat& from, const Quat& to, RadianAngle maxRotation) + { + RadianAngle rotationBetween = AngleBetween(from, to); + if(rotationBetween < maxRotation) + return to; + + return Slerp(from, to, std::min(maxRotation.value / rotationBetween.value), 1.f); + } + + template + Quat Quat::Mirror(Quat quat, const Vec3& axis) + { + T x = std::copysign(T(1.0), axis.x); + T y = std::copysign(T(1.0), axis.y); + T z = std::copysign(T(1.0), axis.z); + + quat.x = y * z * quat.x; + quat.y = x * z * quat.y; + quat.z = x * y * quat.z; + + return quat; + } + + template + Quat Quat::Slerp(const Quat& from, const Quat& to, T interpolation) + { + Quat q; + + T cosOmega = from.DotProduct(to); + if(cosOmega < T(0.0)) + { + // We invert everything + q = Quat(-to.w, -to.x, -to.y, -to.z); + cosOmega = -cosOmega; + } + else + q = Quat(to); + + T k0, k1; + if(cosOmega > T(0.9999)) + { + // Linear interpolation to avoid division by zero + k0 = T(1.0) - interpolation; + k1 = interpolation; + } + else + { + T sinOmega = std::sqrt(T(1.0) - cosOmega*cosOmega); + T omega = std::atan2(sinOmega, cosOmega); + + // To avoid two divisions + sinOmega = T(1.0)/sinOmega; + + k0 = std::sin((T(1.0) - interpolation) * omega) * sinOmega; + k1 = std::sin(interpolation*omega) * sinOmega; + } + + Quat result(k0 * from.w, k0 * from.x, k0 * from.y, k0 * from.z); + return result += q * k1; + } + + template + constexpr Quat Quat::Zero() + { + return Quat(0, 0, 0, 0); + } + + template + std::ostream& operator<<(std::ostream& out, const Quat& quat) + { + return out << "Quat(" << quat.w << " | " << quat.x << ", " << quat.y << ", " << quat.z << ')'; + } +} diff --git a/runtime/Includes/Maths/Vec2.h b/runtime/Includes/Maths/Vec2.h new file mode 100755 index 0000000..ec6f7a5 --- /dev/null +++ b/runtime/Includes/Maths/Vec2.h @@ -0,0 +1,116 @@ +#ifndef __SCOP_VEC2__ +#define __SCOP_VEC2__ + +#include +#include +#include +#include + +#include + +namespace Scop +{ + template class Vec3; + template class Vec4; + + template + struct Vec2 + { + union { T x, r, s; }; + union { T y, g, t; }; + + constexpr Vec2() = default; + constexpr Vec2(T X, T Y); + constexpr explicit Vec2(T scale); + template constexpr explicit Vec2(const Vec2& vec); + constexpr Vec2(const Vec2&) = default; + constexpr Vec2(Vec2&&) = default; + constexpr explicit Vec2(const Vec3& vec); + constexpr explicit Vec2(const Vec4& vec); + + T AbsDotProduct(const Vec2& vec) const; + constexpr bool ApproxEqual(const Vec2& vec, T max_difference = std::numeric_limits::epsilon()) const; + + template U Distance(const Vec2& vec) const; + constexpr T DotProduct(const Vec2& vec) const; + + template T GetLength() const; + Vec2 GetNormal(T* length = nullptr) const; + constexpr T GetSquaredLength() const; + + constexpr Vec2& Maximize(const Vec2& vec); + constexpr Vec2& Minimize(const Vec2& vec); + + Vec2& Normalize(T* length = nullptr); + + constexpr T SquaredDistance(const Vec2& vec) const; + + std::string ToString() const; + + constexpr T& operator[](std::size_t i); + constexpr T operator[](std::size_t i) const; + + constexpr const Vec2& operator+() const; + constexpr Vec2 operator-() const; + + constexpr Vec2 operator+(const Vec2& vec) const; + constexpr Vec2 operator-(const Vec2& vec) const; + constexpr Vec2 operator*(const Vec2& vec) const; + constexpr Vec2 operator*(T scale) const; + constexpr Vec2 operator/(const Vec2& vec) const; + constexpr Vec2 operator/(T scale) const; + constexpr Vec2 operator%(const Vec2& vec) const; + constexpr Vec2 operator%(T mod) const; + + constexpr Vec2& operator=(const Vec2&) = default; + constexpr Vec2& operator=(Vec2&&) = default; + + constexpr Vec2& operator+=(const Vec2& vec); + constexpr Vec2& operator-=(const Vec2& vec); + constexpr Vec2& operator*=(const Vec2& vec); + constexpr Vec2& operator*=(T scale); + constexpr Vec2& operator/=(const Vec2& vec); + constexpr Vec2& operator/=(T scale); + constexpr Vec2& operator%=(const Vec2& vec); + constexpr Vec2& operator%=(T mod); + + constexpr bool operator==(const Vec2& vec) const; + constexpr bool operator!=(const Vec2& vec) const; + constexpr bool operator<(const Vec2& vec) const; + constexpr bool operator<=(const Vec2& vec) const; + constexpr bool operator>(const Vec2& vec) const; + constexpr bool operator>=(const Vec2& vec) const; + + static constexpr Vec2 Apply(T(*func)(T), const Vec2& vec); + static constexpr bool ApproxEqual(const Vec2& lhs, const Vec2& rhs, T max_difference = std::numeric_limits::epsilon()); + template static U Distance(const Vec2& vec1, const Vec2& vec2); + static constexpr T DotProduct(const Vec2& vec1, const Vec2& vec2); + static constexpr Vec2 Lerp(const Vec2& from, const Vec2& to, T interpolation); + static Vec2 Normalize(const Vec2& vec); + static constexpr Vec2 Unit(); + static constexpr Vec2 UnitX(); + static constexpr Vec2 UnitY(); + static constexpr Vec2 Zero(); + + ~Vec2() = default; + }; + + using Vec2d = Vec2; + using Vec2f = Vec2; + using Vec2i = Vec2; + using Vec2ui = Vec2; + using Vec2i32 = Vec2; + using Vec2i64 = Vec2; + using Vec2ui32 = Vec2; + using Vec2ui64 = Vec2; + + template std::ostream& operator<<(std::ostream& out, const Vec2& vec); + + template constexpr Vec2 operator*(T scale, const Vec2& vec); + template constexpr Vec2 operator/(T scale, const Vec2& vec); + template constexpr Vec2 operator%(T mod, const Vec2& vec); +} + +#include + +#endif // __AK_VEC2__ diff --git a/runtime/Includes/Maths/Vec2.inl b/runtime/Includes/Maths/Vec2.inl new file mode 100755 index 0000000..1671da8 --- /dev/null +++ b/runtime/Includes/Maths/Vec2.inl @@ -0,0 +1,388 @@ +#pragma once + +#include + +namespace Scop +{ + template + constexpr Vec2::Vec2(T X, T Y) : x(X), y(Y) {} + + template + constexpr Vec2::Vec2(T scale) : x(scale), y(scale) {} + + template + template + constexpr Vec2::Vec2(const Vec2& vec) : x(static_cast(vec.x)), y(static_cast(vec.y)) {} + + template + constexpr Vec2::Vec2(const Vec3& vec) : x(vec.x), y(vec.y) {} + + template + constexpr Vec2::Vec2(const Vec4& vec) : x(vec.x), y(vec.y) {} + + template + T Vec2::AbsDotProduct(const Vec2& vec) const + { + return std::abs(x * vec.x) + std::abs(y * vec.y); + } + + template + constexpr bool Vec2::ApproxEqual(const Vec2& vec, T maxDifference) const + { + return NumberEquals(x, vec.x, maxDifference) && NumberEquals(y, vec.y, maxDifference); + } + + template + template + U Vec2::Distance(const Vec2& vec) const + { + return static_cast(std::sqrt(SquaredDistance(vec))); + } + + template + constexpr T Vec2::DotProduct(const Vec2& vec) const + { + return x * vec.x + y * vec.y; + } + + template + template + T Vec2::GetLength() const + { + return static_cast(std::sqrt(static_cast(GetSquaredLength()))); + } + + template + Vec2 Vec2::GetNormal(T* length) const + { + Vec2 vec(*this); + vec.Normalize(length); + return vec; + } + + template + constexpr T Vec2::GetSquaredLength() const + { + return x * x + y * y; + } + + template + constexpr Vec2& Vec2::Maximize(const Vec2& vec) + { + if(vec.x > x) + x = vec.x; + if(vec.y > y) + y = vec.y; + return *this; + } + + template + constexpr Vec2& Vec2::Minimize(const Vec2& vec) + { + if(vec.x < x) + x = vec.x; + if(vec.y < y) + y = vec.y; + return *this; + } + + template + Vec2& Vec2::Normalize(T* length) + { + T norm = GetLength(); + if(norm > T(0.0)) + { + T invNorm = T(1.0) / norm; + x *= invNorm; + y *= invNorm; + } + if(length) + *length = norm; + return *this; + } + + template + constexpr T Vec2::SquaredDistance(const Vec2& vec) const + { + return (*this - vec).GetSquaredLength(); + } + + template + std::string Vec2::ToString() const + { + return "Vec2(" + std::to_string(x) + ", " + std::to_string(y) + ')'; + } + + template + constexpr T& Vec2::operator[](std::size_t i) + { + Scop::Assert(i < 2, "index out of range"); + return *(&x + i); + } + + template + constexpr T Vec2::operator[](std::size_t i) const + { + Scop::Assert(i < 2, "index out of range"); + return *(&x + i); + } + + template + constexpr const Vec2& Vec2::operator+() const + { + return *this; + } + + template + constexpr Vec2 Vec2::operator-() const + { + return Vec2(-x, -y); + } + + template + constexpr Vec2 Vec2::operator+(const Vec2& vec) const + { + return Vec2(x + vec.x, y + vec.y); + } + + template + constexpr Vec2 Vec2::operator-(const Vec2& vec) const + { + return Vec2(x - vec.x, y - vec.y); + } + + template + constexpr Vec2 Vec2::operator*(const Vec2& vec) const + { + return Vec2(x * vec.x, y * vec.y); + } + + template + constexpr Vec2 Vec2::operator*(T scale) const + { + return Vec2(x * scale, y * scale); + } + + template + constexpr Vec2 Vec2::operator/(const Vec2& vec) const + { + return Vec2(x / vec.x, y / vec.y); + } + + template + constexpr Vec2 Vec2::operator/(T scale) const + { + return Vec2(x / scale, y / scale); + } + + template + constexpr Vec2 Vec2::operator%(const Vec2& vec) const + { + return Vec2(Mod(x, vec.x), Mod(y, vec.y)); + } + + template + constexpr Vec2 Vec2::operator%(T mod) const + { + return Vec2(Mod(x, mod), Mod(y, mod)); + } + + template + constexpr Vec2& Vec2::operator+=(const Vec2& vec) + { + x += vec.x; + y += vec.y; + + return *this; + } + + template + constexpr Vec2& Vec2::operator-=(const Vec2& vec) + { + x -= vec.x; + y -= vec.y; + + return *this; + } + + template + constexpr Vec2& Vec2::operator*=(const Vec2& vec) + { + x *= vec.x; + y *= vec.y; + + return *this; + } + + template + constexpr Vec2& Vec2::operator*=(T scale) + { + x *= scale; + y *= scale; + + return *this; + } + + template + constexpr Vec2& Vec2::operator/=(const Vec2& vec) + { + x /= vec.x; + y /= vec.y; + + return *this; + } + + template + constexpr Vec2& Vec2::operator/=(T scale) + { + x /= scale; + y /= scale; + + return *this; + } + + template + constexpr Vec2& Vec2::operator%=(const Vec2& vec) + { + x = Mod(x, vec.x); + y = Mod(y, vec.y); + + return *this; + } + + template + constexpr Vec2& Vec2::operator%=(T value) + { + x = Mod(x, value); + y = Mod(y, value); + + return *this; + } + + template + constexpr bool Vec2::operator==(const Vec2& vec) const + { + return x == vec.x && y == vec.y; + } + + template + constexpr bool Vec2::operator!=(const Vec2& vec) const + { + return !operator==(vec); + } + + template + constexpr bool Vec2::operator<(const Vec2& vec) const + { + if (x != vec.x) + return x < vec.x; + + return y < vec.y; + } + + template + constexpr bool Vec2::operator<=(const Vec2& vec) const + { + if (x != vec.x) + return x < vec.x; + + return y <= vec.y; + } + + template + constexpr bool Vec2::operator>(const Vec2& vec) const + { + if (x != vec.x) + return x > vec.x; + + return y > vec.y; + } + + template + constexpr bool Vec2::operator>=(const Vec2& vec) const + { + if (x != vec.x) + return x > vec.x; + + return y >= vec.y; + } + + template + constexpr Vec2 Vec2::Apply(T(*func)(T), const Vec2& vec) + { + return Vec2(func(vec.x), func(vec.y)); + } + + template + constexpr bool Vec2::ApproxEqual(const Vec2& lhs, const Vec2& rhs, T maxDifference) + { + return lhs.ApproxEqual(rhs, maxDifference); + } + + template + template + U Vec2::Distance(const Vec2& vec1, const Vec2& vec2) + { + return vec1.Distance(vec2); + } + + template + constexpr T Vec2::DotProduct(const Vec2& vec1, const Vec2& vec2) + { + return vec1.DotProduct(vec2); + } + + template + Vec2 Vec2::Normalize(const Vec2& vec) + { + return vec.GetNormal(); + } + + template + constexpr Vec2 Vec2::Unit() + { + return Vec2(1, 1); + } + + template + constexpr Vec2 Vec2::UnitX() + { + return Vec2(1, 0); + } + + template + constexpr Vec2 Vec2::UnitY() + { + return Vec2(0, 1); + } + + template + constexpr Vec2 Vec2::Zero() + { + return Vec2(0, 0); + } + + template + std::ostream& operator<<(std::ostream& out, const Vec2& vec) + { + return out << "Vec2(" << vec.x << ", " << vec.y << ')'; + } + + template + constexpr Vec2 operator*(T scale, const Vec2& vec) + { + return Vec2(scale * vec.x, scale * vec.y); + } + + template + constexpr Vec2 operator/(T scale, const Vec2& vec) + { + return Vec2(scale / vec.x, scale / vec.y); + } + + template + constexpr Vec2 operator%(T mod, const Vec2& vec) + { + return Vec2(Mod(mod, vec.x), Mod(mod, vec.y)); + } +} diff --git a/runtime/Includes/Maths/Vec3.h b/runtime/Includes/Maths/Vec3.h new file mode 100755 index 0000000..775431f --- /dev/null +++ b/runtime/Includes/Maths/Vec3.h @@ -0,0 +1,133 @@ +#ifndef __SCOP_VEC3__ +#define __SCOP_VEC3__ + +#include +#include +#include +#include + +#include + +namespace Scop +{ + template class Vec2; + template class Vec4; + + template + struct Vec3 + { + union { T x, r, s; }; + union { T y, g, t; }; + union { T z, b, p; }; + + constexpr Vec3() = default; + constexpr Vec3(T X, T Y, T Z); + constexpr Vec3(T X, const Vec2& vec); + constexpr explicit Vec3(T scale); + constexpr Vec3(const Vec2& vec, T Z = 0.0); + template constexpr explicit Vec3(const Vec3& vec); + constexpr Vec3(const Vec3&) = default; + constexpr Vec3(Vec3&&) = default; + constexpr explicit Vec3(const Vec4& vec); + + T AbsDotProduct(const Vec3& vec) const; + constexpr bool ApproxEqual(const Vec3& vec, T max_difference = std::numeric_limits::epsilon()) const; + + constexpr Vec3 CrossProduct(const Vec3& vec) const; + + template U Distance(const Vec3& vec) const; + constexpr T DotProduct(const Vec3& vec) const; + + Vec3 GetAbs() const; + template U GetLength() const; + Vec3 GetNormal(T* length = nullptr) const; + constexpr T GetSquaredLength() const; + + constexpr Vec3& Maximize(const Vec3& vec); + constexpr Vec3& Minimize(const Vec3& vec); + + Vec3& Normalize(T* length = nullptr); + + constexpr T SquaredDistance(const Vec3& vec) const; + + std::string ToString() const; + + constexpr T& operator[](std::size_t i); + constexpr const T& operator[](std::size_t i) const; + + constexpr const Vec3& operator+() const; + constexpr Vec3 operator-() const; + + constexpr Vec3 operator+(const Vec3& vec) const; + constexpr Vec3 operator-(const Vec3& vec) const; + constexpr Vec3 operator*(const Vec3& vec) const; + constexpr Vec3 operator*(T scale) const; + constexpr Vec3 operator/(const Vec3& vec) const; + constexpr Vec3 operator/(T scale) const; + constexpr Vec3 operator%(const Vec3& vec) const; + constexpr Vec3 operator%(T mod) const; + + constexpr Vec3& operator=(const Vec3&) = default; + constexpr Vec3& operator=(Vec3&&) = default; + + constexpr Vec3& operator+=(const Vec3& vec); + constexpr Vec3& operator-=(const Vec3& vec); + constexpr Vec3& operator*=(const Vec3& vec); + constexpr Vec3& operator*=(T scale); + constexpr Vec3& operator/=(const Vec3& vec); + constexpr Vec3& operator/=(T scale); + constexpr Vec3& operator%=(const Vec3& vec); + constexpr Vec3& operator%=(T mod); + + constexpr bool operator==(const Vec3& vec) const; + constexpr bool operator!=(const Vec3& vec) const; + constexpr bool operator<(const Vec3& vec) const; + constexpr bool operator<=(const Vec3& vec) const; + constexpr bool operator>(const Vec3& vec) const; + constexpr bool operator>=(const Vec3& vec) const; + + static constexpr Vec3 Apply(T(*func)(T), const Vec3& vec); + static constexpr bool ApproxEqual(const Vec3& lhs, const Vec3& rhs, T max_difference = std::numeric_limits::epsilon()); + static constexpr Vec3 Backward(); + static constexpr Vec3 Clamp(const Vec3& vec, const Vec3& min, const Vec3& max); + static constexpr Vec3 CrossProduct(const Vec3& vec1, const Vec3& vec2); + template static U Distance(const Vec3& vec1, const Vec3& vec2); + static constexpr T DotProduct(const Vec3& vec1, const Vec3& vec2); + static constexpr Vec3 Down(); + static constexpr Vec3 Forward(); + static constexpr Vec3 Left(); + static constexpr Vec3 Max(const Vec3& lhs, const Vec3& rhs); + static constexpr Vec3 Min(const Vec3& lhs, const Vec3& rhs); + static Vec3 Normalize(const Vec3& vec); + static constexpr Vec3 Right(); + static constexpr T SquaredDistance(const Vec3& vec1, const Vec3& vec2); + static constexpr Vec3 Unit(); + static constexpr Vec3 UnitX(); + static constexpr Vec3 UnitY(); + static constexpr Vec3 UnitZ(); + static constexpr Vec3 Up(); + static constexpr Vec3 Zero(); + + ~Vec3() = default; + }; + + using Vec3b = Vec3; + using Vec3d = Vec3; + using Vec3f = Vec3; + using Vec3i = Vec3; + using Vec3ui = Vec3; + using Vec3i32 = Vec3; + using Vec3i64 = Vec3; + using Vec3ui32 = Vec3; + using Vec3ui64 = Vec3; + + template std::ostream& operator<<(std::ostream& out, const Vec3& vec); + + template constexpr Vec3 operator*(T scale, const Vec3& vec); + template constexpr Vec3 operator/(T scale, const Vec3& vec); + template constexpr Vec3 operator%(T scale, const Vec3& vec); +} + +#include + +#endif // __AK_VEC3__ diff --git a/runtime/Includes/Maths/Vec3.inl b/runtime/Includes/Maths/Vec3.inl new file mode 100755 index 0000000..73c73ff --- /dev/null +++ b/runtime/Includes/Maths/Vec3.inl @@ -0,0 +1,509 @@ +#pragma once + +#include + +namespace Scop +{ + template + constexpr Vec3::Vec3(T X, T Y, T Z) : x(X), y(Y), z(Z) {} + + template + constexpr Vec3::Vec3(T X, const Vec2& vec) : x(X), y(vec.x), z(vec.y) {} + + template + constexpr Vec3::Vec3(T scale) : x(scale), y(scale), z(scale) {} + + template + constexpr Vec3::Vec3(const Vec2& vec, T Z) : x(vec.x), y(vec.y), z(Z) {} + + template + template + constexpr Vec3::Vec3(const Vec3& vec) : x(static_cast(vec.x)), y(static_cast(vec.y)), z(static_cast(vec.z)) {} + + template + constexpr Vec3::Vec3(const Vec4& vec) : x(vec.x), y(vec.y), z(vec.z) {} + + template + T Vec3::AbsDotProduct(const Vec3& vec) const + { + return std::abs(x * vec.x) + std::abs(y * vec.y) + std::abs(z * vec.z); + } + + template + constexpr bool Vec3::ApproxEqual(const Vec3& vec, T maxDifference) const + { + return NumberEquals(x, vec.x, maxDifference) && NumberEquals(y, vec.y, maxDifference) && NumberEquals(z, vec.z, maxDifference); + } + + template + constexpr Vec3 Vec3::CrossProduct(const Vec3& vec) const + { + return Vec3(y * vec.z - z * vec.y, z * vec.x - x * vec.z, x * vec.y - y * vec.x); + } + + template + template + U Vec3::Distance(const Vec3& vec) const + { + return static_cast(std::sqrt(static_cast(SquaredDistance(vec)))); + } + + template + constexpr T Vec3::DotProduct(const Vec3& vec) const + { + return x * vec.x + y * vec.y + z * vec.z; + } + + template + Vec3 Vec3::GetAbs() const + { + return Vec3(std::abs(x), std::abs(y), std::abs(z)); + } + + template + template + U Vec3::GetLength() const + { + return static_cast(std::sqrt(static_cast(GetSquaredLength()))); + } + + template + Vec3 Vec3::GetNormal(T* length) const + { + Vec3 vec(*this); + vec.Normalize(length); + + return vec; + } + + template + constexpr T Vec3::GetSquaredLength() const + { + return x*x + y*y + z*z; + } + + template + constexpr Vec3& Vec3::Maximize(const Vec3& vec) + { + if (vec.x > x) + x = vec.x; + + if (vec.y > y) + y = vec.y; + + if (vec.z > z) + z = vec.z; + + return *this; + } + + template + constexpr Vec3& Vec3::Minimize(const Vec3& vec) + { + if (vec.x < x) + x = vec.x; + + if (vec.y < y) + y = vec.y; + + if (vec.z < z) + z = vec.z; + + return *this; + } + + template + Vec3& Vec3::Normalize(T* length) + { + T norm = GetLength(); + if (norm > T(0.0)) + { + T invNorm = T(1.0) / norm; + x *= invNorm; + y *= invNorm; + z *= invNorm; + } + + if (length) + *length = norm; + + return *this; + } + + template + constexpr T Vec3::SquaredDistance(const Vec3& vec) const + { + return (*this - vec).GetSquaredLength(); + } + + template + std::string Vec3::ToString() const + { + return "Vec3(" + std::to_string(x) + ", " + std::to_string(y) + ", " + std::to_string(z) + ')'; + } + + template + constexpr T& Vec3::operator[](std::size_t i) + { + Scop::Assert(i < 3, "index out of range"); + return *(&x + i); + } + + template + constexpr const T& Vec3::operator[](std::size_t i) const + { + Scop::Assert(i < 3, "index out of range"); + return *(&x + i); + } + + template + constexpr const Vec3& Vec3::operator+() const + { + return *this; + } + + template + constexpr Vec3 Vec3::operator-() const + { + return Vec3(-x, -y, -z); + } + template + constexpr Vec3 Vec3::operator+(const Vec3& vec) const + { + return Vec3(x + vec.x, y + vec.y, z + vec.z); + } + + template + constexpr Vec3 Vec3::operator-(const Vec3& vec) const + { + return Vec3(x - vec.x, y - vec.y, z - vec.z); + } + + template + constexpr Vec3 Vec3::operator*(const Vec3& vec) const + { + return Vec3(x * vec.x, y * vec.y, z * vec.z); + } + + template + constexpr Vec3 Vec3::operator*(T scale) const + { + return Vec3(x * scale, y * scale, z * scale); + } + + template + constexpr Vec3 Vec3::operator/(const Vec3& vec) const + { + return Vec3(x / vec.x, y / vec.y, z / vec.z); + } + + template + constexpr Vec3 Vec3::operator/(T scale) const + { + return Vec3(x / scale, y / scale, z / scale); + } + + template + constexpr Vec3 Vec3::operator%(const Vec3& vec) const + { + return Vec3(Mod(x, vec.x), Mod(y, vec.y), Mod(z, vec.z)); + } + + template + constexpr Vec3 Vec3::operator%(T mod) const + { + return Vec3(Mod(x, mod), Mod(y, mod), Mod(z, mod)); + } + + template + constexpr Vec3& Vec3::operator+=(const Vec3& vec) + { + x += vec.x; + y += vec.y; + z += vec.z; + + return *this; + } + + template + constexpr Vec3& Vec3::operator-=(const Vec3& vec) + { + x -= vec.x; + y -= vec.y; + z -= vec.z; + + return *this; + } + + template + constexpr Vec3& Vec3::operator*=(const Vec3& vec) + { + x *= vec.x; + y *= vec.y; + z *= vec.z; + + return *this; + } + + template + constexpr Vec3& Vec3::operator*=(T scale) + { + x *= scale; + y *= scale; + z *= scale; + + return *this; + } + + template + constexpr Vec3& Vec3::operator/=(const Vec3& vec) + { + x /= vec.x; + y /= vec.y; + z /= vec.z; + + return *this; + } + + template + constexpr Vec3& Vec3::operator/=(T scale) + { + x /= scale; + y /= scale; + z /= scale; + + return *this; + } + + template + constexpr Vec3& Vec3::operator%=(const Vec3& vec) + { + x = Mod(x, vec.x); + y = Mod(y, vec.y); + z = Mod(z, vec.z); + + return *this; + } + + template + constexpr Vec3& Vec3::operator%=(T mod) + { + x = Mod(x, mod); + y = Mod(y, mod); + z = Mod(z, mod); + + return *this; + } + + template + constexpr bool Vec3::operator==(const Vec3& vec) const + { + return x == vec.x && y == vec.y && z == vec.z; + } + + template + constexpr bool Vec3::operator!=(const Vec3& vec) const + { + return !operator==(vec); + } + + template + constexpr bool Vec3::operator<(const Vec3& vec) const + { + if (x != vec.x) + return x < vec.x; + + if (y != vec.y) + return y < vec.y; + + return z < vec.z; + } + + template + constexpr bool Vec3::operator<=(const Vec3& vec) const + { + if (x != vec.x) + return x < vec.x; + + if (y != vec.y) + return y < vec.y; + + return z <= vec.z; + } + + template + constexpr bool Vec3::operator>(const Vec3& vec) const + { + if (x != vec.x) + return x > vec.x; + + if (y != vec.y) + return y > vec.y; + + return z > vec.z; + } + + template + constexpr bool Vec3::operator>=(const Vec3& vec) const + { + if (x != vec.x) + return x > vec.x; + + if (y != vec.y) + return y > vec.y; + + return z >= vec.z; + } + + template + constexpr Vec3 Vec3::Apply(T(*func)(T), const Vec3& vec) + { + return Vec3(func(vec.x), func(vec.y), func(vec.z)); + } + + template + constexpr bool Vec3::ApproxEqual(const Vec3& lhs, const Vec3& rhs, T maxDifference) + { + return lhs.ApproxEqual(rhs, maxDifference); + } + + template + constexpr Vec3 Vec3::CrossProduct(const Vec3& vec1, const Vec3& vec2) + { + return vec1.CrossProduct(vec2); + } + + template + constexpr T Vec3::DotProduct(const Vec3& vec1, const Vec3& vec2) + { + return vec1.DotProduct(vec2); + } + + template + constexpr Vec3 Vec3::Backward() + { + return Vec3(0, 0, 1); + } + + template + template + U Vec3::Distance(const Vec3& vec1, const Vec3& vec2) + { + return vec1.Distance(vec2); + } + + template + constexpr Vec3 Vec3::Down() + { + return Vec3(0, -1, 0); + } + + template + constexpr Vec3 Vec3::Forward() + { + return Vec3(0, 0, -1); + } + + template + constexpr Vec3 Vec3::Left() + { + return Vec3(-1, 0, 0); + } + + template + constexpr Vec3 Vec3::Max(const Vec3& lhs, const Vec3& rhs) + { + Vec3 max = lhs; + max.Maximize(rhs); + + return max; + } + + template + constexpr Vec3 Vec3::Min(const Vec3& lhs, const Vec3& rhs) + { + Vec3 min = lhs; + min.Minimize(rhs); + + return min; + } + + template + Vec3 Vec3::Normalize(const Vec3& vec) + { + return vec.GetNormal(); + } + + template + constexpr Vec3 Vec3::Right() + { + return Vec3(1, 0, 0); + } + + template + constexpr T Vec3::SquaredDistance(const Vec3& vec1, const Vec3& vec2) + { + return vec1.SquaredDistance(vec2); + } + + template + constexpr Vec3 Vec3::Unit() + { + return Vec3(1); + } + + template + constexpr Vec3 Vec3::UnitX() + { + return Vec3(1, 0, 0); + } + + template + constexpr Vec3 Vec3::UnitY() + { + return Vec3(0, 1, 0); + } + + template + constexpr Vec3 Vec3::UnitZ() + { + return Vec3(0, 0, 1); + } + + template + constexpr Vec3 Vec3::Up() + { + return Vec3(0, 1, 0); + } + + template + constexpr Vec3 Vec3::Zero() + { + return Vec3(0, 0, 0); + } + + template + std::ostream& operator<<(std::ostream& out, const Vec3& vec) + { + return out << "Vec3(" << vec.x << ", " << vec.y << ", " << vec.z << ')'; + } + + template + constexpr Vec3 operator*(T scale, const Vec3& vec) + { + return Vec3(scale * vec.x, scale * vec.y, scale * vec.z); + } + + template + constexpr Vec3 operator/(T scale, const Vec3& vec) + { + return Vec3(scale / vec.x, scale / vec.y, scale / vec.z); + } + + template + constexpr Vec3 operator%(T mod, const Vec3& vec) + { + return Vec3(Mod(mod, vec.x), Mod(mod, vec.y), Mod(mod, vec.z)); + } +} + diff --git a/runtime/Includes/Maths/Vec4.h b/runtime/Includes/Maths/Vec4.h new file mode 100755 index 0000000..e8bff3a --- /dev/null +++ b/runtime/Includes/Maths/Vec4.h @@ -0,0 +1,115 @@ +#ifndef __SCOP_VEC4__ +#define __SCOP_VEC4__ + +#include +#include +#include +#include + +#include + +namespace Scop +{ + template class Vec2; + template class Vec3; + + template + struct Vec4 + { + union { T x, r, s; }; + union { T y, g, t; }; + union { T z, b, p; }; + union { T w, a, q; }; + + constexpr Vec4() = default; + constexpr Vec4(T X, T Y, T Z, T W = 1.0); + constexpr Vec4(T X, T Y, const Vec2& vec); + constexpr Vec4(T X, const Vec2& vec, T W); + constexpr Vec4(T X, const Vec3& vec); + constexpr explicit Vec4(T scale); + constexpr Vec4(const Vec2& vec, T Z = 0.0, T W = 1.0); + constexpr Vec4(const Vec3& vec, T W = 1.0); + template constexpr explicit Vec4(const Vec4& vec); + constexpr Vec4(const Vec4&) = default; + constexpr Vec4(Vec4&&) = default; + + T AbsDotProduct(const Vec4& vec) const; + constexpr bool ApproxEqual(const Vec4& vec, T max_difference = std::numeric_limits::epsilon()) const; + + constexpr T DotProduct(const Vec4& vec) const; + + Vec4 GetNormal(T* length = nullptr) const; + + constexpr Vec4& Maximize(const Vec4& vec); + constexpr Vec4& Minimize(const Vec4& vec); + + Vec4& Normalize(T* length = nullptr); + + std::string ToString() const; + + constexpr Vec4& operator=(const Vec4&) = default; + constexpr Vec4& operator=(Vec4&&) = default; + + constexpr T& operator[](std::size_t i); + constexpr const T& operator[](std::size_t i) const; + + constexpr const Vec4& operator+() const; + constexpr Vec4 operator-() const; + + constexpr Vec4 operator+(const Vec4& vec) const; + constexpr Vec4 operator-(const Vec4& vec) const; + constexpr Vec4 operator*(const Vec4& vec) const; + constexpr Vec4 operator*(T scale) const; + constexpr Vec4 operator/(const Vec4& vec) const; + constexpr Vec4 operator/(T scale) const; + constexpr Vec4 operator%(const Vec4& vec) const; + constexpr Vec4 operator%(T mod) const; + + constexpr Vec4& operator+=(const Vec4& vec); + constexpr Vec4& operator-=(const Vec4& vec); + constexpr Vec4& operator*=(const Vec4& vec); + constexpr Vec4& operator*=(T scale); + constexpr Vec4& operator/=(const Vec4& vec); + constexpr Vec4& operator/=(T scale); + constexpr Vec4& operator%=(const Vec4& vec); + constexpr Vec4& operator%=(T mod); + + constexpr bool operator==(const Vec4& vec) const; + constexpr bool operator!=(const Vec4& vec) const; + constexpr bool operator<(const Vec4& vec) const; + constexpr bool operator<=(const Vec4& vec) const; + constexpr bool operator>(const Vec4& vec) const; + constexpr bool operator>=(const Vec4& vec) const; + + static constexpr Vec4 Apply(T(*func)(T), const Vec4& vec); + static constexpr bool ApproxEqual(const Vec4& lhs, const Vec4& rhs, T max_difference = std::numeric_limits::epsilon()); + static constexpr T DotProduct(const Vec4& vec1, const Vec4& vec2); + static Vec4 Normalize(const Vec4& vec); + static constexpr Vec4 UnitX(); + static constexpr Vec4 UnitY(); + static constexpr Vec4 UnitZ(); + static constexpr Vec4 Zero(); + + ~Vec4() = default; + }; + + using Vec4d = Vec4; + using Vec4f = Vec4; + using Vec4i = Vec4; + using Vec4ui = Vec4; + using Vec4i32 = Vec4; + using Vec4i64 = Vec4; + using Vec4ui32 = Vec4; + using Vec4ui64 = Vec4; + + template std::ostream& operator<<(std::ostream& out, const Vec4& vec); + + template constexpr Vec4 operator*(T scale, const Vec4& vec); + template constexpr Vec4 operator/(T scale, const Vec4& vec); + template constexpr Vec4 operator%(T mod, const Vec4& vec); +} + +#include + +#endif // __AK_VEC4__ + diff --git a/runtime/Includes/Maths/Vec4.inl b/runtime/Includes/Maths/Vec4.inl new file mode 100755 index 0000000..7f016d7 --- /dev/null +++ b/runtime/Includes/Maths/Vec4.inl @@ -0,0 +1,424 @@ +#pragma once + +#include + +namespace Scop +{ + template + constexpr Vec4::Vec4(T X, T Y, T Z, T W) : x(X), y(Y), z(Z), w(W) {} + + template + constexpr Vec4::Vec4(T X, T Y, const Vec2& vec) : x(X), y(Y), z(vec.x), w(vec.y) {} + + template + constexpr Vec4::Vec4(T X, const Vec2& vec, T W) : x(X), y(vec.x), z(vec.y), w(W) {} + + template + constexpr Vec4::Vec4(T X, const Vec3& vec) : x(X), y(vec.x), z(vec.y), w(vec.z) {} + + template + constexpr Vec4::Vec4(T scale) : x(scale), y(scale), z(scale), w(scale) {} + + template + constexpr Vec4::Vec4(const Vec2& vec, T Z, T W) : x(vec.x), y(vec.y), z(Z), w(W) {} + + template + constexpr Vec4::Vec4(const Vec3& vec, T W) : x(vec.x), y(vec.y), z(vec.z), w(W) {} + + template + template + constexpr Vec4::Vec4(const Vec4& vec) : x(static_cast(vec.x)), y(static_cast(vec.y)), z(static_cast(vec.z)), w(static_cast(vec.w)) {} + + template + T Vec4::AbsDotProduct(const Vec4& vec) const + { + return std::abs(x * vec.x) + std::abs(y * vec.y) + std::abs(z * vec.z) + std::abs(w * vec.w); + } + + template + constexpr bool Vec4::ApproxEqual(const Vec4& vec, T maxDifference) const + { + return NumberEquals(x, vec.x, maxDifference) && NumberEquals(y, vec.y, maxDifference) && NumberEquals(z, vec.z, maxDifference) && NumberEquals(w, vec.w, maxDifference); + } + + template + constexpr T Vec4::DotProduct(const Vec4& vec) const + { + return x*vec.x + y*vec.y + z*vec.z + w*vec.w; + } + + template + Vec4 Vec4::GetNormal(T* length) const + { + Vec4 vec(*this); + vec.Normalize(length); + + return vec; + } + + template + constexpr Vec4& Vec4::Maximize(const Vec4& vec) + { + if (vec.x > x) + x = vec.x; + + if (vec.y > y) + y = vec.y; + + if (vec.z > z) + z = vec.z; + + if (vec.w > w) + w = vec.w; + + return *this; + } + + template + constexpr Vec4& Vec4::Minimize(const Vec4& vec) + { + if (vec.x < x) + x = vec.x; + + if (vec.y < y) + y = vec.y; + + if (vec.z < z) + z = vec.z; + + if (vec.w < w) + w = vec.w; + + return *this; + } + + template + Vec4& Vec4::Normalize(T* length) + { + T invLength = T(1.0) / w; + x *= invLength; + y *= invLength; + z *= invLength; + + if (length) + *length = w; + + w = T(1.0); + + return *this; + } + + template + std::string Vec4::ToString() const + { + std::ostringstream ss; + ss << *this; + + return ss.str(); + } + + template + constexpr T& Vec4::operator[](std::size_t i) + { + Scop::Assert(i < 4, "index out of range"); + return *(&x + i); + } + + template + constexpr const T& Vec4::operator[](std::size_t i) const + { + Scop::Assert(i < 4, "index out of range"); + return *(&x + i); + } + + template + constexpr const Vec4& Vec4::operator+() const + { + return *this; + } + + template + constexpr Vec4 Vec4::operator-() const + { + return Vec4(-x, -y, -z, -w); + } + template + constexpr Vec4 Vec4::operator+(const Vec4& vec) const + { + return Vec4(x + vec.x, y + vec.y, z + vec.z, w + vec.w); + } + template + constexpr Vec4 Vec4::operator-(const Vec4& vec) const + { + return Vec4(x - vec.x, y - vec.y, z - vec.z, w - vec.w); + } + + template + constexpr Vec4 Vec4::operator*(const Vec4& vec) const + { + return Vec4(x * vec.x, y * vec.y, z * vec.z, w * vec.w); + } + + template + constexpr Vec4 Vec4::operator*(T scale) const + { + return Vec4(x * scale, y * scale, z * scale, w * scale); + } + + template + constexpr Vec4 Vec4::operator/(const Vec4& vec) const + { + return Vec4(x / vec.x, y / vec.y, z / vec.z, w / vec.w); + } + + template + constexpr Vec4 Vec4::operator/(T scale) const + { + return Vec4(x / scale, y / scale, z / scale, w / scale); + } + + template + constexpr Vec4 Vec4::operator%(const Vec4& vec) const + { + return Vec4(Mod(x, vec.x), Mod(y, vec.y), Mod(z, vec.z), Mod(w, vec.w)); + } + + template + constexpr Vec4 Vec4::operator%(T mod) const + { + return Vec4(Mod(x, mod), Mod(y, mod), Mod(z, mod), Mod(z, mod)); + } + + template + constexpr Vec4& Vec4::operator+=(const Vec4& vec) + { + x += vec.x; + y += vec.y; + z += vec.z; + w += vec.w; + + return *this; + } + + template + constexpr Vec4& Vec4::operator-=(const Vec4& vec) + { + x -= vec.x; + y -= vec.y; + z -= vec.z; + w -= vec.w; + + return *this; + } + + template + constexpr Vec4& Vec4::operator*=(const Vec4& vec) + { + x *= vec.x; + y *= vec.y; + z *= vec.z; + w *= vec.w; + + return *this; + } + + template + constexpr Vec4& Vec4::operator*=(T scale) + { + x *= scale; + y *= scale; + z *= scale; + w *= scale; + + return *this; + } + + template + constexpr Vec4& Vec4::operator/=(const Vec4& vec) + { + x /= vec.x; + y /= vec.y; + z /= vec.z; + w /= vec.w; + + return *this; + } + + template + constexpr Vec4& Vec4::operator/=(T scale) + { + x /= scale; + y /= scale; + z /= scale; + w /= scale; + + return *this; + } + + template + constexpr Vec4& Vec4::operator%=(const Vec4& vec) + { + x = Mod(x, vec.x); + y = Mod(y, vec.y); + z = Mod(z, vec.z); + w = Mod(w, vec.w); + + return *this; + } + + template + constexpr Vec4& Vec4::operator%=(T mod) + { + x = Mod(x, mod); + y = Mod(y, mod); + z = Mod(z, mod); + w = Mod(w, mod); + + return *this; + } + + template + constexpr bool Vec4::operator==(const Vec4& vec) const + { + return x == vec.x && y == vec.y && z == vec.z && w == vec.w; + } + + template + constexpr bool Vec4::operator!=(const Vec4& vec) const + { + return !operator==(vec); + } + + template + constexpr bool Vec4::operator<(const Vec4& vec) const + { + if (x != vec.x) + return x < vec.x; + + if (y != vec.y) + return y < vec.y; + + if (z != vec.z) + return z < vec.z; + + return w < vec.w; + } + + template + constexpr bool Vec4::operator<=(const Vec4& vec) const + { + if (x != vec.x) + return x < vec.x; + + if (y != vec.y) + return y < vec.y; + + if (z != vec.z) + return z < vec.z; + + return w <= vec.w; + } + + template + constexpr bool Vec4::operator>(const Vec4& vec) const + { + if (x != vec.x) + return x > vec.x; + + if (y != vec.y) + return y > vec.y; + + if (z != vec.z) + return z > vec.z; + + return w > vec.w; + } + + template + constexpr bool Vec4::operator>=(const Vec4& vec) const + { + if (x != vec.x) + return x > vec.x; + + if (y != vec.y) + return y > vec.y; + + if (z != vec.z) + return z > vec.z; + + return w >= vec.w; + } + + template + constexpr Vec4 Vec4::Apply(T(*func)(T), const Vec4& vec) + { + return Vec4(func(vec.x), func(vec.y), func(vec.z), func(vec.w)); + } + + template + constexpr bool Vec4::ApproxEqual(const Vec4& lhs, const Vec4& rhs, T maxDifference) + { + return lhs.ApproxEqual(rhs, maxDifference); + } + + template + constexpr T Vec4::DotProduct(const Vec4& vec1, const Vec4& vec2) + { + return vec1.DotProduct(vec2); + } + + template + Vec4 Vec4::Normalize(const Vec4& vec) + { + return vec.GetNormal(); + } + + template + constexpr Vec4 Vec4::UnitX() + { + return Vec4(1, 0, 0, 1); + } + + template + constexpr Vec4 Vec4::UnitY() + { + return Vec4(0, 1, 0, 1); + } + + template + constexpr Vec4 Vec4::UnitZ() + { + return Vec4(0, 0, 1, 1); + } + + template + constexpr Vec4 Vec4::Zero() + { + return Vec4(0, 0, 0, 1); + } + + template + std::ostream& operator<<(std::ostream& out, const Vec4& vec) + { + return out << "Vec4(" << vec.x << ", " << vec.y << ", " << vec.z << ", " << vec.w << ')'; + } + + template + constexpr Vec4 operator*(T scale, const Vec4& vec) + { + return Vec4(scale * vec.x, scale * vec.y, scale * vec.z, scale * vec.w); + } + + template + constexpr Vec4 operator/(T scale, const Vec4& vec) + { + return Vec4(scale / vec.x, scale / vec.y, scale / vec.z, scale / vec.w); + } + + template + constexpr Vec4 operator%(T mod, const Vec4& vec) + { + return Vec4(Mod(mod, vec.x), Mod(mod, vec.y), Mod(mod, vec.z), Mod(mod, vec.w)); + } +} + diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h index ca3d485..6e1929a 100644 --- a/runtime/Includes/Platform/Inputs.h +++ b/runtime/Includes/Platform/Inputs.h @@ -1,18 +1,7 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Inputs.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/05 16:27:35 by maldavid #+# #+# */ -/* Updated: 2024/07/05 20:35:09 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_INPUTS__ #define __MLX_INPUTS__ +#include #include #include diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 30c3945..04ef60e 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -1,36 +1,30 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Window.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 21:53:12 by maldavid #+# #+# */ -/* Updated: 2024/05/25 16:11:00 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_WINDOW__ #define __MLX_WINDOW__ +#include + namespace mlx { class Window { public: - Window(std::size_t w, std::size_t h, const std::string& title); + Window(std::size_t w, std::size_t h, const std::string& title, bool hidden = false); - inline void* GetWindowHandle() const noexcept { return p_window; } + inline Handle GetWindowHandle() const noexcept { return p_window; } inline int GetWidth() const noexcept { return m_width; } inline int GetHeight() const noexcept { return m_height; } inline std::uint32_t GetID() const noexcept { return m_id; } + inline VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } + inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } + inline Vec2ui GetVulkanDrawableSize() const noexcept { return SDLManager::Get().GetVulkanDrawableSize(p_window); } + void Destroy() noexcept; ~Window() = default; private: - void* p_window = nullptr; + Handle p_window = nullptr; std::uint32_t m_id = -1; int m_width = 0; int m_height = 0; diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 6601fe9..0284a51 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -1,31 +1,20 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* PreCompiled.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/25 17:37:23 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:25:07 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_PRE_COMPILED_HEADER__ #define __MLX_PRE_COMPILED_HEADER__ #define VK_NO_PROTOTYPES -#define Window X11Window // fuck X11 +#define Window X11Window // f*ck X11 #include #include #include #include -#include #include #include +#include + #include #include #include @@ -40,8 +29,6 @@ #include #include #include -#include -#include #include #include #include @@ -82,5 +69,8 @@ #include #include #include +#include + +using Handle = void*; #endif diff --git a/runtime/Includes/Renderer/Buffer.h b/runtime/Includes/Renderer/Buffer.h new file mode 100644 index 0000000..24d7f31 --- /dev/null +++ b/runtime/Includes/Renderer/Buffer.h @@ -0,0 +1,83 @@ +#ifndef __MLX_GPU_BUFFER__ +#define __MLX_GPU_BUFFER__ + +#include +#include +#include + +namespace mlx +{ + class GPUBuffer + { + public: + GPUBuffer() = default; + + void Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data); + void Destroy() noexcept; + + bool CopyFrom(const GPUBuffer& buffer) noexcept; + + void Swap(GPUBuffer& buffer) noexcept; + + [[nodiscard]] MLX_FORCEINLINE void* GetMap() const noexcept { return m_memory.map; } + [[nodiscard]] MLX_FORCEINLINE VkBuffer Get() const noexcept { return m_buffer; } + [[nodiscard]] MLX_FORCEINLINE VkDeviceMemory GetMemory() const noexcept { return m_memory.memory; } + [[nodiscard]] MLX_FORCEINLINE VkDeviceSize GetSize() const noexcept { return m_memory.size; } + [[nodiscard]] MLX_FORCEINLINE VkDeviceSize GetOffset() const noexcept { return 0; } + + [[nodiscard]] inline bool IsInit() const noexcept { return m_buffer != VK_NULL_HANDLE; } + + ~GPUBuffer() = default; + + protected: + void PushToGPU() noexcept; + + protected: + VkBuffer m_buffer = VK_NULL_HANDLE; + VmaAllocation m_allocation; + VkDeviceSize m_offset = 0; + VkDeviceSize m_size = 0; + void* p_map = nullptr; + + private: + void CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info); + + private: + VkBufferUsageFlags m_usage = 0; + }; + + class VertexBuffer : public GPUBuffer + { + public: + inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags = 0) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | additional_flags, {}); } + void SetData(CPUBuffer data); + inline void Bind(VkCommandBuffer cmd) const noexcept { VkDeviceSize offset = 0; vkCmdBindVertexBuffers(cmd, 0, 1, &m_buffer, &offset); } + }; + + class IndexBuffer : public GPUBuffer + { + public: + inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags = 0) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | additional_flags, {}); } + void SetData(CPUBuffer data); + inline void Bind(VkCommandBuffer cmd) const noexcept { vkCmdBindIndexBuffer(cmd, m_buffer, 0, VK_INDEX_TYPE_UINT32); } + }; + + class UniformBuffer + { + public: + void Init(std::uint32_t size); + void SetData(CPUBuffer data, std::size_t frame_index); + void Destroy() noexcept; + + inline VkDeviceSize GetSize(int i) const noexcept { return m_buffers[i].GetSize(); } + inline VkDeviceSize GetOffset(int i) const noexcept { return m_buffers[i].GetOffset(); } + inline VkBuffer GetVk(int i) const noexcept { return m_buffers[i].Get(); } + inline GPUBuffer& Get(int i) noexcept { return m_buffers[i]; } + + private: + std::array m_buffers; + std::array m_maps; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/Buffers/Buffer.h b/runtime/Includes/Renderer/Buffers/Buffer.h deleted file mode 100644 index f8532ab..0000000 --- a/runtime/Includes/Renderer/Buffers/Buffer.h +++ /dev/null @@ -1,66 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Buffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 23:18:52 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:20:49 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_BUFFER__ -#define __MLX_VK_BUFFER__ - -#include -#include -#include - -namespace mlx -{ - class Buffer : public CommandResource - { - public: - Buffer() = default; - - void Create(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data = nullptr); - void Destroy() noexcept; - - inline void MapMem(void** data) noexcept { RenderCore::Get().GetAllocator().MapMemory(m_allocation, data); m_is_mapped = true; } - inline bool IsMapped() const noexcept { return m_is_mapped; } - inline void UnmapMem() noexcept { RenderCore::Get().GetAllocator().UnmapMemory(m_allocation); m_is_mapped = false; } - - void Flush(VkDeviceSize size = VK_WHOLE_SIZE, VkDeviceSize offset = 0); - bool CopyFromBuffer(const Buffer& buffer) noexcept; - - inline VkBuffer& operator()() noexcept { return m_buffer; } - inline VkBuffer& Get() noexcept { return m_buffer; } - inline VkDeviceSize GetSize() const noexcept { return m_size; } - inline VkDeviceSize GetOffset() const noexcept { return m_offset; } - - ~Buffer() = default; - - protected: - void PushToGPU() noexcept; - void Swap(Buffer& buffer) noexcept; - - protected: - VmaAllocation m_allocation; - VkBuffer m_buffer = VK_NULL_HANDLE; - VkDeviceSize m_offset = 0; - VkDeviceSize m_size = 0; - - private: - void CreateBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, const char* name); - - private: - #ifdef DEBUG - std::string m_name; - #endif - VkBufferUsageFlags m_usage = 0; - bool m_is_mapped = false; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Buffers/IndexBuffer.h b/runtime/Includes/Renderer/Buffers/IndexBuffer.h deleted file mode 100644 index 877d518..0000000 --- a/runtime/Includes/Renderer/Buffers/IndexBuffer.h +++ /dev/null @@ -1,29 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* IndexBuffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/25 15:05:05 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:24:46 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __VK_IBO__ -#define __VK_IBO__ - -#include -#include - -namespace mlx -{ - class ConstantIndexBuffer : public Buffer - { - public: - inline void Create(std::uint32_t size, const std::uint16_t* data, const char* name) { Buffer::Create(BufferType::Constant, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT, name, data); } - inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindIndexBuffer(*this); } - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Buffers/UniformBuffer.h b/runtime/Includes/Renderer/Buffers/UniformBuffer.h deleted file mode 100644 index d7f6584..0000000 --- a/runtime/Includes/Renderer/Buffers/UniformBuffer.h +++ /dev/null @@ -1,50 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* UniformBuffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:45:29 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:39:32 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_UBO__ -#define __MLX_VK_UBO__ - -#include - -namespace mlx -{ - class UniformBuffer - { - public: - UniformBuffer() = default; - - void Create(NonOwningPtr renderer, std::uint32_t size, const char* name); - void Destroy() noexcept; - - void SetData(std::uint32_t size, const void* data); - - VkDeviceSize GetSize() noexcept; - VkDeviceSize GetOffset() noexcept; - VkDeviceMemory GetDeviceMemory() noexcept; - VkBuffer& operator()() noexcept; - VkBuffer& Get() noexcept; - - inline VkDeviceSize GetSize(int i) noexcept { return m_buffers[i].GetSize(); } - inline VkDeviceSize GetOffset(int i) noexcept { return m_buffers[i].GetOffset(); } - inline VkBuffer& operator()(int i) noexcept { return m_buffers[i].Get(); } - inline VkBuffer& Get(int i) noexcept { return m_buffers[i].Get(); } - - ~UniformBuffer() = default; - - private: - std::array m_buffers; - std::array m_maps; - NonOwningPtr p_renderer; - }; -} - -#endif // __MLX_VK_UBO__ diff --git a/runtime/Includes/Renderer/Buffers/VertexBuffer.h b/runtime/Includes/Renderer/Buffers/VertexBuffer.h deleted file mode 100644 index 8feb7c0..0000000 --- a/runtime/Includes/Renderer/Buffers/VertexBuffer.h +++ /dev/null @@ -1,46 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* VertexBuffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:27:38 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:24:41 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_VBO__ -#define __MLX_VK_VBO__ - -#include -#include -#include - -namespace mlx -{ - class RAMVertexBuffer : public Buffer - { - public: - inline void Create(std::uint32_t size, const void* data, const char* name) { Buffer::Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } - void SetData(std::uint32_t size, const void* data); - inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindVertexBuffer(*this); } - }; - - class DeviceVertexBuffer : public Buffer - { - public: - inline void create(std::uint32_t size, const void* data, const char* name) { Buffer::Create(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } - void SetData(std::uint32_t size, const void* data); - inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindVertexBuffer(*this); } - }; - - class ConstantVertexBuffer : public Buffer - { - public: - inline void Create(std::uint32_t size, const void* data, const char* name) { Buffer::Create(BufferType::Constant, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, name, data); } - inline void Bind(Renderer& renderer) noexcept { renderer.GetActiveCmdBuffer().BindVertexBuffer(*this); } - }; -} - -#endif // __MLX_VK_VBO__ diff --git a/runtime/Includes/Renderer/Command/CommandBuffer.h b/runtime/Includes/Renderer/Command/CommandBuffer.h deleted file mode 100644 index 90a0a48..0000000 --- a/runtime/Includes/Renderer/Command/CommandBuffer.h +++ /dev/null @@ -1,70 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CommandBuffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:25:42 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:37:54 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_CMD_BUFFER__ -#define __MLX_VK_CMD_BUFFER__ - -#include -#include - -namespace mlx -{ - class Buffer; - class Image; - - class CommandBuffer - { - public: - void Init(CommandBufferType type, NonOwningPtr manager); - void Init(CommandBufferType type, NonOwningPtr pool); - void Destroy() noexcept; - - void BeginRecord(VkCommandBufferUsageFlags usage = 0); - void Submit(NonOwningPtr signal, NonOwningPtr wait) noexcept; - void SubmitIdle(bool shouldWaitForExecution = true) noexcept; // TODO : handle `shouldWaitForExecution` as false by default (needs to modify CmdResources lifetimes to do so) - void UpdateSubmitState() noexcept; - inline void WaitForExecution() noexcept { m_fence.Wait(); UpdateSubmitState(); m_state = CommandBufferState::Ready; } - inline void Reset() noexcept { vkResetCommandBuffer(m_cmd_buffer, 0); } - void EndRecord(); - - void BindVertexBuffer(Buffer& buffer) noexcept; - void BindIndexBuffer(Buffer& buffer) noexcept; - void CopyBuffer(Buffer& dst, Buffer& src) noexcept; - void CopyBufferToImage(Buffer& buffer, Image& image) noexcept; - void CopyImagetoBuffer(Image& image, Buffer& buffer) noexcept; - void TransitionImageLayout(Image& image, VkImageLayout new_layout) noexcept; - - inline bool IsInit() const noexcept { return m_state != CommandBufferState::Uninit; } - inline bool IsReadyToBeUsed() const noexcept { return m_state == CommandBufferState::Ready; } - inline bool IsRecording() const noexcept { return m_state == CommandBufferState::Recording; } - inline bool HasBeenSubmitted() const noexcept { return m_state == CommandBufferState::Submitted; } - inline CommandBufferState GetCurrentState() const noexcept { return m_state; } - - inline VkCommandBuffer& operator()() noexcept { return m_cmd_buffer; } - inline VkCommandBuffer& Get() noexcept { return m_cmd_buffer; } - inline Fence& GetFence() noexcept { return m_fence; } - - private: - void PreTransferBarrier() noexcept; - void PostTransferBarrier() noexcept; - - private: - std::vector> m_cmd_resources; - Fence m_fence; - VkCommandBuffer m_cmd_buffer = VK_NULL_HANDLE; - NonOwningPtr m_pool; - CommandBufferState m_state = CommandBufferState::Uninit; - CommandBufferType m_type; - }; -} - -#endif // __MLX_VK_CMD_BUFFER__ diff --git a/runtime/Includes/Renderer/Command/CommandManager.h b/runtime/Includes/Renderer/Command/CommandManager.h deleted file mode 100644 index c0dcedc..0000000 --- a/runtime/Includes/Renderer/Command/CommandManager.h +++ /dev/null @@ -1,43 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CommandManager.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 17:48:52 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:20:53 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_COMMAND_MANAGER__ -#define __MLX_COMMAND_MANAGER__ - -#include -#include -#include - -namespace mlx -{ - class CommandManager - { - public: - CommandManager() = default; - - void Init() noexcept; - void BeginRecord(int active_image_index); - void EndRecord(int active_image_index); - void Destroy() noexcept; - - inline CommandPool& GetCmdPool() noexcept { return m_cmd_pool; } - inline CommandBuffer& GetCmdBuffer(int i) noexcept { return m_cmd_buffers[i]; } - - ~CommandManager() = default; - - private: - std::array m_cmd_buffers; - CommandPool m_cmd_pool; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Command/CommandPool.h b/runtime/Includes/Renderer/Command/CommandPool.h deleted file mode 100644 index edb8f22..0000000 --- a/runtime/Includes/Renderer/Command/CommandPool.h +++ /dev/null @@ -1,36 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CommandPool.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:24:12 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:33:15 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_CMD_POOL__ -#define __MLX_VK_CMD_POOL__ - -namespace mlx -{ - class CommandPool - { - public: - CommandPool() = default; - - void Init(); - void Destroy() noexcept; - - inline VkCommandPool& operator()() noexcept { return m_cmd_pool; } - inline VkCommandPool& Get() noexcept { return m_cmd_pool; } - - ~CommandPool() = default; - - private: - VkCommandPool m_cmd_pool = VK_NULL_HANDLE; - }; -} - -#endif // __MLX_VK_CMD_POOL__ diff --git a/runtime/Includes/Renderer/Command/CommandResource.h b/runtime/Includes/Renderer/Command/CommandResource.h deleted file mode 100644 index e9c367e..0000000 --- a/runtime/Includes/Renderer/Command/CommandResource.h +++ /dev/null @@ -1,38 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CommandResource.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/16 20:44:29 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:37:06 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_COMMAND_RESOURCE__ -#define __MLX_COMMAND_RESOURCE__ - -#include -#include - -namespace mlx -{ - class CommandResource - { - friend class SingleTimeCmdManager; - - public: - CommandResource() : m_uuid() {} - inline void RecordedInCmdBuffer() noexcept { m_state = CommandResourceState::Held; } - inline void RemovedFromCmdBuffer() noexcept { m_state = CommandResourceState::Free; } - inline UUID GetUUID() const noexcept { return m_uuid; } - virtual ~CommandResource() = default; - - private: - UUID m_uuid; - CommandResourceState m_state = CommandResourceState::Free; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h b/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h deleted file mode 100644 index 1ac2ec6..0000000 --- a/runtime/Includes/Renderer/Command/SingleTimeCmdManager.h +++ /dev/null @@ -1,49 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* SingleTimeCmdManager.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/15 18:25:57 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:38:11 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_SINGLE_TIME_CMD_MANAGER__ -#define __MLX_SINGLE_TIME_CMD_MANAGER__ - -#include -#include - -namespace mlx -{ - class CommandBuffer; - - class SingleTimeCmdManager - { - friend class RenderCore; - - public: - SingleTimeCmdManager() = default; - - void Init() noexcept; - void Destroy() noexcept; - - void UpdateSingleTimesCmdBuffersSubmitState() noexcept; - void WaitForAllExecutions() noexcept; - - inline CommandPool& GetCmdPool() noexcept { return m_pool; } - CommandBuffer& GetCmdBuffer() noexcept; - - ~SingleTimeCmdManager() = default; - - inline static constexpr const std::uint8_t BASE_POOL_SIZE = 16; - - private: - std::vector m_buffers; - CommandPool m_pool; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Core/Device.h b/runtime/Includes/Renderer/Core/Device.h deleted file mode 100644 index c46aaf6..0000000 --- a/runtime/Includes/Renderer/Core/Device.h +++ /dev/null @@ -1,40 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Device.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:13:42 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:47:21 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_DEVICE__ -#define __MLX_VK_DEVICE__ - -namespace mlx -{ - class Device - { - public: - void Init(); - void Destroy() noexcept; - - inline VkDevice& operator()() noexcept { return m_device; } - inline VkDevice& Get() noexcept { return m_device; } - - inline VkPhysicalDevice& GetPhysicalDevice() noexcept { return m_physical_device; } - - private: - void PickPhysicalDevice(); - bool CheckDeviceExtensionSupport(VkPhysicalDevice device); - int DeviceScore(VkPhysicalDevice device); - - private: - VkPhysicalDevice m_physical_device = VK_NULL_HANDLE; - VkDevice m_device = VK_NULL_HANDLE; - }; -} - -#endif // __MLX_VK_DEVICE__ diff --git a/runtime/Includes/Renderer/Core/DrawableResource.h b/runtime/Includes/Renderer/Core/DrawableResource.h deleted file mode 100644 index a043d3a..0000000 --- a/runtime/Includes/Renderer/Core/DrawableResource.h +++ /dev/null @@ -1,28 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DrawableResource.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/10 21:00:37 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:10:56 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_DRAWABLE_RESOURCE__ -#define __MLX_DRAWABLE_RESOURCE__ - -namespace mlx -{ - class DrawableResource - { - public: - DrawableResource() = default; - virtual void Render(class Renderer& renderer) = 0; - virtual void ResetUpdate() {} - virtual ~DrawableResource() = default; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Core/Fence.h b/runtime/Includes/Renderer/Core/Fence.h deleted file mode 100644 index ea46aae..0000000 --- a/runtime/Includes/Renderer/Core/Fence.h +++ /dev/null @@ -1,40 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Fence.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 17:52:09 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:48:31 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_FENCE__ -#define __MLX_VK_FENCE__ - -namespace mlx -{ - class Fence - { - public: - Fence() = default; - - void Init(); - - inline VkFence& Get() noexcept { return m_fence; } - void Wait() noexcept; - void Reset() noexcept; - bool IsReady() const noexcept; - MLX_FORCEINLINE void WaitAndReset() noexcept { Wait(); Reset(); } - - void Destroy() noexcept; - - ~Fence() = default; - - private: - VkFence m_fence = VK_NULL_HANDLE; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Core/Instance.h b/runtime/Includes/Renderer/Core/Instance.h deleted file mode 100644 index cecb73d..0000000 --- a/runtime/Includes/Renderer/Core/Instance.h +++ /dev/null @@ -1,38 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Instance.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:03:04 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:44:02 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_INSTANCE__ -#define __MLX_VK_INSTANCE__ - -namespace mlx -{ - class Instance - { - public: - void Init(); - void Destroy() noexcept; - - inline std::uint32_t GetInstanceVersion() const noexcept { return m_instance_version; } - - inline VkInstance& operator()() noexcept { return m_instance; } - inline VkInstance& Get() noexcept { return m_instance; } - - private: - std::vector GetRequiredExtensions(); - - private: - VkInstance m_instance = VK_NULL_HANDLE; - std::uint32_t m_instance_version = 0; - }; -} - -#endif // __MLX_VK_INSTANCE__ diff --git a/runtime/Includes/Renderer/Core/Queues.h b/runtime/Includes/Renderer/Core/Queues.h deleted file mode 100644 index 2fdc7ec..0000000 --- a/runtime/Includes/Renderer/Core/Queues.h +++ /dev/null @@ -1,51 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Queues.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:01:49 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:38:50 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_QUEUES__ -#define __MLX_VK_QUEUES__ - -namespace mlx -{ - class Queues - { - public: - struct QueueFamilyIndices - { - std::optional graphics_family; - std::optional present_family; - - inline bool IsComplete() { return graphics_family.has_value() && present_family.has_value(); } - }; - - public: - QueueFamilyIndices FindQueueFamilies(VkPhysicalDevice device); - - void Init(); - - inline VkQueue& GetGraphic() noexcept { return m_graphics_queue; } - inline VkQueue& GetPresent() noexcept { return m_present_queue; } - inline QueueFamilyIndices GetFamilies() noexcept - { - if(m_families.has_value()) - return *m_families; - FatalError("Vulkan : cannot get queue families, not init"); - return {}; // just to avoid warnings - } - - private: - VkQueue m_graphics_queue; - VkQueue m_present_queue; - std::optional m_families; - }; -} - -#endif // __MLX_VK_QUEUES__ diff --git a/runtime/Includes/Renderer/Core/RenderCore.h b/runtime/Includes/Renderer/Core/RenderCore.h deleted file mode 100644 index b0939e7..0000000 --- a/runtime/Includes/Renderer/Core/RenderCore.h +++ /dev/null @@ -1,78 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* RenderCore.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:16:32 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:39:11 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_RENDER_CORE__ -#define __MLX_RENDER_CORE__ - -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -namespace mlx -{ - const char* VerbaliseVkResult(VkResult result); - VkPipelineStageFlags AccessFlagsToPipelineStage(VkAccessFlags access_flags, VkPipelineStageFlags stage_flags); - - #ifdef DEBUG - constexpr const bool enable_validation_layers = true; - #else - constexpr const bool enable_validation_layers = false; - #endif - - const std::vector validation_layers = { "VK_LAYER_KHRONOS_validation" }; - - constexpr const int MAX_FRAMES_IN_FLIGHT = 3; - constexpr const int MAX_SETS_PER_POOL = 512; - constexpr const int NUMBER_OF_UNIFORM_BUFFERS = 1; // change this if for wathever reason more than one uniform buffer is needed - - class RenderCore : public Singleton - { - friend class Singleton; - - public: - void Init(); - void Destroy(); - - inline bool IsInit() const noexcept { return m_is_init; } - inline Instance& GetInstance() noexcept { return m_instance; } - inline Device& GetDevice() noexcept { return m_device; } - inline Queues& GetQueue() noexcept { return m_queues; } - inline GPUallocator& GetAllocator() noexcept { return m_allocator; } - inline ValidationLayers& GetLayers() noexcept { return m_layers; } - inline CommandBuffer& GetSingleTimeCmdBuffer() noexcept { return m_cmd_manager.GetCmdBuffer(); } - inline SingleTimeCmdManager& GetSingleTimeCmdManager() noexcept { return m_cmd_manager; } - inline DescriptorPool& GetDescriptorPool() { return m_pool_manager.GetAvailablePool(); } - - private: - RenderCore() = default; - ~RenderCore() = default; - - private: - ValidationLayers m_layers; - SingleTimeCmdManager m_cmd_manager; - Queues m_queues; - DescriptorPoolManager m_pool_manager; - Device m_device; - Instance m_instance; - GPUallocator m_allocator; - bool m_is_init = false; - }; -} - -#endif // __MLX_RENDER_CORE__ diff --git a/runtime/Includes/Renderer/Core/Semaphore.h b/runtime/Includes/Renderer/Core/Semaphore.h deleted file mode 100644 index 8e071b8..0000000 --- a/runtime/Includes/Renderer/Core/Semaphore.h +++ /dev/null @@ -1,31 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Semaphore.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 18:59:38 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:56:51 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_SEMAPHORE__ -#define __MLX_VK_SEMAPHORE__ - -namespace mlx -{ - class Semaphore - { - public: - void Init(); - void Destroy() noexcept; - - inline VkSemaphore& Get() noexcept { return m_semaphore; } - - private: - VkSemaphore m_semaphore = VK_NULL_HANDLE; - }; -} - -#endif // __MLX_VK_SEMAPHORE__ diff --git a/runtime/Includes/Renderer/Core/Surface.h b/runtime/Includes/Renderer/Core/Surface.h deleted file mode 100644 index 01a2785..0000000 --- a/runtime/Includes/Renderer/Core/Surface.h +++ /dev/null @@ -1,34 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Surface.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 18:57:55 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:58:15 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_SURFACE__ -#define __MLX_VK_SURFACE__ - -namespace mlx -{ - class Surface - { - public: - void Create(class Renderer& renderer); - void Destroy() noexcept; - - VkSurfaceFormatKHR ChooseSwapSurfaceFormat(const std::vector& available_formats); - - inline VkSurfaceKHR& operator()() noexcept { return m_surface; } - inline VkSurfaceKHR& Get() noexcept { return m_surface; } - - private: - VkSurfaceKHR m_surface = VK_NULL_HANDLE; - }; -} - -#endif // __MLX_VK_SURFACE__ diff --git a/runtime/Includes/Renderer/Core/ValidationLayers.h b/runtime/Includes/Renderer/Core/ValidationLayers.h deleted file mode 100644 index 0dc3e45..0000000 --- a/runtime/Includes/Renderer/Core/ValidationLayers.h +++ /dev/null @@ -1,44 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* ValidationLayers.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/12/19 14:04:25 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:16:25 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __VK_VALIDATION_LAYERS__ -#define __VK_VALIDATION_LAYERS__ - -namespace mlx -{ - class ValidationLayers - { - public: - ValidationLayers() = default; - - void Init(); - void Destroy(); - - bool CheckValidationLayerSupport(); - void PopulateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& create_info); - - VkResult SetDebugUtilsObjectNameEXT(VkObjectType object_type, std::uint64_t object_handle, const char* object_name); - - ~ValidationLayers() = default; - - private: - VkResult CreateDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator); - static VKAPI_ATTR VkBool32 VKAPI_CALL DebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT message_severity, VkDebugUtilsMessageTypeFlagsEXT message_type, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData); - void DestroyDebugUtilsMessengerEXT(const VkAllocationCallbacks* pAllocator); - - private: - VkDebugUtilsMessengerEXT m_debug_messenger; - PFN_vkSetDebugUtilsObjectNameEXT f_vkSetDebugUtilsObjectNameEXT = nullptr; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Descriptor.h b/runtime/Includes/Renderer/Descriptor.h new file mode 100644 index 0000000..9a28ee3 --- /dev/null +++ b/runtime/Includes/Renderer/Descriptor.h @@ -0,0 +1,48 @@ +#ifndef __MLX_DESCRIPTOR_SET__ +#define __MLX_DESCRIPTOR_SET__ + +#include +#include +#include +#include + +namespace mlx +{ + struct Descriptor + { + NonOwningPtr storage_buffer_ptr; + NonOwningPtr uniform_buffer_ptr; + NonOwningPtr image_ptr; + VkDescriptorType type; + ShaderType shader_type; + std::uint32_t binding; + }; + + class DescriptorSet + { + public: + DescriptorSet() { m_set.fill(VK_NULL_HANDLE); } + DescriptorSet(const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type); + + void SetImage(std::size_t i, std::uint32_t binding, class Image& image); + void SetStorageBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); + void SetUniformBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); + void Update(std::size_t i, VkCommandBuffer cmd = VK_NULL_HANDLE) noexcept; + + [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t i) const noexcept { return m_set[i]; } + [[nodiscard]] inline DescriptorSet Duplicate() const { return DescriptorSet{ m_set_layout, m_descriptors }; } + [[nodiscard]] inline bool IsInit() const noexcept { return m_set[0] != VK_NULL_HANDLE; } + + ~DescriptorSet() = default; + + private: + DescriptorSet(VkDescriptorSetLayout layout, const std::vector& descriptors); + + private: + std::vector m_descriptors; + std::array m_set; + VkDescriptorSetLayout m_set_layout; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorPool.h b/runtime/Includes/Renderer/Descriptors/DescriptorPool.h deleted file mode 100644 index 1b09905..0000000 --- a/runtime/Includes/Renderer/Descriptors/DescriptorPool.h +++ /dev/null @@ -1,42 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorPool.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:32:43 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:36:03 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __VK_DESCRIPTOR_POOL__ -#define __VK_DESCRIPTOR_POOL__ - -namespace mlx -{ - class DescriptorPool - { - public: - DescriptorPool() = default; - - void Init(std::vector sizes); - VkDescriptorSet AllocateDescriptorSet(class DescriptorSetLayout& layout); - void FreeDescriptor(VkDescriptorSet set); - void Destroy() noexcept; - - inline VkDescriptorPool& operator()() noexcept { return m_pool; } - inline VkDescriptorPool& Get() noexcept { return m_pool; } - inline std::size_t GetNumberOfSetsAllocated() const noexcept { return m_allocated_sets; } - - inline bool IsInit() const noexcept { return m_pool != VK_NULL_HANDLE; } - - ~DescriptorPool() = default; - - private: - VkDescriptorPool m_pool = VK_NULL_HANDLE; - std::size_t m_allocated_sets = 0; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h b/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h deleted file mode 100644 index 5c032aa..0000000 --- a/runtime/Includes/Renderer/Descriptors/DescriptorPoolManager.h +++ /dev/null @@ -1,35 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorPoolManager.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/20 06:26:26 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:40:22 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_DESCRIPTOR_POOL_MANAGER__ -#define __MLX_DESCRIPTOR_POOL_MANAGER__ - -#include - -namespace mlx -{ - class DescriptorPoolManager - { - public: - DescriptorPoolManager() = default; - - DescriptorPool& GetAvailablePool(); - void DestroyAllPools(); - - ~DescriptorPoolManager() = default; - - private: - std::list m_pools; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h b/runtime/Includes/Renderer/Descriptors/DescriptorSet.h deleted file mode 100644 index f687c8c..0000000 --- a/runtime/Includes/Renderer/Descriptors/DescriptorSet.h +++ /dev/null @@ -1,56 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorSet.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:39:36 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:42:22 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __VK_DESCRIPTOR_SET__ -#define __VK_DESCRIPTOR_SET__ - -#include -#include - -namespace mlx -{ - class DescriptorSet - { - public: - DescriptorSet() = default; - - void Init(NonOwningPtr renderer, NonOwningPtr pool, DescriptorSetLayout layout); - - void WriteDescriptor(int binding, NonOwningPtr ubo) const noexcept; - void WriteDescriptor(int binding, const class Image& image) const noexcept; - - inline bool IsInit() const noexcept { return p_pool && p_renderer; } - - void Bind() noexcept; - - DescriptorSet Duplicate(); - - VkDescriptorSet& operator()() noexcept; - VkDescriptorSet& Get() noexcept; - - inline const DescriptorSetLayout& GetLayout() const noexcept { return m_layout; } - - inline const std::array& GetAllFramesDescriptorSets() const { return m_desc_set; } - - void Destroy() noexcept; - - ~DescriptorSet() = default; - - private: - DescriptorSetLayout m_layout; - std::array m_desc_set; - NonOwningPtr p_pool; - NonOwningPtr p_renderer; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h b/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h deleted file mode 100644 index ce13a5f..0000000 --- a/runtime/Includes/Renderer/Descriptors/DescriptorSetLayout.h +++ /dev/null @@ -1,38 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorSetLayout.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:36:22 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:15:01 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __VK_DESCRIPTOR_SET_LAYOUT__ -#define __VK_DESCRIPTOR_SET_LAYOUT__ - -namespace mlx -{ - class DescriptorSetLayout - { - public: - DescriptorSetLayout() = default; - - void Init(std::vector> binds, VkShaderStageFlagBits stage); - void Destroy() noexcept; - - inline VkDescriptorSetLayout operator()() const noexcept { return m_layout; } - inline VkDescriptorSetLayout Get() const noexcept { return m_layout; } - inline const std::vector>& GetBindings() const noexcept { return m_bindings; } - - ~DescriptorSetLayout() = default; - - private: - std::vector> m_bindings; - VkDescriptorSetLayout m_layout = VK_NULL_HANDLE; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Enums.h b/runtime/Includes/Renderer/Enums.h index 30083b4..684f4e0 100644 --- a/runtime/Includes/Renderer/Enums.h +++ b/runtime/Includes/Renderer/Enums.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Enums.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 22:02:58 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:39:31 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_RENDERER_ENUMS__ #define __MLX_RENDERER_ENUMS__ @@ -18,6 +6,7 @@ namespace mlx enum class BufferType { Constant = 0, + Staging, HighDynamic, // typically stored in RAM LowDynamic, // typically stored in VRAM @@ -25,35 +14,13 @@ namespace mlx }; constexpr std::size_t BufferTypeCount = static_cast(BufferType::EndEnum); - enum class CommandResourceState + enum class ImageType { - Held = 0, - Free, + Color = 0, EndEnum }; - constexpr std::size_t CommandResourceStateCount = static_cast(CommandResourceState::EndEnum); - - enum class CommandBufferState - { - Uninit = 0, // buffer not initialized or destroyed - Ready, // buffer ready to be used after having been submitted - Idle, // buffer has recorded informations but has not been submitted - Recording, // buffer is currently recording - Submitted, // buffer has been submitted - - EndEnum - }; - constexpr std::size_t CommandBufferStateCount = static_cast(CommandBufferState::EndEnum); - - enum class CommandBufferType - { - SingleTime = 0, - LongTime, - - EndEnum - }; - constexpr std::size_t CommandBufferTypeCount = static_cast(CommandBufferType::EndEnum); + constexpr std::size_t ImageTypeCount = static_cast(ImageType::EndEnum); } #endif diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h new file mode 100644 index 0000000..2c3809e --- /dev/null +++ b/runtime/Includes/Renderer/Image.h @@ -0,0 +1,102 @@ +#ifndef __MLX_IMAGE__ +#define __MLX_IMAGE__ + +#include +#include +#include +#include +#include + +namespace mlx +{ + class Image + { + public: + Image() = default; + + inline void Init(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED) noexcept + { + m_image = image; + m_format = format; + m_width = width; + m_height = height; + m_layout = layout; + } + + void Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, VkMemoryPropertyFlags properties, bool is_multisampled = false); + void CreateImageView(VkImageViewType type, VkImageAspectFlags aspectFlags, int layer_count = 1) noexcept; + void CreateSampler() noexcept; + void TransitionLayout(VkImageLayout new_layout, VkCommandBuffer cmd = VK_NULL_HANDLE); + void Clear(VkCommandBuffer cmd, Vec4f color); + + void DestroySampler() noexcept; + void DestroyImageView() noexcept; + virtual void Destroy() noexcept; + + [[nodiscard]] MLX_FORCEINLINE VkImage Get() const noexcept { return m_image; } + [[nodiscard]] MLX_FORCEINLINE VkImage operator()() const noexcept { return m_image; } + [[nodiscard]] MLX_FORCEINLINE VkDeviceMemory GetDeviceMemory() const noexcept { return m_memory.memory; } + [[nodiscard]] MLX_FORCEINLINE VkImageView GetImageView() const noexcept { return m_image_view; } + [[nodiscard]] MLX_FORCEINLINE VkFormat GetFormat() const noexcept { return m_format; } + [[nodiscard]] MLX_FORCEINLINE VkImageTiling GetTiling() const noexcept { return m_tiling; } + [[nodiscard]] MLX_FORCEINLINE VkImageLayout GetLayout() const noexcept { return m_layout; } + [[nodiscard]] MLX_FORCEINLINE VkSampler GetSampler() const noexcept { return m_sampler; } + [[nodiscard]] MLX_FORCEINLINE std::uint32_t GetWidth() const noexcept { return m_width; } + [[nodiscard]] MLX_FORCEINLINE std::uint32_t GetHeight() const noexcept { return m_height; } + [[nodiscard]] MLX_FORCEINLINE bool IsInit() const noexcept { return m_image != VK_NULL_HANDLE; } + [[nodiscard]] MLX_FORCEINLINE ImageType GetType() const noexcept { return m_type; } + + virtual ~Image() = default; + + private: + VmaAllocation m_allocation; + VkImage m_image = VK_NULL_HANDLE; + VkImageView m_image_view = VK_NULL_HANDLE; + VkSampler m_sampler = VK_NULL_HANDLE; + VkFormat m_format; + VkImageTiling m_tiling; + VkImageLayout m_layout = VK_IMAGE_LAYOUT_UNDEFINED; + ImageType m_type; + std::uint32_t m_width = 0; + std::uint32_t m_height = 0; + bool m_is_multisampled = false; + }; + + class Texture : public Image + { + public: + Texture() = default; + Texture(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format = VK_FORMAT_R8G8B8A8_SRGB, bool is_multisampled = false) + { + Init(std::move(pixels), width, height, format, is_multisampled); + } + inline void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format = VK_FORMAT_R8G8B8A8_SRGB, bool is_multisampled = false) + { + Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, is_multisampled); + Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + Image::CreateSampler(); + if(pixels) + { + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); + GPUBuffer staging_buffer; + std::size_t size = width * height * kvfFormatSize(format); + staging_buffer.Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, pixels); + VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); + vkEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + staging_buffer.Destroy(); + } + if(!pixels) + TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + else + TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + } + ~Texture() override { Destroy(); } + }; +} + +#endif diff --git a/runtime/Includes/Renderer/Images/Image.h b/runtime/Includes/Renderer/Images/Image.h deleted file mode 100644 index e86b12d..0000000 --- a/runtime/Includes/Renderer/Images/Image.h +++ /dev/null @@ -1,84 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Image.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/25 11:54:21 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:40:51 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_IMAGE__ -#define __MLX_VK_IMAGE__ - -#include -#include -#include - -namespace mlx -{ - std::uint32_t FormatSize(VkFormat format); - bool IsStencilFormat(VkFormat format); - bool IsDepthFormat(VkFormat format); - VkFormat BitsToFormat(std::uint32_t bits); - VkPipelineStageFlags LayoutToAccessMask(VkImageLayout layout, bool is_destination); - - class Image : public CommandResource - { - friend class Swapchain; - - public: - Image() = default; - - inline void Create(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED) noexcept - { - m_image = image; - m_format = format; - m_width = width; - m_height = height; - m_layout = layout; - } - void Create(std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool decated_memory = false); - void CreateImageView(VkImageViewType type, VkImageAspectFlags aspect_flags) noexcept; - void CreateSampler() noexcept; - void CopyFromBuffer(class Buffer& buffer); - void CopyToBuffer(class Buffer& buffer); - void TransitionLayout(VkImageLayout new_layout, NonOwningPtr cmd = nullptr); - virtual void Destroy() noexcept; - - inline VkImage Get() noexcept { return m_image; } - inline VkImage operator()() noexcept { return m_image; } - inline VkImageView GetImageView() const noexcept { return m_image_view; } - inline VkFormat GetFormat() const noexcept { return m_format; } - inline VkImageTiling GetTiling() const noexcept { return m_tiling; } - inline VkImageLayout GetLayout() const noexcept { return m_layout; } - inline VkSampler GetSampler() const noexcept { return m_sampler; } - inline std::uint32_t GetWidth() const noexcept { return m_width; } - inline std::uint32_t GetHeight() const noexcept { return m_height; } - inline bool IsInit() const noexcept { return m_image != VK_NULL_HANDLE; } - - virtual ~Image() = default; - - private: - void DestroySampler() noexcept; - void DestroyImageView() noexcept; - - private: - VmaAllocation m_allocation; - VkImage m_image = VK_NULL_HANDLE; - VkImageView m_image_view = VK_NULL_HANDLE; - VkSampler m_sampler = VK_NULL_HANDLE; - #ifdef DEBUG - std::string m_name; - #endif - VkFormat m_format; - VkImageTiling m_tiling; - VkImageLayout m_layout = VK_IMAGE_LAYOUT_UNDEFINED; - std::uint32_t m_width = 0; - std::uint32_t m_height = 0; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Images/Texture.h b/runtime/Includes/Renderer/Images/Texture.h deleted file mode 100644 index 5cc468d..0000000 --- a/runtime/Includes/Renderer/Images/Texture.h +++ /dev/null @@ -1,63 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Texture.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/03/08 02:24:58 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:42:41 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXTURE__ -#define __MLX_TEXTURE__ - -#include -#include -#include -#include - -namespace mlx -{ - class Texture : public Image - { - public: - Texture() = default; - - void Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); - void Render(class Renderer& renderer, int x, int y); - void Destroy() noexcept override; - - void SetPixel(int x, int y, std::uint32_t color) noexcept; - int GetPixel(int x, int y) noexcept; - - inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } - inline VkDescriptorSet GetSet() noexcept { return m_set.IsInit() ? m_set.Get() : VK_NULL_HANDLE; } - inline void UpdateSet(int binding) noexcept { m_set.WriteDescriptor(binding, *this); m_has_set_been_updated = true; } - inline bool HasBeenUpdated() const noexcept { return m_has_set_been_updated; } - inline constexpr void ResetUpdate() noexcept { m_has_set_been_updated = false; } - - ~Texture() = default; - - private: - void OpenCPUmap(); - - private: - ConstantVertexBuffer m_vbo; - ConstantIndexBuffer m_ibo; - #ifdef DEBUG - std::string m_name; - #endif - DescriptorSet m_set; - std::vector m_cpu_map; - std::optional m_buf_map = std::nullopt; - void* m_map = nullptr; - bool m_has_been_modified = false; - bool m_has_set_been_updated = false; - }; - - Texture* StbTextureLoad(std::filesystem::path file, int* w, int* h); -} - -#endif diff --git a/runtime/Includes/Renderer/Images/TextureAtlas.h b/runtime/Includes/Renderer/Images/TextureAtlas.h deleted file mode 100644 index 669097f..0000000 --- a/runtime/Includes/Renderer/Images/TextureAtlas.h +++ /dev/null @@ -1,43 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextureAtlas.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/07 16:36:33 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:52:40 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXTURE_ATLAS__ -#define __MLX_TEXTURE_ATLAS__ - -#include - -namespace mlx -{ - class TextureAtlas : public Image - { - public: - TextureAtlas() = default; - - void Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory = false); - void Render(class Renderer& renderer, int x, int y, std::uint32_t ibo_size) const; - void Destroy() noexcept override; - - inline void SetDescriptor(DescriptorSet&& set) noexcept { m_set = set; } - inline DescriptorSet GetSet() noexcept { return m_set; } - inline void UpdateSet(int binding) noexcept { m_set.WriteDescriptor(binding, *this); m_has_been_updated = true; } - inline bool HasBeenUpdated() const noexcept { return m_has_been_updated; } - inline constexpr void ResetUpdate() noexcept { m_has_been_updated = false; } - - ~TextureAtlas() = default; - - private: - DescriptorSet m_set; - bool m_has_been_updated = false; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Images/TextureDescriptor.h b/runtime/Includes/Renderer/Images/TextureDescriptor.h deleted file mode 100644 index 7a9706b..0000000 --- a/runtime/Includes/Renderer/Images/TextureDescriptor.h +++ /dev/null @@ -1,59 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextureDescriptor.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/11 01:00:13 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:08:02 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXTURE_DESCRIPTOR__ -#define __MLX_TEXTURE_DESCRIPTOR__ - -#include -#include -#include - -namespace mlx -{ - struct TextureRenderDescriptor : public DrawableResource - { - NonOwningPtr texture; - int x; - int y; - - TextureRenderDescriptor(NonOwningPtr _texture, int _x, int _y) : texture(_texture), x(_x), y(_y) {} - inline bool operator==(const TextureRenderDescriptor& rhs) const { return texture == rhs.texture && x == rhs.x && y == rhs.y; } - inline void Render(class Renderer& renderer) override - { - if(!texture->IsInit()) - return; - texture->Render(renderer, x, y); - } - inline void ResetUpdate() override - { - if(!texture->IsInit()) - return; - texture->ResetUpdate(); - } - }; -} - -namespace std -{ - template <> - struct hash - { - std::size_t operator()(const mlx::TextureRenderDescriptor& d) const noexcept - { - std::size_t hash = 0; - mlx::HashCombine(hash, d.texture, d.x, d.y); - return hash; - } - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Images/TextureRegistry.h b/runtime/Includes/Renderer/Images/TextureRegistry.h deleted file mode 100644 index c1248e9..0000000 --- a/runtime/Includes/Renderer/Images/TextureRegistry.h +++ /dev/null @@ -1,39 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextureRegistry.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/11 00:56:15 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:43:48 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXTURE_MANAGER__ -#define __MLX_TEXTURE_MANAGER__ - -#include - -namespace mlx -{ - class TextureRegistry - { - public: - TextureRegistry() = default; - - inline void Clear(); - inline std::pair, bool> RegisterTexture(NonOwningPtr texture, int x, int y); - inline bool IsTextureKnown(NonOwningPtr texture) noexcept; - inline void EraseTextures(NonOwningPtr texture); - - ~TextureRegistry() = default; - - private: - std::unordered_set m_texture_descriptors; - }; -} - -#include - -#endif diff --git a/runtime/Includes/Renderer/Images/TextureRegistry.inl b/runtime/Includes/Renderer/Images/TextureRegistry.inl deleted file mode 100644 index 5a8986b..0000000 --- a/runtime/Includes/Renderer/Images/TextureRegistry.inl +++ /dev/null @@ -1,52 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextureRegistry.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/23 22:08:46 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:11:09 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#pragma once -#include - -namespace mlx -{ - void TextureRegistry::Clear() - { - m_texture_descriptors.clear(); - } - - std::pair, bool> TextureRegistry::RegisterTexture(NonOwningPtr texture, int x, int y) - { - MLX_PROFILE_FUNCTION(); - auto res = m_texture_descriptors.emplace(texture, x, y); - return std::make_pair(static_cast(&const_cast(*res.first)), res.second); - } - - bool TextureRegistry::IsTextureKnown(NonOwningPtr texture) noexcept - { - MLX_PROFILE_FUNCTION(); - for(const auto& desc : m_texture_descriptors) - { - if(desc.texture == texture) - return true; - } - return false; - } - - void TextureRegistry::EraseTextures(NonOwningPtr texture) - { - MLX_PROFILE_FUNCTION(); - for(auto it = m_texture_descriptors.begin(); it != m_texture_descriptors.end();) - { - if(it->texture == texture) - it = m_texture_descriptors.erase(it); - else - ++it; - } - } -} diff --git a/runtime/Includes/Renderer/Core/Memory.h b/runtime/Includes/Renderer/Memory.h similarity index 50% rename from runtime/Includes/Renderer/Core/Memory.h rename to runtime/Includes/Renderer/Memory.h index a29be0b..2470630 100644 --- a/runtime/Includes/Renderer/Core/Memory.h +++ b/runtime/Includes/Renderer/Memory.h @@ -1,24 +1,12 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Memory.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/10/20 02:13:03 by maldavid #+# #+# */ -/* Updated: 2024/03/27 22:49:57 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_VK_MEMORY__ #define __MLX_VK_MEMORY__ namespace mlx { - class GPUallocator + class GPUAllocator { public: - GPUallocator() = default; + GPUAllocator() = default; void Init() noexcept; void Destroy() noexcept; @@ -36,7 +24,7 @@ namespace mlx void Flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept; - ~GPUallocator() = default; + ~GPUAllocator() = default; private: VmaAllocator m_allocator; diff --git a/runtime/Includes/Renderer/Pipelines/Graphics.h b/runtime/Includes/Renderer/Pipelines/Graphics.h new file mode 100644 index 0000000..027aaf4 --- /dev/null +++ b/runtime/Includes/Renderer/Pipelines/Graphics.h @@ -0,0 +1,57 @@ +#ifndef __MLX_GRAPHICS_PIPELINE__ +#define __MLX_GRAPHICS_PIPELINE__ + +#include +#include +#include +#include + +namespace mlx +{ + struct GraphicPipelineDescriptor + { + std::shared_ptr vertex_shader; + std::shared_ptr fragment_shader; + std::vector> color_attachments; + NonOwningPtr renderer = nullptr; + bool clear_color_attachments = true; + bool no_vertex_inputs = false; + }; + + class GraphicPipeline : public Pipeline + { + public: + GraphicPipeline() = default; + + void Init(const GraphicPipelineDescriptor& descriptor); + bool BindPipeline(VkCommandBuffer command_buffer, std::size_t framebuffer_index, std::array clear) noexcept; + void EndPipeline(VkCommandBuffer command_buffer) noexcept override; + void Destroy() noexcept; + + [[nodiscard]] inline VkPipeline GetPipeline() const override { return m_pipeline; } + [[nodiscard]] inline VkPipelineLayout GetPipelineLayout() const override { return m_pipeline_layout; } + [[nodiscard]] inline VkPipelineBindPoint GetPipelineBindPoint() const override { return VK_PIPELINE_BIND_POINT_GRAPHICS; } + + ~GraphicPipeline() = default; + + private: + void CreateFramebuffers(const std::vector>& render_targets, bool clear_attachments); + void TransitionAttachments(VkCommandBuffer cmd = VK_NULL_HANDLE); + + // Private override to remove access + bool BindPipeline(VkCommandBuffer) noexcept override { return false; }; + + private: + std::vector> m_attachments; + std::vector m_framebuffers; + std::vector m_clears; + std::shared_ptr p_vertex_shader; + std::shared_ptr p_fragment_shader; + VkRenderPass m_renderpass = VK_NULL_HANDLE; + VkPipeline m_pipeline = VK_NULL_HANDLE; + VkPipelineLayout m_pipeline_layout = VK_NULL_HANDLE; + NonOwningPtr p_renderer; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/Pipelines/Pipeline.h b/runtime/Includes/Renderer/Pipelines/Pipeline.h index 7cd8ffd..126fb34 100644 --- a/runtime/Includes/Renderer/Pipelines/Pipeline.h +++ b/runtime/Includes/Renderer/Pipelines/Pipeline.h @@ -1,36 +1,23 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Pipeline.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/12/18 21:23:52 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:15:38 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ +#ifndef __MLX_PIPELINE__ +#define __MLX_PIPELINE__ -#ifndef __PIPELINE__ -#define __PIPELINE__ - -#include +#include namespace mlx { - class GraphicPipeline + class Pipeline { public: - void init(class Renderer& renderer); - void Destroy() noexcept; + Pipeline() = default; - inline void BindPipeline(CommandBuffer& command_buffer) noexcept { vkCmdBindPipeline(command_buffer.Get(), VK_PIPELINE_BIND_POINT_GRAPHICS, m_graphics_pipeline); } + inline virtual bool BindPipeline(VkCommandBuffer command_buffer) noexcept { vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); return true; } + inline virtual void EndPipeline([[maybe_unused]] VkCommandBuffer command_buffer) noexcept {} - inline const VkPipeline& GetPipeline() const noexcept { return m_graphics_pipeline; } - inline const VkPipelineLayout& GetPipelineLayout() const noexcept { return m_pipeline_layout; } + virtual VkPipeline GetPipeline() const = 0; + virtual VkPipelineLayout GetPipelineLayout() const = 0; + virtual VkPipelineBindPoint GetPipelineBindPoint() const = 0; - private: - VkPipeline m_graphics_pipeline = VK_NULL_HANDLE; - VkPipelineLayout m_pipeline_layout = VK_NULL_HANDLE; + virtual ~Pipeline() = default; }; } diff --git a/runtime/Includes/Renderer/Pipelines/Shader.h b/runtime/Includes/Renderer/Pipelines/Shader.h new file mode 100644 index 0000000..d1c59db --- /dev/null +++ b/runtime/Includes/Renderer/Pipelines/Shader.h @@ -0,0 +1,68 @@ +#ifndef __MLX_SHADER__ +#define __MLX_SHADER__ + +namespace mlx +{ + struct ShaderSetLayout + { + std::vector > binds; + + ShaderSetLayout(std::vector > b) : binds(std::move(b)) {} + }; + + struct ShaderPushConstantLayout + { + std::size_t offset; + std::size_t size; + + ShaderPushConstantLayout(std::size_t o, std::size_t s) : offset(o), size(s) {} + }; + + struct ShaderLayout + { + std::vector > set_layouts; + std::vector push_constants; + + ShaderLayout(std::vector > s, std::vector pc) : set_layouts(std::move(s)), push_constants(std::move(pc)) {} + }; + + enum class ShaderType + { + Vertex, + Fragment, + Compute + }; + + struct ShaderPipelineLayoutPart + { + std::vector push_constants; + std::vector set_layouts; + }; + + class Shader + { + public: + Shader(const std::vector& bytecode, ShaderType type, ShaderLayout layout); + + [[nodiscard]] inline const ShaderLayout& GetShaderLayout() const { return m_layout; } + [[nodiscard]] inline const std::vector& GetByteCode() const noexcept { return m_bytecode; } + [[nodiscard]] inline const ShaderPipelineLayoutPart& GetPipelineLayout() const noexcept { return m_pipeline_layout_part; } + [[nodiscard]] inline VkShaderModule GetShaderModule() const noexcept { return m_module; } + [[nodiscard]] inline VkShaderStageFlagBits GetShaderStage() const noexcept { return m_stage; } + + ~Shader(); + + private: + void GeneratePipelineLayout(ShaderLayout layout); + + private: + ShaderLayout m_layout; + ShaderPipelineLayoutPart m_pipeline_layout_part; + std::vector m_bytecode; + std::vector m_set_layouts; + VkShaderStageFlagBits m_stage; + VkShaderModule m_module = VK_NULL_HANDLE; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/PixelPut.h b/runtime/Includes/Renderer/PixelPut.h deleted file mode 100644 index c16d7c9..0000000 --- a/runtime/Includes/Renderer/PixelPut.h +++ /dev/null @@ -1,48 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* PixelPut.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/03/31 13:18:50 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:46:11 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_PIXEL_PUT__ -#define __MLX_PIXEL_PUT__ - -#include -#include - -namespace mlx -{ - class PixelPutPipeline - { - public: - PixelPutPipeline() = default; - - void Init(std::uint32_t width, std::uint32_t height, class Renderer& renderer) noexcept; - - void SetPixel(int x, int y, std::uint32_t color) noexcept; - void Render(class Renderer& renderer) noexcept; - - void Clear(); - void Destroy() noexcept; - - ~PixelPutPipeline() = default; - - private: - Texture m_texture; - Buffer m_buffer; - // using vector as CPU map and not directly writting to mapped buffer to improve performances - std::vector m_cpu_map; - void* m_buffer_map = nullptr; - std::uint32_t m_width = 0; - std::uint32_t m_height = 0; - bool m_has_been_modified = true; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h new file mode 100644 index 0000000..01218a3 --- /dev/null +++ b/runtime/Includes/Renderer/RenderCore.h @@ -0,0 +1,39 @@ +#ifndef __MLX_RENDER_CORE__ +#define __MLX_RENDER_CORE__ + +#include + +namespace mlx +{ + constexpr const int MAX_FRAMES_IN_FLIGHT = 3; + + class RenderCore : public Singleton + { + friend class Singleton; + + public: + void Init() noexcept; + void Destroy() noexcept; + + [[nodiscard]] MLX_FORCEINLINE VkInstance GetInstance() const noexcept { return m_instance; } + [[nodiscard]] MLX_FORCEINLINE VkInstance& GetInstanceRef() noexcept { return m_instance; } + [[nodiscard]] MLX_FORCEINLINE VkDevice GetDevice() const noexcept { return m_device; } + [[nodiscard]] MLX_FORCEINLINE VkPhysicalDevice GetPhysicalDevice() const noexcept { return m_physical_device; } + [[nodiscard]] MLX_FORCEINLINE GPUAllocator& GetAllocator() noexcept { return m_allocator; } + + + inline void WaitDeviceIdle() const noexcept { vkDeviceWaitIdle(m_device); } + + private: + RenderCore() = default; + ~RenderCore() = default; + + private: + GPUAllocator m_allocator; + VkInstance m_instance = VK_NULL_HANDLE; + VkDevice m_device = VK_NULL_HANDLE; + VkPhysicalDevice m_physical_device = VK_NULL_HANDLE; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/RenderPasses/2DPass.h b/runtime/Includes/Renderer/RenderPasses/2DPass.h new file mode 100644 index 0000000..eef1282 --- /dev/null +++ b/runtime/Includes/Renderer/RenderPasses/2DPass.h @@ -0,0 +1,29 @@ +#ifndef __MLX_2D_PASS__ +#define __MLX_2D_PASS__ + +#include +#include +#include + +namespace mlx +{ + class Render2DPass + { + public: + Render2DPass() = default; + void Init(); + void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target); + void Destroy(); + ~Render2DPass() = default; + + private: + GraphicPipeline m_pipeline; + std::shared_ptr p_viewer_data_set; + std::shared_ptr p_viewer_data_buffer; + std::shared_ptr p_texture_set; + std::shared_ptr p_vertex_shader; + std::shared_ptr p_fragment_shader; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/RenderPasses/FinalPass.h b/runtime/Includes/Renderer/RenderPasses/FinalPass.h new file mode 100644 index 0000000..93a71cf --- /dev/null +++ b/runtime/Includes/Renderer/RenderPasses/FinalPass.h @@ -0,0 +1,27 @@ +#ifndef __MLX_FINAL_PASS__ +#define __MLX_FINAL_PASS__ + +#include +#include +#include + +namespace mlx +{ + class FinalPass + { + public: + FinalPass() = default; + void Init(); + void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target); + void Destroy(); + ~FinalPass() = default; + + private: + GraphicPipeline m_pipeline; + std::shared_ptr p_set; + std::shared_ptr p_vertex_shader; + std::shared_ptr p_fragment_shader; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/RenderPasses/Passes.h b/runtime/Includes/Renderer/RenderPasses/Passes.h new file mode 100644 index 0000000..7800912 --- /dev/null +++ b/runtime/Includes/Renderer/RenderPasses/Passes.h @@ -0,0 +1,26 @@ +#ifndef __MLX_PASSES__ +#define __MLX_PASSES__ + +#include +#include +#include + +namespace mlx +{ + class RenderPasses + { + public: + RenderPasses() = default; + void Init(); + void Pass(class Scene& scene, class Renderer& renderer); + void Destroy(); + ~RenderPasses() = default; + + private: + Render2DPass m_2Dpass; + FinalPass m_final; + Texture m_main_render_texture; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index a7ff20c..9502fbe 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -1,31 +1,11 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Renderer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/12/18 17:14:45 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:40:20 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ +#ifndef __MLX_RENDERER__ +#define __MLX_RENDERER__ -#ifndef __RENDERER__ -#define __RENDERER__ - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include +#include +#include +#include +#include +#include namespace mlx { @@ -34,57 +14,48 @@ namespace mlx public: Renderer() = default; - void Init(NonOwningPtr render_target); + void Init(NonOwningPtr window); bool BeginFrame(); void EndFrame(); - void Destroy(); + [[nodiscard]] inline VkSwapchainKHR GetSwapchain() const noexcept { return m_swapchain; } + [[nodiscard]] inline VkSurfaceKHR GetSurface() const noexcept { return m_surface; } + [[nodiscard]] inline VkSemaphore GetImageAvailableSemaphore(int index) const noexcept { return m_image_available_semaphores[index]; } + [[nodiscard]] inline VkSemaphore GetRenderFinishedSemaphore(int index) const noexcept { return m_render_finished_semaphores[index]; } + [[nodiscard]] inline VkCommandBuffer GetCommandBuffer(int index) const noexcept { return m_cmd_buffers[index]; } + [[nodiscard]] inline VkCommandBuffer GetActiveCommandBuffer() const noexcept { return m_cmd_buffers[m_current_frame_index]; } + [[nodiscard]] inline const std::vector& GetSwapchainImages() const { return m_swapchain_images; } + [[nodiscard]] inline std::size_t& GetDrawCallsCounterRef() noexcept { return m_drawcalls; } + [[nodiscard]] inline std::size_t& GetPolygonDrawnCounterRef() noexcept { return m_polygons_drawn; } + [[nodiscard]] inline std::size_t GetSwapchainImageIndex() const noexcept { return m_swapchain_image_index; } + [[nodiscard]] inline std::size_t GetCurrentFrameIndex() const noexcept { return m_current_frame_index; } + [[nodiscard]] inline NonOwningPtr GetWindow() const noexcept { return m_window_ptr; } - inline NonOwningPtr GetWindow() { return m_window; } - inline void SetWindow(NonOwningPtr window) { m_window = window; } + MLX_FORCEINLINE constexpr void RequireFramebufferResize() noexcept { m_framebuffers_resize = true; } - inline Surface& GetSurface() noexcept { return m_surface; } - inline NonOwningPtr GetUniformBuffer() noexcept { return m_uniform_buffer.get(); } - inline SwapChain& GetSwapChain() noexcept { return m_swapchain; } - inline RenderPass& GetRenderPass() noexcept { return m_pass; } - inline GraphicPipeline& GetPipeline() noexcept { return m_pipeline; } - inline CommandBuffer& GetCmdBuffer(int i) noexcept { return m_cmd.GetCmdBuffer(i); } - inline CommandBuffer& GetActiveCmdBuffer() noexcept { return m_cmd.GetCmdBuffer(m_current_frame_index); } - inline FrameBuffer& GetFrameBuffer(int i) noexcept { return m_framebuffers[i]; } - inline DescriptorSet& GetVertDescriptorSet() noexcept { return m_vert_set; } - inline DescriptorSet& GetFragDescriptorSet() noexcept { return m_frag_set; } - inline std::uint32_t GetActiveImageIndex() noexcept { return m_current_frame_index; } - inline std::uint32_t GetImageIndex() noexcept { return m_image_index; } - - constexpr inline void RequireFrameBufferResize() noexcept { m_framebuffer_resized = true; } + void Destroy() noexcept; ~Renderer() = default; private: - void RecreateRenderData(); + void CreateSwapchain(); + void DestroySwapchain(); private: - GraphicPipeline m_pipeline; - CommandManager m_cmd; - RenderPass m_pass; - Surface m_surface; - SwapChain m_swapchain; - std::array m_render_finished_semaphores; - std::array m_image_available_semaphores; - std::vector m_framebuffers; - - DescriptorSet m_vert_set; - DescriptorSet m_frag_set; - - std::unique_ptr m_uniform_buffer; - - NonOwningPtr m_window; - NonOwningPtr m_render_target; - + std::array m_image_available_semaphores; + std::array m_render_finished_semaphores; + std::array m_cmd_buffers; + std::array m_cmd_fences; + std::vector m_swapchain_images; + NonOwningPtr p_window; + VkSurfaceKHR m_surface = VK_NULL_HANDLE; + VkSwapchainKHR m_swapchain = VK_NULL_HANDLE; std::uint32_t m_current_frame_index = 0; - std::uint32_t m_image_index = 0; - bool m_framebuffer_resized = false; + std::uint32_t m_swapchain_image_index = 0; + std::size_t m_drawcalls = 0; + std::size_t m_polygons_drawn = 0; + bool m_framebuffers_resize = false; }; } diff --git a/runtime/Includes/Renderer/Renderpass/FrameBuffer.h b/runtime/Includes/Renderer/Renderpass/FrameBuffer.h deleted file mode 100644 index ec6abe5..0000000 --- a/runtime/Includes/Renderer/Renderpass/FrameBuffer.h +++ /dev/null @@ -1,36 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* FrameBuffer.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:19:44 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:16:02 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_FRAMEBUFFER__ -#define __MLX_VK_FRAMEBUFFER__ - -namespace mlx -{ - class FrameBuffer - { - public: - void Init(class RenderPass& renderpass, class Image& image); - void Destroy() noexcept; - - inline VkFramebuffer& operator()() noexcept { return m_framebuffer; } - inline VkFramebuffer& Get() noexcept { return m_framebuffer; } - inline std::uint32_t GetWidth() const noexcept { return m_width; } - inline std::uint32_t GetHeight() const noexcept { return m_height; } - - private: - VkFramebuffer m_framebuffer = VK_NULL_HANDLE; - std::uint32_t m_width = 0; - std::uint32_t m_height = 0; - }; -} - -#endif // __MLX_VK_FRAMEBUFFER__ diff --git a/runtime/Includes/Renderer/Renderpass/RenderPass.h b/runtime/Includes/Renderer/Renderpass/RenderPass.h deleted file mode 100644 index 18c6c03..0000000 --- a/runtime/Includes/Renderer/Renderpass/RenderPass.h +++ /dev/null @@ -1,36 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* RenderPass.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:22:00 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:16:44 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_RENDER_PASS__ -#define __MLX_VK_RENDER_PASS__ - -namespace mlx -{ - class RenderPass - { - public: - void Init(VkFormat attachement_format, VkImageLayout layout); - void Destroy() noexcept; - - void Begin(class CmommandBuffer& cmd, class FrameBuffer& fb); - void End(class CommandBuffer& cmd); - - inline VkRenderPass& operator()() noexcept { return m_render_pass; } - inline VkRenderPass& Get() noexcept { return m_render_pass; } - - private: - VkRenderPass m_render_pass = VK_NULL_HANDLE; - bool m_is_running = false; - }; -} - -#endif // __MLX_VK_RENDER_PASS__ diff --git a/runtime/Includes/Renderer/Renderpass/Swapchain.h b/runtime/Includes/Renderer/Renderpass/Swapchain.h deleted file mode 100644 index f9c8054..0000000 --- a/runtime/Includes/Renderer/Renderpass/Swapchain.h +++ /dev/null @@ -1,65 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Swapchain.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:23:27 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:18:15 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_VK_SWAPCHAIN__ -#define __MLX_VK_SWAPCHAIN__ - -#include - -namespace mlx -{ - class SwapChain - { - friend class GraphicPipeline; - friend class RenderPass; - friend class Renderer; - - public: - struct SwapChainSupportDetails - { - VkSurfaceCapabilitiesKHR capabilities; - std::vector formats; - std::vector present_modes; - }; - - public: - SwapChain() = default; - - void Init(NonOwningPtr renderer); - void Recreate(); - void Destroy() noexcept; - - SwapChainSupportDetails QuerySwapChainSupport(VkPhysicalDevice device); - VkExtent2D ChooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities); - VkPresentModeKHR ChooseSwapPresentMode([[maybe_unused]] const std::vector &available_present_modes); - - inline VkSwapchainKHR Get() noexcept { return m_swapchain; } - inline VkSwapchainKHR operator()() noexcept { return m_swapchain; } - inline std::size_t GetImagesNumber() const noexcept { return m_images.size(); } - inline Image& GetImage(std::size_t i) noexcept { return m_images[i]; } - inline SwapChainSupportDetails GetSupport() noexcept { return m_swapchain_support; } - inline VkExtent2D GetExtent() noexcept { return m_extent; } - inline VkFormat GetImagesFormat() const noexcept { return m_swapchain_image_format; } - - ~SwapChain() = default; - - private: - SwapChainSupportDetails m_swapchain_support; - VkSwapchainKHR m_swapchain; - std::vector m_images; - VkFormat m_swapchain_image_format; - VkExtent2D m_extent; - NonOwningPtr m_renderer; - }; -} - -#endif // __MLX_VK_SWAPCHAIN__ diff --git a/runtime/Includes/Renderer/ScenesRenderer.h b/runtime/Includes/Renderer/ScenesRenderer.h new file mode 100644 index 0000000..03c391e --- /dev/null +++ b/runtime/Includes/Renderer/ScenesRenderer.h @@ -0,0 +1,22 @@ +#ifndef __MLX_SCENES_RENDERER__ +#define __MLX_SCENES_RENDERER__ + +#include + +namespace mlx +{ + class SceneRenderer + { + public: + SceneRenderer() = default; + void Init(); + void Render(class Scene& scene, class Renderer& renderer); // TODO : add RTT support + void Destroy(); + ~SceneRenderer() = default; + + private: + RenderPasses m_passes; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/Texts/Font.h b/runtime/Includes/Renderer/Texts/Font.h deleted file mode 100644 index 91b6c4c..0000000 --- a/runtime/Includes/Renderer/Texts/Font.h +++ /dev/null @@ -1,55 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Font.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/11 21:17:04 by kbz_8 #+# #+# */ -/* Updated: 2024/07/05 13:53:11 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_FONT__ -#define __MLX_FONT__ - -#include -#include - -namespace mlx -{ - class Font - { - friend class FontLibrary; - - public: - Font() = delete; - Font(class Renderer& renderer, const std::filesystem::path& path, float scale); - Font(class Renderer& renderer, const std::string& name, const std::vector& ttf_data, float scale); - - inline const std::string& GetName() const { return m_name; } - inline float GetScale() const noexcept { return m_scale; } - inline const std::array& GetCharData() const { return m_cdata; } - inline const TextureAtlas& GetAtlas() const noexcept { return m_atlas; } - inline bool operator==(const Font& rhs) const { return rhs.m_name == m_name && rhs.m_scale == m_scale; } - inline bool operator!=(const Font& rhs) const { return rhs.m_name != m_name || rhs.m_scale != m_scale; } - - void Destroy(); - - ~Font(); - - private: - void BuildFont(); - - private: - std::array m_cdata; - TextureAtlas m_atlas; - std::variant> m_build_data; - std::string m_name; - class Renderer& m_renderer; - float m_scale = 0; - bool m_is_init = false; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Texts/FontLibrary.h b/runtime/Includes/Renderer/Texts/FontLibrary.h deleted file mode 100644 index 433c5f5..0000000 --- a/runtime/Includes/Renderer/Texts/FontLibrary.h +++ /dev/null @@ -1,47 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* FontLibrary.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/18 09:26:03 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:21:53 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_FONT_LIBRARY__ -#define __MLX_FONT_LIBRARY__ - -#include -#include -#include - -namespace mlx -{ - using FontID = std::uint32_t; - constexpr FontID nullfont = 0; - - class FontLibrary : public Singleton - { - friend class Singleton; - - public: - std::shared_ptr GetFontData(FontID id); - FontID AddFontToLibrary(std::shared_ptr font); - void RemoveFontFromLibrary(FontID id); - - void ClearLibrary(); - - private: - FontLibrary() = default; - ~FontLibrary() = default; - - private: - std::unordered_map> m_cache; - std::vector m_invalid_ids; - FontID m_current_id = 1; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Texts/Text.h b/runtime/Includes/Renderer/Texts/Text.h deleted file mode 100644 index 07b6d10..0000000 --- a/runtime/Includes/Renderer/Texts/Text.h +++ /dev/null @@ -1,49 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Text.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/11 00:09:04 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:23:50 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXT__ -#define __MLX_TEXT__ - -#include -#include -#include -#include - -namespace mlx -{ - class Text - { - public: - Text() = default; - - void Init(std::string text, FontID font, std::uint32_t color, std::vector vbo_data, std::vector ibo_data); - void Bind(class Renderer& renderer) noexcept; - inline FontID GetFontInUse() const noexcept { return m_font; } - void UpdateVertexData(int frame, std::vector vbo_data); - inline std::uint32_t GetIBOsize() noexcept { return m_ibo.GetSize(); } - inline const std::string& GetText() const { return m_text; } - inline std::uint32_t GetColor() const noexcept { return m_color; } - void Destroy() noexcept; - - ~Text(); - - private: - std::array m_vbo; - ConstantIndexBuffer m_ibo; - std::string m_text; - std::uint32_t m_color; - FontID m_font = nullfont; - bool m_is_init = false; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Texts/TextDescriptor.h b/runtime/Includes/Renderer/Texts/TextDescriptor.h deleted file mode 100644 index 4b5f72a..0000000 --- a/runtime/Includes/Renderer/Texts/TextDescriptor.h +++ /dev/null @@ -1,62 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextDescriptor.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/11 00:13:34 by maldavid #+# #+# */ -/* Updated: 2024/07/05 14:03:43 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXT_DESCRIPTOR__ -#define __MLX_TEXT_DESCRIPTOR__ - -#include -#include -#include -#include - -namespace mlx -{ - class TextDrawDescriptor : public DrawableResource - { - friend class std::hash; - - public: - TextID id; - std::uint32_t color; - int x; - int y; - - public: - TextDrawDescriptor(std::string text, std::uint32_t _color, int _x, int _y); - - void Init(FontID font) noexcept; - bool operator==(const TextDrawDescriptor& rhs) const { return m_text == rhs.m_text && x == rhs.x && y == rhs.y && color == rhs.color; } - void Render(Renderer& renderer) override; - void ResetUpdate() override; - - TextDrawDescriptor() = default; - - private: - std::string m_text; - }; -} - -namespace std -{ - template <> - struct hash - { - std::size_t operator()(const mlx::TextDrawDescriptor& d) const noexcept - { - std::size_t hash = 0; - mlx::HashCombine(hash, d.x, d.y, d.color, d.m_text); - return hash; - } - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Texts/TextLibrary.h b/runtime/Includes/Renderer/Texts/TextLibrary.h deleted file mode 100644 index 63385ea..0000000 --- a/runtime/Includes/Renderer/Texts/TextLibrary.h +++ /dev/null @@ -1,48 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextLibrary.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/10 11:52:30 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:26:10 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXT_LIBRARY__ -#define __MLX_TEXT_LIBRARY__ - -#include -#include -#include -#include -#include - -namespace mlx -{ - using TextID = std::uint32_t; - constexpr TextID nulltext = 0; - - class TextLibrary : public Singleton - { - friend class Singleton; - - public: - std::shared_ptr GetTextData(TextID id); - TextID AddTextToLibrary(std::shared_ptr text); - void RemoveTextFromLibrary(TextID id); - - void ClearLibrary(); - - private: - TextLibrary() = default; - ~TextLibrary() = default; - - private: - std::unordered_map> m_cache; - TextID m_current_id = 1; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Texts/TextManager.h b/runtime/Includes/Renderer/Texts/TextManager.h deleted file mode 100644 index a2d57db..0000000 --- a/runtime/Includes/Renderer/Texts/TextManager.h +++ /dev/null @@ -1,43 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextManager.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/06 16:24:11 by maldavid #+# #+# */ -/* Updated: 2024/03/28 22:27:32 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#ifndef __MLX_TEXT_MANAGER__ -#define __MLX_TEXT_MANAGER__ - -#include -#include -#include -#include -#include - -namespace mlx -{ - class TextManager - { - public: - TextManager() = default; - - void Init(Renderer& renderer) noexcept; - std::pair, bool> RegisterText(int x, int y, std::uint32_t color, std::string str); - inline void Clear() { m_text_descriptors.clear(); } - void LoadFont(Renderer& renderer, const std::filesystem::path& filepath, float scale); - void Destroy() noexcept; - - ~TextManager() = default; - - private: - std::unordered_set m_text_descriptors; - FontID m_font_in_use = nullfont; - }; -} - -#endif diff --git a/runtime/Includes/Renderer/Vertex.h b/runtime/Includes/Renderer/Vertex.h index 285825e..bcc501e 100644 --- a/runtime/Includes/Renderer/Vertex.h +++ b/runtime/Includes/Renderer/Vertex.h @@ -1,60 +1,25 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Vertex.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/04/23 22:24:33 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:25:01 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ +#ifndef __MLX_VERTEX__ +#define __MLX_VERTEX__ -#ifndef __MLX_RENDERER_VERTEX__ -#define __MLX_RENDERER_VERTEX__ +#include +#include namespace mlx { struct Vertex { - glm::vec2 pos; - glm::vec4 color; - glm::vec2 uv; + alignas(16) Vec2f position = Vec4f{ 0.0f, 0.0f }; + alignas(16) Vec4f color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; + alignas(16) Vec2f uv = Vec2f{ 0.0f, 0.0f }; - Vertex(glm::vec2 _pos, glm::vec4 _color, glm::vec2 _uv) : pos(std::move(_pos)), color(std::move(_color)), uv(std::move(_uv)) {} + Vertex() = default; + Vertex(Vec2f p, Vec4f c, Vec2f u) : position(std::move(p)), color(std::move(c)), uv(std::move(u)) {} - static VkVertexInputBindingDescription GetBindingDescription() - { - VkVertexInputBindingDescription binding_description{}; - binding_description.binding = 0; - binding_description.stride = sizeof(Vertex); - binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX; - - return binding_description; - } - - static std::array GetAttributeDescriptions() - { - std::array attribute_descriptions; - - attribute_descriptions[0].binding = 0; - attribute_descriptions[0].location = 0; - attribute_descriptions[0].format = VK_FORMAT_R32G32_SFLOAT; - attribute_descriptions[0].offset = offsetof(Vertex, pos); - - attribute_descriptions[1].binding = 0; - attribute_descriptions[1].location = 1; - attribute_descriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; - attribute_descriptions[1].offset = offsetof(Vertex, color); - - attribute_descriptions[2].binding = 0; - attribute_descriptions[2].location = 2; - attribute_descriptions[2].format = VK_FORMAT_R32G32_SFLOAT; - attribute_descriptions[2].offset = offsetof(Vertex, uv); - - return attribute_descriptions; - } + [[nodiscard]] inline static VkVertexInputBindingDescription GetBindingDescription(); + [[nodiscard]] inline static std::array GetAttributeDescriptions(); }; } +#include + #endif diff --git a/runtime/Includes/Renderer/Vertex.inl b/runtime/Includes/Renderer/Vertex.inl new file mode 100644 index 0000000..0c6b9ea --- /dev/null +++ b/runtime/Includes/Renderer/Vertex.inl @@ -0,0 +1,36 @@ +#pragma once +#include + +namespace mlx +{ + VkVertexInputBindingDescription Vertex::GetBindingDescription() + { + VkVertexInputBindingDescription binding_description{}; + binding_description.binding = 0; + binding_description.stride = sizeof(Vertex); + binding_description.inputRate = VK_VERTEX_INPUT_RATE_VERTEX; + return binding_description; + } + + std::array Vertex::GetAttributeDescriptions() + { + std::array attribute_descriptions; + + attribute_descriptions[0].binding = 0; + attribute_descriptions[0].location = 0; + attribute_descriptions[0].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[0].offset = offsetof(Vertex, position); + + attribute_descriptions[1].binding = 0; + attribute_descriptions[1].location = 1; + attribute_descriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; + attribute_descriptions[1].offset = offsetof(Vertex, color); + + attribute_descriptions[2].binding = 0; + attribute_descriptions[2].location = 2; + attribute_descriptions[2].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[2].offset = offsetof(Vertex, uv); + + return attribute_descriptions; + } +} diff --git a/runtime/Includes/Renderer/ViewerData.h b/runtime/Includes/Renderer/ViewerData.h new file mode 100644 index 0000000..e9685b6 --- /dev/null +++ b/runtime/Includes/Renderer/ViewerData.h @@ -0,0 +1,14 @@ +#ifndef __MLX_VIEWER_DATA__ +#define __MLX_VIEWER_DATA__ + +#include + +namespace mlx +{ + struct ViewerData + { + Mat4f projection_matrix; + }; +} + +#endif diff --git a/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h b/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h new file mode 100644 index 0000000..fd96c65 --- /dev/null +++ b/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h @@ -0,0 +1,170 @@ +#ifndef __SCOP_VK_PROTOTYPES__ +#define __SCOP_VK_PROTOTYPES__ + +#if defined(VULKAN_H_) && !defined(VK_NO_PROTOTYPES) + #error "define VK_NO_PROTOTYPES needed" +#endif + +#ifndef VK_NO_PROTOTYPES + #define VK_NO_PROTOTYPES +#endif + +#ifndef VULKAN_H_ + #include +#endif + +#if defined(VK_VERSION_1_0) + extern PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; + extern PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; + extern PFN_vkAllocateMemory vkAllocateMemory; + extern PFN_vkBeginCommandBuffer vkBeginCommandBuffer; + extern PFN_vkBindBufferMemory vkBindBufferMemory; + extern PFN_vkBindImageMemory vkBindImageMemory; + extern PFN_vkCmdBeginQuery vkCmdBeginQuery; + extern PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; + extern PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; + extern PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; + extern PFN_vkCmdBindPipeline vkCmdBindPipeline; + extern PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; + extern PFN_vkCmdBlitImage vkCmdBlitImage; + extern PFN_vkCmdClearAttachments vkCmdClearAttachments; + extern PFN_vkCmdClearColorImage vkCmdClearColorImage; + extern PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; + extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + extern PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; + extern PFN_vkCmdCopyImage vkCmdCopyImage; + extern PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; + extern PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; + extern PFN_vkCmdDispatch vkCmdDispatch; + extern PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; + extern PFN_vkCmdDraw vkCmdDraw; + extern PFN_vkCmdDrawIndexed vkCmdDrawIndexed; + extern PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; + extern PFN_vkCmdDrawIndirect vkCmdDrawIndirect; + extern PFN_vkCmdEndQuery vkCmdEndQuery; + extern PFN_vkCmdEndRenderPass vkCmdEndRenderPass; + extern PFN_vkCmdExecuteCommands vkCmdExecuteCommands; + extern PFN_vkCmdFillBuffer vkCmdFillBuffer; + extern PFN_vkCmdNextSubpass vkCmdNextSubpass; + extern PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; + extern PFN_vkCmdPushConstants vkCmdPushConstants; + extern PFN_vkCmdResetEvent vkCmdResetEvent; + extern PFN_vkCmdResetQueryPool vkCmdResetQueryPool; + extern PFN_vkCmdResolveImage vkCmdResolveImage; + extern PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; + extern PFN_vkCmdSetDepthBias vkCmdSetDepthBias; + extern PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; + extern PFN_vkCmdSetEvent vkCmdSetEvent; + extern PFN_vkCmdSetLineWidth vkCmdSetLineWidth; + extern PFN_vkCmdSetScissor vkCmdSetScissor; + extern PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; + extern PFN_vkCmdSetStencilReference vkCmdSetStencilReference; + extern PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; + extern PFN_vkCmdSetViewport vkCmdSetViewport; + extern PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; + extern PFN_vkCmdWaitEvents vkCmdWaitEvents; + extern PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; + extern PFN_vkCreateBuffer vkCreateBuffer; + extern PFN_vkCreateBufferView vkCreateBufferView; + extern PFN_vkCreateCommandPool vkCreateCommandPool; + extern PFN_vkCreateComputePipelines vkCreateComputePipelines; + extern PFN_vkCreateDescriptorPool vkCreateDescriptorPool; + extern PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; + extern PFN_vkCreateDevice vkCreateDevice; + extern PFN_vkCreateEvent vkCreateEvent; + extern PFN_vkCreateFence vkCreateFence; + extern PFN_vkCreateFramebuffer vkCreateFramebuffer; + extern PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; + extern PFN_vkCreateImage vkCreateImage; + extern PFN_vkCreateImageView vkCreateImageView; + extern PFN_vkCreateInstance vkCreateInstance; + extern PFN_vkCreatePipelineCache vkCreatePipelineCache; + extern PFN_vkCreatePipelineLayout vkCreatePipelineLayout; + extern PFN_vkCreateQueryPool vkCreateQueryPool; + extern PFN_vkCreateRenderPass vkCreateRenderPass; + extern PFN_vkCreateSampler vkCreateSampler; + extern PFN_vkCreateSemaphore vkCreateSemaphore; + extern PFN_vkCreateShaderModule vkCreateShaderModule; + extern PFN_vkDestroyBuffer vkDestroyBuffer; + extern PFN_vkDestroyBufferView vkDestroyBufferView; + extern PFN_vkDestroyCommandPool vkDestroyCommandPool; + extern PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; + extern PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; + extern PFN_vkDestroyDevice vkDestroyDevice; + extern PFN_vkDestroyEvent vkDestroyEvent; + extern PFN_vkDestroyFence vkDestroyFence; + extern PFN_vkDestroyFramebuffer vkDestroyFramebuffer; + extern PFN_vkDestroyImage vkDestroyImage; + extern PFN_vkDestroyImageView vkDestroyImageView; + extern PFN_vkDestroyInstance vkDestroyInstance; + extern PFN_vkDestroyPipeline vkDestroyPipeline; + extern PFN_vkDestroyPipelineCache vkDestroyPipelineCache; + extern PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; + extern PFN_vkDestroyQueryPool vkDestroyQueryPool; + extern PFN_vkDestroyRenderPass vkDestroyRenderPass; + extern PFN_vkDestroySampler vkDestroySampler; + extern PFN_vkDestroySemaphore vkDestroySemaphore; + extern PFN_vkDestroyShaderModule vkDestroyShaderModule; + extern PFN_vkDeviceWaitIdle vkDeviceWaitIdle; + extern PFN_vkEndCommandBuffer vkEndCommandBuffer; + extern PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; + extern PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; + extern PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; + extern PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; + extern PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; + extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; + extern PFN_vkFreeCommandBuffers vkFreeCommandBuffers; + extern PFN_vkFreeDescriptorSets vkFreeDescriptorSets; + extern PFN_vkFreeMemory vkFreeMemory; + extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + extern PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; + extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + extern PFN_vkGetDeviceQueue vkGetDeviceQueue; + extern PFN_vkGetEventStatus vkGetEventStatus; + extern PFN_vkGetFenceStatus vkGetFenceStatus; + extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + extern PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; + extern PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; + extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + extern PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; + extern PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; + extern PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; + extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + extern PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; + extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; + extern PFN_vkGetPipelineCacheData vkGetPipelineCacheData; + extern PFN_vkGetQueryPoolResults vkGetQueryPoolResults; + extern PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; + extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; + extern PFN_vkMapMemory vkMapMemory; + extern PFN_vkMergePipelineCaches vkMergePipelineCaches; + extern PFN_vkQueueBindSparse vkQueueBindSparse; + extern PFN_vkQueueSubmit vkQueueSubmit; + extern PFN_vkQueueWaitIdle vkQueueWaitIdle; + extern PFN_vkResetCommandBuffer vkResetCommandBuffer; + extern PFN_vkResetCommandPool vkResetCommandPool; + extern PFN_vkResetDescriptorPool vkResetDescriptorPool; + extern PFN_vkResetEvent vkResetEvent; + extern PFN_vkResetFences vkResetFences; + extern PFN_vkSetEvent vkSetEvent; + extern PFN_vkUnmapMemory vkUnmapMemory; + extern PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; + extern PFN_vkWaitForFences vkWaitForFences; +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_KHR_swapchain) + extern PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; + extern PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; + extern PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; + extern PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; + extern PFN_vkQueuePresentKHR vkQueuePresentKHR; +#endif /* defined(VK_KHR_swapchain) */ +#if defined(VK_KHR_surface) + extern PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; + extern PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; + extern PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; + extern PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; + extern PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; +#endif /* defined(VK_KHR_surface) */ + +#endif diff --git a/runtime/Includes/Utils/Ansi.h b/runtime/Includes/Utils/Ansi.h index 42eedeb..a299d74 100644 --- a/runtime/Includes/Utils/Ansi.h +++ b/runtime/Includes/Utils/Ansi.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Ansi.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:22:41 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:23:34 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_ANSI__ #define __MLX_ANSI__ diff --git a/runtime/Includes/Utils/Buffer.h b/runtime/Includes/Utils/Buffer.h new file mode 100644 index 0000000..fe95d25 --- /dev/null +++ b/runtime/Includes/Utils/Buffer.h @@ -0,0 +1,41 @@ +#ifndef __MLX_CPU_BUFFER__ +#define __MLX_CPU_BUFFER__ + +namespace mlx +{ + class CPUBuffer + { + public: + CPUBuffer() {} + CPUBuffer(std::size_t size) try : m_data(new std::uint8_t[size]), m_size(size) + {} + catch(...) + { + FatalError("memory allocation for a CPU buffer failed"); + } + + [[nodiscard]] inline CPUBuffer Duplicate() const + { + CPUBuffer buffer(m_size); + std::memcpy(buffer.GetData(), m_data.get(), m_size); + return buffer; + } + + inline bool Empty() const { return m_size == 0; } + + [[nodiscard]] inline std::size_t GetSize() const noexcept { return m_size; } + + template + [[nodiscard]] inline T* GetDataAs() const { return reinterpret_cast(m_data.get()); } + [[nodiscard]] inline std::uint8_t* GetData() const { return m_data.get(); } + inline operator bool() const { return (bool)m_data; } + + ~CPUBuffer() = default; + + private: + std::shared_ptr m_data; + std::size_t m_size = 0; + }; +} + +#endif diff --git a/runtime/Includes/Utils/CombineHash.h b/runtime/Includes/Utils/CombineHash.h index a11a35c..b334300 100644 --- a/runtime/Includes/Utils/CombineHash.h +++ b/runtime/Includes/Utils/CombineHash.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CombineHash.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/14 16:16:06 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:59:30 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_HASH__ #define __MLX_HASH__ diff --git a/runtime/Includes/Utils/ConstMap.h b/runtime/Includes/Utils/ConstMap.h index a4e8c40..74666ae 100644 --- a/runtime/Includes/Utils/ConstMap.h +++ b/runtime/Includes/Utils/ConstMap.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* ConstMap.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 15:26:39 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:59:35 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_CONST_MAP__ #define __MLX_CONST_MAP__ diff --git a/runtime/Includes/Utils/NonCopyable.h b/runtime/Includes/Utils/NonCopyable.h index abc402a..aa9c806 100644 --- a/runtime/Includes/Utils/NonCopyable.h +++ b/runtime/Includes/Utils/NonCopyable.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* non_copyable.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:20:13 by maldavid #+# #+# */ -/* Updated: 2024/03/24 14:42:48 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_NON_COPYABLE__ #define __MLX_NON_COPYABLE__ diff --git a/runtime/Includes/Utils/NonOwningPtr.h b/runtime/Includes/Utils/NonOwningPtr.h index 2acc906..8ece694 100644 --- a/runtime/Includes/Utils/NonOwningPtr.h +++ b/runtime/Includes/Utils/NonOwningPtr.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* NonOwningPtr.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 21:03:37 by maldavid #+# #+# */ -/* Updated: 2024/04/21 20:21:56 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_NON_OWNING_PTR__ #define __MLX_NON_OWNING_PTR__ diff --git a/runtime/Includes/Utils/NonOwningPtr.inl b/runtime/Includes/Utils/NonOwningPtr.inl index b36a337..b35bee5 100644 --- a/runtime/Includes/Utils/NonOwningPtr.inl +++ b/runtime/Includes/Utils/NonOwningPtr.inl @@ -1,15 +1,4 @@ -/* **************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* NonOwningPtr.inl :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 21:04:00 by maldavid #+# #+# */ -/* Updated: 2024/03/27 21:04:00 by maldavid ### ########.fr */ -/* */ -/* **************************************************************************** */ - +#pragma once #include namespace mlx diff --git a/runtime/Includes/Utils/Singleton.h b/runtime/Includes/Utils/Singleton.h index 33cc136..e5e76ad 100644 --- a/runtime/Includes/Utils/Singleton.h +++ b/runtime/Includes/Utils/Singleton.h @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Singleton.h :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:18:46 by maldavid #+# #+# */ -/* Updated: 2024/03/27 18:20:11 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #ifndef __MLX_SINGLETON__ #define __MLX_SINGLETON__ diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index a097b9c..3becaf4 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Application.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 22:10:52 by maldavid #+# #+# */ -/* Updated: 2024/05/25 16:06:57 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 6a550bc..ca0ecef 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Bridge.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 17:35:20 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:44:27 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/EventBus.cpp b/runtime/Sources/Core/EventBus.cpp index 550882b..721796a 100644 --- a/runtime/Sources/Core/EventBus.cpp +++ b/runtime/Sources/Core/EventBus.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* EventBus.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:36:05 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:37:01 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include #include diff --git a/runtime/Sources/Core/EventListener.cpp b/runtime/Sources/Core/EventListener.cpp index cb979e1..21540f5 100644 --- a/runtime/Sources/Core/EventListener.cpp +++ b/runtime/Sources/Core/EventListener.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* EventListener.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:37:09 by maldavid #+# #+# */ -/* Updated: 2024/03/27 17:37:38 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/Fps.cpp b/runtime/Sources/Core/Fps.cpp index 59a0926..a464f67 100644 --- a/runtime/Sources/Core/Fps.cpp +++ b/runtime/Sources/Core/Fps.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Fps.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/18 14:56:17 by maldavid #+# #+# */ -/* Updated: 2024/03/27 20:53:11 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index b9ea623..52b4419 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Graphics.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 15:13:55 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:03:51 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index b70ca49..ed15367 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Logs.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/03/27 17:20:55 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:31:02 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 670061b..36f7689 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Memory.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/07 16:32:01 by kbz_8 #+# #+# */ -/* Updated: 2024/04/23 14:05:52 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/Profiler.cpp b/runtime/Sources/Core/Profiler.cpp index e811bc5..886668a 100644 --- a/runtime/Sources/Core/Profiler.cpp +++ b/runtime/Sources/Core/Profiler.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Profiler.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/10 13:56:21 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:08:51 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index c0f27b9..c4ce300 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* SDLManager.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/05/25 15:44:03 by maldavid #+# #+# */ -/* Updated: 2024/05/25 16:46:48 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include #include @@ -52,14 +40,76 @@ namespace mlx if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_TIMER) != 0) FatalError("SDL : unable to init all subsystems; %", SDL_GetError()); + + struct WatcherData + { + func::function callback; + NonOwningPtr manager; + void* userdata; + }; + + WatcherData watcher_data; + watcher_data.callback = f_callback; + watcher_data.userdata = p_callback_data; + + SDL_AddEventWatch([](void* userdata, SDL_Event* event) -> int + { + WatcherData* data = static_cast(userdata); + + if(event->type == SDL_MOUSEMOTION) + { + } + + std::uint32_t id = event->window.windowID; + if(events_hooks.find(id) == events_hooks.end()) + continue; + switch(event->type) + { + case SDL_KEYUP: data->callback(MLX_KEYUP, event->key.keysym.scancode, data->userdata); break; + case SDL_KEYDOWN: data->callback(MLX_KEYDOWN, event->key.keysym.scancode, data->userdata); break; + case SDL_MOUSEBUTTONUP: data->callback(MLX_MOUSEUP, event->button.button, data->userdata); break; + case SDL_MOUSEBUTTONDOWN: data->callback(MLX_MOUSEDOWN, event->button.button, data->userdata); break; + case SDL_MOUSEWHEEL: + { + if(event->wheel.y > 0) // scroll up + data->callback(MLX_MOUSEWHEEL, 1, data->userdata); + else if(event->wheel.y < 0) // scroll down + data->callback(MLX_MOUSEWHEEL, 2, data->userdata); + if(event->wheel.x > 0) // scroll right + data->callback(MLX_MOUSEWHEEL, 3, data->userdata); + else if(event->wheel.x < 0) // scroll left + data->callback(MLX_MOUSEWHEEL, 4, data->userdata); + break; + } + case SDL_WINDOWEVENT: + { + switch(event.window.event) + { + case SDL_WINDOWEVENT_CLOSE: data->callback(MLX_WINDOW_EVENT, 0, data->userdata); break; + case SDL_WINDOWEVENT_MOVED: data->callback(MLX_WINDOW_EVENT, 1, data->userdata); break; + case SDL_WINDOWEVENT_MINIMIZED: data->callback(MLX_WINDOW_EVENT, 2, data->userdata); break; + case SDL_WINDOWEVENT_MAXIMIZED: data->callback(MLX_WINDOW_EVENT, 3, data->userdata); break; + case SDL_WINDOWEVENT_ENTER: data->callback(MLX_WINDOW_EVENT, 4, data->userdata); break; + case SDL_WINDOWEVENT_FOCUS_GAINED: data->callback(MLX_WINDOW_EVENT, 5, data->userdata); break; + case SDL_WINDOWEVENT_LEAVE: data->callback(MLX_WINDOW_EVENT, 6, data->userdata); break; + case SDL_WINDOWEVENT_FOCUS_LOST: data->callback(MLX_WINDOW_EVENT, 7, data->userdata); break; + + default : break; + } + break; + } + + default: break; + } + }, &watcher_data); } - void* SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h) + void* SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden) { details::WindowInfos* infos = new details::WindowInfos; Verify(infos != nullptr, "SDL : window allocation failed"); - infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | SDL_WINDOW_SHOWN); + infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); if(!infos->window) FatalError("SDL : unable to open a new window; %", SDL_GetError()); infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); @@ -84,6 +134,34 @@ namespace mlx delete infos; } + VkSurfaceKHR SDLManager::CreateVulkanSurface(Handle window, VkInstance instance) const noexcept + { + VkSurfaceKHR surface; + if(!SDL_Vulkan_CreateSurface(static_cast(window), instance, &surface)) + FatalError("SDL : could not create a Vulkan surface; %", SDL_GetError()); + return surface; + } + + std::vector SDLManager::GetRequiredVulkanInstanceExtentions(Handle window) const noexcept + { + std::uint32_t count; + if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window), &count, nullptr)) + FatalError("Vulkan : cannot get instance extentions from window : %", SDL_GetError()); + + std::vector extensions(count); + + if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window), &count, extensions.data())) + FatalError("Vulkan : cannot get instance extentions from window : %", SDL_GetError()); + return extentions; + } + + Vec2ui SDLManager::GetVulkanDrawableSize(Handle window) const noexcept + { + Vec2ui extent; + SDL_Vulkan_GetDrawableSize(window, &extent.x, &extent.y); + return extent; + } + void SDLManager::Shutdown() noexcept { if(m_drop_sdl_responsability) diff --git a/runtime/Sources/Core/UUID.cpp b/runtime/Sources/Core/UUID.cpp index 96ae62f..f2da9c2 100644 --- a/runtime/Sources/Core/UUID.cpp +++ b/runtime/Sources/Core/UUID.cpp @@ -1,18 +1,6 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* UUID.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/06 11:26:37 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:09:35 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include -#include +#include namespace mlx { diff --git a/runtime/Sources/Graphics/Mesh.cpp b/runtime/Sources/Graphics/Mesh.cpp new file mode 100644 index 0000000..3f8221f --- /dev/null +++ b/runtime/Sources/Graphics/Mesh.cpp @@ -0,0 +1,31 @@ +#include +#include +#include + +namespace mlx +{ + void Mesh::Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn) const noexcept + { + for(std::size_t i = 0; i < m_sub_meshes.size(); i++) + Draw(cmd, drawcalls, polygondrawn, i); + } + + void Mesh::Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn, std::size_t submesh_index) const noexcept + { + Verify(submesh_index < m_sub_meshes.size(), "invalid submesh index"); + m_sub_meshes[submesh_index].vbo.Bind(cmd); + m_sub_meshes[submesh_index].ibo.Bind(cmd); + vkCmdDrawIndexed(cmd, static_cast(m_sub_meshes[submesh_index].ibo.GetSize() / sizeof(std::uint32_t)), 1, 0, 0, 0); + polygondrawn += m_sub_meshes[submesh_index].triangle_count; + drawcalls++; + } + + Mesh::~Mesh() + { + for(auto& mesh : m_sub_meshes) + { + mesh.vbo.Destroy(); + mesh.ibo.Destroy(); + } + } +} diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp new file mode 100644 index 0000000..547e641 --- /dev/null +++ b/runtime/Sources/Graphics/Scene.cpp @@ -0,0 +1,19 @@ +#include +#include +#include +#include + +namespace Scop +{ + Scene::Scene(SceneDescriptor desc) + : m_descriptor(std::move(desc)) + { + } + + Sprite& Scene::CreateSprite(std::shared_ptr texture) noexcept + { + std::shared_ptr sprite = std::make_shared(texture); + m_sprites.push_back(sprite); + return *sprite; + } +} diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp new file mode 100644 index 0000000..8a65924 --- /dev/null +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -0,0 +1,44 @@ +#include +#include +#include +#include + +namespace mlx +{ + std::shared_ptr CreateQuad(float x, float y, float width, float height) + { + std::vector data(4); + + data[0].position = Vec4f(x, y, 0.0f, 1.0f); + data[0].uv = Vec2f(1.0f, 1.0f); + + data[1].position = Vec4f(x + width, y, 0.0f, 1.0f); + data[1].uv = Vec2f(0.0f, 1.0f); + + data[2].position = Vec4f(x + width, y + height, 0.0f, 1.0f); + data[2].uv = Vec2f(0.0f, 0.0f); + + data[3].position = Vec4f(x, y + height, 0.0f, 1.0f); + data[3].uv = Vec2f(1.0f, 0.0f); + + std::vector indices = { + 0, + 1, + 2, + 2, + 3, + 0, + }; + + std::shared_ptr mesh = std::make_shared(); + mesh->AddSubMesh({ std::move(data), std::move(indices) }); + return mesh; + } + + Sprite::Sprite(std::shared_ptr texture) + { + Verify((bool)texture, "Sprite: invalid texture"); + p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); + p_texture = texture; + } +} diff --git a/runtime/Sources/Platform/Inputs.cpp b/runtime/Sources/Platform/Inputs.cpp index 2cc6ca6..770c59b 100644 --- a/runtime/Sources/Platform/Inputs.cpp +++ b/runtime/Sources/Platform/Inputs.cpp @@ -1,20 +1,6 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* inputs.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/05 16:30:19 by maldavid #+# #+# */ -/* Updated: 2024/03/27 15:50:07 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ +#include -#include - -#include "inputs.h" -#include -#include +#include namespace mlx { diff --git a/runtime/Sources/Platform/Window.cpp b/runtime/Sources/Platform/Window.cpp index 66ae725..bc15aec 100644 --- a/runtime/Sources/Platform/Window.cpp +++ b/runtime/Sources/Platform/Window.cpp @@ -1,15 +1,3 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Window.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 17:36:44 by maldavid #+# #+# */ -/* Updated: 2024/07/05 13:12:51 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include @@ -17,10 +5,10 @@ namespace mlx { - Window::Window(std::size_t w, std::size_t h, const std::string& title) : m_width(w), m_height(h) + Window::Window(std::size_t w, std::size_t h, const std::string& title, bool hidden) : m_width(w), m_height(h) { static std::uint64_t ids = 0; - p_window = SDLManager::Get().CreateWindow(title, w, h); + p_window = SDLManager::Get().CreateWindow(title, w, h, hidden); m_id = ids++; } diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp new file mode 100644 index 0000000..f6feb3a --- /dev/null +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -0,0 +1,175 @@ +#include +#include +#include + +namespace mlx +{ + void GPUBuffer::Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data) + { + VmaAllocationCreateInfo alloc_info{}; + alloc_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; + alloc_info.usage = VMA_MEMORY_USAGE_AUTO; + + if(type == BufferType::Constant) + { + if(data.Empty()) + { + Warning("Vulkan : trying to create constant buffer without data (constant buffers cannot be modified after creation)"); + return; + } + m_usage = usage | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + } + else if(type == BufferType::HighDynamic) + m_usage = usage; + else // LowDynamic or Staging + m_usage = usage | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + + if(type == BufferType::Staging && data.Empty()) + Warning("Vulkan : trying to create staging buffer without data (wtf?)"); + + CreateBuffer(size, m_usage, alloc_info); + + if(!data.Empty()) + { + if(p_map != nullptr) + std::memcpy(m_memory.map, data.GetData(), data.GetSize()); + } + if(type == BufferType::Constant || type == BufferType::LowDynamic) + PushToGPU(); + } + + void GPUBuffer::CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info) + { + VkBufferCreateInfo bufferInfo{}; + bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + bufferInfo.size = size; + bufferInfo.usage = usage; + bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + + m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &info, m_buffer, nullptr); + if(alloc_info.flags != 0) + RenderCore::Get().GetAllocator().MapMemory(m_allocation, &p_map); + } + + bool GPUBuffer::CopyFrom(const GPUBuffer& buffer) noexcept + { + if(!(m_usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT)) + { + Error("Vulkan : buffer cannot be the destination of a copy because it does not have the correct usage flag"); + return false; + } + if(!(buffer.m_usage & VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) + { + Error("Vulkan : buffer cannot be the source of a copy because it does not have the correct usage flag"); + return false; + } + + VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + kvfCopyBufferToBuffer(cmd, m_buffer, buffer.Get(), m_memory.size); + kvfEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfWaitForFence(RenderCore::Get().GetDevice(), fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + return true; + } + + void GPUBuffer::PushToGPU() noexcept + { + VmaAllocationCreateInfo alloc_info{}; + alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; + + GPUBuffer new_buffer; + new_buffer.m_usage = (this->m_usage & 0xFFFFFFFC) | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + new_buffer.CreateBuffer(m_memory.size, new_buffer.m_usage, alloc_info); + + if(new_buffer.CopyFrom(*this)) + Swap(new_buffer); + new_buffer.Destroy(); + DebugLog("Vulkan : pushed buffer to GPU memory"); + } + + void GPUBuffer::Destroy() noexcept + { + if(m_buffer == VK_NULL_HANDLE) + return; + RenderCore::Get().GetAllocator().UnmapMemory(m_allocation); + RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer); + m_buffer = VK_NULL_HANDLE; + } + + void GPUBuffer::Swap(GPUBuffer& buffer) noexcept + { + std::swap(m_buffer, buffer.m_buffer); + std::swap(m_allocation, buffer.m_allocation); + std::swap(m_size, buffer.m_size); + std::swap(m_offset, buffer.m_offset); + std::swap(p_map, buffer.p_map); + std::swap(m_usage, buffer.m_usage); + } + + void VertexBuffer::SetData(CPUBuffer data) + { + if(data.GetSize() > m_memory.size) + { + Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", data.GetSize(), m_memory.size); + return; + } + if(data.Empty()) + { + Warning("Vulkan : cannot set empty data in a vertex buffer"); + return; + } + GPUBuffer staging; + staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, data); + CopyFrom(staging); + staging.Destroy(); + } + + void IndexBuffer::SetData(CPUBuffer data) + { + if(data.GetSize() > m_memory.size) + { + Error("Vulkan : trying to store to much data in an index buffer (% bytes in % bytes)", data.GetSize(), m_memory.size); + return; + } + if(data.Empty()) + { + Warning("Vulkan : cannot set empty data in an index buffer"); + return; + } + GPUBuffer staging; + staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_INDEX_BUFFER_BIT, data); + CopyFrom(staging); + staging.Destroy(); + } + + void UniformBuffer::Init(std::uint32_t size) + { + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + m_buffers[i].Init(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, {}); + m_maps[i] = m_buffers[i].GetMap(); + if(m_maps[i] == nullptr) + FatalError("Vulkan : unable to map a uniform buffer"); + } + } + + void UniformBuffer::SetData(CPUBuffer data, std::size_t frame_index) + { + if(data.GetSize() != m_buffers[frame_index].GetSize()) + { + Error("Vulkan : invalid data size to update to a uniform buffer, % != %", data.GetSize(), m_buffers[frame_index].GetSize()); + return; + } + if(m_maps[frame_index] != nullptr) + std::memcpy(m_maps[frame_index], data.GetData(), data.GetSize()); + } + + void UniformBuffer::Destroy() noexcept + { + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + m_buffers[i].Destroy(); + } +} diff --git a/runtime/Sources/Renderer/Buffers/Buffer.cpp b/runtime/Sources/Renderer/Buffers/Buffer.cpp deleted file mode 100644 index 6f6455f..0000000 --- a/runtime/Sources/Renderer/Buffers/Buffer.cpp +++ /dev/null @@ -1,150 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Buffer.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 18:55:57 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:20:13 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include -#include - -namespace mlx -{ - void Buffer::Create(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, const char* name, const void* data) - { - MLX_PROFILE_FUNCTION(); - m_usage = usage; - if(type == BufferType::Constant || type == BufferType::LowDynamic) - { - if(data == nullptr && type == BufferType::Constant) - { - Warning("Vulkan : trying to create constant buffer without data (constant buffers cannot be modified after creation)"); - return; - } - m_usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT; - } - - VmaAllocationCreateInfo alloc_info{}; - alloc_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; - alloc_info.usage = VMA_MEMORY_USAGE_AUTO; - - CreateBuffer(m_usage, alloc_info, size, name); - - if(data != nullptr) - { - void* mapped = nullptr; - MapMem(&mapped); - std::memcpy(mapped, data, size); - UnmapMem(); - if(type == BufferType::constant || type == BufferType::LowDynamic) - PushToGPU(); - } - } - - void Buffer::Destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - if(m_is_mapped) - UnmapMem(); - if(m_buffer != VK_NULL_HANDLE) - RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer); - m_buffer = VK_NULL_HANDLE; - } - - void Buffer::CreateBuffer(VkBufferUsageFlags usage, VmaAllocationCreateInfo info, VkDeviceSize size, [[maybe_unused]] const char* name) - { - MLX_PROFILE_FUNCTION(); - VkBufferCreateInfo bufferInfo{}; - bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; - bufferInfo.size = size; - bufferInfo.usage = usage; - bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; - - #ifdef DEBUG - m_name = name; - std::string alloc_name = m_name; - if(usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) - alloc_name.append("_index_buffer"); - else if(usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) - alloc_name.append("_vertex_buffer"); - else if(!(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT)) - alloc_name.append("_buffer"); - m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &info, m_buffer, alloc_name.c_str()); - #else - m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &info, m_buffer, nullptr); - #endif - m_size = size; - } - - bool Buffer::CopyFromBuffer(const Buffer& buffer) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!(m_usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT)) - { - Error("Vulkan : buffer cannot be the destination of a copy because it does not have the correct usage flag"); - return false; - } - if(!(buffer.m_usage & VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) - { - Error("Vulkan : buffer cannot be the source of a copy because it does not have the correct usage flag"); - return false; - } - - CmdBuffer& cmd = RenderCore::Get().GetSingleTimeCmdBuffer(); - cmd.BeginRecord(); - - cmd.CopyBuffer(*this, const_cast(buffer)); - - cmd.EndRecord(); - cmd.SubmitIdle(); - - return true; - } - - void Buffer::PushToGPU() noexcept - { - MLX_PROFILE_FUNCTION(); - VmaAllocationCreateInfo alloc_info{}; - alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; - - Buffer new_buffer; - new_buffer.m_usage = (m_usage & 0xFFFFFFFC) | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - #ifdef DEBUG - std::string new_name = m_name + "_GPU"; - new_buffer.CreateBuffer(new_buffer.m_usage, alloc_info, m_size, new_name.c_str()); - #else - new_buffer.CreateBuffer(new_buffer.m_usage, alloc_info, m_size, nullptr); - #endif - - if(new_buffer.CopyFromBuffer(*this)) // if the copy succeded we swap the buffers, otherwise the new one is deleted - this->Swap(new_buffer); - new_buffer.Destroy(); // destroying the old buffer as they have been swapped - } - - void Buffer::Swap(Buffer& buffer) noexcept - { - std::swap(m_buffer, buffer.m_buffer); - std::swap(m_allocation, buffer.m_allocation); - std::swap(m_size, buffer.m_size); - std::swap(m_offset, buffer.m_offset); - #ifdef DEBUG - std::swap(m_name, buffer.m_name); - #endif - std::swap(m_usage, buffer.m_usage); - std::swap(m_is_mapped, buffer.m_is_mapped); - } - - void Buffer::Flush(VkDeviceSize size, VkDeviceSize offset) - { - RenderCore::Get().GetAllocator().Flush(m_allocation, size, offset); - } -} diff --git a/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp b/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp deleted file mode 100644 index 0359c03..0000000 --- a/runtime/Sources/Renderer/Buffers/UniformBuffer.cpp +++ /dev/null @@ -1,78 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* UniformBuffer.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:45:52 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:25:17 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - void UniformBuffer::create(NonOwningPtr renderer, std::uint32_t size, [[maybe_unused]] const char* name) - { - MLX_PROFILE_FUNCTION(); - p_renderer = renderer; - - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - { - #ifdef DEBUG - std::string name_frame = name; - name_frame.append(std::to_string(i)); - m_buffers[i].create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, name_frame.c_str()); - #else - _buffers[i].Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, nullptr); - #endif - m_buffers[i].MapMem(&_maps[i]); - if(m_maps[i] == nullptr) - FatalError("Vulkan : unable to map a uniform buffer"); - } - } - - void UniformBuffer::SetData(std::uint32_t size, const void* data) - { - MLX_PROFILE_FUNCTION(); - std::memcpy(m_maps[p_renderer->GetActiveImageIndex()], data, static_cast(size)); - } - - void UniformBuffer::SetDynamicData(std::uint32_t size, const void* data) - { - MLX_PROFILE_FUNCTION(); - std::memcpy(m_maps[p_renderer->GetActiveImageIndex()], data, static_cast(size)); - m_buffers[p_renderer->GetActiveImageIndex()].Flush(); - } - - unsigned int UniformBuffer::GetSize() noexcept - { - return m_buffers[p_renderer->GetActiveImageIndex()].GetSize(); - } - - unsigned int UniformBuffer::GetOffset() noexcept - { - return m_buffers[p_renderer->GetActiveImageIndex()].GetOffset(); - } - - VkBuffer& UniformBuffer::operator()() noexcept - { - return m_buffers[p_renderer->GetActiveImageIndex()].Get(); - } - - VkBuffer& UniformBuffer::Get() noexcept - { - return m_buffers[p_renderer->GetActiveImageIndex()].Get(); - } - - void UniformBuffer::Destroy() noexcept - { - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_buffers[i].Destroy(); - } -} diff --git a/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp b/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp deleted file mode 100644 index 41afa98..0000000 --- a/runtime/Sources/Renderer/Buffers/VertexBuffer.cpp +++ /dev/null @@ -1,56 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* VertexBuffer.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:28:08 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:48:15 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include - -namespace mlx -{ - void RAMVertexBuffer::SetData(std::uint32_t size, const void* data) - { - if(size > GetSize()) - { - Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", size, GetSize()); - return; - } - - if(data == nullptr) - Warning("Vulkan : mapping null data in a vertex buffer"); - - void* temp = nullptr; - MapMem(&temp); - std::memcpy(temp, data, static_cast(size)); - UnmapMem(); - } - - void DeviceVertexBuffer::SetData(std::uint32_t size, const void* data) - { - if(size > GetSize()) - { - Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", size, GetSize()); - return; - } - - if(data == nullptr) - Warning("Vulkan : mapping null data in a vertex buffer"); - - Buffer tmp_buf; - #ifdef DEBUG - tmp_buf.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "tmp_buffer", data); - #else - tmp_buf.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, data); - #endif - CopyFromBuffer(tmp_buf); - tmp_buf.Destroy(); - } -} diff --git a/runtime/Sources/Renderer/Command/CommandBuffer.cpp b/runtime/Sources/Renderer/Command/CommandBuffer.cpp deleted file mode 100644 index 6574682..0000000 --- a/runtime/Sources/Renderer/Command/CommandBuffer.cpp +++ /dev/null @@ -1,365 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CommandBuffer.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:26:06 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:02:20 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include -#include -#include -#include -#include - -namespace mlx -{ - bool VectorPushBackIfNotFound(std::vector>& vector, NonOwningPtr res) - { - auto it = std::find_if(vector.begin(), vector.end(), [=](const NonOwningPtr vres) - { - return vres->GetUUID() == res->GetUUID(); - }); - - if(it == vector.end()) - { - vector.push_back(res); - return true; - } - return false; - } - - void CmommanBuffer::Init(CommandBufferType type, NonOwningPtr manager) - { - Init(type, &manager->GetCmdPool()); - } - - void CommandBuffer::Init(CommandBufferType type, NonOwningPtr pool) - { - MLX_PROFILE_FUNCTION(); - m_type = type; - m_pool = pool; - - VkCommandBufferAllocateInfo alloc_info{}; - alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; - alloc_info.commandPool = pool->get(); - alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY; - alloc_info.commandBufferCount = 1; - - VkResult res = vkAllocateCommandBuffers(RenderCore::Get().getDevice().get(), &allocInfo, &_cmd_buffer); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to allocate command buffer, %s", RCore::verbaliseResultVk(res)); - #ifdef DEBUG - Message("Vulkan : created new command buffer"); - #endif - - m_fence.init(); - state = CommandBufferState::Idle; - } - - void CommandBuffer::BeginRecord(VkCommandBufferUsageFlags usage) - { - MLX_PROFILE_FUNCTION(); - if(!IsInit()) - FatalError("Vulkan : begenning record on un uninit command buffer"); - if(m_state == CommandBufferState::Recording) - return; - - VkCommandBufferBeginInfo begin_info{}; - begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; - begin_info.flags = usage; - if(vkBeginCommandBuffer(m_cmd_buffer, &begin_info) != VK_SUCCESS) - FatalError("Vulkan : failed to begin recording command buffer"); - - m_state = CommandBufferState::Recording; - } - - void CommandBuffer::BindVertexBuffer(Buffer& buffer) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!IsRecording()) - { - Warning("Vulkan : trying to bind a vertex buffer to a non recording command buffer"); - return; - } - VkDeviceSize offset[] = { buffer.GetOffset() }; - vkCmdBindVertexBuffers(m_cmd_buffer, 0, 1, &buffer.Get(), offset); - - buffer.RecordedInCommandBuffer(); - VectorPushBackIfNotFound(m_cmd_resources, &buffer); - } - - void CommandBuffer::NindIndexBuffer(Buffer& buffer) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!IsRecording()) - { - Warning("Vulkan : trying to bind a index buffer to a non recording command buffer"); - return; - } - vkCmdBindIndexBuffer(m_cmd_buffer, buffer.Get(), buffer.GetOffset(), VK_INDEX_TYPE_UINT16); - - buffer.RecordedInCommandBuffer(); - VectorPushBackIfNotFound(m_cmd_resources, &buffer); - } - - void CommandBuffer::CopyBuffer(Buffer& dst, Buffer& src) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!IsRecording()) - { - Warning("Vulkan : trying to do a buffer to buffer copy in a non recording command buffer"); - return; - } - - PreTransferBarrier(); - - VkBufferCopy copy_region{}; - copy_region.size = src.GetSize(); - vkCmdCopyBuffer(m_cmd_buffer, src.Get(), dst.Get(), 1, ©_region); - - PostTransferBarrier(); - - dst.RecordedInCommandBuffer(); - src.RecordedInCommandBuffer(); - VectorPushBackIfNotFound(m_cmd_resources, &dst); - VectorPushBackIfNotFound(m_cmd_resources, &src); - } - - void CommandBuffer::CopyBufferToImage(Buffer& buffer, Image& image) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!IsRecording()) - { - Warning("Vulkan : trying to do a buffer to image copy in a non recording command buffer"); - return; - } - - PreTransferBarrier(); - - VkBufferImageCopy region{}; - region.bufferOffset = 0; - region.bufferRowLength = 0; - region.bufferImageHeight = 0; - region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - region.imageSubresource.mipLevel = 0; - region.imageSubresource.baseArrayLayer = 0; - region.imageSubresource.layerCount = 1; - region.imageOffset = { 0, 0, 0 }; - region.imageExtent = { image.GetWidth(), image.GetHeight(), 1 }; - - vkCmdCopyBufferToImage(m_cmd_buffer, buffer.Get(), image.Get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); - - PostTransferBarrier(); - - image.RecordedInCommandBuffer(); - buffer.RecordedInCommandBuffer(); - VectorPushBackIfNotFound(m_cmd_resources, &image); - VectorPushBackIfNotFound(m_cmd_resources, &buffer); - } - - void CommandBuffer::CopyImagetoBuffer(Image& image, Buffer& buffer) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!IsRecording()) - { - Warning("Vulkan : trying to do an image to buffer copy in a non recording command buffer"); - return; - } - - PreTransferBarrier(); - - VkBufferImageCopy region{}; - region.bufferOffset = 0; - region.bufferRowLength = 0; - region.bufferImageHeight = 0; - region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - region.imageSubresource.mipLevel = 0; - region.imageSubresource.baseArrayLayer = 0; - region.imageSubresource.layerCount = 1; - region.imageOffset = { 0, 0, 0 }; - region.imageExtent = { image.GetWidth(), image.GetHeight(), 1 }; - - vkCmdCopyImageToBuffer(m_cmd_buffer, image.Get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer.Get(), 1, ®ion); - - PostTransferBarrier(); - - image.RecordedInCommandBuffer(); - buffer.RecordedInCommandBuffer(); - VectorPushBackIfNotFound(m_cmd_resources, &buffer); - VectorPushBackIfNotFound(m_cmd_resources, &image); - } - - void CommandBuffer::TransitionImageLayout(Image& image, VkImageLayout new_layout) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!IsRecording()) - { - Warning("Vulkan : trying to do an image layout transition in a non recording command buffer"); - return; - } - - VkImageMemoryBarrier barrier{}; - barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; - barrier.oldLayout = image.GetLayout(); - barrier.newLayout = new_layout; - barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; - barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; - barrier.image = image.Get(); - barrier.subresourceRange.aspectMask = IsDepthFormat(image.GetFormat()) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT; - barrier.subresourceRange.baseMipLevel = 0; - barrier.subresourceRange.levelCount = 1; - barrier.subresourceRange.baseArrayLayer = 0; - barrier.subresourceRange.layerCount = 1; - barrier.srcAccessMask = LayoutToAccessMask(image.GetLayout(), false); - barrier.dstAccessMask = LayoutToAccessMask(new_layout, true); - if(IsStencilFormat(image.GetFormat())) - barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT; - - VkPipelineStageFlags source_stage = 0; - if(barrier.oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) - source_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - else if(barrier.srcAccessMask != 0) - source_stage = AccessFlagsToPipelineStage(barrier.srcAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); - else - source_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; - - VkPipelineStageFlags destination_stage = 0; - if(barrier.newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) - destination_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; - else if(barrier.dstAccessMask != 0) - destination_stage = AccessFlagsToPipelineStage(barrier.dstAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); - else - destination_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - - vkCmdPipelineBarrier(m_cmd_buffer, source_stage, destination_stage, 0, 0, nullptr, 0, nullptr, 1, &barrier); - - image.RecordedInCommandBuffer(); - VectorPushBackIfNotFound(m_cmd_resources, &image); - } - - void CommandBuffer::EndRecord() - { - MLX_PROFILE_FUNCTION(); - if(!IsInit()) - FatalError("Vulkan : ending record on un uninit command buffer"); - if(m_state != CommandBufferState::Recording) - return; - if(vkEndCommandBuffer(m_cmd_buffer) != VK_SUCCESS) - FatalError("Vulkan : failed to end recording command buffer"); - - m_state = CommandBufferState::Idle; - } - - void CommandBuffer::SubmitIdle(bool should_wait_for_execution) noexcept - { - MLX_PROFILE_FUNCTION(); - if(m_type != CommandBufferType::SingleTime) - { - Error("Vulkan : try to perform an idle submit on a command buffer that is not single-time, this is not allowed"); - return; - } - - m_fence.Reset(); - - VkSubmitInfo submit_info{}; - submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - submit_info.commandBufferCount = 1; - submit_info.pCommandBuffers = &m_cmd_buffer; - - VkResult res = vkQueueSubmit(RenderCore::Get().GetQueue().GetGraphic(), 1, &submit_info, m_fence.Get()); - if(res != VK_SUCCESS) - FatalError("Vulkan error : failed to submit a single time command buffer, %", VerbaliseVkResult(res)); - m_state = CommandBufferState::Submitted; - - if(should_wait_for_execution) - WaitForExecution(); - } - - void CommandBuffer::Submit(NonOwningPtr signal, NonOwningPtr wait) noexcept - { - MLX_PROFILE_FUNCTION(); - std::array signal_semaphores; - std::array wait_semaphores; - - signal_semaphores[0] = (signal ? signal->Get() : VK_NULL_HANDLE); - - wait_semaphores[0] = (wait ? wait->Get() : VK_NULL_HANDLE); - VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; - - m_fence.Reset(); - - VkSubmitInfo submit_info{}; - submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; - submit_info.waitSemaphoreCount = (!wait ? 0 : wait_semaphores.size()); - submit_info.pWaitSemaphores = wait_semaphores.data(); - submit_info.pWaitDstStageMask = wait_stages; - submit_info.commandBufferCount = 1; - submit_info.pCommandBuffers = &m_cmd_buffer; - submit_info.signalSemaphoreCount = (!signal ? 0 : signal_semaphores.size()); - submit_info.pSignalSemaphores = signal_semaphores.data(); - - VkResult res = vkQueueSubmit(RenderCore::Get().GetQueue().GetGraphic(), 1, &submit_info, m_fence.get()); - if(res != VK_SUCCESS) - FatalError("Vulkan error : failed to submit draw command buffer, %", VerbaliseVkResult(res)); - m_state = CommandBufferState::Submitted; - } - - void CommandBuffer::UpdateSubmitState() noexcept - { - MLX_PROFILE_FUNCTION(); - if(!m_fence.IsReady()) - return; - - for(NonOwningPtr res : m_cmd_resources) - { - if(res) - res->RemovedFromCommandBuffer(); - } - m_cmd_resources.clear(); - m_state = CommandBufferState::Ready; - } - - void CommandBuffer::PreTransferBarrier() noexcept - { - MLX_PROFILE_FUNCTION(); - VkMemoryBarrier memory_barrier{}; - memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; - memory_barrier.pNext = nullptr; - memory_barrier.srcAccessMask = 0U; - memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; - - vkCmdPipelineBarrier(m_cmd_buffer, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr); - } - - void CommandBuffer::PostTransferBarrier() noexcept - { - MLX_PROFILE_FUNCTION(); - VkMemoryBarrier memory_barrier{}; - memory_barrier.sType = VK_STRUCTURE_TYPE_MEMORY_BARRIER; - memory_barrier.pNext = nullptr; - memory_barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - memory_barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_UNIFORM_READ_BIT; - - vkCmdPipelineBarrier(m_cmd_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, 0, 1, &memory_barrier, 0, nullptr, 0, nullptr); - } - - void CommandBuffer::Destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - m_fence.Destroy(); - m_cmd_buffer = VK_NULL_HANDLE; - m_state = CommandBufferState::Uninit; - #ifdef DEBUG - Message("Vulkan : destroyed command buffer"); - #endif - } -} diff --git a/runtime/Sources/Renderer/Command/CommandManager.cpp b/runtime/Sources/Renderer/Command/CommandManager.cpp deleted file mode 100644 index df6fab9..0000000 --- a/runtime/Sources/Renderer/Command/CommandManager.cpp +++ /dev/null @@ -1,42 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CommandManager.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 17:50:52 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:55:04 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include - -namespace mlx -{ - void CommandManager::Init() noexcept - { - m_cmd_pool.Init(); - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_cmd_buffers[i].Init(CommandBufferType::LongTime, this); - } - - void CommandManager::BeginRecord(int active_image_index) - { - m_cmd_buffers[active_image_index].BeginRecord(); - } - - void CommandManager::EndRecord(int active_image_index) - { - m_cmd_buffers[active_image_index].EndRecord(); - } - - void CommandManager::Destroy() noexcept - { - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_cmd_buffers[i].Destroy(); - m_cmd_pool.Destroy(); - } -} diff --git a/runtime/Sources/Renderer/Command/CommandPool.cpp b/runtime/Sources/Renderer/Command/CommandPool.cpp deleted file mode 100644 index af576a7..0000000 --- a/runtime/Sources/Renderer/Command/CommandPool.cpp +++ /dev/null @@ -1,37 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* CommandPool.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:24:33 by maldavid #+# #+# */ -/* Updated: 2024/04/23 14:57:15 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - void CommandPool::Init() - { - VkCommandPoolCreateInfo pool_info{}; - pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; - pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; - pool_info.queueFamilyIndex = RenderCore::Get().GetQueue().GetFamilies().graphics_family.value(); - - VkResult res = vkCreateCommandPool(RenderCore::Get().GetDevice().Get(), &pool_info, nullptr, &m_cmd_pool); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create command pool, %", VerbaliseVkResult(res)); - } - - void CommandPool::Destroy() noexcept - { - vkDestroyCommandPool(RenderCore::Get().GetDevice().Get(), m_cmd_pool, nullptr); - m_cmd_pool = VK_NULL_HANDLE; - } -} diff --git a/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp b/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp deleted file mode 100644 index b9fdae4..0000000 --- a/runtime/Sources/Renderer/Command/SingleTimeCommandManager.cpp +++ /dev/null @@ -1,64 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* SingleTimeCommandManager.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/15 19:57:49 by maldavid #+# #+# */ -/* Updated: 2024/04/23 15:05:19 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - void SingleTimeCmdManager::Init() noexcept - { - m_pool.init(); - for(int i = 0; i < BASE_POOL_SIZE; i++) - { - m_buffers.emplace_back(); - m_buffers.back().Init(CommandBufferType::SingleTime, &m_pool); - } - } - - CommandBuffer& SingleTimeCmdManager::GetCmdBuffer() noexcept - { - for(CmdBuffer& buf : m_buffers) - { - if(buf.IsReadyToBeUsed()) - { - buf.reset(); - return buf; - } - } - m_buffers.emplace_back().Init(CommandBufferType::SingleTime, &m_pool); - return m_buffers.back(); - } - - void SingleTimeCmdManager::UpdateSingleTimesCmdBuffersSubmitState() noexcept - { - for(CmdBuffer& cmd : m_buffers) - cmd.UpdateSubmitState(); - } - - void SingleTimeCmdManager::WaitForAllExecutions() noexcept - { - for(CmdBuffer& cmd : m_buffers) - cmd.WaitForExecution(); - } - - void SingleTimeCmdManager::Destroy() noexcept - { - std::for_each(m_buffers.begin(), m_buffers.end(), [](CommandBuffer& buf) - { - buf.Destroy(); - }); - m_pool.Destroy(); - } -} diff --git a/runtime/Sources/Renderer/Core/Device.cpp b/runtime/Sources/Renderer/Core/Device.cpp deleted file mode 100644 index d26e966..0000000 --- a/runtime/Sources/Renderer/Core/Device.cpp +++ /dev/null @@ -1,142 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Device.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:14:29 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:10:08 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include - -namespace mlx -{ - const std::vector device_extensions = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; - - void Device::Init() - { - PickPhysicalDevice(); - - Queues::QueueFamilyIndices indices = RenderCore::Get().GetQueue().GetFamilies(); - - std::vector queue_create_infos; - std::set unique_queue_families = { indices.graphics_family.value(), indices.present_family.value() }; - - float queue_priority = 1.0f; - for(std::uint32_t queue_family : unique_queue_families) - { - VkDeviceQueueCreateInfo queue_create_info{}; - queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queue_create_info.queueFamilyIndex = queue_family; - queue_create_info.queueCount = 1; - queue_create_info.pQueuePriorities = &queue_priority; - queue_create_infos.push_back(queue_create_info); - } - - VkPhysicalDeviceFeatures device_features{}; - - VkDeviceCreateInfo create_info{}; - create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; - create_info.queueCreateInfoCount = static_cast(queue_create_infos.size()); - create_info.pQueueCreateInfos = queue_create_infos.data(); - create_info.pEnabledFeatures = &device_features; - create_info.enabledExtensionCount = static_cast(device_extensions.size()); - create_info.ppEnabledExtensionNames = device_extensions.data(); - create_info.enabledLayerCount = 0; - - VkResult res; - if((res = vkCreateDevice(m_physical_device, &create_info, nullptr, &m_device)) != VK_SUCCESS) - FatalError("Vulkan : failed to create logcal device, %", VerbaliseVkResult(res)); - DebugLog("Vulkan : created new logical device"); - } - - void Device::PickPhysicalDevice() - { - std::uint32_t device_count = 0; - vkEnumeratePhysicalDevices(RenderCore::Get().GetInstance().Get(), &device_count, nullptr); - - if(device_count == 0) - FatalError("Vulkan : failed to find GPUs with Vulkan support"); - - std::vector devices(device_count); - vkEnumeratePhysicalDevices(RenderCore::Get().GetInstance().Get(), &device_count, devices.data()); - - std::multimap devices_score; - - for(const auto& device : devices) - { - int score = DeviceScore(device); - devices_score.insert(std::make_pair(score, device)); - } - - if(devices_score.rbegin()->first > 0) - m_physical_device = devices_score.rbegin()->second; - else - FatalError("Vulkan : failed to find a suitable GPU"); - - #ifdef DEBUG - VkPhysicalDeviceProperties props; - vkGetPhysicalDeviceProperties(m_physical_device, &props); - DebugLog("Vulkan : picked a physical device, %s", props.deviceName); - #endif - RenderCore::Get().GetQueue().FindQueueFamilies(m_physical_device); // update queue indicies to current physical device - } - - int Device::DeviceScore(VkPhysicalDevice device) - { - Queues::QueueFamilyIndices indices = RenderCore::Get().GetQueue().FindQueueFamilies(device); - bool extensions_supported = CheckDeviceExtensionSupport(device); - - VkPhysicalDeviceProperties props; - vkGetPhysicalDeviceProperties(device, &props); - if(!indices.IsComplete() || !extensions_supported) - return -1; - - VkPhysicalDeviceFeatures features; - vkGetPhysicalDeviceFeatures(device, &features); - - int score = 0; - #ifndef FORCE_INTEGRATED_GPU - if(props.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU) - score += 1000; - #else - if(props.deviceType != VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU) - return -1; - #endif - - if(!features.geometryShader) - return -1; - - score += props.limits.maxImageDimension2D; - score += props.limits.maxBoundDescriptorSets; - return score; - } - - bool Device::CheckDeviceExtensionSupport(VkPhysicalDevice device) - { - std::uint32_t extension_count; - vkEnumerateDeviceExtensionProperties(device, nullptr, &extension_count, nullptr); - - std::vector available_extensions(extensionCount); - vkEnumerateDeviceExtensionProperties(device, nullptr, &extension_count, available_extensions.data()); - - std::set required_extensions(device_extensions.begin(), device_extensions.end()); - - for(const auto& extension : available_extensions) - required_extensions.erase(extension.extensionName); - - return required_extensions.empty(); - } - - void Device::Destroy() noexcept - { - vkDestroyDevice(m_device, nullptr); - m_device = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed a logical device"); - } -} diff --git a/runtime/Sources/Renderer/Core/Fence.cpp b/runtime/Sources/Renderer/Core/Fence.cpp deleted file mode 100644 index 7db2b3d..0000000 --- a/runtime/Sources/Renderer/Core/Fence.cpp +++ /dev/null @@ -1,54 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Fence.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/02 17:53:06 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:13:09 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - void Fence::Init() - { - VkFenceCreateInfo fence_info{}; - fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; - fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; - - VkResult res; - if((res = vkCreateFence(RenderCore::Get().GetDevice().Get(), &fence_info, nullptr, &m_fence)) != VK_SUCCESS) - FatalError("Vulkan : failed to create a synchronization object (fence), %", VerbaliseVkResult(res)); - DebugLog("Vulkan : created new fence"); - } - - void Fence::Wait() noexcept - { - vkWaitForFences(RenderCore::Get().GetDevice().Get(), 1, &m_fence, VK_TRUE, UINT64_MAX); - } - - void Fence::Reset() noexcept - { - vkResetFences(RenderCore::Get().GetDevice().Get(), 1, &m_fence); - } - - bool Fence::IsReady() const noexcept - { - return vkGetFenceStatus(RenderCore::Get().GetDevice().Get(), m_fence) == VK_SUCCESS; - } - - void Fence::destroy() noexcept - { - if(m_fence != VK_NULL_HANDLE) - vkDestroyFence(RenderCore::Get().GetDevice().Get(), m_fence, nullptr); - m_fence = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed fence"); - } -} diff --git a/runtime/Sources/Renderer/Core/Instance.cpp b/runtime/Sources/Renderer/Core/Instance.cpp deleted file mode 100644 index 694e3b2..0000000 --- a/runtime/Sources/Renderer/Core/Instance.cpp +++ /dev/null @@ -1,88 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Instance.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:04:21 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:43:47 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - void Instance::Init() - { - std::uint32_t api_version = std::min(volkGetInstanceVersion(), MLX_TARGET_VULKAN_API_VERSION); - - if(api_version == 0) - FatalError("Vulkan API is not supported by this driver"); - - m_instance_version = api_version; - - VkApplicationInfo app_info{}; - app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; - app_info.pEngineName = "MacroLibX"; - app_info.engineVersion = MLX_VERSION; - app_info.apiVersion = api_version; - - auto extensions = GetRequiredExtensions(); - - VkInstanceCreateInfo create_info{}; - create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; - create_info.pApplicationInfo = &app_info; - create_info.enabledExtensionCount = static_cast(extensions.size()); - create_info.ppEnabledExtensionNames = extensions.data(); - create_info.enabledLayerCount = 0; // will be replaced if validation layers are enabled - create_info.pNext = nullptr; - - VkDebugUtilsMessengerCreateInfoEXT debug_create_info; - if constexpr(enable_validation_layers) - { - if(RenderCore::Get().GetLayers().CheckValidationLayerSupport()) - { - create_info.enabledLayerCount = static_cast(validation_layers.size()); - create_info.ppEnabledLayerNames = validation_layers.data(); - RenderCore::Get().GetLayers().PopulateDebugMessengerCreateInfo(debug_create_info); - create_info.pNext = static_cast(&debug_create_info); - } - } - - VkResult res; - if((res = vkCreateInstance(&create_info, nullptr, &m_instance)) != VK_SUCCESS) - FatalError("Vulkan : failed to create Vulkan instance, %", VerbaliseVkResult(res)); - volkLoadInstance(m_instance); - DebugLog("Vulkan : created new instance"); - } - - std::vector Instance::GetRequiredExtensions() - { - std::uint32_t glfw_extension_count = 0; - const char** glfw_extensions = glfwGetRequiredInstanceExtensions(&glfw_extension_count); - - std::vector extensions(glfw_extensions, glfw_extensions + glfw_extension_count); - - extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME); - - if constexpr(enableValidationLayers) - { - extensions.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME); - extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME); - } - - return extensions; - } - - void Instance::Destroy() noexcept - { - vkDestroyInstance(m_instance, nullptr); - m_instance = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed an instance"); - } -} diff --git a/runtime/Sources/Renderer/Core/Memory.cpp b/runtime/Sources/Renderer/Core/Memory.cpp deleted file mode 100644 index 80ce793..0000000 --- a/runtime/Sources/Renderer/Core/Memory.cpp +++ /dev/null @@ -1,199 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Memory.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: kbz_8 +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/10/20 22:02:37 by kbz_8 #+# #+# */ -/* Updated: 2024/04/23 18:49:10 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include - -#define VK_NO_PROTOTYPES -#define VMA_STATIC_VULKAN_FUNCTIONS 0 -#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 -#define VMA_ASSERT(expr) ((void)0) -#define VMA_IMPLEMENTATION - -#ifdef MLX_COMPILER_CLANG - #pragma clang diagnostic push - #pragma clang diagnostic ignored "-Weverything" - #include - #pragma clang diagnostic pop -#elif defined(MLX_COMPILER_GCC) - #pragma GCC diagnostic push - #pragma GCC diagnostic ignored "-Wimplicit-fallthrough" - #pragma GCC diagnostic ignored "-Wmissing-field-initializers" - #pragma GCC diagnostic ignored "-Wunused-parameter" - #pragma GCC diagnostic ignored "-Wunused-variable" - #pragma GCC diagnostic ignored "-Wparentheses" - #include - #pragma GCC diagnostic pop -#else - #include -#endif - -#include -#include - -namespace mlx -{ - void GPUallocator::init() noexcept - { - VmaVulkanFunctions vma_vulkan_func{}; - vma_vulkan_func.vkAllocateMemory = vkAllocateMemory; - vma_vulkan_func.vkBindBufferMemory = vkBindBufferMemory; - vma_vulkan_func.vkBindImageMemory = vkBindImageMemory; - vma_vulkan_func.vkCreateBuffer = vkCreateBuffer; - vma_vulkan_func.vkCreateImage = vkCreateImage; - vma_vulkan_func.vkDestroyBuffer = vkDestroyBuffer; - vma_vulkan_func.vkDestroyImage = vkDestroyImage; - vma_vulkan_func.vkFlushMappedMemoryRanges = vkFlushMappedMemoryRanges; - vma_vulkan_func.vkFreeMemory = vkFreeMemory; - vma_vulkan_func.vkGetBufferMemoryRequirements = vkGetBufferMemoryRequirements; - vma_vulkan_func.vkGetImageMemoryRequirements = vkGetImageMemoryRequirements; - vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties = vkGetPhysicalDeviceMemoryProperties; - vma_vulkan_func.vkGetPhysicalDeviceProperties = vkGetPhysicalDeviceProperties; - vma_vulkan_func.vkInvalidateMappedMemoryRanges = vkInvalidateMappedMemoryRanges; - vma_vulkan_func.vkMapMemory = vkMapMemory; - vma_vulkan_func.vkUnmapMemory = vkUnmapMemory; - vma_vulkan_func.vkCmdCopyBuffer = vkCmdCopyBuffer; -#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - vma_vulkan_func.vkGetBufferMemoryRequirements2KHR = vkGetBufferMemoryRequirements2, - vma_vulkan_func.vkGetImageMemoryRequirements2KHR = vkGetImageMemoryRequirements2, -#endif -#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 - vma_vulkan_func.vkBindBufferMemory2KHR = vkBindBufferMemory2, - vma_vulkan_func.vkBindImageMemory2KHR = vkBindImageMemory2, -#endif -#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 - vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties2KHR = vkGetPhysicalDeviceMemoryProperties2, -#endif -#if VMA_VULKAN_VERSION >= 1003000 - vma_vulkan_func.vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirements, - vma_vulkan_func.vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirements, -#endif - - VmaAllocatorCreateInfo allocator_create_info{}; - allocator_create_info.vulkanApiVersion = RenderCore::Get().GetInstance().GetInstanceVersion(); - allocator_create_info.physicalDevice = RenderCore::Get().GetDevice().GetPhysicalDevice(); - allocator_create_info.device = RenderCore::Get().GetDevice().Get(); - allocator_create_info.instance = RenderCore::Get().GetInstance().Get(); - allocator_create_info.pVulkanFunctions = &vma_vulkan_func; - - VkResult res = vmaCreateAllocator(&allocator_create_info, &m_allocator); - if(res != VK_SUCCESS) - FatalError("Graphics allocator : failed to create graphics memory allocator, %", VerbaliseVkResult(res)); - DebugLog("Graphics allocator : created new allocator"); - } - - VmaAllocation GPUallocator::CreateBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name) noexcept - { - MLX_PROFILE_FUNCTION(); - VmaAllocation allocation; - VkResult res = vmaCreateBuffer(m_allocator, binfo, vinfo, &buffer, &allocation, nullptr); - if(res != VK_SUCCESS) - FatalError("Graphics allocator : failed to allocate a buffer, %s", RCore::verbaliseResultVk(res)); - if(name != nullptr) - { - RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_BUFFER, (std::uint64_t)buffer, name); - vmaSetAllocationName(m_allocator, allocation, name); - } - DebugLog("Graphics Allocator : created new buffer '%s'", name); - m_active_buffers_allocations++; - return allocation; - } - - void GPUallocator::DestroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept - { - MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); - vmaDestroyBuffer(m_allocator, buffer, allocation); - DebugLog("Graphics Allocator : destroyed buffer"); - m_active_buffers_allocations--; - } - - VmaAllocation GPUallocator::CreateImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name) noexcept - { - MLX_PROFILE_FUNCTION(); - VmaAllocation allocation; - VkResult res = vmaCreateImage(m_allocator, iminfo, vinfo, &image, &allocation, nullptr); - if(res != VK_SUCCESS) - FatalError("Graphics allocator : failed to allocate an image, %", VerbaliseVkResult(res)); - if(name != nullptr) - { - RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_IMAGE, (std::uint64_t)image, name); - vmaSetAllocationName(m_allocator, allocation, name); - } - DebugLog("Graphics Allocator : created new image '%s'", name); - m_active_images_allocations++; - return allocation; - } - - void GPUallocator::DestroyImage(VmaAllocation allocation, VkImage image) noexcept - { - MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); - vmaDestroyImage(m_allocator, image, allocation); - DebugLog("Graphics Allocator : destroyed image"); - m_active_images_allocations--; - } - - void GPUallocator::MapMemory(VmaAllocation allocation, void** data) noexcept - { - MLX_PROFILE_FUNCTION(); - VkResult res = vmaMapMemory(m_allocator, allocation, data); - if(res != VK_SUCCESS) - FatalError("Graphics allocator : unable to map GPU memory to CPU memory, %", VerbaliseVkResult(res)); - } - - void GPUallocator::unmapMemory(VmaAllocation allocation) noexcept - { - MLX_PROFILE_FUNCTION(); - vmaUnmapMemory(m_allocator, allocation); - } - - void GPUallocator::dumpMemoryToJson() - { - static std::uint32_t id = 0; - std::string name("memory_dump"); - name.append(std::to_string(id) + ".json"); - std::ofstream file(name); - if(!file.is_open()) - { - Error("Graphics allocator : unable to dump memory to a json file"); - return; - } - char* str = nullptr; - vmaBuildStatsString(m_allocator, &str, true); - file << str; - vmaFreeStatsString(m_allocator, str); - file.close(); - id++; - } - - void GPUallocator::Flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept - { - MLX_PROFILE_FUNCTION(); - vmaFlushAllocation(m_allocator, allocation, offset, size); - } - - void GPUallocator::Destroy() noexcept - { - if(m_active_images_allocations != 0) - Error("Graphics allocator : some user-dependant allocations were not freed before destroying the display (% active allocations). You may have not destroyed all the MLX resources you've created", m_active_images_allocations); - else if(m_active_buffers_allocations != 0) - Error("Graphics allocator : some MLX-dependant allocations were not freed before destroying the display (% active allocations). This is an error in the MLX, please report this should not happen", m_active_buffers_allocations); - if(m_active_images_allocations < 0 || m_active_buffers_allocations < 0) - Warning("Graphics allocator : the impossible happened, the MLX has freed more allocations than it has made (wtf)"); - vmaDestroyAllocator(m_allocator); - m_active_buffers_allocations = 0; - m_active_images_allocations = 0; - DebugLog("Vulkan : destroyed a graphics allocator"); - } -} diff --git a/runtime/Sources/Renderer/Core/Queues.cpp b/runtime/Sources/Renderer/Core/Queues.cpp deleted file mode 100644 index b1b7ae8..0000000 --- a/runtime/Sources/Renderer/Core/Queues.cpp +++ /dev/null @@ -1,53 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Queues.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:02:42 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:51:21 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include - -namespace mlx -{ - Queues::QueueFamilyIndices Queues::findQueueFamilies(VkPhysicalDevice device) - { - std::uint32_t queue_family_count = 0; - vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, nullptr); - - std::vector queue_families(queueFamilyCount); - vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, queue_families.data()); - - n_families = Queues::QueueFamilyIndices{}; - int i = 0; - for(const auto& queue_family : queue_families) - { - if(queue_family.queueFlags & VK_QUEUE_GRAPHICS_BIT) - m_families->graphics_family = i; - - if(glfwGetPhysicalDevicePresentationSupport(RenderCore::Get().GetInstance().Get(), device, i)) - m_families->present_family = i; - - if(m_families->IsComplete()) - return *m_families; - i++; - } - - return *m_families; - } - - void Queues::Init() - { - if(!m_families.has_value()) - FindQueueFamilies(RenderCore::Get().GetDevice().GetPhysicalDevice()); - vkGetDeviceQueue(RenderCore::Get().GetDevice().Get(), m_families->graphics_family.value(), 0, &m_graphics_queue); - vkGetDeviceQueue(RenderCore::Get().GetDevice().Get(), m_families->present_family.value(), 0, &m_present_queue); - DebugLog("Vulkan : got graphics and present queues"); - } -} diff --git a/runtime/Sources/Renderer/Core/RenderCore.cpp b/runtime/Sources/Renderer/Core/RenderCore.cpp deleted file mode 100644 index 44ced4a..0000000 --- a/runtime/Sources/Renderer/Core/RenderCore.cpp +++ /dev/null @@ -1,134 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* RenderCore.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/12/17 23:33:34 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:54:26 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#define VK_NO_PROTOTYPES -#define VOLK_IMPLEMENTATION -#include - -#include - -#include -#include - -#ifdef DEBUG - #ifdef MLX_COMPILER_MSVC - #pragma NOTE("MLX is being compiled in debug mode, this activates Vulkan's validation layers and debug messages which may impact rendering performances") - #else - #warning "MLX is being compiled in debug mode, this activates Vulkan's validation layers and debug messages which may impact rendering performances" - #endif -#endif - -namespace mlx -{ - const char* VerbaliseVkResult(VkResult result) - { - switch(result) - { - case VK_SUCCESS: return "Success"; - case VK_NOT_READY: return "A fence or query has not yet completed"; - case VK_TIMEOUT: return "A wait operation has not completed in the specified time"; - case VK_EVENT_SET: return "An event is signaled"; - case VK_EVENT_RESET: return "An event is unsignaled"; - case VK_INCOMPLETE: return "A return array was too small for the result"; - case VK_ERROR_OUT_OF_HOST_MEMORY: return "A host memory allocation has failed"; - case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "A device memory allocation has failed"; - case VK_ERROR_INITIALIZATION_FAILED: return "Initialization of an object could not be completed for implementation-specific reasons"; - case VK_ERROR_DEVICE_LOST: return "The logical or physical device has been lost"; - case VK_ERROR_MEMORY_MAP_FAILED: return "Mapping of a memory object has failed"; - case VK_ERROR_LAYER_NOT_PRESENT: return "A requested layer is not present or could not be loaded"; - case VK_ERROR_EXTENSION_NOT_PRESENT: return "A requested extension is not supported"; - case VK_ERROR_FEATURE_NOT_PRESENT: return "A requested feature is not supported"; - case VK_ERROR_INCOMPATIBLE_DRIVER: return "The requested version of Vulkan is not supported by the driver or is otherwise incompatible"; - case VK_ERROR_TOO_MANY_OBJECTS: return "Too many objects of the type have already been created"; - case VK_ERROR_FORMAT_NOT_SUPPORTED: return "A requested format is not supported on this device"; - case VK_ERROR_SURFACE_LOST_KHR: return "A surface is no longer available"; - case VK_SUBOPTIMAL_KHR: return "A swapchain no longer matches the surface properties exactly, but can still be used"; - case VK_ERROR_OUT_OF_DATE_KHR: return "A surface has changed in such a way that it is no longer compatible with the swapchain"; - case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "The display used by a swapchain does not use the same presentable image layout"; - case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "The requested window is already connected to a VkSurfaceKHR, or to some other non-Vulkan API"; - case VK_ERROR_VALIDATION_FAILED_EXT: return "A validation layer found an error"; - - default: return "Unknown Vulkan error"; - } - return nullptr; - } - - VkPipelineStageFlags AccessFlagsToPipelineStage(VkAccessFlags access_flags, VkPipelineStageFlags stage_flags) - { - VkPipelineStageFlags stages = 0; - - while(access_flags != 0) - { - VkAccessFlagBits Access_flag = static_cast(access_flags & (~(access_flags - 1))); - if(Access_flag == 0 || (Access_flag & (Access_flag - 1)) != 0) - FatalError("Vulkan : an error has been caught during access flag to pipeline stage operation"); - access_flags &= ~Access_flag; - - switch(Access_flag) - { - case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: stages |= VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; break; - case VK_ACCESS_INDEX_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; - case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; - case VK_ACCESS_UNIFORM_READ_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; - case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break; - case VK_ACCESS_SHADER_READ_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; - case VK_ACCESS_SHADER_WRITE_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; - case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; - case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; - case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; - case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; - case VK_ACCESS_TRANSFER_READ_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; - case VK_ACCESS_TRANSFER_WRITE_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; - case VK_ACCESS_HOST_READ_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; - case VK_ACCESS_HOST_WRITE_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; - case VK_ACCESS_MEMORY_READ_BIT: break; - case VK_ACCESS_MEMORY_WRITE_BIT: break; - - default: Error("Vulkan : unknown access flag"); break; - } - } - return stages; - } - - void RenderCore::Init() - { - if(volkInitialize() != VK_SUCCESS) - FatalError("Vulkan loader : cannot load %, are you sure Vulkan is installed on your system ?", VULKAN_LIB_NAME); - - m_instance.Init(); - volkLoadInstance(m_instance.Get()); - m_layers.Init(); - m_device.Init(); - volkLoadDevice(m_device.Get()); - m_queues.Init(); - m_allocator.Init(); - m_cmd_manager.Init(); - m_is_init = true; - } - - void RenderCore::Destroy() - { - if(!m_is_init) - return; - - vkDeviceWaitIdle(m_device.Get()); - - m_pool_manager.DestroyAllPools(); - m_cmd_manager.Destroy(); - m_allocator.Destroy(); - m_device.Destroy(); - m_layers.Destroy(); - m_instance.Destroy(); - - m_is_init = false; - } -} diff --git a/runtime/Sources/Renderer/Core/Semaphore.cpp b/runtime/Sources/Renderer/Core/Semaphore.cpp deleted file mode 100644 index 8a9d72a..0000000 --- a/runtime/Sources/Renderer/Core/Semaphore.cpp +++ /dev/null @@ -1,36 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Semaphore.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 19:01:08 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:55:42 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include -#include -#include - -namespace mlx -{ - void Semaphore::Init() - { - VkSemaphoreCreateInfo semaphore_info{}; - semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; - - VkResult res; - if((res = vkCreateSemaphore(RenderCore::Get().GetDevice().Get(), &semaphore_info, nullptr, &m_semaphore)) != VK_SUCCESS) - FatalError("Vulkan : failed to create a synchronization object (semaphore), %", VerbaliseVkResult(res)); - DebugLog("Vulkan : created new semaphores"); - } - - void Semaphore::Destroy() noexcept - { - vkDestroySemaphore(RenderCore::Get().GetDevice().Get(), m_semaphore, nullptr); - m_semaphore = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed semaphore"); - } -} diff --git a/runtime/Sources/Renderer/Core/Surface.cpp b/runtime/Sources/Renderer/Core/Surface.cpp deleted file mode 100644 index 4715afe..0000000 --- a/runtime/Sources/Renderer/Core/Surface.cpp +++ /dev/null @@ -1,43 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Surface.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/08 18:58:49 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:56:56 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include -#include "render_core.h" -#include -#include - -namespace mlx -{ - void Surface::Create(Renderer& renderer) - { - if(glfwCreateWindowSurface(RenderCore::Get().GetInstance().Get(), renderer.GetWindow()->GetNativeWindow(), NULL, &m_surface) != VK_SUCCESS) - FatalError("Vulkan : failed to create a surface"); - DebugLog("Vulkan : created new surface"); - } - - VkSurfaceFormatKHR Surface::ChooseSwapSurfaceFormat(const std::vector& available_formats) - { - auto it = std::find_if(available_formats.begin(), available_formats.end(), [](VkSurfaceFormatKHR format) - { - return format.format == VK_FORMAT_R8G8B8A8_SRGB && format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR; - }); - - return (it == available_formats.end() ? available_formats[0] : *it); - } - - void Surface::Destroy() noexcept - { - vkDestroySurfaceKHR(RenderCore::Get().GetInstance().Get(), m_surface, nullptr); - m_surface = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed a surface"); - } -} diff --git a/runtime/Sources/Renderer/Core/ValidationLayers.cpp b/runtime/Sources/Renderer/Core/ValidationLayers.cpp deleted file mode 100644 index d64a033..0000000 --- a/runtime/Sources/Renderer/Core/ValidationLayers.cpp +++ /dev/null @@ -1,122 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* ValidationLayers.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/12/19 14:05:25 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:20:21 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include -#include - -namespace mlx -{ - void ValidationLayers::Init() - { - if constexpr(!enable_validation_layers) - return; - - std::uint32_t extension_count; - vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, nullptr); - std::vector extensions(extension_count); - vkEnumerateInstanceExtensionProperties(nullptr, &extension_count, extensions.data()); - if(!std::any_of(extensions.begin(), extensions.end(), [=](VkExtensionProperties ext) { return std::strcmp(ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0; })) - { - Warning("Vulkan : %s not present, debug utils are disabled", VK_EXT_DEBUG_UTILS_EXTENSION_NAME); - return; - } - - VkDebugUtilsMessengerCreateInfoEXT create_info{}; - populateDebugMessengerCreateInfo(create_info); - VkResult res = createDebugUtilsMessengerEXT(&create_info, nullptr); - if(res != VK_SUCCESS) - Warning("Vulkan : failed to set up debug messenger, %", VerbaliseVkResult(res)); - else - DebugLog("Vulkan : enabled validation layers"); - - f_vkSetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)vkGetInstanceProcAddr(RenderCore::Get().GetInstance().Get(), "vkSetDebugUtilsObjectNameEXT"); - if(!f_vkSetDebugUtilsObjectNameEXT) - Warning("Vulkan : failed to set up debug object names, %", VerbaliseVkResult(VK_ERROR_EXTENSION_NOT_PRESENT)); - else - DebugLog("Vulkan : enabled debug object names"); - } - - bool ValidationLayers::CheckValidationLayerSupport() - { - std::uint32_t layer_count; - vkEnumerateInstanceLayerProperties(&layer_count, nullptr); - - std::vector available_layers(layer_count); - vkEnumerateInstanceLayerProperties(&layer_count, available_layers.data()); - - return std::all_of(validation_layers.begin(), validation_layers.end(), [&](const char* layer_name) - { - if(!std::any_of(available_layers.begin(), available_layers.end(), [=](VkLayerProperties props) { return std::strcmp(layer_name, props.layer_name) == 0; })) - { - Error("Vulkan : a validation layer was requested but was not found ('%')", layer_name); - return false; - } - return true; - }); - } - - VkResult ValidationLayers::SetDebugUtilsObjectNameEXT(VkObjectType object_type, std::uint64_t object_handle, const char* object_name) - { - if(!f_vkSetDebugUtilsObjectNameEXT) - return VK_ERROR_EXTENSION_NOT_PRESENT; - - VkDebugUtilsObjectNameInfoEXT name_info{}; - name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; - name_info.objectType = object_type; - name_info.objectHandle = object_handle; - name_info.pObjectName = object_name; - return f_vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice().Get(), &name_info); - } - - void ValidationLayers::PopulateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT& create_info) - { - create_info = {}; - create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; - create_info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; - create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; - create_info.pfnUserCallback = ValidationLayers::DebugCallback; - } - - void ValidationLayers::Destroy() - { - if constexpr(enable_validation_layers) - { - DestroyDebugUtilsMessengerEXT(nullptr); - #ifdef DEBUG - DebugLog("Vulkan : destroyed validation layers"); - #endif - } - } - - VkResult ValidationLayers::CreateDebugUtilsMessengerEXT(const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator) - { - auto func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(RenderCore::Get().GetInstance().Get(), "vkCreateDebugUtilsMessengerEXT"); - return func != nullptr ? func(RenderCore::Get().GetInstance().Get(), pCreateInfo, pAllocator, &m_debug_messenger) : VK_ERROR_EXTENSION_NOT_PRESENT; - } - - VKAPI_ATTR VkBool32 VKAPI_CALL ValidationLayers::DebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, [[maybe_unused]] VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, [[maybe_unused]] void* pUserData) - { - if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) - Error(pCallbackData->pMessage); - else if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) - Warning(pCallbackData->pMessage); - return VK_FALSE; - } - - void ValidationLayers::destroyDebugUtilsMessengerEXT(const VkAllocationCallbacks* pAllocator) - { - auto func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(RenderCore::Get().GetInstance().Get(), "vkDestroyDebugUtilsMessengerEXT"); - if(func != nullptr) - func(RenderCore::Get().GetInstance().Get(), m_debug_messenger, pAllocator); - } - -} diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp new file mode 100644 index 0000000..8d4ee93 --- /dev/null +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -0,0 +1,141 @@ +#include + +#include +#include +#include +#include +#include + +namespace mlx +{ + void TransitionImageToCorrectLayout(Image& image, VkCommandBuffer cmd) + { + if(!image.IsInit()) + return; + if(image.GetType() == ImageType::Color) + image.TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, cmd); + else + Error("Vulkan : cannot transition descriptor image layout, unkown image type"); + } + + DescriptorSet::DescriptorSet(const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type) + : m_set_layout(vklayout) + { + for(auto& [binding, type] : layout.binds) + { + m_descriptors.emplace_back(); + m_descriptors.back().type = type; + m_descriptors.back().shader_type = shader_type; + m_descriptors.back().binding = binding; + } + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), vklayout); + } + + DescriptorSet::DescriptorSet(VkDescriptorSetLayout layout, const std::vector& descriptors) + : m_set_layout(layout), m_descriptors(descriptors) + { + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), layout); + } + + void DescriptorSet::SetImage(std::size_t i, std::uint32_t binding, class Image& image) + { + Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) + { + return binding == descriptor.binding; + }); + if(it == m_descriptors.end()) + { + Warning("Vulkan : cannot update descriptor set image; invalid binding"); + return; + } + if(it->type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) + { + Error("Vulkan : trying to bind an image to the wrong descriptor"); + return; + } + it->image_ptr = ℑ + } + + void DescriptorSet::SetStorageBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer) + { + Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) + { + return binding == descriptor.binding; + }); + if(it == m_descriptors.end()) + { + Warning("Vulkan : cannot update descriptor set buffer; invalid binding"); + return; + } + if(it->type != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) + { + Error("Vulkan : trying to bind a buffer to the wrong descriptor"); + return; + } + it->storage_buffer_ptr = &buffer; + } + + void DescriptorSet::SetUniformBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer) + { + Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) + { + return binding == descriptor.binding; + }); + if(it == m_descriptors.end()) + { + Warning("Vulkan : cannot update descriptor set buffer; invalid binding"); + return; + } + if(it->type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) + { + Error("Vulkan : trying to bind a buffer to the wrong descriptor"); + return; + } + it->uniform_buffer_ptr = &buffer; + } + + void DescriptorSet::Update(std::size_t i, VkCommandBuffer cmd) noexcept + { + Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + std::vector writes; + std::vector buffer_infos; + std::vector image_infos; + for(auto& descriptor : m_descriptors) + { + if(descriptor.image_ptr) + { + TransitionImageToCorrectLayout(*descriptor.image_ptr, cmd); + VkDescriptorImageInfo info{}; + info.sampler = descriptor.image_ptr->GetSampler(); + info.imageLayout = descriptor.image_ptr->GetLayout(); + info.imageView = descriptor.image_ptr->GetImageView(); + image_infos.push_back(info); + writes.push_back(kvfWriteImageToDescriptorSet(RenderCore::Get().GetDevice(), m_set[i], &image_infos.back(), descriptor.binding)); + } + else if(descriptor.uniform_buffer_ptr) + { + VkDescriptorBufferInfo info{}; + info.buffer = descriptor.uniform_buffer_ptr->Get(); + info.offset = descriptor.uniform_buffer_ptr->GetOffset(); + info.range = VK_WHOLE_SIZE; + buffer_infos.push_back(info); + writes.push_back(kvfWriteUniformBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_set[i], &buffer_infos.back(), descriptor.binding)); + } + else if(descriptor.storage_buffer_ptr) + { + VkDescriptorBufferInfo info{}; + info.buffer = descriptor.storage_buffer_ptr->Get(); + info.offset = descriptor.storage_buffer_ptr->GetOffset(); + info.range = VK_WHOLE_SIZE; + buffer_infos.push_back(info); + writes.push_back(kvfWriteStorageBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_set[i], &buffer_infos.back(), descriptor.binding)); + } + } + vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); + } +} diff --git a/runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp deleted file mode 100644 index 2cd8085..0000000 --- a/runtime/Sources/Renderer/Descriptors/DescriptorPool.cpp +++ /dev/null @@ -1,69 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorPool.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:34:23 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:39:39 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include - -namespace mlx -{ - void DescriptorPool::Init(std::vector sizes) - { - VkDescriptorPoolCreateInfo pool_info{}; - pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; - pool_info.poolSizeCount = sizes.size(); - pool_info.pPoolSizes = sizes.data(); - pool_info.maxSets = MAX_SETS_PER_POOL; - pool_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; - - VkResult res = vkCreateDescriptorPool(RenderCore::Get().GetDevice().Get(), &pool_info, nullptr, &m_pool); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create descriptor pool, %", VerbaliseVkResult(res)); - DebugLog("Vulkan : created new descriptor pool"); - } - - VkDescriptorSet DescriptorPool::AllocateDescriptorSet(class DescriptorSetLayout& layout) - { - VkDescriptorSet set; - - VkDescriptorSetAllocateInfo alloc_info{}; - alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; - alloc_info.descriptorPool = m_pool; - alloc_info.descriptorSetCount = 1; - alloc_info.pSetLayouts = layouts.Get(); - - VkResult res = vkAllocateDescriptorSets(RenderCore::Get().GetDevice().Get(), &alloc_info, &set); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to allocate descriptor set, %", VerbaliseVkResult(res)); - m_allocated_sets++; - DebugLog("Vulkan : created new descriptor set"); - return set; - } - - void DescriptorPool::FreeDescriptor(VkDescriptorSet set) - { - if(!IsInit()) - return; - vkFreeDescriptorSets(RenderCore::Get().GetDevice().Get(), m_pool, 1, set); - m_allocated_sets--; // if this goes underflow I quit - } - - void DescriptorPool::Destroy() noexcept - { - if(m_pool != VK_NULL_HANDLE) - vkDestroyDescriptorPool(RenderCore::Get().GetDevice().Get(), m_pool, nullptr); - m_pool = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed a descriptor pool"); - } -} diff --git a/runtime/Sources/Renderer/Descriptors/DescriptorPoolManager.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorPoolManager.cpp deleted file mode 100644 index 081f887..0000000 --- a/runtime/Sources/Renderer/Descriptors/DescriptorPoolManager.cpp +++ /dev/null @@ -1,41 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorPoolManager.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/20 06:51:47 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:41:38 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - DescriptorPool& DescriptorPoolManager::GetAvailablePool() - { - for(auto& pool : m_pools) - { - if(pool.GetNumberOfSetsAllocated() < MAX_SETS_PER_POOL) - return pool; - } - std::vector pool_sizes = { - { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, (MAX_FRAMES_IN_FLIGHT * NUMBER_OF_UNIFORM_BUFFERS) }, - { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_SETS_PER_POOL - (MAX_FRAMES_IN_FLIGHT * NUMBER_OF_UNIFORM_BUFFERS) } - }; - m_pools.emplace_front().Init(std::move(pool_sizes)); - return m_pools.front(); - } - - void DescriptorPoolManager::DestroyAllPools() - { - for(auto& pool : m_pools) - pool.Destroy(); - m_pools.clear(); - } -} diff --git a/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp deleted file mode 100644 index 75d6427..0000000 --- a/runtime/Sources/Renderer/Descriptors/DescriptorSet.cpp +++ /dev/null @@ -1,116 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorSet.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:40:44 by maldavid #+# #+# */ -/* Updated: 2024/04/23 21:17:39 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include -#include -#include -#include - -namespace mlx -{ - void DescriptorSet::Init(NonOwningPtr renderer, NonOwningPtr pool, DescriptorSetLayout layout) - { - MLX_PROFILE_FUNCTION(); - p_renderer = renderer; - m_layout = layout; - p_pool = pool; - - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_desc_set[i] = pool->AllocateDescriptorSet(layout); - } - - void DescriptorSet::WriteDescriptor(int binding, NonOwningPtr ubo) const noexcept - { - MLX_PROFILE_FUNCTION(); - auto device = RenderCore::Get().GetDevice().Get(); - - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - { - VkDescriptorBufferInfo buffer_info{}; - buffer_info.buffer = ubo->Get(i); - buffer_info.offset = ubo->GetOffset(i); - buffer_info.range = ubo->GetSize(i); - - VkWriteDescriptorSet descriptor_write{}; - descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - descriptor_write.dstSet = m_desc_set[i]; - descriptor_write.dstBinding = binding; - descriptor_write.dstArrayElement = 0; - descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; - descriptor_write.descriptorCount = 1; - descriptor_write.pBufferInfo = &buffer_info; - - vkUpdateDescriptorSets(device, 1, &descriptor_write, 0, nullptr); - } - } - - void DescriptorSet::WriteDescriptor(int binding, const Image& image) const noexcept - { - MLX_PROFILE_FUNCTION(); - auto device = RenderCore::Get().GetDevice().Get(); - - VkDescriptorImageInfo image_info{}; - image_info.imageLayout = image.GetLayout(); - image_info.imageView = image.GetImageView(); - image_info.sampler = image.GetSampler(); - - VkWriteDescriptorSet descriptor_write{}; - descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; - descriptor_write.dstSet = m_desc_set[m_renderer->GetActiveImageIndex()]; - descriptor_write.dstBinding = binding; - descriptor_write.dstArrayElement = 0; - descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; - descriptor_write.descriptorCount = 1; - descriptor_write.pImageInfo = &image_info; - - vkUpdateDescriptorSets(device, 1, &descriptor_write, 0, nullptr); - } - - void DescriptorSet::Bind() noexcept - { - vkCmdBindDescriptorSets(p_renderer->GetActiveCmdBuffer().Get(), VK_PIPELINE_BIND_POINT_GRAPHICS, p_renderer->GetPipeline().GetPipelineLayout(), 0, 1, m_desc_set[p_renderer->GetActiveImageIndex()], 0, nullptr); - } - - DescriptorSet DescriptorSet::Duplicate() - { - MLX_PROFILE_FUNCTION(); - DescriptorSet set; - set.Init(p_renderer, &RenderCore::Get().GetDescriptorPool(), m_layout); - return set; - } - - VkDescriptorSet& DescriptorSet::operator()() noexcept - { - return m_desc_set[p_renderer->GetActiveImageIndex()]; - } - - VkDescriptorSet& DescriptorSet::Get() noexcept - { - return m_desc_set[p_renderer->GetActiveImageIndex()]; - } - - void DescriptorSet::Destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - if(p_pool != nullptr && RenderCore::Get().IsInit()) // checks if the render core is still init (it should always be init but just in case) - p_pool->FreeDescriptor(*this); - for(auto& set : m_desc_set) - { - if(set != VK_NULL_HANDLE) - set = VK_NULL_HANDLE; - } - } -} diff --git a/runtime/Sources/Renderer/Descriptors/DescriptorSetLayout.cpp b/runtime/Sources/Renderer/Descriptors/DescriptorSetLayout.cpp deleted file mode 100644 index cc61a34..0000000 --- a/runtime/Sources/Renderer/Descriptors/DescriptorSetLayout.cpp +++ /dev/null @@ -1,49 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* DescriptorSetLayout.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/23 18:37:28 by maldavid #+# #+# */ -/* Updated: 2024/04/23 19:52:41 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - void DescriptorSetLayout::Init(std::vector> binds, VkShaderStageFlagBits stage) - { - std::vector bindings(binds.size()); - for(std::size_t i = 0; i < binds.size(); i++) - { - bindings[i].binding = binds[i].first; - bindings[i].descriptorCount = 1; - bindings[i].descriptorType = binds[i].second; - bindings[i].pImmutableSamplers = nullptr; - bindings[i].stageFlags = stage; - } - - m_bindings = std::move(binds); - - VkDescriptorSetLayoutCreateInfo layout_info{}; - layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; - layout_info.bindingCount = m_bindings.size(); - layout_info.pBindings = m_bindings.data(); - - VkResult res = vkCreateDescriptorSetLayout(RenderCore::Get().GetDevice().Get(), &layout_info, nullptr, &m_layout); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create descriptor set layout, %", VerbaliseVkResult(res)); - } - - void DescriptorSetLayout::Destroy() noexcept - { - vkDestroyDescriptorSetLayout(RenderCore::Get().GetDevice().Get(), m_layout, nullptr); - m_layout = VK_NULL_HANDLE; - } -} diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp new file mode 100644 index 0000000..5ee95d0 --- /dev/null +++ b/runtime/Sources/Renderer/Image.cpp @@ -0,0 +1,113 @@ +#include +#include +#include +#include + +namespace mlx +{ + void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, VkMemoryPropertyFlags properties, bool is_multisampled) + { + m_type = type; + m_width = width; + m_height = height; + m_format = format; + m_tiling = tiling; + m_is_multisampled = is_multisampled; + + VmaAllocationCreateInfo alloc_info{}; + alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; + + VkImageCreateInfo image_info{}; + image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_info.imageType = VK_IMAGE_TYPE_2D; + image_info.extent.width = width; + image_info.extent.height = height; + image_info.extent.depth = 1; + image_info.mipLevels = 1; + image_info.arrayLayers = 1; + image_info.format = format; + image_info.tiling = tiling; + image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + image_info.usage = usage; + image_info.samples = (m_is_multisampled ? VK_SAMPLE_COUNT_4_BIT : VK_SAMPLE_COUNT_1_BIT); + image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, alloc_info, &m_image); + } + + void Image::CreateImageView(VkImageViewType type, VkImageAspectFlags aspect_flags, int layer_count) noexcept + { + m_image_view = kvfCreateImageView(RenderCore::Get().GetDevice(), m_image, m_format, type, aspect_flags, layer_count); + } + + void Image::CreateSampler() noexcept + { + m_sampler = kvfCreateSampler(RenderCore::Get().GetDevice(), VK_FILTER_NEAREST, VK_SAMPLER_ADDRESS_MODE_REPEAT, VK_SAMPLER_MIPMAP_MODE_NEAREST); + } + + void Image::TransitionLayout(VkImageLayout new_layout, VkCommandBuffer cmd) + { + if(new_layout == m_layout) + return; + bool is_single_time_cmd_buffer = (cmd == VK_NULL_HANDLE); + if(is_single_time_cmd_buffer) + cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + KvfImageType kvf_type = KVF_IMAGE_OTHER; + switch(m_type) + { + case ImageType::Color: kvf_type = KVF_IMAGE_COLOR; break; + default: break; + } + kvfTransitionImageLayout(RenderCore::Get().GetDevice(), m_image, kvf_type, cmd, m_format, m_layout, new_layout, is_single_time_cmd_buffer); + m_layout = new_layout; + } + + void Image::Clear(VkCommandBuffer cmd, Vec4f color) + { + VkImageSubresourceRange subresource_range{}; + subresource_range.baseMipLevel = 0; + subresource_range.layerCount = (m_type == ImageType::Cube ? 6 : 1); + subresource_range.levelCount = 1; + subresource_range.baseArrayLayer = 0; + + if(m_type == ImageType::Color || m_type == ImageType::Cube) + { + VkImageLayout old_layout = m_layout; + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); + subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + VkClearColorValue clear_color = VkClearColorValue({ { color.x, color.y, color.z, color.w } }); + vkCmdClearColorImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource_range); + TransitionLayout(old_layout, cmd); + } + else if(m_type == ImageType::Depth) + { + VkClearDepthStencilValue clear_depth_stencil = { 1.0f, 1 }; + subresource_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); + vkCmdClearDepthStencilImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_depth_stencil, 1, &subresource_range); + } + } + + void Image::DestroySampler() noexcept + { + if(m_sampler != VK_NULL_HANDLE) + kvfDestroySampler(RenderCore::Get().GetDevice(), m_sampler); + m_sampler = VK_NULL_HANDLE; + } + + void Image::DestroyImageView() noexcept + { + if(m_image_view != VK_NULL_HANDLE) + kvfDestroyImageView(RenderCore::Get().GetDevice(), m_image_view); + m_image_view = VK_NULL_HANDLE; + } + + void Image::Destroy() noexcept + { + DestroySampler(); + DestroyImageView(); + + if(m_image != VK_NULL_HANDLE) + RenderCore::Get().GetAllocator().DestroyImage(m_allocation, m_image); + m_image = VK_NULL_HANDLE; + } +} diff --git a/runtime/Sources/Renderer/Images/Image.cpp b/runtime/Sources/Renderer/Images/Image.cpp deleted file mode 100644 index d6e62ba..0000000 --- a/runtime/Sources/Renderer/Images/Image.cpp +++ /dev/null @@ -1,393 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Image.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/01/25 11:59:07 by maldavid #+# #+# */ -/* Updated: 2024/04/23 20:02:25 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - bool IsStencilFormat(VkFormat format) - { - switch(format) - { - case VK_FORMAT_D32_SFLOAT_S8_UINT: - case VK_FORMAT_D24_UNORM_S8_UINT: - return true; - - default: return false; - } - } - - bool IsDepthFormat(VkFormat format) - { - switch(format) - { - case VK_FORMAT_D16_UNORM: - case VK_FORMAT_D32_SFLOAT: - case VK_FORMAT_D32_SFLOAT_S8_UINT: - case VK_FORMAT_D24_UNORM_S8_UINT: - case VK_FORMAT_D16_UNORM_S8_UINT: - return true; - - default: return false; - } - } - - VkFormat BitsToFormat(std::uint32_t bits) - { - switch(bits) - { - case 8: return VK_FORMAT_R8_UNORM; - case 16: return VK_FORMAT_R8G8_UNORM; - case 24: return VK_FORMAT_R8G8B8_UNORM; - case 32: return VK_FORMAT_R8G8B8A8_UNORM; - case 48: return VK_FORMAT_R16G16B16_SFLOAT; - case 64: return VK_FORMAT_R16G16B16A16_SFLOAT; - case 96: return VK_FORMAT_R32G32B32_SFLOAT; - case 128: return VK_FORMAT_R32G32B32A32_SFLOAT; - - default: - FatalError("Vulkan : unsupported image bit-depth"); - return VK_FORMAT_R8G8B8A8_UNORM; - } - } - - VkPipelineStageFlags LayoutToAccessMask(VkImageLayout layout, bool is_destination) - { - VkPipelineStageFlags access_mask = 0; - - switch(layout) - { - case VK_IMAGE_LAYOUT_UNDEFINED: - if(is_destination) - Error("Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); - break; - case VK_IMAGE_LAYOUT_GENERAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; break; - case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; break; - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; break; - case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: - access_mask = VK_ACCESS_SHADER_READ_BIT; // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; - break; - case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; break; - case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: access_mask = VK_ACCESS_TRANSFER_READ_BIT; break; - case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: access_mask = VK_ACCESS_TRANSFER_WRITE_BIT; break; - case VK_IMAGE_LAYOUT_PREINITIALIZED: - if(!is_destination) - access_mask = VK_ACCESS_HOST_WRITE_BIT; - else - Error("Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); - break; - case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; - case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; - case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: access_mask = VK_ACCESS_MEMORY_READ_BIT; break; - - default: Error("Vulkan : unexpected image layout"); break; - } - - return access_mask; - } - - void Image::Create(std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, const char* name, bool dedicated_memory) - { - m_width = width; - m_height = height; - m_format = format; - m_tiling = tiling; - - VkImageCreateInfo image_info{}; - image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; - image_info.imageType = VK_IMAGE_TYPE_2D; - image_info.extent.width = width; - image_info.extent.height = height; - image_info.extent.depth = 1; - image_info.mipLevels = 1; - image_info.arrayLayers = 1; - image_info.format = format; - image_info.tiling = tiling; - image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - image_info.usage = usage; - image_info.samples = VK_SAMPLE_COUNT_1_BIT; - image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; - - VmaAllocationCreateInfo alloc_info{}; - alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; - if(dedicated_memory) - { - alloc_info.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; - alloc_info.priority = 1.0f; - } - - m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, &alloc_info, m_image, name); - #ifdef DEBUG - m_name = name; - #endif - } - - void Image::CreateImageView(VkImageViewType type, VkImageAspectFlags aspect_flags) noexcept - { - VkImageViewCreateInfo view_info{}; - view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; - view_info.image = m_image; - view_info.viewType = type; - view_info.format = m_format; - view_info.subresourceRange.aspectMask = aspect_flags; - view_info.subresourceRange.baseMipLevel = 0; - view_info.subresourceRange.levelCount = 1; - view_info.subresourceRange.baseArrayLayer = 0; - view_info.subresourceRange.layerCount = 1; - - VkResult res = vkCreateImageView(RenderCore::Get().GetDevice().Get(), &view_info, nullptr, &m_image_view); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create an image view, %s", VerbaliseVkResult(res)); - #ifdef DEBUG - else - RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_IMAGE_VIEW, (std::uint64_t)m_image_view, m_name.c_str()); - #endif - } - - void Image::CreateSampler() noexcept - { - VkSamplerCreateInfo info{}; - info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; - info.magFilter = VK_FILTER_NEAREST; - info.minFilter = VK_FILTER_NEAREST; - info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST; - info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT; - info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT; - info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT; - info.minLod = -1000; - info.maxLod = 1000; - info.anisotropyEnable = VK_FALSE; - info.maxAnisotropy = 1.0f; - - VkResult res = vkCreateSampler(RenderCore::Get().GetDevice().Get(), &info, nullptr, &m_sampler); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create an image sampler, %", VerbaliseVkResult(res)); - #ifdef DEBUG - else - RenderCore::Get().GetLayers().SetDebugUtilsObjectNameEXT(VK_OBJECT_TYPE_SAMPLER, (std::uint64_t)m_sampler, m_name.c_str()); - #endif - } - - void Image::CopyFromBuffer(Buffer& buffer) - { - CommandBuffer& cmd = RenderCore::Get().GetSingleTimeCmdBuffer(); - cmd.BeginRecord(); - - VkImageLayout layout_save = m_layout; - TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &cmd); - - cmd.CopyBufferToImage(buffer, *this); - - TransitionLayout(layout_save, &cmd); - - cmd.EndRecord(); - cmd.SubmitIdle(); - } - - void Image::CopyToBuffer(Buffer& buffer) - { - CommandBuffer& cmd = RenderCore::Get().GetSingleTimeCmdBuffer(); - cmd.BeginRecord(); - - VkImageLayout layout_save = m_layout; - TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, &cmd); - - cmd.CopyImagetoBuffer(*this, buffer); - - TransitionLayout(layout_save, &cmd); - - cmd.EndRecord(); - cmd.SubmitIdle(); - } - - void Image::TransitionLayout(VkImageLayout new_layout, NonOwningPtr cmd) - { - if(new_layout == m_layout) - return; - - bool single_time = (cmd == nullptr); - if(single_time) - { - cmd = &RenderCore::Get().GetSingleTimeCmdBuffer(); - cmd->BeginRecord(); - } - - cmd->TransitionImageLayout(*this, new_layout); - - if(single_time) - { - cmd->EndRecord(); - cmd->SubmitIdle(); - } - m_layout = new_layout; - } - - void Image::DestroySampler() noexcept - { - if(m_sampler != VK_NULL_HANDLE) - vkDestroySampler(RenderCore::Get().GetDevice().Get(), m_sampler, nullptr); - m_sampler = VK_NULL_HANDLE; - } - - void Image::DestroyImageView() noexcept - { - if(m_image_view != VK_NULL_HANDLE) - vkDestroyImageView(RenderCore::Get().GetDevice().Get(), m_image_view, nullptr); - m_image_view = VK_NULL_HANDLE; - } - - void Image::Destroy() noexcept - { - DestroySampler(); - DestroyImageView(); - - if(m_image != VK_NULL_HANDLE) - RenderCore::Get().GetAllocator().DestroyImage(m_allocation, m_image); - m_image = VK_NULL_HANDLE; - } - - std::uint32_t FormatSize(VkFormat format) - { - switch(format) - { - case VK_FORMAT_UNDEFINED: return 0; - case VK_FORMAT_R4G4_UNORM_PACK8: return 1; - case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return 2; - case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return 2; - case VK_FORMAT_R5G6B5_UNORM_PACK16: return 2; - case VK_FORMAT_B5G6R5_UNORM_PACK16: return 2; - case VK_FORMAT_R5G5B5A1_UNORM_PACK16: return 2; - case VK_FORMAT_B5G5R5A1_UNORM_PACK16: return 2; - case VK_FORMAT_A1R5G5B5_UNORM_PACK16: return 2; - case VK_FORMAT_R8_UNORM: return 1; - case VK_FORMAT_R8_SNORM: return 1; - case VK_FORMAT_R8_USCALED: return 1; - case VK_FORMAT_R8_SSCALED: return 1; - case VK_FORMAT_R8_UINT: return 1; - case VK_FORMAT_R8_SINT: return 1; - case VK_FORMAT_R8_SRGB: return 1; - case VK_FORMAT_R8G8_UNORM: return 2; - case VK_FORMAT_R8G8_SNORM: return 2; - case VK_FORMAT_R8G8_USCALED: return 2; - case VK_FORMAT_R8G8_SSCALED: return 2; - case VK_FORMAT_R8G8_UINT: return 2; - case VK_FORMAT_R8G8_SINT: return 2; - case VK_FORMAT_R8G8_SRGB: return 2; - case VK_FORMAT_R8G8B8_UNORM: return 3; - case VK_FORMAT_R8G8B8_SNORM: return 3; - case VK_FORMAT_R8G8B8_USCALED: return 3; - case VK_FORMAT_R8G8B8_SSCALED: return 3; - case VK_FORMAT_R8G8B8_UINT: return 3; - case VK_FORMAT_R8G8B8_SINT: return 3; - case VK_FORMAT_R8G8B8_SRGB: return 3; - case VK_FORMAT_B8G8R8_UNORM: return 3; - case VK_FORMAT_B8G8R8_SNORM: return 3; - case VK_FORMAT_B8G8R8_USCALED: return 3; - case VK_FORMAT_B8G8R8_SSCALED: return 3; - case VK_FORMAT_B8G8R8_UINT: return 3; - case VK_FORMAT_B8G8R8_SINT: return 3; - case VK_FORMAT_B8G8R8_SRGB: return 3; - case VK_FORMAT_R8G8B8A8_UNORM: return 4; - case VK_FORMAT_R8G8B8A8_SNORM: return 4; - case VK_FORMAT_R8G8B8A8_USCALED: return 4; - case VK_FORMAT_R8G8B8A8_SSCALED: return 4; - case VK_FORMAT_R8G8B8A8_UINT: return 4; - case VK_FORMAT_R8G8B8A8_SINT: return 4; - case VK_FORMAT_R8G8B8A8_SRGB: return 4; - case VK_FORMAT_B8G8R8A8_UNORM: return 4; - case VK_FORMAT_B8G8R8A8_SNORM: return 4; - case VK_FORMAT_B8G8R8A8_USCALED: return 4; - case VK_FORMAT_B8G8R8A8_SSCALED: return 4; - case VK_FORMAT_B8G8R8A8_UINT: return 4; - case VK_FORMAT_B8G8R8A8_SINT: return 4; - case VK_FORMAT_B8G8R8A8_SRGB: return 4; - case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return 4; - case VK_FORMAT_A8B8G8R8_SNORM_PACK32: return 4; - case VK_FORMAT_A8B8G8R8_USCALED_PACK32: return 4; - case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: return 4; - case VK_FORMAT_A8B8G8R8_UINT_PACK32: return 4; - case VK_FORMAT_A8B8G8R8_SINT_PACK32: return 4; - case VK_FORMAT_A8B8G8R8_SRGB_PACK32: return 4; - case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return 4; - case VK_FORMAT_A2R10G10B10_SNORM_PACK32: return 4; - case VK_FORMAT_A2R10G10B10_USCALED_PACK32: return 4; - case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: return 4; - case VK_FORMAT_A2R10G10B10_UINT_PACK32: return 4; - case VK_FORMAT_A2R10G10B10_SINT_PACK32: return 4; - case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return 4; - case VK_FORMAT_A2B10G10R10_SNORM_PACK32: return 4; - case VK_FORMAT_A2B10G10R10_USCALED_PACK32: return 4; - case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: return 4; - case VK_FORMAT_A2B10G10R10_UINT_PACK32: return 4; - case VK_FORMAT_A2B10G10R10_SINT_PACK32: return 4; - case VK_FORMAT_R16_UNORM: return 2; - case VK_FORMAT_R16_SNORM: return 2; - case VK_FORMAT_R16_USCALED: return 2; - case VK_FORMAT_R16_SSCALED: return 2; - case VK_FORMAT_R16_UINT: return 2; - case VK_FORMAT_R16_SINT: return 2; - case VK_FORMAT_R16_SFLOAT: return 2; - case VK_FORMAT_R16G16_UNORM: return 4; - case VK_FORMAT_R16G16_SNORM: return 4; - case VK_FORMAT_R16G16_USCALED: return 4; - case VK_FORMAT_R16G16_SSCALED: return 4; - case VK_FORMAT_R16G16_UINT: return 4; - case VK_FORMAT_R16G16_SINT: return 4; - case VK_FORMAT_R16G16_SFLOAT: return 4; - case VK_FORMAT_R16G16B16_UNORM: return 6; - case VK_FORMAT_R16G16B16_SNORM: return 6; - case VK_FORMAT_R16G16B16_USCALED: return 6; - case VK_FORMAT_R16G16B16_SSCALED: return 6; - case VK_FORMAT_R16G16B16_UINT: return 6; - case VK_FORMAT_R16G16B16_SINT: return 6; - case VK_FORMAT_R16G16B16_SFLOAT: return 6; - case VK_FORMAT_R16G16B16A16_UNORM: return 8; - case VK_FORMAT_R16G16B16A16_SNORM: return 8; - case VK_FORMAT_R16G16B16A16_USCALED: return 8; - case VK_FORMAT_R16G16B16A16_SSCALED: return 8; - case VK_FORMAT_R16G16B16A16_UINT: return 8; - case VK_FORMAT_R16G16B16A16_SINT: return 8; - case VK_FORMAT_R16G16B16A16_SFLOAT: return 8; - case VK_FORMAT_R32_UINT: return 4; - case VK_FORMAT_R32_SINT: return 4; - case VK_FORMAT_R32_SFLOAT: return 4; - case VK_FORMAT_R32G32_UINT: return 8; - case VK_FORMAT_R32G32_SINT: return 8; - case VK_FORMAT_R32G32_SFLOAT: return 8; - case VK_FORMAT_R32G32B32_UINT: return 12; - case VK_FORMAT_R32G32B32_SINT: return 12; - case VK_FORMAT_R32G32B32_SFLOAT: return 12; - case VK_FORMAT_R32G32B32A32_UINT: return 16; - case VK_FORMAT_R32G32B32A32_SINT: return 16; - case VK_FORMAT_R32G32B32A32_SFLOAT: return 16; - case VK_FORMAT_R64_UINT: return 8; - case VK_FORMAT_R64_SINT: return 8; - case VK_FORMAT_R64_SFLOAT: return 8; - case VK_FORMAT_R64G64_UINT: return 16; - case VK_FORMAT_R64G64_SINT: return 16; - case VK_FORMAT_R64G64_SFLOAT: return 16; - case VK_FORMAT_R64G64B64_UINT: return 24; - case VK_FORMAT_R64G64B64_SINT: return 24; - case VK_FORMAT_R64G64B64_SFLOAT: return 24; - case VK_FORMAT_R64G64B64A64_UINT: return 32; - case VK_FORMAT_R64G64B64A64_SINT: return 32; - case VK_FORMAT_R64G64B64A64_SFLOAT: return 32; - case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return 4; - case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: return 4; - - default: return 0; - } - } -} diff --git a/runtime/Sources/Renderer/Images/Texture.cpp b/runtime/Sources/Renderer/Images/Texture.cpp deleted file mode 100644 index 18090f8..0000000 --- a/runtime/Sources/Renderer/Images/Texture.cpp +++ /dev/null @@ -1,190 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Texture.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/03/31 18:03:35 by maldavid #+# #+# */ -/* Updated: 2024/04/23 21:52:23 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#define STB_IMAGE_IMPLEMENTATION -#include - -#include - -#include -#include -#include - -#ifdef IMAGE_OPTIMIZED - #define TILING VK_IMAGE_TILING_OPTIMAL -#else - #define TILING VK_IMAGE_TILING_LINEAR -#endif - -namespace mlx -{ - void Texture::Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory) - { - MLX_PROFILE_FUNCTION(); - Image::Create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); - Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); - Image::CreateSampler(); - TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); - - std::vector vertex_data = { - {{0, 0}, {1.f, 1.f, 1.f, 1.f}, {0.0f, 0.0f}}, - {{width, 0}, {1.f, 1.f, 1.f, 1.f}, {1.0f, 0.0f}}, - {{width, height}, {1.f, 1.f, 1.f, 1.f}, {1.0f, 1.0f}}, - {{0, height}, {1.f, 1.f, 1.f, 1.f}, {0.0f, 1.0f}} - }; - - std::vector index_data = { 0, 1, 2, 2, 3, 0 }; - - #ifdef DEBUG - m_vbo.Create(sizeof(Vertex) * vertex_data.size(), vertex_data.data(), name); - m_ibo.Create(sizeof(std::uint16_t) * index_data.size(), index_data.data(), name); - m_name = name; - #else - m_vbo.Create(sizeof(Vertex) * vertex_data.size(), vertex_data.data(), nullptr); - m_ibo.Create(sizeof(std::uint16_t) * index_data.size(), index_data.data(), nullptr); - #endif - - Buffer staging_buffer; - std::size_t size = width * height * formatSize(format); - if(pixels != nullptr) - { - #ifdef DEBUG - staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); - #else - staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, pixels); - #endif - } - else - { - std::vector default_pixels(width * height, 0x00000000); - #ifdef DEBUG - staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, default_pixels.data()); - #else - staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, nullptr, default_pixels.data()); - #endif - } - Image::CopyFromBuffer(staging_buffer); - staging_buffer.Destroy(); - } - - void Texture::SetPixel(int x, int y, std::uint32_t color) noexcept - { - MLX_PROFILE_FUNCTION(); - if(x < 0 || y < 0 || static_cast(x) > GetWidth() || static_cast(y) > GetHeight()) - return; - if(m_map == nullptr) - PpenCPUmap(); - m_cpu_map[(y * GetWidth()) + x] = color; - m_has_been_modified = true; - } - - int Texture::GetPixel(int x, int y) noexcept - { - MLX_PROFILE_FUNCTION(); - if(x < 0 || y < 0 || static_cast(x) > GetWidth() || static_cast(y) > GetHeight()) - return 0; - if(m_map == nullptr) - OpenCPUmap(); - std::uint32_t color = m_cpu_map[(y * GetWidth()) + x]; - std::uint8_t* bytes = reinterpret_cast(&color); - std::uint8_t tmp = bytes[0]; - bytes[0] = bytes[2]; - bytes[2] = tmp; - return *reinterpret_cast(bytes); - } - - void Texture::OpenCPUmap() - { - MLX_PROFILE_FUNCTION(); - if(m_map != nullptr) - return; - - DebugLog("Texture : enabling CPU mapping"); - std::size_t size = GetWidth() * GetHeight() * FormatSize(GetFormat()); - m_buf_map.emplace(); - #ifdef DEBUG - m_buf_map->Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, m_name.c_str()); - #else - m_buf_map->Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, nullptr); - #endif - Image::CopyToBuffer(*m_buf_map); - m_buf_map->MapMem(&_map); - m_cpu_map = std::vector(GetWidth() * GetHeight(), 0); - std::memcpy(m_cpu_map.data(), m_map, size); - DebugLog("Texture : mapped CPU memory using staging buffer"); - } - - void Texture::Render(Renderer& renderer, int x, int y) - { - MLX_PROFILE_FUNCTION(); - if(m_has_been_modified) - { - std::memcpy(m_map, m_cpu_map.data(), m_cpu_map.size() * FormatSize(GetFormat())); - Image::copyFromBuffer(*m_buf_map); - m_has_been_modified = false; - } - if(!m_set.IsInit()) - m_set = renderer.GetFragDescriptorSet().Duplicate(); - if(GetLayout() != VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) - TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); - if(!m_has_set_been_updated) - UpdateSet(0); - auto cmd = renderer.GetActiveCmdBuffer(); - m_vbo.bind(renderer); - m_ibo.bind(renderer); - glm::vec2 translate(x, y); - vkCmdPushConstants(cmd.Get(), renderer.GetPipeline().GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); - m_set.Bind(); - vkCmdDrawIndexed(cmd.Get(), static_cast(m_ibo.GetSize() / sizeof(std::uint16_t)), 1, 0, 0, 0); - } - - void Texture::Destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - Image::Destroy(); - m_set.Destroy(); - if(m_buf_map.has_value()) - m_buf_map->Destroy(); - m_vbo.destroy(); - m_ibo.destroy(); - } - - Texture stbTextureLoad(std::filesystem::path file, int* w, int* h) - { - MLX_PROFILE_FUNCTION(); - Texture* texture = new Texture; - int channels; - std::uint8_t* data = nullptr; - std::string filename = file.string(); - - if(!std::filesystem::exists(std::move(file))) - { - Error("Image : file not found '%s'", filename.c_str()); - return nullptr; - } - if(stbi_is_hdr(filename.c_str())) - { - Error("Texture : unsupported image format '%s'", filename.c_str()); - return nullptr; - } - int dummy_w; - int dummy_h; - data = stbi_load(filename.c_str(), (w == nullptr ? &dummy_w : w), (h == nullptr ? &dummy_h : h), &channels, 4); - #ifdef DEBUG - texture->Create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, filename.c_str()); - #else - texture->Create(data, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_UNORM, nullptr); - #endif - stbi_image_free(data); - return texture; - } -} diff --git a/runtime/Sources/Renderer/Images/TextureAtlas.cpp b/runtime/Sources/Renderer/Images/TextureAtlas.cpp deleted file mode 100644 index 04bf46f..0000000 --- a/runtime/Sources/Renderer/Images/TextureAtlas.cpp +++ /dev/null @@ -1,58 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextureAtlas.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/07 16:40:09 by maldavid #+# #+# */ -/* Updated: 2024/04/23 21:54:05 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include - -#ifdef IMAGE_OPTIMIZED - #define TILING VK_IMAGE_TILING_OPTIMAL -#else - #define TILING VK_IMAGE_TILING_LINEAR -#endif - -namespace mlx -{ - void TextureAtlas::Create(std::uint8_t* pixels, std::uint32_t width, std::uint32_t height, VkFormat format, const char* name, bool dedicated_memory) - { - Image::Create(width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT, name, dedicated_memory); - Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); - Image::CreateSampler(); - TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); - - if(pixels == nullptr) - { - Warning("Renderer : creating an empty texture atlas. They cannot be updated after creation, this might be a mistake or a bug, please report"); - return; - } - Buffer staging_buffer; - std::size_t size = width * height * FormatSize(format); - staging_buffer.Create(BufferType::HighDynamic, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, name, pixels); - Image::CopyFromBuffer(staging_buffer); - staging_buffer.Destroy(); - } - - void TextureAtlas::Render(Renderer& renderer, int x, int y, std::uint32_t ibo_size) const - { - auto cmd = renderer.GetActiveCmdBuffer().Get(); - - glm::vec2 translate(x, y); - vkCmdPushConstants(cmd, renderer.GetPipeline().GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(translate), &translate); - vkCmdDrawIndexed(cmd, ibo_size / sizeof(std::uint16_t), 1, 0, 0, 0); - } - - void TextureAtlas::Destroy() noexcept - { - Image::Destroy(); - m_set.Destroy(); - } -} diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp new file mode 100644 index 0000000..4b22069 --- /dev/null +++ b/runtime/Sources/Renderer/Memory.cpp @@ -0,0 +1,159 @@ +#include + +#define VMA_STATIC_VULKAN_FUNCTIONS 0 +#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 +#define VMA_VULKAN_VERSION 1000000 +#define VMA_ASSERT(expr) ((void)0) +#define VMA_IMPLEMENTATION + +#ifdef MLX_COMPILER_CLANG + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Weverything" + #include + #pragma clang diagnostic pop +#elif defined(MLX_COMPILER_GCC) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wimplicit-fallthrough" + #pragma GCC diagnostic ignored "-Wmissing-field-initializers" + #pragma GCC diagnostic ignored "-Wunused-parameter" + #pragma GCC diagnostic ignored "-Wunused-variable" + #pragma GCC diagnostic ignored "-Wparentheses" + #include + #pragma GCC diagnostic pop +#else + #include +#endif + +#include + +namespace mlx +{ + void GPUAllocator::Init() noexcept + { + VmaVulkanFunctions vma_vulkan_func{}; + vma_vulkan_func.vkAllocateMemory = vkAllocateMemory; + vma_vulkan_func.vkBindBufferMemory = vkBindBufferMemory; + vma_vulkan_func.vkBindImageMemory = vkBindImageMemory; + vma_vulkan_func.vkCreateBuffer = vkCreateBuffer; + vma_vulkan_func.vkCreateImage = vkCreateImage; + vma_vulkan_func.vkDestroyBuffer = vkDestroyBuffer; + vma_vulkan_func.vkDestroyImage = vkDestroyImage; + vma_vulkan_func.vkFlushMappedMemoryRanges = vkFlushMappedMemoryRanges; + vma_vulkan_func.vkFreeMemory = vkFreeMemory; + vma_vulkan_func.vkGetBufferMemoryRequirements = vkGetBufferMemoryRequirements; + vma_vulkan_func.vkGetImageMemoryRequirements = vkGetImageMemoryRequirements; + vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties = vkGetPhysicalDeviceMemoryProperties; + vma_vulkan_func.vkGetPhysicalDeviceProperties = vkGetPhysicalDeviceProperties; + vma_vulkan_func.vkInvalidateMappedMemoryRanges = vkInvalidateMappedMemoryRanges; + vma_vulkan_func.vkMapMemory = vkMapMemory; + vma_vulkan_func.vkUnmapMemory = vkUnmapMemory; + vma_vulkan_func.vkCmdCopyBuffer = vkCmdCopyBuffer; + + VmaAllocatorCreateInfo allocator_create_info{}; + allocator_create_info.vulkanApiVersion = VK_API_VERSION_1_0; + allocator_create_info.physicalDevice = RenderCore::Get().GetPhysicalDevice(); + allocator_create_info.device = RenderCore::Get().GetDevice(); + allocator_create_info.instance = RenderCore::Get().GetInstance(); + allocator_create_info.pVulkanFunctions = &vma_vulkan_func; + + kvfCheckVk(vmaCreateAllocator(&allocator_create_info, &m_allocator)); + DebugLog("Graphics allocator : created new allocator"); + } + + VmaAllocation GPUAllocator::CreateBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name) noexcept + { + MLX_PROFILE_FUNCTION(); + VmaAllocation allocation; + kvfCheckVk(vmaCreateBuffer(m_allocator, binfo, vinfo, &buffer, &allocation, nullptr)); + if(name != nullptr) + { + vmaSetAllocationName(m_allocator, allocation, name); + } + DebugLog("Graphics Allocator : created new buffer '%'", name); + m_active_buffers_allocations++; + return allocation; + } + + void GPUAllocator::DestroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept + { + MLX_PROFILE_FUNCTION(); + RenderCore::Get().WaitDeviceIdle(); + vmaDestroyBuffer(m_allocator, buffer, allocation); + DebugLog("Graphics Allocator : destroyed buffer"); + m_active_buffers_allocations--; + } + + VmaAllocation GPUAllocator::CreateImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name) noexcept + { + MLX_PROFILE_FUNCTION(); + VmaAllocation allocation; + kvfCheckVk(vmaCreateImage(m_allocator, iminfo, vinfo, &image, &allocation, nullptr)); + if(name != nullptr) + { + vmaSetAllocationName(m_allocator, allocation, name); + } + DebugLog("Graphics Allocator : created new image '%'", name); + m_active_images_allocations++; + return allocation; + } + + void GPUAllocator::DestroyImage(VmaAllocation allocation, VkImage image) noexcept + { + MLX_PROFILE_FUNCTION(); + RenderCore::Get().WaitDeviceIdle(); + vmaDestroyImage(m_allocator, image, allocation); + DebugLog("Graphics Allocator : destroyed image"); + m_active_images_allocations--; + } + + void GPUAllocator::MapMemory(VmaAllocation allocation, void** data) noexcept + { + MLX_PROFILE_FUNCTION(); + kvfCheckVk(vmaMapMemory(m_allocator, allocation, data)); + } + + void GPUAllocator::UnmapMemory(VmaAllocation allocation) noexcept + { + MLX_PROFILE_FUNCTION(); + vmaUnmapMemory(m_allocator, allocation); + } + + void GPUAllocator::DumpMemoryToJson() + { + static std::uint32_t id = 0; + std::string name("memory_dump"); + name.append(std::to_string(id) + ".json"); + std::ofstream file(name); + if(!file.is_open()) + { + Error("Graphics allocator : unable to dump memory to a json file"); + return; + } + char* str = nullptr; + vmaBuildStatsString(m_allocator, &str, true); + file << str; + vmaFreeStatsString(m_allocator, str); + file.close(); + id++; + } + + void GPUAllocator::Flush(VmaAllocation allocation, VkDeviceSize size, VkDeviceSize offset) noexcept + { + MLX_PROFILE_FUNCTION(); + vmaFlushAllocation(m_allocator, allocation, offset, size); + } + + void GPUAllocator::Destroy() noexcept + { + if(m_active_images_allocations != 0) + Error("Graphics allocator : some user-dependant allocations were not freed before destroying the display (% active allocations). You may have not destroyed all the MLX resources you've created", m_active_images_allocations); + else if(m_active_buffers_allocations != 0) + Error("Graphics allocator : some MLX-dependant allocations were not freed before destroying the display (% active allocations). This is an error in the MLX, please report this should not happen", m_active_buffers_allocations); + if(m_active_images_allocations < 0 || m_active_buffers_allocations < 0) + Warning("Graphics allocator : the impossible happened, the MLX has freed more allocations than it has made (wtf)"); + vmaDestroyAllocator(m_allocator); + m_active_buffers_allocations = 0; + m_active_images_allocations = 0; + DebugLog("Vulkan : destroyed a graphics allocator"); + } +} diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp new file mode 100644 index 0000000..81bee86 --- /dev/null +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -0,0 +1,164 @@ +#include +#include +#include +#include +#include +#include + +namespace Scop +{ + void GraphicPipeline::Init(const GraphicPipelineDescriptor& descriptor) + { + if(!descriptor.vertex_shader || !descriptor.fragment_shader) + FatalError("Vulkan : invalid shaders"); + + m_attachments = descriptor.color_attachments; + p_vertex_shader = descriptor.vertex_shader; + p_fragment_shader = descriptor.fragment_shader; + p_renderer = descriptor.renderer; + + std::vector push_constants; + std::vector set_layouts; + push_constants.insert(push_constants.end(), p_vertex_shader->GetPipelineLayout().push_constants.begin(), p_vertex_shader->GetPipelineLayout().push_constants.end()); + push_constants.insert(push_constants.end(), p_fragment_shader->GetPipelineLayout().push_constants.begin(), p_fragment_shader->GetPipelineLayout().push_constants.end()); + set_layouts.insert(set_layouts.end(), p_vertex_shader->GetPipelineLayout().set_layouts.begin(), p_vertex_shader->GetPipelineLayout().set_layouts.end()); + set_layouts.insert(set_layouts.end(), p_fragment_shader->GetPipelineLayout().set_layouts.begin(), p_fragment_shader->GetPipelineLayout().set_layouts.end()); + m_pipeline_layout = kvfCreatePipelineLayout(RenderCore::Get().GetDevice(), set_layouts.data(), set_layouts.size(), push_constants.data(), push_constants.size()); + + TransitionAttachments(); + CreateFramebuffers(m_attachments, descriptor.clear_color_attachments); + + VkPhysicalDeviceFeatures features{}; + vkGetPhysicalDeviceFeatures(RenderCore::Get().GetPhysicalDevice(), &features); + + KvfGraphicsPipelineBuilder* builder = kvfCreateGPipelineBuilder(); + kvfGPipelineBuilderAddShaderStage(builder, p_vertex_shader->GetShaderStage(), p_vertex_shader->GetShaderModule(), "main"); + kvfGPipelineBuilderAddShaderStage(builder, p_fragment_shader->GetShaderStage(), p_fragment_shader->GetShaderModule(), "main"); + kvfGPipelineBuilderSetInputTopology(builder, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST); + kvfGPipelineBuilderSetCullMode(builder, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE); + kvfGPipelineBuilderEnableAlphaBlending(builder); + kvfGPipelineBuilderDisableDepthTest(builder); + kvfGPipelineBuilderSetPolygonMode(builder, VK_POLYGON_MODE_FILL, 1.0f); + if(features.sampleRateShading) + kvfGPipelineBuilderSetMultisamplingShading(builder, VK_SAMPLE_COUNT_1_BIT, 0.25f); + else + kvfGPipelineBuilderSetMultisampling(builder, VK_SAMPLE_COUNT_1_BIT); + + if(!descriptor.no_vertex_inputs) + { + VkVertexInputBindingDescription binding_description = Vertex::GetBindingDescription(); + auto attributes_description = Vertex::GetAttributeDescriptions(); + kvfGPipelineBuilderSetVertexInputs(builder, binding_description, attributes_description.data(), attributes_description.size()); + } + + m_pipeline = kvfCreateGraphicsPipeline(RenderCore::Get().GetDevice(), m_pipeline_layout, builder, m_renderpass); + DebugLog("Vulkan : graphics pipeline created"); + kvfDestroyGPipelineBuilder(builder); + } + + bool GraphicPipeline::BindPipeline(VkCommandBuffer command_buffer, std::size_t framebuffer_index, std::array clear) noexcept + { + TransitionAttachments(command_buffer); + VkFramebuffer fb = m_framebuffers[framebuffer_index]; + VkExtent2D fb_extent = kvfGetFramebufferSize(fb); + + VkViewport viewport{}; + viewport.x = 0.0f; + viewport.y = 0.0f; + viewport.width = fb_extent.width; + viewport.height = fb_extent.height; + viewport.minDepth = 0.0f; + viewport.maxDepth = 1.0f; + vkCmdSetViewport(command_buffer, 0, 1, &viewport); + + VkRect2D scissor{}; + scissor.offset = { 0, 0 }; + scissor.extent = fb_extent; + vkCmdSetScissor(command_buffer, 0, 1, &scissor); + + for(int i = 0; i < m_clears.size(); i++) + { + m_clears[i].color.float32[0] = clear[0]; + m_clears[i].color.float32[1] = clear[1]; + m_clears[i].color.float32[2] = clear[2]; + m_clears[i].color.float32[3] = clear[3]; + } + + kvfBeginRenderPass(m_renderpass, command_buffer, fb, fb_extent, m_clears.data(), m_clears.size()); + vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); + return true; + } + + void GraphicPipeline::EndPipeline(VkCommandBuffer command_buffer) noexcept + { + vkCmdEndRenderPass(command_buffer); + } + + void GraphicPipeline::Destroy() noexcept + { + p_vertex_shader.reset(); + p_fragment_shader.reset(); + for(auto& fb : m_framebuffers) + { + kvfDestroyFramebuffer(RenderCore::Get().GetDevice(), fb); + DebugLog("Vulkan : framebuffer destroyed"); + } + m_framebuffers.clear(); + kvfDestroyPipelineLayout(RenderCore::Get().GetDevice(), m_pipeline_layout); + m_pipeline_layout = VK_NULL_HANDLE; + DebugLog("Vulkan : graphics pipeline layout destroyed"); + kvfDestroyRenderPass(RenderCore::Get().GetDevice(), m_renderpass); + m_renderpass = VK_NULL_HANDLE; + DebugLog("Vulkan : renderpass destroyed"); + kvfDestroyPipeline(RenderCore::Get().GetDevice(), m_pipeline); + m_pipeline = VK_NULL_HANDLE; + DebugLog("Vulkan : graphics pipeline destroyed"); + } + + void GraphicPipeline::CreateFramebuffers(const std::vector>& render_targets, bool clear_attachments) + { + std::vector attachments; + std::vector attachment_views; + if(p_renderer) + { + attachments.push_back(kvfBuildSwapchainAttachmentDescription(p_renderer->GetSwapchain(), clear_attachments)); + attachment_views.push_back(p_renderer->GetSwapchainImages()[0].GetImageView()); + } + + for(NonOwningPtr image : render_targets) + { + attachments.push_back(kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); + attachment_views.push_back(image->GetImageView()); + } + + m_renderpass = kvfCreateRenderPass(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint()); + m_clears.clear(); + m_clears.resize(attachments.size()); + DebugLog("Vulkan : renderpass created"); + + if(p_renderer) + { + for(const Image& image : p_renderer->GetSwapchainImages()) + { + attachment_views[0] = image.GetImageView(); + m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image.GetWidth(), .height = image.GetHeight() })); + DebugLog("Vulkan : framebuffer created"); + } + } + for(NonOwningPtr image : render_targets) + { + m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image->GetWidth(), .height = image->GetHeight() })); + DebugLog("Vulkan : framebuffer created"); + } + } + + void GraphicPipeline::TransitionAttachments(VkCommandBuffer cmd) + { + for(NonOwningPtr image : m_attachments) + { + if(!image->IsInit()) + continue; + image->TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, cmd); + } + } +} diff --git a/runtime/Sources/Renderer/Pipelines/Pipeline.cpp b/runtime/Sources/Renderer/Pipelines/Pipeline.cpp deleted file mode 100644 index cd194fe..0000000 --- a/runtime/Sources/Renderer/Pipelines/Pipeline.cpp +++ /dev/null @@ -1,331 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Pipeline.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/12/18 21:27:38 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:24:13 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include -#include - -namespace mlx -{ - /** - #version 450 core - - layout(location = 0) in vec2 aPos; - layout(location = 1) in vec4 aColor; - layout(location = 2) in vec2 aUV; - - layout(set = 0, binding = 0) uniform uProjection - { - mat4 mat; - } uProj; - - layout(push_constant) uniform uModelPushConstant - { - vec2 vec; - } uTranslate; - - out gl_PerVertex - { - vec4 gl_Position; - }; - - layout(location = 0) out struct - { - vec4 Color; - vec2 UV; - } Out; - - void main() - { - Out.Color = aColor; - Out.UV = aUV; - vec2 pos = aPos + uTranslate.vec; - gl_Position = uProj.mat * vec4(pos.x, pos.y, 0.0, 1.0); - } - */ - const std::vector vertex_shader = { // precompiled vertex shader - 0x07230203,0x00010000,0x0008000b,0x0000003b,0x00000000,0x00020011,0x00000001,0x0006000b, - 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001, - 0x000a000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x0000000b,0x0000000f,0x00000015, - 0x0000001b,0x00000026,0x00030003,0x00000002,0x000001c2,0x00040005,0x00000004,0x6e69616d, - 0x00000000,0x00030005,0x00000009,0x00000000,0x00050006,0x00000009,0x00000000,0x6f6c6f43, - 0x00000072,0x00040006,0x00000009,0x00000001,0x00005655,0x00030005,0x0000000b,0x0074754f, - 0x00040005,0x0000000f,0x6c6f4361,0x0000726f,0x00030005,0x00000015,0x00565561,0x00030005, - 0x0000001a,0x00736f70,0x00040005,0x0000001b,0x736f5061,0x00000000,0x00070005,0x0000001d, - 0x646f4d75,0x75506c65,0x6f436873,0x6174736e,0x0000746e,0x00040006,0x0000001d,0x00000000, - 0x00636576,0x00050005,0x0000001f,0x61725475,0x616c736e,0x00006574,0x00060005,0x00000024, - 0x505f6c67,0x65567265,0x78657472,0x00000000,0x00060006,0x00000024,0x00000000,0x505f6c67, - 0x7469736f,0x006e6f69,0x00030005,0x00000026,0x00000000,0x00050005,0x00000028,0x6f725075, - 0x7463656a,0x006e6f69,0x00040006,0x00000028,0x00000000,0x0074616d,0x00040005,0x0000002a, - 0x6f725075,0x0000006a,0x00040047,0x0000000b,0x0000001e,0x00000000,0x00040047,0x0000000f, - 0x0000001e,0x00000001,0x00040047,0x00000015,0x0000001e,0x00000002,0x00040047,0x0000001b, - 0x0000001e,0x00000000,0x00050048,0x0000001d,0x00000000,0x00000023,0x00000000,0x00030047, - 0x0000001d,0x00000002,0x00050048,0x00000024,0x00000000,0x0000000b,0x00000000,0x00030047, - 0x00000024,0x00000002,0x00040048,0x00000028,0x00000000,0x00000005,0x00050048,0x00000028, - 0x00000000,0x00000023,0x00000000,0x00050048,0x00000028,0x00000000,0x00000007,0x00000010, - 0x00030047,0x00000028,0x00000002,0x00040047,0x0000002a,0x00000022,0x00000000,0x00040047, - 0x0000002a,0x00000021,0x00000000,0x00020013,0x00000002,0x00030021,0x00000003,0x00000002, - 0x00030016,0x00000006,0x00000020,0x00040017,0x00000007,0x00000006,0x00000004,0x00040017, - 0x00000008,0x00000006,0x00000002,0x0004001e,0x00000009,0x00000007,0x00000008,0x00040020, - 0x0000000a,0x00000003,0x00000009,0x0004003b,0x0000000a,0x0000000b,0x00000003,0x00040015, - 0x0000000c,0x00000020,0x00000001,0x0004002b,0x0000000c,0x0000000d,0x00000000,0x00040020, - 0x0000000e,0x00000001,0x00000007,0x0004003b,0x0000000e,0x0000000f,0x00000001,0x00040020, - 0x00000011,0x00000003,0x00000007,0x0004002b,0x0000000c,0x00000013,0x00000001,0x00040020, - 0x00000014,0x00000001,0x00000008,0x0004003b,0x00000014,0x00000015,0x00000001,0x00040020, - 0x00000017,0x00000003,0x00000008,0x00040020,0x00000019,0x00000007,0x00000008,0x0004003b, - 0x00000014,0x0000001b,0x00000001,0x0003001e,0x0000001d,0x00000008,0x00040020,0x0000001e, - 0x00000009,0x0000001d,0x0004003b,0x0000001e,0x0000001f,0x00000009,0x00040020,0x00000020, - 0x00000009,0x00000008,0x0003001e,0x00000024,0x00000007,0x00040020,0x00000025,0x00000003, - 0x00000024,0x0004003b,0x00000025,0x00000026,0x00000003,0x00040018,0x00000027,0x00000007, - 0x00000004,0x0003001e,0x00000028,0x00000027,0x00040020,0x00000029,0x00000002,0x00000028, - 0x0004003b,0x00000029,0x0000002a,0x00000002,0x00040020,0x0000002b,0x00000002,0x00000027, - 0x00040015,0x0000002e,0x00000020,0x00000000,0x0004002b,0x0000002e,0x0000002f,0x00000000, - 0x00040020,0x00000030,0x00000007,0x00000006,0x0004002b,0x0000002e,0x00000033,0x00000001, - 0x0004002b,0x00000006,0x00000036,0x00000000,0x0004002b,0x00000006,0x00000037,0x3f800000, - 0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003b, - 0x00000019,0x0000001a,0x00000007,0x0004003d,0x00000007,0x00000010,0x0000000f,0x00050041, - 0x00000011,0x00000012,0x0000000b,0x0000000d,0x0003003e,0x00000012,0x00000010,0x0004003d, - 0x00000008,0x00000016,0x00000015,0x00050041,0x00000017,0x00000018,0x0000000b,0x00000013, - 0x0003003e,0x00000018,0x00000016,0x0004003d,0x00000008,0x0000001c,0x0000001b,0x00050041, - 0x00000020,0x00000021,0x0000001f,0x0000000d,0x0004003d,0x00000008,0x00000022,0x00000021, - 0x00050081,0x00000008,0x00000023,0x0000001c,0x00000022,0x0003003e,0x0000001a,0x00000023, - 0x00050041,0x0000002b,0x0000002c,0x0000002a,0x0000000d,0x0004003d,0x00000027,0x0000002d, - 0x0000002c,0x00050041,0x00000030,0x00000031,0x0000001a,0x0000002f,0x0004003d,0x00000006, - 0x00000032,0x00000031,0x00050041,0x00000030,0x00000034,0x0000001a,0x00000033,0x0004003d, - 0x00000006,0x00000035,0x00000034,0x00070050,0x00000007,0x00000038,0x00000032,0x00000035, - 0x00000036,0x00000037,0x00050091,0x00000007,0x00000039,0x0000002d,0x00000038,0x00050041, - 0x00000011,0x0000003a,0x00000026,0x0000000d,0x0003003e,0x0000003a,0x00000039,0x000100fd, - 0x00010038 - }; - - /** - #version 450 core - - layout(location = 0) out vec4 fColor; - - layout(set = 1, binding = 0) uniform sampler2D sTexture; - - layout(location = 0) in struct - { - vec4 Color; - vec2 UV; - } In; - - void main() - { - vec4 process_color = In.Color * texture(sTexture, In.UV.st); - if(process_color.w == 0) - discard; - fColor = process_color; - } - */ - const std::vector fragment_shader = { // pre compiled fragment shader - 0x07230203,0x00010000,0x0008000b,0x0000002c,0x00000000,0x00020011,0x00000001,0x0006000b, - 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001, - 0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x0000000d,0x0000002a,0x00030010, - 0x00000004,0x00000007,0x00030003,0x00000002,0x000001c2,0x00040005,0x00000004,0x6e69616d, - 0x00000000,0x00060005,0x00000009,0x636f7270,0x5f737365,0x6f6c6f63,0x00000072,0x00030005, - 0x0000000b,0x00000000,0x00050006,0x0000000b,0x00000000,0x6f6c6f43,0x00000072,0x00040006, - 0x0000000b,0x00000001,0x00005655,0x00030005,0x0000000d,0x00006e49,0x00050005,0x00000016, - 0x78655473,0x65727574,0x00000000,0x00040005,0x0000002a,0x6c6f4366,0x0000726f,0x00040047, - 0x0000000d,0x0000001e,0x00000000,0x00040047,0x00000016,0x00000022,0x00000001,0x00040047, - 0x00000016,0x00000021,0x00000000,0x00040047,0x0000002a,0x0000001e,0x00000000,0x00020013, - 0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,0x00040017, - 0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000007,0x00000007,0x00040017, - 0x0000000a,0x00000006,0x00000002,0x0004001e,0x0000000b,0x00000007,0x0000000a,0x00040020, - 0x0000000c,0x00000001,0x0000000b,0x0004003b,0x0000000c,0x0000000d,0x00000001,0x00040015, - 0x0000000e,0x00000020,0x00000001,0x0004002b,0x0000000e,0x0000000f,0x00000000,0x00040020, - 0x00000010,0x00000001,0x00000007,0x00090019,0x00000013,0x00000006,0x00000001,0x00000000, - 0x00000000,0x00000000,0x00000001,0x00000000,0x0003001b,0x00000014,0x00000013,0x00040020, - 0x00000015,0x00000000,0x00000014,0x0004003b,0x00000015,0x00000016,0x00000000,0x0004002b, - 0x0000000e,0x00000018,0x00000001,0x00040020,0x00000019,0x00000001,0x0000000a,0x00040015, - 0x0000001e,0x00000020,0x00000000,0x0004002b,0x0000001e,0x0000001f,0x00000003,0x00040020, - 0x00000020,0x00000007,0x00000006,0x0004002b,0x00000006,0x00000023,0x00000000,0x00020014, - 0x00000024,0x00040020,0x00000029,0x00000003,0x00000007,0x0004003b,0x00000029,0x0000002a, - 0x00000003,0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005, - 0x0004003b,0x00000008,0x00000009,0x00000007,0x00050041,0x00000010,0x00000011,0x0000000d, - 0x0000000f,0x0004003d,0x00000007,0x00000012,0x00000011,0x0004003d,0x00000014,0x00000017, - 0x00000016,0x00050041,0x00000019,0x0000001a,0x0000000d,0x00000018,0x0004003d,0x0000000a, - 0x0000001b,0x0000001a,0x00050057,0x00000007,0x0000001c,0x00000017,0x0000001b,0x00050085, - 0x00000007,0x0000001d,0x00000012,0x0000001c,0x0003003e,0x00000009,0x0000001d,0x00050041, - 0x00000020,0x00000021,0x00000009,0x0000001f,0x0004003d,0x00000006,0x00000022,0x00000021, - 0x000500b4,0x00000024,0x00000025,0x00000022,0x00000023,0x000300f7,0x00000027,0x00000000, - 0x000400fa,0x00000025,0x00000026,0x00000027,0x000200f8,0x00000026,0x000100fc,0x000200f8, - 0x00000027,0x0004003d,0x00000007,0x0000002b,0x00000009,0x0003003e,0x0000002a,0x0000002b, - 0x000100fd,0x00010038 - }; - - void GraphicPipeline::Init(Renderer& renderer) - { - VkShaderModuleCreateInfo create_info{}; - create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; - create_info.codeSize = vertex_shader.size() * sizeof(std::uint32_t); - create_info.pCode = vertex_shader.data(); - VkShaderModule vshader; - if(vkCreateShaderModule(RenderCore::Get().GetDevice().Get(), &create_info, nullptr, &vshader) != VK_SUCCESS) - FatalError("Vulkan : failed to create a vertex shader module"); - - VkPushConstantRange push_constant; - push_constant.offset = 0; - push_constant.size = sizeof(glm::vec2); - push_constant.stageFlags = VK_SHADER_STAGE_VERTEX_BIT; - - create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; - create_info.codeSize = fragment_shader.size() * sizeof(std::uint32_t); - create_info.pCode = fragment_shader.data(); - VkShaderModule fshader; - if(vkCreateShaderModule(RenderCore::Get().GetDevice().Get(), &create_info, nullptr, &fshader) != VK_SUCCESS) - FatalError("Vulkan : failed to create a fragment shader module"); - - VkPipelineShaderStageCreateInfo vert_shader_stage_info{}; - vert_shader_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - vert_shader_stage_info.stage = VK_SHADER_STAGE_VERTEX_BIT; - vert_shader_stage_info.module = vshader; - vert_shader_stage_info.pName = "main"; - - VkPipelineShaderStageCreateInfo frag_shader_stage_info{}; - frag_shader_stage_info.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; - frag_shader_stage_info.stage = VK_SHADER_STAGE_FRAGMENT_BIT; - frag_shader_stage_info.module = fshader; - frag_shader_stage_info.pName = "main"; - - std::array stages = { vert_shader_stage_info, frag_shader_stage_info }; - - auto binding_description = Vertex::GetBindingDescription(); - auto attribute_descriptions = Vertex::GetAttributeDescriptions(); - - VkPipelineVertexInputStateCreateInfo vertex_input_state_create_info{}; - vertex_input_state_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; - vertex_input_state_create_info.vertexBindingDescriptionCount = 1; - vertex_input_state_create_info.pVertexBindingDescriptions = &binding_description; - vertex_input_state_create_info.vertexAttributeDescriptionCount = static_cast(attribute_descriptions.size()); - vertex_input_state_create_info.pVertexAttributeDescriptions = attribute_descriptions.data(); - - VkPipelineInputAssemblyStateCreateInfo input_assembly{}; - input_assembly.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; - input_assembly.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST; - input_assembly.primitiveRestartEnable = VK_FALSE; - - VkDynamicState states[] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; - - constexpr std::size_t states_count = sizeof(states) / sizeof(VkDynamicState); - VkPipelineDynamicStateCreateInfo dynamic_states{}; - dynamic_states.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; - dynamic_states.dynamicStateCount = states_count; - dynamic_states.pDynamicStates = states; - - VkViewport viewport{}; - viewport.x = 0.0f; - viewport.y = 0.0f; - viewport.width = (float)renderer.GetFrameBuffer(0).GetWidth(); - viewport.height = (float)renderer.GetFrameBuffer(0).GetHeight(); - viewport.minDepth = 0.0f; - viewport.maxDepth = 1.0f; - - VkRect2D scissor{}; - scissor.offset = { 0, 0 }; - scissor.extent = { renderer.GetFrameBuffer(0).GetWidth(), renderer.GetFrameBuffer(0).GetHeight()}; - - VkPipelineViewportStateCreateInfo viewport_state{}; - viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; - viewport_state.viewportCount = 1; - viewport_state.pViewports = &viewport; - viewport_state.scissorCount = 1; - viewport_state.pScissors = &scissor; - - VkPipelineRasterizationStateCreateInfo rasterizer{}; - rasterizer.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; - rasterizer.depthClampEnable = VK_FALSE; - rasterizer.rasterizerDiscardEnable = VK_FALSE; - rasterizer.polygonMode = VK_POLYGON_MODE_FILL; - rasterizer.lineWidth = 1.0f; - rasterizer.cullMode = VK_CULL_MODE_NONE; - rasterizer.frontFace = VK_FRONT_FACE_CLOCKWISE; - rasterizer.depthBiasEnable = VK_FALSE; - - VkPipelineMultisampleStateCreateInfo multisampling{}; - multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; - multisampling.sampleShadingEnable = VK_FALSE; - multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; - - VkPipelineColorBlendAttachmentState color_blend_attachment{}; - color_blend_attachment.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; - color_blend_attachment.blendEnable = VK_TRUE; - color_blend_attachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; - color_blend_attachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; - color_blend_attachment.colorBlendOp = VK_BLEND_OP_ADD; - color_blend_attachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; - color_blend_attachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; - color_blend_attachment.alphaBlendOp = VK_BLEND_OP_ADD; - - VkPipelineColorBlendStateCreateInfo color_blending{}; - color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; - color_blending.logicOpEnable = VK_FALSE; - color_blending.logicOp = VK_LOGIC_OP_COPY; - color_blending.attachmentCount = 1; - color_blending.pAttachments = &color_blend_attachment; - color_blending.blendConstants[0] = 1.0f; - color_blending.blendConstants[1] = 1.0f; - color_blending.blendConstants[2] = 1.0f; - color_blending.blendConstants[3] = 1.0f; - - VkDescriptorSetLayout layouts[] = { - renderer.GetVertDescriptorSet().GetLayout(), - renderer.GetFragDescriptorSet().GetLayout() - }; - - VkPipelineLayoutCreateInfo pipeline_layout_info{}; - pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; - pipeline_layout_info.setLayoutCount = 2; - pipeline_layout_info.pSetLayouts = layouts; - pipeline_layout_info.pushConstantRangeCount = 1; - pipeline_layout_info.pPushConstantRanges = &push_constant; - - if(vkCreatePipelineLayout(RenderCore::Get().GetDevice().Get(), &pipeline_layout_info, nullptr, &m_pipeline_layout) != VK_SUCCESS) - FatalError("Vulkan : failed to create a graphics pipeline layout"); - - VkGraphicsPipelineCreateInfo pipeline_info{}; - pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; - pipeline_info.stageCount = stages.size(); - pipeline_info.pStages = stages.data(); - pipeline_info.pVertexInputState = &vertex_input_state_create_info; - pipeline_info.pInputAssemblyState = &input_assembly; - pipeline_info.pViewportState = &viewport_state; - pipeline_info.pRasterizationState = &rasterizer; - pipeline_info.pMultisampleState = &multisampling; - pipeline_info.pColorBlendState = &color_blending; - pipeline_info.pDynamicState = &dynamic_states; - pipeline_info.layout = m_pipeline_layout; - pipeline_info.renderPass = renderer.GetRenderPass().Get(); - pipeline_info.subpass = 0; - pipeline_info.basePipelineHandle = VK_NULL_HANDLE; - - VkResult res = vkCreateGraphicsPipelines(RenderCore::Get().GetDevice().Get(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, &m_graphics_pipeline); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create a graphics pipeline, %", VerbaliseVkResult(res)); - DebugLog("Vulkan : created new graphic pipeline"); - - vkDestroyShaderModule(RenderCore::Get().GetDevice().Get(), fshader, nullptr); - vkDestroyShaderModule(RenderCore::Get().GetDevice().Get(), vshader, nullptr); - } - - void GraphicPipeline::Destroy() noexcept - { - vkDestroyPipeline(RenderCore::Get().GetDevice().Get(), m_graphics_pipeline, nullptr); - vkDestroyPipelineLayout(RenderCore::Get().GetDevice().Get(), m_pipeline_layout, nullptr); - m_graphics_pipeline = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed a graphics pipeline"); - } -} diff --git a/runtime/Sources/Renderer/Pipelines/Shader.cpp b/runtime/Sources/Renderer/Pipelines/Shader.cpp new file mode 100644 index 0000000..63d8873 --- /dev/null +++ b/runtime/Sources/Renderer/Pipelines/Shader.cpp @@ -0,0 +1,83 @@ +#include +#include +#include + +namespace Scop +{ + Shader::Shader(const std::vector& bytecode, ShaderType type, ShaderLayout layout) : m_bytecode(bytecode), m_layout(std::move(layout)) + { + switch(type) + { + case ShaderType::Vertex : m_stage = VK_SHADER_STAGE_VERTEX_BIT; break; + case ShaderType::Fragment : m_stage = VK_SHADER_STAGE_FRAGMENT_BIT; break; + case ShaderType::Compute : m_stage = VK_SHADER_STAGE_COMPUTE_BIT; break; + + default : FatalError("wtf"); break; + } + m_module = kvfCreateShaderModule(RenderCore::Get().GetDevice(), m_bytecode.data(), m_bytecode.size() * 4); + DebugLog("Vulkan : shader module created"); + + GeneratePipelineLayout(m_layout); + } + + void Shader::GeneratePipelineLayout(ShaderLayout layout) + { + for(auto& [n, set] : layout.set_layouts) + { + std::vector bindings(set.binds.size()); + for(std::size_t i = 0; i < set.binds.size(); i++) + { + bindings[i].binding = set.binds[i].first; + bindings[i].descriptorCount = 1; + bindings[i].descriptorType = set.binds[i].second; + bindings[i].pImmutableSamplers = nullptr; + bindings[i].stageFlags = m_stage; + } + m_set_layouts.emplace_back(kvfCreateDescriptorSetLayout(RenderCore::Get().GetDevice(), bindings.data(), bindings.size())); + DebugLog("Vulkan : descriptor set layout created"); + m_pipeline_layout_part.set_layouts.push_back(m_set_layouts.back()); + } + + std::size_t i = 0; + std::vector push_constants(layout.push_constants.size()); + m_pipeline_layout_part.push_constants.resize(layout.push_constants.size()); + for(auto& pc : layout.push_constants) + { + VkPushConstantRange push_constant_range = {}; + push_constant_range.offset = pc.offset; + push_constant_range.size = pc.size; + push_constant_range.stageFlags = m_stage; + push_constants[i] = push_constant_range; + m_pipeline_layout_part.push_constants[i] = push_constant_range; + i++; + } + } + + Shader::~Shader() + { + kvfDestroyShaderModule(RenderCore::Get().GetDevice(), m_module); + DebugLog("Vulkan : shader module destroyed"); + for(auto& layout : m_set_layouts) + { + kvfDestroyDescriptorSetLayout(RenderCore::Get().GetDevice(), layout); + DebugLog("Vulkan : descriptor set layout destroyed"); + } + } + + std::shared_ptr LoadShaderFromFile(const std::filesystem::path& filepath, ShaderType type, ShaderLayout layout) + { + std::ifstream stream(filepath, std::ios::binary); + if(!stream.is_open()) + FatalError("Renderer : unable to open a spirv shader file, %", filepath); + std::vector data; + stream.seekg(0); + std::uint32_t part = 0; + while(stream.read(reinterpret_cast(&part), sizeof(part))) + data.push_back(part); + stream.close(); + + std::shared_ptr shader = std::make_shared(data, type, layout); + DebugLog("Vulkan : shader loaded %", filepath); + return shader; + } +} diff --git a/runtime/Sources/Renderer/PixelPut.cpp b/runtime/Sources/Renderer/PixelPut.cpp deleted file mode 100644 index f9743a6..0000000 --- a/runtime/Sources/Renderer/PixelPut.cpp +++ /dev/null @@ -1,67 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* PixelPut.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/03/31 15:14:50 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:46:06 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include - -namespace mlx -{ - void PixelPutPipeline::Init(std::uint32_t width, std::uint32_t height, Renderer& renderer) noexcept - { - MLX_PROFILE_FUNCTION(); - m_texture.Create(nullptr, width, height, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_pixel_put_pipeline_texture", true); - m_texture.SetDescriptor(renderer.GetFragDescriptorSet().Duplicate()); - - m_buffer.Create(BufferType::HighDynamic, sizeof(std::uint32_t) * (width * height), VK_BUFFER_USAGE_TRANSFER_SRC_BIT, "__mlx_pixel_put_pipeline_texture"); - m_buffer.MapMem(&_buffer_map); - m_cpu_map = std::vector(height * width + 1, 0); - m_width = width; - m_height = height; - } - - void PixelPutPipeline::SetPixel(int x, int y, std::uint32_t color) noexcept - { - MLX_PROFILE_FUNCTION(); - if(x < 0 || y < 0 || x > static_cast(m_width) || y > static_cast(m_height)) - return; - m_cpu_map[(y * m_width) + x] = color; - m_has_been_modified = true; - } - - void PixelPutPipeline::Clear() - { - MLX_PROFILE_FUNCTION(); - m_cpu_map.assign(m_width * m_height, 0); - m_has_been_modified = true; - } - - void PixelPutPipeline::Render(Renderer& renderer) noexcept - { - MLX_PROFILE_FUNCTION(); - if(m_has_been_modified) - { - std::memcpy(m_buffer_map, m_cpu_map.data(), sizeof(std::uint32_t) * m_cpu_map.size()); - m_texture.CopyFromBuffer(m_buffer); - m_has_been_modified = false; - } - m_texture.UpdateSet(0); - m_texture.Render(renderer, 0, 0); - } - - void PixelPutPipeline::Destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - m_buffer.Destroy(); - m_texture.Destroy(); - } -} diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp new file mode 100644 index 0000000..6623ae3 --- /dev/null +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -0,0 +1,78 @@ +#define KVF_IMPLEMENTATION +#ifdef DEBUG + #define KVF_ENABLE_VALIDATION_LAYERS +#endif +#include + +#include +#include +#include +#include +#include + +namespace mlx +{ + static VulkanLoader loader; + + void ErrorCallback(const char* message) noexcept + { + FatalError(message); + std::cout << std::endl; + } + + void ValidationErrorCallback(const char* message) noexcept + { + Error(message); + std::cout << std::endl; + } + + void ValidationWarningCallback(const char* message) noexcept + { + Warning(message); + std::cout << std::endl; + } + + void RenderCore::Init() noexcept + { + kvfSetErrorCallback(&ErrorCallback); + kvfSetValidationErrorCallback(&ValidationErrorCallback); + kvfSetValidationWarningCallback(&ValidationWarningCallback); + + //kvfAddLayer("VK_LAYER_MESA_overlay"); + + Window window(1, 1, "", true); + std::vector instance_extentions = window.GetRequiredVulkanInstanceExtentions(); + + m_instance = kvfCreateInstance(instance_extensions.data(), instance_extensions.size()); + DebugLog("Vulkan : instance created"); + + loader.LoadInstance(m_instance); + + VkSurfaceKHR surface = window.CreateVulkanSurface(m_instance); + + m_physical_device = kvfPickGoodDefaultPhysicalDevice(m_instance, surface); + + // just for style + VkPhysicalDeviceProperties props; + vkGetPhysicalDeviceProperties(m_physical_device, &props); + DebugLog("Vulkan : physical device picked '%'", props.deviceName); + + const char* device_extensions[] = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; + VkPhysicalDeviceFeatures features{}; + vkGetPhysicalDeviceFeatures(m_physical_device, &features); + m_device = kvfCreateDevice(m_physical_device, device_extensions, sizeof(device_extensions) / sizeof(device_extensions[0]), &features); + DebugLog("Vulkan : logical device created"); + + vkDestroySurfaceKHR(m_instance, surface, nullptr); + window.Destroy(); + } + + void RenderCore::Destroy() noexcept + { + WaitDeviceIdle(); + kvfDestroyDevice(m_device); + DebugLog("Vulkan : logical device destroyed"); + kvfDestroyInstance(m_instance); + DebugLog("Vulkan : instance destroyed"); + } +} diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp new file mode 100644 index 0000000..28f199e --- /dev/null +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -0,0 +1,112 @@ +#include +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + struct SpriteData + { + Vec4f color; + Vec2f position; + }; + + void Render2DPass::Init() + { + ShaderLayout vertex_shader_layout( + { + { 0, + ShaderSetLayout({ + { 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER } + }) + } + }, { ShaderPushConstantLayout({ 0, sizeof(SpriteData) }) } + ); + std::vector vertex_shader_code = { + #include + }; + p_vertex_shader = std::make_shared(vertex_shader_code, ShaderType::Vertex, std::move(vertex_shader_layout)); + ShaderLayout fragment_shader_layout( + { + { 1, + ShaderSetLayout({ + { 0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER } + }) + } + }, {} + ); + std::vector fragment_shader_code = { + #include + }; + p_fragment_shader = std::make_shared(fragment_shader, ShaderType::Fragment, std::move(fragment_shader_layout)); + + std::function functor = [this](const EventBase& event) + { + if(event.What() == Event::ResizeEventCode) + m_pipeline.Destroy(); + }; + EventBus::RegisterListener({ functor, "__ScopRender2DPass" }); + + p_viewer_data_set = std::make_shared(p_vertex_shader->GetShaderLayout().set_layouts[0].second, p_vertex_shader->GetPipelineLayout().set_layouts[0], ShaderType::Vertex); + p_texture_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); + + p_viewer_data_buffer = std::make_shared(); + p_viewer_data_buffer->Init(sizeof(ViewerData2D)); + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + p_viewer_data_set->SetUniformBuffer(i, 0, p_viewer_data_buffer->Get(i)); + p_viewer_data_set->Update(i); + } + } + + void Render2DPass::Pass(Scene& scene, Renderer& renderer, Texture& render_target) + { + if(m_pipeline.GetPipeline() == VK_NULL_HANDLE) + { + GraphicPipelineDescriptor pipeline_descriptor; + pipeline_descriptor.vertex_shader = p_vertex_shader; + pipeline_descriptor.fragment_shader = p_fragment_shader; + pipeline_descriptor.color_attachments = { &render_target }; + pipeline_descriptor.clear_color_attachments = false; + m_pipeline.Init(pipeline_descriptor); + } + + std::uint32_t frame_index = renderer.GetCurrentFrameIndex(); + + ViewerData viewer_data; + viewer_data.projection = Mat4f::Ortho(0.0f, render_target.GetWidth(), render_target.GetHeight(), 0.0f); + static CPUBuffer buffer(sizeof(ViewerData2D)); + std::memcpy(buffer.GetData(), &viewer_data, buffer.GetSize()); + p_viewer_data_buffer->SetData(buffer, frame_index); + + VkCommandBuffer cmd = renderer.GetActiveCommandBuffer(); + m_pipeline.BindPipeline(cmd, 0, {}); + for(auto sprite : scene.GetSprites()) + { + SpriteData sprite_data; + sprite_data.position = Vec2f{ static_cast(sprite->GetPosition().x), static_cast(sprite->GetPosition().y) }; + sprite_data.color = sprite->GetColor(); + if(!sprite->IsSetInit()) + sprite->UpdateDescriptorSet(*p_texture_set); + sprite->Bind(frame_index, cmd); + std::array sets = { p_viewer_data_set->GetSet(frame_index), sprite->GetSet(frame_index) }; + vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &sprite_data); + sprite->GetMesh()->Draw(cmd, renderer.GetDrawCallsCounterRef(), renderer.GetPolygonDrawnCounterRef()); + } + m_pipeline.EndPipeline(cmd); + } + + void Render2DPass::Destroy() + { + m_pipeline.Destroy(); + p_vertex_shader.reset(); + p_fragment_shader.reset(); + p_viewer_data_set.reset(); + p_viewer_data_buffer->Destroy(); + p_texture_set.reset(); + } +} diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp new file mode 100644 index 0000000..d3b5cbe --- /dev/null +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -0,0 +1,76 @@ +#include +#include +#include +#include +#include +#include + +namespace mlx +{ + void FinalPass::Init() + { + ShaderLayout vertex_shader_layout( + {}, {} + ); + std::vector vertex_shader_code = { + #include + }; + p_vertex_shader = std::make_shared(vertex_shader_code, ShaderType::Vertex, std::move(vertex_shader_layout)); + ShaderLayout fragment_shader_layout( + { + { 0, + ShaderSetLayout({ + { 0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER } + }) + } + }, {} + ); + std::vector fragment_shader_code = { + #include + }; + p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); + + std::function functor = [this](const EventBase& event) + { + if(event.What() == Event::ResizeEventCode) + m_pipeline.Destroy(); + }; + EventBus::RegisterListener({ functor, "__ScopFinalPass" }); + + p_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); + } + + void FinalPass::Pass(Scene& scene, Renderer& renderer, Texture& render_target) + { + if(m_pipeline.GetPipeline() == VK_NULL_HANDLE) + { + GraphicPipelineDescriptor pipeline_descriptor; + pipeline_descriptor.vertex_shader = p_vertex_shader; + pipeline_descriptor.fragment_shader = p_fragment_shader; + pipeline_descriptor.renderer = &renderer; + pipeline_descriptor.no_vertex_inputs = true; + m_pipeline.Init(pipeline_descriptor); + } + + VkCommandBuffer cmd = renderer.GetActiveCommandBuffer(); + + p_set->SetImage(renderer.GetCurrentFrameIndex(), 0, render_target); + p_set->Update(renderer.GetCurrentFrameIndex(), cmd); + + m_pipeline.BindPipeline(cmd, renderer.GetSwapchainImageIndex(), { 0.0f, 0.0f, 0.0f, 1.0f }); + VkDescriptorSet set = p_set->GetSet(renderer.GetCurrentFrameIndex()); + vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, 1, &set, 0, nullptr); + vkCmdDraw(cmd, 3, 1, 0, 0); + renderer.GetDrawCallsCounterRef()++; + renderer.GetPolygonDrawnCounterRef()++; + m_pipeline.EndPipeline(cmd); + } + + void FinalPass::Destroy() + { + m_pipeline.Destroy(); + p_vertex_shader.reset(); + p_fragment_shader.reset(); + p_set.reset(); + } +} diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp new file mode 100644 index 0000000..a2a3e45 --- /dev/null +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -0,0 +1,45 @@ +#include +#include +#include +#include + +namespace mlx +{ + void RenderPasses::Init() + { + m_2Dpass.Init(); + m_final.Init(); + } + + void RenderPasses::Pass(Scene& scene, Renderer& renderer) + { + if(!m_main_render_texture.IsInit()) + { + std::function functor = [this, renderer](const EventBase& event) + { + if(event.What() == Event::ResizeEventCode) + { + m_main_render_texture.Destroy(); + auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); + m_main_render_texture.Init({}, extent.width, extent.height); + } + }; + EventBus::RegisterListener({ functor, "__ScopRenderPasses" }); + auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); + + m_main_render_texture.Init({}, extent.width, extent.height); + } + + m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), Vec4f{ 0.0f, 0.0f, 0.0f, 1.0f }); + + m_2Dpass.Pass(scene, renderer, m_main_render_texture); + m_final.Pass(scene, renderer, m_main_render_texture); + } + + void RenderPasses::Destroy() + { + m_2Dpass.Destroy(); + m_final.Destroy(); + m_main_render_texture.Destroy(); + } +} diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 0c131d0..00f09b2 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -1,195 +1,139 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Renderer.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/12/18 17:25:16 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:58:51 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include - #include -#include -#include +#include +#include +#include +#include namespace mlx { - void Renderer::Init(NonOwningPtr render_target) + namespace Internal { - MLX_PROFILE_FUNCTION(); - if(!render_target) + struct ResizeEventBroadcast : public EventBase { - m_surface.Create(*this); - m_swapchain.Init(this); - m_pass.Init(m_swapchain.GetImagesFormat(), VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - for(std::size_t i = 0; i < m_swapchain.GetImagesNumber(); i++) - m_framebuffers.emplace_back().Init(m_pass, m_swapchain.GetImage(i)); - } - else + Event What() const override { return Event::ResizeEventCode; } + }; + + struct FrameBeginEventBroadcast : public EventBase { - m_render_target = render_target; - m_render_target->TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); - m_pass.Init(m_render_target->GetFormat(), m_render_target->GetLayout()); - m_framebuffers.emplace_back().Init(m_pass, *static_cast(m_render_target)); - } - m_cmd.Init(); + Event What() const override { return Event::FrameBeginEventCode; } + }; + } + + void Renderer::Init(NonOwningPtr window) + { + std::function functor = [this](const EventBase& event) + { + if(event.What() == Event::ResizeEventCode) + this->RequireFramebufferResize(); + }; + EventBus::RegisterListener({ functor, "__ScopRenderer" }); + + p_window = window; + + auto& render_core = RenderCore::Get(); + m_surface = p_window->CreateVulkanSurface(render_core::GetInstance()); + DebugLog("Vulkan : surface created"); + + CreateSwapchain(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - m_render_finished_semaphores[i].Init(); - m_image_available_semaphores[i].Init(); + m_image_available_semaphores[i] = kvfCreateSemaphore(render_core.GetDevice()); + DebugLog("Vulkan : image available semaphore created"); + m_render_finished_semaphores[i] = kvfCreateSemaphore(render_core.GetDevice()); + DebugLog("Vulkan : render finished semaphore created"); + m_cmd_buffers[i] = kvfCreateCommandBuffer(render_core.GetDevice()); + DebugLog("Vulkan : command buffer created"); + m_cmd_fences[i] = kvfCreateFence(render_core.GetDevice()); + DebugLog("Vulkan : fence created"); } - - m_uniform_buffer.reset(new UniformBuffer); - #ifdef DEBUG - m_uniform_buffer->Create(this, sizeof(glm::mat4), "__mlx_matrices_uniform_buffer_"); - #else - m_uniform_buffer->Create(this, sizeof(glm::mat4), nullptr); - #endif - - DescriptorSetLayout vert_layout; - DescriptorSetLayout frag_layout; - - vert_layout.Init({ - {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER} - }, VK_SHADER_STAGE_VERTEX_BIT); - frag_layout.Init({ - {0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER} - }, VK_SHADER_STAGE_FRAGMENT_BIT); - - m_vert_set.Init(this, &RenderCore::Get().GetDescriptorPool(), std::move(vert_layout)); - m_frag_set.Init(this, &RenderCore::Get().GetDescriptorPool(), std::move(frag_layout)); - - m_vert_set.WriteDescriptor(0, m_uniform_buffer.Get()); - - m_pipeline.Init(*this); - - m_framebuffer_resized = false; } bool Renderer::BeginFrame() { - MLX_PROFILE_FUNCTION(); - auto device = RenderCore::Get().GetDevice().Get(); - - if(!m_render_target) + kvfWaitForFence(RenderCore::Get().GetDevice(), m_cmd_fences[m_current_frame_index]); + VkResult result = vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); + if(result == VK_ERROR_OUT_OF_DATE_KHR) { - m_cmd.GetCmdBuffer(m_current_frame_index).WaitForExecution(); - VkResult result = vkAcquireNextImageKHR(device, m_swapchain.Get(), UINT64_MAX, m_image_available_semaphores[m_current_frame_index].Get(), VK_NULL_HANDLE, &m_image_index); - - if(result == VK_ERROR_OUT_OF_DATE_KHR) - { - RecreateRenderData(); - return false; - } - else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) - FatalError("Vulkan error : failed to acquire swapchain image"); - } - else - { - m_image_index = 0; - if(m_render_target->GetLayout() != VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) - m_render_target->TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + DestroySwapchain(); + CreateSwapchain(); + EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); + return false; } + else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) + FatalError("Vulkan error : failed to acquire swapchain image, %", kvfVerbaliseVkResult(result)); - m_cmd.GetCmdBuffer(m_current_frame_index).Reset(); - m_cmd.GetCmdBuffer(m_current_frame_index).BeginRecord(); - auto& fb = _framebuffers[_image_index]; - m_pass.Begin(GetActiveCmdBuffer(), fb); - - m_pipeline.BindPipeline(m_cmd.GetCmdBuffer(m_current_frame_index)); - - VkViewport viewport{}; - viewport.x = 0.0f; - viewport.y = 0.0f; - viewport.width = static_cast(fb.GetWidth()); - viewport.height = static_cast(fb.GetHeight()); - viewport.minDepth = 0.0f; - viewport.maxDepth = 1.0f; - vkCmdSetViewport(m_cmd.GetCmdBuffer(m_current_frame_index).Get(), 0, 1, &viewport); - - VkRect2D scissor{}; - scissor.offset = { 0, 0 }; - scissor.extent = { fb.GetWidth(), fb.GetHeight()}; - vkCmdSetScissor(m_cmd.GetCmdBuffer(m_current_frame_index).Get(), 0, 1, &scissor); - + vkResetCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); + kvfBeginCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); + m_drawcalls = 0; + m_polygons_drawn = 0; + EventBus::SendBroadcast(Internal::FrameBeginEventBroadcast{}); return true; } void Renderer::EndFrame() { - MLX_PROFILE_FUNCTION(); - m_pass.End(GetActiveCmdBuffer()); - m_cmd.GetCmdBuffer(m_current_frame_index).EndRecord(); - - if(!m_render_target) + VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; + kvfEndCommandBuffer(m_cmd_buffers[m_current_frame_index]); + kvfSubmitCommandBuffer(RenderCore::Get().GetDevice(), m_cmd_buffers[m_current_frame_index], KVF_GRAPHICS_QUEUE, m_render_finished_semaphores[m_current_frame_index], m_image_available_semaphores[m_current_frame_index], m_cmd_fences[m_current_frame_index], wait_stages); + if(!kvfQueuePresentKHR(RenderCore::Get().GetDevice(), m_render_finished_semaphores[m_current_frame_index], m_swapchain, m_swapchain_image_index) || m_framebuffers_resize) { - m_cmd.GetCmdBuffer(m_current_frame_index).Submit(&m_render_finished_semaphores[m_current_frame_index]); - - VkSwapchainKHR swapchain = m_swapchain.Get(); - VkSemaphore signal_semaphores[] = { m_render_finished_semaphores[m_current_frame_index].Get() }; - - VkPresentInfoKHR present_info{}; - present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; - present_info.waitSemaphoreCount = 1; - present_info.pWaitSemaphores = signal_semaphores; - present_info.swapchainCount = 1; - present_info.pSwapchains = &swapchain; - present_info.pImageIndices = &m_image_index; - VkResult result = vkQueuePresentKHR(RenderCore::Get().GetQueue().GetPresent(), &present_info); - if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR || m_framebuffer_resized) - { - m_framebuffer_resized = false; - RecreateRenderData(); - } - else if(result != VK_SUCCESS) - FatalError("Vulkan error : failed to present swap chain image"); - m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; - } - else - { - m_cmd.GetCmdBuffer(m_current_frame_index).SubmitIdle(true); - m_current_frame_index = 0; + m_framebuffers_resize = false; + DestroySwapchain(); + CreateSwapchain(); + EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); } + m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; + kvfResetDeviceDescriptorPools(RenderCore::Get().GetDevice()); } - void Renderer::RecreateRenderData() + void Renderer::CreateSwapchain() { - m_swapchain.Recreate(); - m_pass.Destroy(); - m_pass.Init(m_swapchain.GetImagesFormat(), VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - for(auto& fb : m_framebuffers) - fb.Destroy(); - m_framebuffers.clear(); - for(std::size_t i = 0; i < m_swapchain.GetImagesNumber(); i++) - m_framebuffers.emplace_back().Init(m_pass, m_swapchain.GetImage(i)); + Vec2ui drawable_size = p_window->GetVulkanDrawableSize(); + VkExtent2D extent = { drawable_size.x, drawable_size.y }; + m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, false); + + std::uint32_t images_count = kvfGetSwapchainImagesCount(m_swapchain); + std::vector tmp(images_count); + m_swapchain_images.resize(images_count); + vkGetSwapchainImagesKHR(RenderCore::Get().GetDevice(), m_swapchain, &images_count, tmp.data()); + for(std::size_t i = 0; i < images_count; i++) + { + m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height); + m_swapchain_images[i].TransitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); + m_swapchain_images[i].CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + } + DebugLog("Vulkan : swapchain created"); } - void Renderer::Destroy() + void Renderer::DestroySwapchain() { - MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); + RenderCore::Get().WaitDeviceIdle(); + for(Image& img : m_swapchain_images) + img.DestroyImageView(); + kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), m_swapchain); + DebugLog("Vulkan : swapchain destroyed"); + } - m_ipeline.Destroy(); - muniform_buffer->Destroy(); - mvert_layout.Destroy(); - mfrag_layout.Destroy(); - mfrag_set.Destroy(); - mvert_set.Destroy(); - mcmd.Destroy(); - mpass.Destroy(); - if(!m_render_target) + void Renderer::Destroy() noexcept + { + auto& render_core = RenderCore::Get(); + render_core.WaitDeviceIdle(); + + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - m_swapchain.Destroy(); - m_surface.Destroy(); + kvfDestroySemaphore(render_core.GetDevice(), m_image_available_semaphores[i]); + DebugLog("Vulkan : image available semaphore destroyed"); + kvfDestroySemaphore(render_core.GetDevice(), m_render_finished_semaphores[i]); + DebugLog("Vulkan : render finished semaphore destroyed"); + kvfDestroyFence(render_core.GetDevice(), m_cmd_fences[i]); + DebugLog("Vulkan : fence destroyed"); } - for(auto& fb : m_framebuffers) - fb.Destroy(); - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_semaphores[i].Destroy(); + + DestroySwapchain(); + vkDestroySurfaceKHR(render_core.GetInstance(), m_surface, nullptr); + DebugLog("Vulkan : surface destroyed"); + m_surface = VK_NULL_HANDLE; } } diff --git a/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp b/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp deleted file mode 100644 index a84b4a3..0000000 --- a/runtime/Sources/Renderer/Renderpass/Framebuffer.cpp +++ /dev/null @@ -1,49 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Framebuffer.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:18:06 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:28:07 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include - -namespace mlx -{ - void FrameBuffer::Init(RenderPass& renderpass, Image& image) - { - VkImageView attachments[] = { image.GetImageView() }; - - m_width = image.GetWidth(); - m_height = image.GetHeight(); - - VkFramebufferCreateInfo framebuffer_info{}; - framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; - framebuffer_info.renderPass = renderpass.get(); - framebuffer_info.attachmentCount = 1; - framebuffer_info.pAttachments = attachments; - framebuffer_info.width = _width; - framebuffer_info.height = _height; - framebuffer_info.layers = 1; - - VkResult res = vkCreateFramebuffer(RenderCore::Get().GetDevice().Get(), &framebuffer_info, nullptr, &m_framebuffer); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create a framebuffer, %s", RCore::verbaliseResultVk(res)); - DebugLog("Vulkan : created new framebuffer"); - } - - void FrameBuffer::Destroy() noexcept - { - vkDestroyFramebuffer(RenderCore::Get().GetDevice().Get(), m_framebuffer, nullptr); - m_framebuffer = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed a framebuffer"); - } -} diff --git a/runtime/Sources/Renderer/Renderpass/Renderpass.cpp b/runtime/Sources/Renderer/Renderpass/Renderpass.cpp deleted file mode 100644 index b5d25e9..0000000 --- a/runtime/Sources/Renderer/Renderpass/Renderpass.cpp +++ /dev/null @@ -1,117 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Renderpass.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:21:36 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:31:09 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include "vk_render_pass.h" -#include -#include -#include -#include - -namespace mlx -{ - static const VkClearValue clear_color = {{{ 0.f, 0.f, 0.f, 1.0f }}}; // wtf, this mess to satisfy a warning - - void RenderPass::Init(VkFormat attachement_format, VkImageLayout layout) - { - VkAttachmentDescription color_attachment{}; - color_attachment.format = attachement_format; - color_attachment.samples = VK_SAMPLE_COUNT_1_BIT; - color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; - color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; - color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; - color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; - color_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; - color_attachment.finalLayout = layout; - - VkAttachmentReference color_attachment_ref{}; - colorAttachmentRef.attachment = 0; - colorAttachmentRef.layout = (layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : layout); - - VkSubpassDescription subpass1{}; - subpass1.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS; - subpass1.colorAttachmentCount = 1; - subpass1.pColorAttachments = &colorAttachmentRef; - - VkSubpassDescription subpasses[] = { subpass1 }; - - std::vector subpasses_deps; - subpasses_deps.emplace_back(); - subpasses_deps.back().srcSubpass = VK_SUBPASS_EXTERNAL; - subpasses_deps.back().dstSubpass = 0; - subpasses_deps.back().srcStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - subpasses_deps.back().dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - subpasses_deps.back().srcAccessMask = VK_ACCESS_MEMORY_READ_BIT; - subpasses_deps.back().dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - subpasses_deps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; - - subpasses_deps.emplace_back(); - subpasses_deps.back().srcSubpass = 0; - subpasses_deps.back().dstSubpass = VK_SUBPASS_EXTERNAL; - subpasses_deps.back().srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - subpasses_deps.back().dstStageMask = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - subpasses_deps.back().srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - subpasses_deps.back().dstAccessMask = VK_ACCESS_MEMORY_READ_BIT; - subpasses_deps.back().dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; - - VkRenderPassCreateInfo render_pass_info{}; - render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; - render_pass_info.attachmentCount = 1; - render_pass_info.pAttachments = &color_attachment; - render_pass_info.subpassCount = sizeof(subpasses) / sizeof(VkSubpassDescription); - render_pass_info.pSubpasses = subpasses; - render_pass_info.dependencyCount = static_cast(subpasses_deps.size()); - render_pass_info.pDependencies = subpasses_deps.data(); - - VkResult res = vkCreateRenderPass(RenderCore::Get().GetDevice().Get(), &render_pass_info, nullptr, &m_render_pass); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create render pass, %", VerbaliseVkResult(res)); - DebugLog("Vulkan : created new render pass"); - } - - void RenderPass::Begin(class CommandBuffer& cmd, class FrameBuffer& fb) - { - MLX_PROFILE_FUNCTION(); - if(m_is_running) - return; - - VkRenderPassBeginInfo render_pass_info{}; - render_pass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; - render_pass_info.renderPass = m_render_pass; - render_pass_info.framebuffer = fb.Get(); - render_pass_info.renderArea.offset = { 0, 0 }; - render_pass_info.renderArea.extent = { fb.GetWidth(), fb.GetHeight() }; - render_pass_info.clearValueCount = 1; - render_pass_info.pClearValues = &clear_color; - - vkCmdBeginRenderPass(cmd.Get(), &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); - - m_is_running = true; - } - - void RenderPass::End(class CommandBuffer& cmd) - { - MLX_PROFILE_FUNCTION(); - if(!m_is_running) - return; - vkCmdEndRenderPass(cmdd.Get()); - m_is_running = false; - } - - void RenderPass::Destroy() noexcept - { - vkDestroyRenderPass(RenderCore::Get().GetDevice().Get(), m_render_pass, nullptr); - m_render_pass = VK_NULL_HANDLE; - DebugLog("Vulkan : destroyed a renderpass"); - } -} diff --git a/runtime/Sources/Renderer/Renderpass/Swapchain.cpp b/runtime/Sources/Renderer/Renderpass/Swapchain.cpp deleted file mode 100644 index ffc5df9..0000000 --- a/runtime/Sources/Renderer/Renderpass/Swapchain.cpp +++ /dev/null @@ -1,150 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Swapchain.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/06 18:22:28 by maldavid #+# #+# */ -/* Updated: 2024/04/23 22:43:10 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include - -namespace mlx -{ - void SwapChain::Init(NonOwningPtr renderer) - { - VkDevice device = RenderCore::get().GetDevice().Get(); - - m_renderer = renderer; - m_swapchain_support = QuerySwapChainSupport(RenderCore::Get().GetDevice().GetPhysicalDevice()); - - VkSurfaceFormatKHR surface_format = renderer->GetSurface().ChooseSwapSurfaceFormat(m_swapchain_support.formats); - VkPresentModeKHR present_mode = ChooseSwapPresentMode(m_swapchain_support.present_modes); - m_extent = ChooseSwapExtent(m_swapchain_support.capabilities); - - std::uint32_t image_count = m_swapchain_support.capabilities.minImageCount + 1; - if(m_swapchain_support.capabilities.maxImageCount > 0 && image_count > m_swapchain_support.capabilities.maxImageCount) - image_count = m_swapchain_support.capabilities.maxImageCount; - - Queues::QueueFamilyIndices indices = RenderCore::Get().GetQueue().FindQueueFamilies(RenderCore::Get().GetDevice().GetPhysicalDevice()); - std::uint32_t queue_family_indices[] = { indices.graphics_family.value(), indices.present_family.value() }; - - VkSwapchainCreateInfoKHR create_info{}; - create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; - create_info.surface = renderer->GetSurface().Get(); - create_info.minImageCount = image_count; - create_info.imageFormat = surface_format.format; - create_info.imageColorSpace = surface_format.colorSpace; - create_info.imageExtent = m_extent; - create_info.imageArrayLayers = 1; - create_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; - create_info.preTransform = m_swapchain_support.capabilities.currentTransform; - create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; - create_info.presentMode = present_mode; - create_info.clipped = VK_TRUE; - create_info.oldSwapchain = VK_NULL_HANDLE; - if(indices.graphics_family != indices.present_family) - { - create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT; - create_info.queueFamilyIndexCount = 2; - create_info.pQueueFamilyIndices = queue_family_indices; - } - else - create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; - - VkResult res = vkCreateSwapchainKHR(device, &create_info, nullptr, &m_swapchain); - if(res != VK_SUCCESS) - FatalError("Vulkan : failed to create the swapchain, %", VerbaliseVkResult(res)); - - std::vector tmp; - vkGetSwapchainImagesKHR(device, m_swapchain, &image_count, nullptr); - m_images.resize(image_count); - tmp.resize(image_count); - vkGetSwapchainImagesKHR(device, m_swapchain, &image_count, tmp.data()); - - for(std::size_t i = 0; i < image_count; i++) - { - m_images[i].Create(tmp[i], surface_format.format, m_extent.width, m_extent.height); - m_images[i].TransitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - m_images[i].CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); - } - - m_swapchain_image_format = surface_format.format; - DebugLog("Vulkan : created new swapchain"); - } - - SwapChain::SwapChainSupportDetails SwapChain::QuerySwapChainSupport(VkPhysicalDevice device) - { - SwapChain::SwapChainSupportDetails details; - VkSurfaceKHR surface = m_renderer->GetSurface().Get(); - - if(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(device, surface, &details.capabilities) != VK_SUCCESS) - FatalError("Vulkan : unable to retrieve surface capabilities"); - - std::uint32_t format_count = 0; - vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, nullptr); - - if(format_count != 0) - { - details.formats.resize(format_count); - vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, details.formats.data()); - } - - std::uint32_t present_mode_count; - vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_mode_count, nullptr); - - if(present_mode_count != 0) - { - details.present_modes.resize(present_mode_count); - vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &present_mode_count, details.present_modes.data()); - } - - return details; - } - - VkPresentModeKHR SwapChain::chooseSwapPresentMode([[maybe_unused]] const std::vector& available_present_modes) - { - // in the future, you may choose to activate vsync or not - return VK_PRESENT_MODE_IMMEDIATE_KHR; - } - - VkExtent2D SwapChain::ChooseSwapExtent(const VkSurfaceCapabilitiesKHR& capabilities) - { - if(capabilities.currentExtent.width != std::numeric_limits::max()) - return capabilities.currentExtent; - - int width, height; - glfwGetFramebufferSize(m_renderer->GetWindow()->GetNativeWindow(), &width, &height); - - VkExtent2D actual_extent = { static_cast(width), static_cast(height) }; - - actual_extent.width = std::clamp(actual_extent.width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width); - actual_extent.height = std::clamp(actual_extent.height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height); - - return actual_extent; - } - - void SwapChain::Recreate() - { - Destroy(); - Init(m_renderer); - } - - void SwapChain::Destroy() noexcept - { - if(m_swapchain == VK_NULL_HANDLE) - return; - vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); - vkDestroySwapchainKHR(RenderCore::Get().GetDevice().Get(), m_swapchain, nullptr); - m_swapchain = VK_NULL_HANDLE; - for(Image& img : m_images) - img.DestroyImageView(); - } -} diff --git a/runtime/Sources/Renderer/SceneRenderer.cpp b/runtime/Sources/Renderer/SceneRenderer.cpp new file mode 100644 index 0000000..b6f7860 --- /dev/null +++ b/runtime/Sources/Renderer/SceneRenderer.cpp @@ -0,0 +1,23 @@ +#include +#include +#include +#include +#include + +namespacemlx +{ + void SceneRenderer::Init() + { + m_passes.Init(); + } + + void SceneRenderer::Render(Scene& scene, Renderer& renderer) + { + m_passes.Pass(scene, renderer); + } + + void SceneRenderer::Destroy() + { + m_passes.Destroy(); + } +} diff --git a/runtime/Sources/Renderer/Texts/Font.cpp b/runtime/Sources/Renderer/Texts/Font.cpp deleted file mode 100644 index 6c93646..0000000 --- a/runtime/Sources/Renderer/Texts/Font.cpp +++ /dev/null @@ -1,88 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Font.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/12/11 22:06:09 by kbz_8 #+# #+# */ -/* Updated: 2024/04/23 22:48:30 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -constexpr const int RANGE = 1024; - -namespace mlx -{ - Font::Font(Renderer& renderer, const std::filesystem::path& path, float scale) : m_name(path.string()), m_renderer(renderer), m_scale(scale) - { - m_build_data = path; - } - - Font::Font(class Renderer& renderer, const std::string& name, const std::vector& ttf_data, float scale) : m_name(name), m_renderer(renderer), m_scale(scale) - { - m_build_data = ttf_data; - } - - void Font::BuildFont() - { - MLX_PROFILE_FUNCTION(); - std::vector file_bytes; - if(std::holds_alternative(m_build_data)) - { - std::ifstream file(std::get(m_build_data), std::ios::binary); - if(!file.is_open()) - { - Error("Font load : cannot open font file, %", m_name.c_str()); - return; - } - std::ifstream::pos_type fileSize = std::filesystem::file_size(std::get(m_build_data)); - file.seekg(0, std::ios::beg); - file_bytes.resize(fileSize); - file.read(reinterpret_cast(file_bytes.data()), fileSize); - file.close(); - } - - std::vector tmp_bitmap(RANGE * RANGE); - std::vector vulkan_bitmap(RANGE * RANGE * 4); - stbtt_pack_context pc; - stbtt_PackBegin(&pc, tmp_bitmap.data(), RANGE, RANGE, RANGE, 1, nullptr); - if(std::holds_alternative(m_build_data)) - stbtt_PackFontRange(&pc, file_bytes.data(), 0, m_scale, 32, 96, m_cdata.data()); - else - stbtt_PackFontRange(&pc, std::get>(m_build_data).data(), 0, m_scale, 32, 96, m_cdata.data()); - stbtt_PackEnd(&pc); - for(int i = 0, j = 0; i < RANGE * RANGE; i++, j += 4) - { - vulkan_bitmap[j + 0] = tmp_bitmap[i]; - vulkan_bitmap[j + 1] = tmp_bitmap[i]; - vulkan_bitmap[j + 2] = tmp_bitmap[i]; - vulkan_bitmap[j + 3] = tmp_bitmap[i]; - } - #ifdef DEBUG - m_atlas.Create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, std::string(m_name + "_font_altas").c_str(), true); - #else - m_atlas.Create(vulkan_bitmap.data(), RANGE, RANGE, VK_FORMAT_R8G8B8A8_UNORM, nullptr, true); - #endif - m_atlas.SetDescriptor(m_renderer.GetFragDescriptorSet().Duplicate()); - m_is_init = true; - } - - void Font::Destroy() - { - MLX_PROFILE_FUNCTION(); - m_atlas.Destroy(); - m_is_init = false; - } - - Font::~Font() - { - if(m_is_init) - destroy(); - } -} diff --git a/runtime/Sources/Renderer/Texts/FontLibrary.cpp b/runtime/Sources/Renderer/Texts/FontLibrary.cpp deleted file mode 100644 index eed3b60..0000000 --- a/runtime/Sources/Renderer/Texts/FontLibrary.cpp +++ /dev/null @@ -1,68 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* FontLibrary.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/18 09:28:14 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:28:40 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include - -namespace mlx -{ - std::shared_ptr FontLibrary::GetFontData(FontID id) - { - MLX_PROFILE_FUNCTION(); - if(!m_cache.count(id) || std::find(m_invalid_ids.begin(), m_invalid_ids.end(), id) != m_invalid_ids.end()) - FatalError("Font Library : wrong font ID '%'", id); - return m_cache[id]; - } - - FontID FontLibrary::AddFontToLibrary(std::shared_ptr font) - { - MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_cache.begin(), m_cache.end(), [&](const std::pair>& v) - { - return v.second->GetScale() == font->GetScale() && - v.second->GetName() == font->GetName() && - std::find(m_invalid_ids.begin(), m_invalid_ids.end(), v.first) == m_invalid_ids.end(); - }); - if(it != m_cache.end()) - return it->first; - font->BuildFont(); - m_cache[m_current_id] = font; - m_current_id++; - return m_current_id - 1; - } - - void FontLibrary::RemoveFontFromLibrary(FontID id) - { - MLX_PROFILE_FUNCTION(); - if(!m_cache.count(id) || std::find(m_invalid_ids.begin(), m_invalid_ids.end(), id) != m_invalid_ids.end()) - { - Warning("Font Library : trying to remove a font with an unkown or invalid ID '%'", id); - return; - } - m_cache[id]->Destroy(); - m_invalid_ids.push_back(id); - } - - void FontLibrary::ClearLibrary() - { - MLX_PROFILE_FUNCTION(); - for(auto& [id, font] : m_cache) - { - font->Destroy(); - m_invalid_ids.push_back(id); - } - // do not `_cache.clear();` as it releases the fonts and may not destroy the texture atlas that is in use by command buffers - } -} diff --git a/runtime/Sources/Renderer/Texts/Text.cpp b/runtime/Sources/Renderer/Texts/Text.cpp deleted file mode 100644 index 162307a..0000000 --- a/runtime/Sources/Renderer/Texts/Text.cpp +++ /dev/null @@ -1,78 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* Text.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/11 00:11:56 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:33:58 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include - -namespace mlx -{ - void Text::Init(std::string text, FontID font, std::uint32_t color, std::vector vbo_data, std::vector ibo_data) - { - MLX_PROFILE_FUNCTION(); - if(m_is_init) - return; - m_text = std::move(text); - m_color = color; - m_font = font; - #ifdef DEBUG - std::string debug_name = m_text; - for(char& c : debug_name) - { - if(c == ' ' || c == '"' || c == '\'') - c = '_'; - } - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_vbo[i].Create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), debug_name.c_str()); - m_ibo.Create(sizeof(std::uint16_t) * ibo_data.size(), ibo_data.data(), debug_name.c_str()); - #else - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_vbo[i].Create(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data()), nullptr); - m_ibo.Create(sizeof(std::uint16_t) * ibo_data.size(), ibo_data.data(), nullptr); - #endif - m_is_init = true; - } - - void Text::Bind(Renderer& renderer) noexcept - { - MLX_PROFILE_FUNCTION(); - if(!m_is_init) - return; - m_vbo[renderer.GetActiveImageIndex()].Bind(renderer); - m_ibo.Bind(renderer); - } - - void Text::updateVertexData(int frame, std::vector vbo_data) - { - MLX_PROFILE_FUNCTION(); - if(!m_is_init) - return; - m_vbo[frame].SetData(sizeof(Vertex) * vbo_data.size(), static_cast(vbo_data.data())); - } - - void Text::destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - if(!m_is_init) - return; - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_vbo[i].Destroy(); - m_ibo.Destroy(); - m_is_init = false; - } - - Text::~Text() - { - Destroy(); - } -} diff --git a/runtime/Sources/Renderer/Texts/TextDescriptor.cpp b/runtime/Sources/Renderer/Texts/TextDescriptor.cpp deleted file mode 100644 index b4a3208..0000000 --- a/runtime/Sources/Renderer/Texts/TextDescriptor.cpp +++ /dev/null @@ -1,107 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextDescriptor.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2024/01/11 00:23:11 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:38:40 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include -#include - -#define STB_RECT_PACK_IMPLEMENTATION -#include - -#include - -#define STB_TRUETYPE_IMPLEMENTATION -#define STB_malloc(x, u) ((void)(u), MemManager::Malloc(x)) -#define STB_free(x, u) ((void)(u), MemManager::Free(x)) -#include - -constexpr const int RANGE = 1024; - -namespace mlx -{ - TextDrawDescriptor::TextDrawDescriptor(std::string text, std::uint32_t _color, int _x, int _y) : color(_color), x(_x), y(_y), m_text(std::move(text)) - {} - - void TextDrawDescriptor::Init(FontID font) noexcept - { - MLX_PROFILE_FUNCTION(); - std::vector vertex_data; - std::vector index_data; - - float stb_x = 0.0f; - float stb_y = 0.0f; - - { - std::shared_ptr font_data = FontLibrary::Get().GetFontData(font); - - for(char c : m_text) - { - if(c < 32) - continue; - - stbtt_aligned_quad q; - stbtt_GetPackedQuad(font_data->GetCharData().data(), RANGE, RANGE, c - 32, &stb_x, &stb_y, &q, 1); - - std::size_t index = vertex_data.size(); - - glm::vec4 vertex_color = { - static_cast((color & 0x000000FF)) / 255.f, - static_cast((color & 0x0000FF00) >> 8) / 255.f, - static_cast((color & 0x00FF0000) >> 16) / 255.f, - static_cast((color & 0xFF000000) >> 24) / 255.f - }; - - vertex_data.emplace_back(glm::vec2{q.x0, q.y0}, vertex_color, glm::vec2{q.s0, q.t0}); - vertex_data.emplace_back(glm::vec2{q.x1, q.y0}, vertex_color, glm::vec2{q.s1, q.t0}); - vertex_data.emplace_back(glm::vec2{q.x1, q.y1}, vertex_color, glm::vec2{q.s1, q.t1}); - vertex_data.emplace_back(glm::vec2{q.x0, q.y1}, vertex_color, glm::vec2{q.s0, q.t1}); - - index_data.emplace_back(index + 0); - index_data.emplace_back(index + 1); - index_data.emplace_back(index + 2); - index_data.emplace_back(index + 2); - index_data.emplace_back(index + 3); - index_data.emplace_back(index + 0); - } - } - std::shared_ptr text_data = std::make_shared(); - text_data->Init(m_text, font, color, std::move(vertex_data), std::move(index_data)); - id = TextLibrary::Get().AddTextToLibrary(text_data); - DebugLog("Text put : registered new text to render"); - } - - void TextDrawDescriptor::Render(Renderer& renderer) - { - MLX_PROFILE_FUNCTION(); - std::shared_ptr draw_data = TextLibrary::Get().GetTextData(id); - std::shared_ptr font_data = FontLibrary::Get().GetFontData(draw_data->GetFontInUse()); - TextureAtlas& atlas = const_cast(font_data->GetAtlas()); - draw_data->Bind(renderer); - if(!atlas.GetSet().IsInit()) - atlas.SetDescriptor(renderer.GetFragDescriptorSet().Duplicate()); - if(!atlas.HasBeenUpdated()) - atlas.UpdateSet(0); - atlas.GetSet().Bind(); - atlas.Render(renderer, x, y, draw_data->GetIBOsize()); - } - - void TextDrawDescriptor::ResetUpdate() - { - std::shared_ptr draw_data = TextLibrary::Get().GetTextData(id); - std::shared_ptr font_data = FontLibrary::Get().GetFontData(draw_data->GetFontInUse()); - TextureAtlas& atlas = const_cast(font_data->GetAtlas()); - atlas.ResetUpdate(); - } -} diff --git a/runtime/Sources/Renderer/Texts/TextLibrary.cpp b/runtime/Sources/Renderer/Texts/TextLibrary.cpp deleted file mode 100644 index 5f995d9..0000000 --- a/runtime/Sources/Renderer/Texts/TextLibrary.cpp +++ /dev/null @@ -1,62 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextLibrary.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/10 11:59:57 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:40:28 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include - -namespace mlx -{ - std::shared_ptr TextLibrary::GetTextData(TextID id) - { - MLX_PROFILE_FUNCTION(); - if(!m_cache.count(id)) - FatalError("Text Library : wrong text ID '%d'", id); - return m_cache[id]; - } - - TextID TextLibrary::AddTextToLibrary(std::shared_ptr text) - { - MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_cache.begin(), m_cache.end(), [&](const std::pair>& v) - { - return v.second->GetText() == text->GetText() && v.second->GetColor() == text->GetColor(); - }); - if(it != m_cache.end()) - return it->first; - m_cache[m_current_id] = text; - m_current_id++; - return m_current_id - 1; - } - - void TextLibrary::RemoveTextFromLibrary(TextID id) - { - MLX_PROFILE_FUNCTION(); - if(!m_cache.count(id)) - { - Warning("Text Library : trying to remove a text with an unkown or invalid ID '%d'", id); - return; - } - m_cache[id]->Destroy(); - m_cache.erase(id); - } - - void TextLibrary::ClearLibrary() - { - MLX_PROFILE_FUNCTION(); - for(auto& [id, text] : m_cache) - text->Destroy(); - m_cache.clear(); - } -} diff --git a/runtime/Sources/Renderer/Texts/TextManager.cpp b/runtime/Sources/Renderer/Texts/TextManager.cpp deleted file mode 100644 index b660ea2..0000000 --- a/runtime/Sources/Renderer/Texts/TextManager.cpp +++ /dev/null @@ -1,65 +0,0 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* TextManager.cpp :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2023/04/06 16:41:13 by maldavid #+# #+# */ -/* Updated: 2024/04/24 01:42:19 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - -#include - -#include -#include -#include -#include -#include - -namespace mlx -{ - void TextManager::Init(Renderer& renderer) noexcept - { - MLX_PROFILE_FUNCTION(); - LoadFont(renderer, "default", 6.f); - } - - void TextManager::LoadFont(Renderer& renderer, const std::filesystem::path& filepath, float scale) - { - MLX_PROFILE_FUNCTION(); - std::shared_ptr font; - if(filepath.string() == "default") - font = std::make_shared(renderer, "default", dogica_ttf, scale); - else - font = std::make_shared(renderer, filepath, scale); - m_font_in_use = FontLibrary::Get().AddFontToLibrary(font); - } - - std::pair TextManager::RegisterText(int x, int y, std::uint32_t color, std::string str) - { - MLX_PROFILE_FUNCTION(); - auto res = m_text_descriptors.emplace(std::move(str), color, x, y); - if(res.second) - { - const_cast(*res.first).Init(m_font_in_use); - return std::make_pair(static_cast(&const_cast(*res.first)), true); - } - - auto text_ptr = TextLibrary::Get().GetTextData(res.first->id); - if(_font_in_use != text_ptr->GetFontInUse()) - { - // TODO : update text vertex buffers rather than destroying it and recreating it - TextLibrary::Get().RemoveTextFromLibrary(res.first->id); - const_cast(*res.first).Init(_font_in_use); - } - return std::make_pair(static_cast(&const_cast(*res.first)), false); - } - - void TextManager::Destroy() noexcept - { - MLX_PROFILE_FUNCTION(); - m_text_descriptors.clear(); - } -} diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp new file mode 100644 index 0000000..df743d1 --- /dev/null +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp @@ -0,0 +1,416 @@ +#include +#include + +#ifdef _WIN32 + __declspec(dllimport) HMODULE __stdcall LoadLibraryA(LPCSTR); + __declspec(dllimport) FARPROC __stdcall GetProcAddress(HMODULE, LPCSTR); + __declspec(dllimport) int __stdcall FreeLibrary(HMODULE); +#endif + +#if defined(MLX_COMPILER_GCC) + #define DISABLE_GCC_PEDANTIC_WARNINGS \ + _Pragma("GCC diagnostic push") \ + _Pragma("GCC diagnostic ignored \"-Wpedantic\"") + #define RESTORE_GCC_PEDANTIC_WARNINGS \ + _Pragma("GCC diagnostic pop") +#else + #define DISABLE_GCC_PEDANTIC_WARNINGS + #define RESTORE_GCC_PEDANTIC_WARNINGS +#endif + +namespace mlx +{ + namespace Internal + { + static PFN_vkVoidFunction vkGetInstanceProcAddrStub(Handle context, const char* name) + { + return vkGetInstanceProcAddr((VkInstance)context, name); + } + } + + VulkanLoader::VulkanLoader() + { + #if defined(_WIN32) + p_module = LoadLibraryA("vulkan-1.dll"); + if(!p_module) + FatalError("Vulkan loader : failed to load libvulkan"); + vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)(void(*)(void))GetProcAddress(p_module, "vkGetInstanceProcAddr"); + #elif defined(__APPLE__) + p_module = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); + if(!p_module) + p_module = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); + if(!p_module) + p_module = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); + + // Add support for using Vulkan and MoltenVK in a Framework. App store rules for iOS + // strictly enforce no .dylib's. If they aren't found it just falls through + if(!p_module) + p_module = dlopen("vulkan.framework/vulkan", RTLD_NOW | RTLD_LOCAL); + if(!p_module) + p_module = dlopen("MoltenVK.framework/MoltenVK", RTLD_NOW | RTLD_LOCAL); + + // modern versions of macOS don't search /usr/local/lib automatically contrary to what man dlopen says + // Vulkan SDK uses this as the system-wide installation location, so we're going to fallback to this if all else fails + if(!p_module && getenv("DYLD_FALLBACK_LIBRARY_PATH") == NULL) + p_module = dlopen("/usr/local/lib/libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); + if(!p_module) + FatalError("Vulkan loader : failed to load libvulkan"); + + vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(p_module, "vkGetInstanceProcAddr"); + #else + p_module = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); + if(!p_module) + p_module = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); + if(!p_module) + FatalError("Vulkan loader : failed to load libvulkan"); + DISABLE_GCC_PEDANTIC_WARNINGS + vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(p_module, "vkGetInstanceProcAddr"); + RESTORE_GCC_PEDANTIC_WARNINGS + #endif + DebugLog("Vulkan loader : libvulkan loaded"); + LoadGlobalFunctions(nullptr, Internal::vkGetInstanceProcAddrStub); + } + + void VulkanLoader::LoadInstance(VkInstance instance) + { + LoadInstanceFunctions(instance, Internal::vkGetInstanceProcAddrStub); + LoadDeviceFunctions(instance, Internal::vkGetInstanceProcAddrStub); + } + + void VulkanLoader::LoadGlobalFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept + { + #if defined(VK_VERSION_1_0) + vkCreateInstance = (PFN_vkCreateInstance)load(context, "vkCreateInstance"); + vkEnumerateInstanceExtensionProperties = (PFN_vkEnumerateInstanceExtensionProperties)load(context, "vkEnumerateInstanceExtensionProperties"); + vkEnumerateInstanceLayerProperties = (PFN_vkEnumerateInstanceLayerProperties)load(context, "vkEnumerateInstanceLayerProperties"); + #endif /* defined(VK_VERSION_1_0) */ + DebugLog("Vulkan loader : global functions loaded"); + } + + void VulkanLoader::LoadInstanceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept + { + #if defined(VK_VERSION_1_0) + vkCreateDevice = (PFN_vkCreateDevice)load(context, "vkCreateDevice"); + vkDestroyInstance = (PFN_vkDestroyInstance)load(context, "vkDestroyInstance"); + vkEnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties)load(context, "vkEnumerateDeviceExtensionProperties"); + vkEnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties)load(context, "vkEnumerateDeviceLayerProperties"); + vkEnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices)load(context, "vkEnumeratePhysicalDevices"); + vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)load(context, "vkGetDeviceProcAddr"); + vkGetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures)load(context, "vkGetPhysicalDeviceFeatures"); + vkGetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties)load(context, "vkGetPhysicalDeviceFormatProperties"); + vkGetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties)load(context, "vkGetPhysicalDeviceImageFormatProperties"); + vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)load(context, "vkGetPhysicalDeviceMemoryProperties"); + vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)load(context, "vkGetPhysicalDeviceProperties"); + vkGetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)load(context, "vkGetPhysicalDeviceQueueFamilyProperties"); + vkGetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties"); + #endif /* defined(VK_VERSION_1_0) */ + #if defined(VK_KHR_surface) + vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)load(context, "vkDestroySurfaceKHR"); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)load(context, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); + vkGetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)load(context, "vkGetPhysicalDeviceSurfaceFormatsKHR"); + vkGetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)load(context, "vkGetPhysicalDeviceSurfacePresentModesKHR"); + vkGetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)load(context, "vkGetPhysicalDeviceSurfaceSupportKHR"); + #endif /* defined(VK_KHR_surface) */ + DebugLog("Vulkan loader : instance functions loaded"); + } + + void VulkanLoader::LoadDeviceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept + { + #if defined(VK_VERSION_1_0) + vkAllocateCommandBuffers = (PFN_vkAllocateCommandBuffers)load(context, "vkAllocateCommandBuffers"); + vkAllocateDescriptorSets = (PFN_vkAllocateDescriptorSets)load(context, "vkAllocateDescriptorSets"); + vkAllocateMemory = (PFN_vkAllocateMemory)load(context, "vkAllocateMemory"); + vkBeginCommandBuffer = (PFN_vkBeginCommandBuffer)load(context, "vkBeginCommandBuffer"); + vkBindBufferMemory = (PFN_vkBindBufferMemory)load(context, "vkBindBufferMemory"); + vkBindImageMemory = (PFN_vkBindImageMemory)load(context, "vkBindImageMemory"); + vkCmdBeginQuery = (PFN_vkCmdBeginQuery)load(context, "vkCmdBeginQuery"); + vkCmdBeginRenderPass = (PFN_vkCmdBeginRenderPass)load(context, "vkCmdBeginRenderPass"); + vkCmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets)load(context, "vkCmdBindDescriptorSets"); + vkCmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer)load(context, "vkCmdBindIndexBuffer"); + vkCmdBindPipeline = (PFN_vkCmdBindPipeline)load(context, "vkCmdBindPipeline"); + vkCmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers)load(context, "vkCmdBindVertexBuffers"); + vkCmdBlitImage = (PFN_vkCmdBlitImage)load(context, "vkCmdBlitImage"); + vkCmdClearAttachments = (PFN_vkCmdClearAttachments)load(context, "vkCmdClearAttachments"); + vkCmdClearColorImage = (PFN_vkCmdClearColorImage)load(context, "vkCmdClearColorImage"); + vkCmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)load(context, "vkCmdClearDepthStencilImage"); + vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)load(context, "vkCmdCopyBuffer"); + vkCmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage)load(context, "vkCmdCopyBufferToImage"); + vkCmdCopyImage = (PFN_vkCmdCopyImage)load(context, "vkCmdCopyImage"); + vkCmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer)load(context, "vkCmdCopyImageToBuffer"); + vkCmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults)load(context, "vkCmdCopyQueryPoolResults"); + vkCmdDispatch = (PFN_vkCmdDispatch)load(context, "vkCmdDispatch"); + vkCmdDispatchIndirect = (PFN_vkCmdDispatchIndirect)load(context, "vkCmdDispatchIndirect"); + vkCmdDraw = (PFN_vkCmdDraw)load(context, "vkCmdDraw"); + vkCmdDrawIndexed = (PFN_vkCmdDrawIndexed)load(context, "vkCmdDrawIndexed"); + vkCmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect)load(context, "vkCmdDrawIndexedIndirect"); + vkCmdDrawIndirect = (PFN_vkCmdDrawIndirect)load(context, "vkCmdDrawIndirect"); + vkCmdEndQuery = (PFN_vkCmdEndQuery)load(context, "vkCmdEndQuery"); + vkCmdEndRenderPass = (PFN_vkCmdEndRenderPass)load(context, "vkCmdEndRenderPass"); + vkCmdExecuteCommands = (PFN_vkCmdExecuteCommands)load(context, "vkCmdExecuteCommands"); + vkCmdFillBuffer = (PFN_vkCmdFillBuffer)load(context, "vkCmdFillBuffer"); + vkCmdNextSubpass = (PFN_vkCmdNextSubpass)load(context, "vkCmdNextSubpass"); + vkCmdPipelineBarrier = (PFN_vkCmdPipelineBarrier)load(context, "vkCmdPipelineBarrier"); + vkCmdPushConstants = (PFN_vkCmdPushConstants)load(context, "vkCmdPushConstants"); + vkCmdResetEvent = (PFN_vkCmdResetEvent)load(context, "vkCmdResetEvent"); + vkCmdResetQueryPool = (PFN_vkCmdResetQueryPool)load(context, "vkCmdResetQueryPool"); + vkCmdResolveImage = (PFN_vkCmdResolveImage)load(context, "vkCmdResolveImage"); + vkCmdSetBlendConstants = (PFN_vkCmdSetBlendConstants)load(context, "vkCmdSetBlendConstants"); + vkCmdSetDepthBias = (PFN_vkCmdSetDepthBias)load(context, "vkCmdSetDepthBias"); + vkCmdSetDepthBounds = (PFN_vkCmdSetDepthBounds)load(context, "vkCmdSetDepthBounds"); + vkCmdSetEvent = (PFN_vkCmdSetEvent)load(context, "vkCmdSetEvent"); + vkCmdSetLineWidth = (PFN_vkCmdSetLineWidth)load(context, "vkCmdSetLineWidth"); + vkCmdSetScissor = (PFN_vkCmdSetScissor)load(context, "vkCmdSetScissor"); + vkCmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask)load(context, "vkCmdSetStencilCompareMask"); + vkCmdSetStencilReference = (PFN_vkCmdSetStencilReference)load(context, "vkCmdSetStencilReference"); + vkCmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask)load(context, "vkCmdSetStencilWriteMask"); + vkCmdSetViewport = (PFN_vkCmdSetViewport)load(context, "vkCmdSetViewport"); + vkCmdUpdateBuffer = (PFN_vkCmdUpdateBuffer)load(context, "vkCmdUpdateBuffer"); + vkCmdWaitEvents = (PFN_vkCmdWaitEvents)load(context, "vkCmdWaitEvents"); + vkCmdWriteTimestamp = (PFN_vkCmdWriteTimestamp)load(context, "vkCmdWriteTimestamp"); + vkCreateBuffer = (PFN_vkCreateBuffer)load(context, "vkCreateBuffer"); + vkCreateBufferView = (PFN_vkCreateBufferView)load(context, "vkCreateBufferView"); + vkCreateCommandPool = (PFN_vkCreateCommandPool)load(context, "vkCreateCommandPool"); + vkCreateComputePipelines = (PFN_vkCreateComputePipelines)load(context, "vkCreateComputePipelines"); + vkCreateDescriptorPool = (PFN_vkCreateDescriptorPool)load(context, "vkCreateDescriptorPool"); + vkCreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)load(context, "vkCreateDescriptorSetLayout"); + vkCreateEvent = (PFN_vkCreateEvent)load(context, "vkCreateEvent"); + vkCreateFence = (PFN_vkCreateFence)load(context, "vkCreateFence"); + vkCreateFramebuffer = (PFN_vkCreateFramebuffer)load(context, "vkCreateFramebuffer"); + vkCreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines)load(context, "vkCreateGraphicsPipelines"); + vkCreateImage = (PFN_vkCreateImage)load(context, "vkCreateImage"); + vkCreateImageView = (PFN_vkCreateImageView)load(context, "vkCreateImageView"); + vkCreatePipelineCache = (PFN_vkCreatePipelineCache)load(context, "vkCreatePipelineCache"); + vkCreatePipelineLayout = (PFN_vkCreatePipelineLayout)load(context, "vkCreatePipelineLayout"); + vkCreateQueryPool = (PFN_vkCreateQueryPool)load(context, "vkCreateQueryPool"); + vkCreateRenderPass = (PFN_vkCreateRenderPass)load(context, "vkCreateRenderPass"); + vkCreateSampler = (PFN_vkCreateSampler)load(context, "vkCreateSampler"); + vkCreateSemaphore = (PFN_vkCreateSemaphore)load(context, "vkCreateSemaphore"); + vkCreateShaderModule = (PFN_vkCreateShaderModule)load(context, "vkCreateShaderModule"); + vkDestroyBuffer = (PFN_vkDestroyBuffer)load(context, "vkDestroyBuffer"); + vkDestroyBufferView = (PFN_vkDestroyBufferView)load(context, "vkDestroyBufferView"); + vkDestroyCommandPool = (PFN_vkDestroyCommandPool)load(context, "vkDestroyCommandPool"); + vkDestroyDescriptorPool = (PFN_vkDestroyDescriptorPool)load(context, "vkDestroyDescriptorPool"); + vkDestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)load(context, "vkDestroyDescriptorSetLayout"); + vkDestroyDevice = (PFN_vkDestroyDevice)load(context, "vkDestroyDevice"); + vkDestroyEvent = (PFN_vkDestroyEvent)load(context, "vkDestroyEvent"); + vkDestroyFence = (PFN_vkDestroyFence)load(context, "vkDestroyFence"); + vkDestroyFramebuffer = (PFN_vkDestroyFramebuffer)load(context, "vkDestroyFramebuffer"); + vkDestroyImage = (PFN_vkDestroyImage)load(context, "vkDestroyImage"); + vkDestroyImageView = (PFN_vkDestroyImageView)load(context, "vkDestroyImageView"); + vkDestroyPipeline = (PFN_vkDestroyPipeline)load(context, "vkDestroyPipeline"); + vkDestroyPipelineCache = (PFN_vkDestroyPipelineCache)load(context, "vkDestroyPipelineCache"); + vkDestroyPipelineLayout = (PFN_vkDestroyPipelineLayout)load(context, "vkDestroyPipelineLayout"); + vkDestroyQueryPool = (PFN_vkDestroyQueryPool)load(context, "vkDestroyQueryPool"); + vkDestroyRenderPass = (PFN_vkDestroyRenderPass)load(context, "vkDestroyRenderPass"); + vkDestroySampler = (PFN_vkDestroySampler)load(context, "vkDestroySampler"); + vkDestroySemaphore = (PFN_vkDestroySemaphore)load(context, "vkDestroySemaphore"); + vkDestroyShaderModule = (PFN_vkDestroyShaderModule)load(context, "vkDestroyShaderModule"); + vkDeviceWaitIdle = (PFN_vkDeviceWaitIdle)load(context, "vkDeviceWaitIdle"); + vkEndCommandBuffer = (PFN_vkEndCommandBuffer)load(context, "vkEndCommandBuffer"); + vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)load(context, "vkFlushMappedMemoryRanges"); + vkFreeCommandBuffers = (PFN_vkFreeCommandBuffers)load(context, "vkFreeCommandBuffers"); + vkFreeDescriptorSets = (PFN_vkFreeDescriptorSets)load(context, "vkFreeDescriptorSets"); + vkFreeMemory = (PFN_vkFreeMemory)load(context, "vkFreeMemory"); + vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)load(context, "vkGetBufferMemoryRequirements"); + vkGetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)load(context, "vkGetDeviceMemoryCommitment"); + vkGetDeviceQueue = (PFN_vkGetDeviceQueue)load(context, "vkGetDeviceQueue"); + vkGetEventStatus = (PFN_vkGetEventStatus)load(context, "vkGetEventStatus"); + vkGetFenceStatus = (PFN_vkGetFenceStatus)load(context, "vkGetFenceStatus"); + vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)load(context, "vkGetImageMemoryRequirements"); + vkGetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements)load(context, "vkGetImageSparseMemoryRequirements"); + vkGetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)load(context, "vkGetImageSubresourceLayout"); + vkGetPipelineCacheData = (PFN_vkGetPipelineCacheData)load(context, "vkGetPipelineCacheData"); + vkGetQueryPoolResults = (PFN_vkGetQueryPoolResults)load(context, "vkGetQueryPoolResults"); + vkGetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity)load(context, "vkGetRenderAreaGranularity"); + vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)load(context, "vkInvalidateMappedMemoryRanges"); + vkMapMemory = (PFN_vkMapMemory)load(context, "vkMapMemory"); + vkMergePipelineCaches = (PFN_vkMergePipelineCaches)load(context, "vkMergePipelineCaches"); + vkQueueBindSparse = (PFN_vkQueueBindSparse)load(context, "vkQueueBindSparse"); + vkQueueSubmit = (PFN_vkQueueSubmit)load(context, "vkQueueSubmit"); + vkQueueWaitIdle = (PFN_vkQueueWaitIdle)load(context, "vkQueueWaitIdle"); + vkResetCommandBuffer = (PFN_vkResetCommandBuffer)load(context, "vkResetCommandBuffer"); + vkResetCommandPool = (PFN_vkResetCommandPool)load(context, "vkResetCommandPool"); + vkResetDescriptorPool = (PFN_vkResetDescriptorPool)load(context, "vkResetDescriptorPool"); + vkResetEvent = (PFN_vkResetEvent)load(context, "vkResetEvent"); + vkResetFences = (PFN_vkResetFences)load(context, "vkResetFences"); + vkSetEvent = (PFN_vkSetEvent)load(context, "vkSetEvent"); + vkUnmapMemory = (PFN_vkUnmapMemory)load(context, "vkUnmapMemory"); + vkUpdateDescriptorSets = (PFN_vkUpdateDescriptorSets)load(context, "vkUpdateDescriptorSets"); + vkWaitForFences = (PFN_vkWaitForFences)load(context, "vkWaitForFences"); + #endif /* defined(VK_VERSION_1_0) */ + #if defined(VK_KHR_swapchain) + vkAcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)load(context, "vkAcquireNextImageKHR"); + vkCreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)load(context, "vkCreateSwapchainKHR"); + vkDestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)load(context, "vkDestroySwapchainKHR"); + vkGetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)load(context, "vkGetSwapchainImagesKHR"); + vkQueuePresentKHR = (PFN_vkQueuePresentKHR)load(context, "vkQueuePresentKHR"); + #endif /* defined(VK_KHR_swapchain) */ + + DebugLog("Vulkan loader : device functions loaded"); + } + + VulkanLoader::~VulkanLoader() + { + #if defined(_WIN32) + FreeLibrary((HMODULE)p_module); + #else + dlclose(p_module); + #endif + p_module = nullptr; + DebugLog("Vulkan loader : libvulkan unloaded"); + } +} + +#if defined(VK_VERSION_1_0) + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; + PFN_vkAllocateMemory vkAllocateMemory; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer; + PFN_vkBindBufferMemory vkBindBufferMemory; + PFN_vkBindImageMemory vkBindImageMemory; + PFN_vkCmdBeginQuery vkCmdBeginQuery; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; + PFN_vkCmdBindPipeline vkCmdBindPipeline; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; + PFN_vkCmdBlitImage vkCmdBlitImage; + PFN_vkCmdClearAttachments vkCmdClearAttachments; + PFN_vkCmdClearColorImage vkCmdClearColorImage; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; + PFN_vkCmdCopyImage vkCmdCopyImage; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; + PFN_vkCmdDispatch vkCmdDispatch; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; + PFN_vkCmdDraw vkCmdDraw; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect; + PFN_vkCmdEndQuery vkCmdEndQuery; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands; + PFN_vkCmdFillBuffer vkCmdFillBuffer; + PFN_vkCmdNextSubpass vkCmdNextSubpass; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; + PFN_vkCmdPushConstants vkCmdPushConstants; + PFN_vkCmdResetEvent vkCmdResetEvent; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool; + PFN_vkCmdResolveImage vkCmdResolveImage; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; + PFN_vkCmdSetEvent vkCmdSetEvent; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth; + PFN_vkCmdSetScissor vkCmdSetScissor; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; + PFN_vkCmdSetViewport vkCmdSetViewport; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; + PFN_vkCmdWaitEvents vkCmdWaitEvents; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; + PFN_vkCreateBuffer vkCreateBuffer; + PFN_vkCreateBufferView vkCreateBufferView; + PFN_vkCreateCommandPool vkCreateCommandPool; + PFN_vkCreateComputePipelines vkCreateComputePipelines; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; + PFN_vkCreateDevice vkCreateDevice; + PFN_vkCreateEvent vkCreateEvent; + PFN_vkCreateFence vkCreateFence; + PFN_vkCreateFramebuffer vkCreateFramebuffer; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; + PFN_vkCreateImage vkCreateImage; + PFN_vkCreateImageView vkCreateImageView; + PFN_vkCreateInstance vkCreateInstance; + PFN_vkCreatePipelineCache vkCreatePipelineCache; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout; + PFN_vkCreateQueryPool vkCreateQueryPool; + PFN_vkCreateRenderPass vkCreateRenderPass; + PFN_vkCreateSampler vkCreateSampler; + PFN_vkCreateSemaphore vkCreateSemaphore; + PFN_vkCreateShaderModule vkCreateShaderModule; + PFN_vkDestroyBuffer vkDestroyBuffer; + PFN_vkDestroyBufferView vkDestroyBufferView; + PFN_vkDestroyCommandPool vkDestroyCommandPool; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; + PFN_vkDestroyDevice vkDestroyDevice; + PFN_vkDestroyEvent vkDestroyEvent; + PFN_vkDestroyFence vkDestroyFence; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer; + PFN_vkDestroyImage vkDestroyImage; + PFN_vkDestroyImageView vkDestroyImageView; + PFN_vkDestroyInstance vkDestroyInstance; + PFN_vkDestroyPipeline vkDestroyPipeline; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; + PFN_vkDestroyQueryPool vkDestroyQueryPool; + PFN_vkDestroyRenderPass vkDestroyRenderPass; + PFN_vkDestroySampler vkDestroySampler; + PFN_vkDestroySemaphore vkDestroySemaphore; + PFN_vkDestroyShaderModule vkDestroyShaderModule; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle; + PFN_vkEndCommandBuffer vkEndCommandBuffer; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets; + PFN_vkFreeMemory vkFreeMemory; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + PFN_vkGetDeviceQueue vkGetDeviceQueue; + PFN_vkGetEventStatus vkGetEventStatus; + PFN_vkGetFenceStatus vkGetFenceStatus; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; + PFN_vkMapMemory vkMapMemory; + PFN_vkMergePipelineCaches vkMergePipelineCaches; + PFN_vkQueueBindSparse vkQueueBindSparse; + PFN_vkQueueSubmit vkQueueSubmit; + PFN_vkQueueWaitIdle vkQueueWaitIdle; + PFN_vkResetCommandBuffer vkResetCommandBuffer; + PFN_vkResetCommandPool vkResetCommandPool; + PFN_vkResetDescriptorPool vkResetDescriptorPool; + PFN_vkResetEvent vkResetEvent; + PFN_vkResetFences vkResetFences; + PFN_vkSetEvent vkSetEvent; + PFN_vkUnmapMemory vkUnmapMemory; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; + PFN_vkWaitForFences vkWaitForFences; +#endif /* defined(VK_VERSION_1_0) */ +#if defined(VK_KHR_swapchain) + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; + PFN_vkQueuePresentKHR vkQueuePresentKHR; +#endif /* defined(VK_KHR_swapchain) */ +#if defined(VK_KHR_surface) + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; +#endif /* defined(VK_KHR_surface) */ diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.h b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h new file mode 100644 index 0000000..c6a7274 --- /dev/null +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h @@ -0,0 +1,42 @@ +#ifndef __MLX_VULKAN_LOADER__ +#define __MLX_VULKAN_LOADER__ + +#ifdef _WIN32 + typedef const char* LPCSTR; + typedef struct HINSTANCE__* HINSTANCE; + typedef HINSTANCE HMODULE; + #if defined(_MINWINDEF_) + /* minwindef.h defines FARPROC, and attempting to redefine it may conflict with -Wstrict-prototypes */ + #elif defined(_WIN64) + typedef __int64 (__stdcall* FARPROC)(void); + #else + typedef int (__stdcall* FARPROC)(void); + #endif +#else + #include +#endif + +namespace mlx +{ + class VulkanLoader + { + public: + VulkanLoader(); + void LoadInstance(VkInstance instance); + ~VulkanLoader(); + + private: + void LoadGlobalFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept; + void LoadInstanceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept; + void LoadDeviceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept; + + private: + #ifdef _WIN32 + HMODULE p_module = nullptr; + #else + Handle p_module = nullptr; + #endif + }; +} + +#endif diff --git a/scripts/fetch_dependencies.sh b/scripts/fetch_dependencies.sh index d40d1b2..ba8a0aa 100755 --- a/scripts/fetch_dependencies.sh +++ b/scripts/fetch_dependencies.sh @@ -1,16 +1,5 @@ #!/bin/bash -# Update volk -rm -f ../third_party/volk.c -rm -f ../third_party/volk.h -tag_name=$(curl -sL https://api.github.com/repos/zeux/Volk/releases/latest | jq -r '.tag_name') -wget https://api.github.com/repos/zeux/volk/zipball/$tag_name -O volk.zip -unzip -o volk.zip -d ../third_party/ -mv ../third_party/zeux-volk*/volk.h ../third_party -mv ../third_party/zeux-volk*/volk.c ../third_party -rm -rf ../third_party/zeux-volk* -rm volk.zip - # Update VMA rm -f ../third_party/vma.h tag_name=$(curl -sL https://api.github.com/repos/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator/releases/latest | jq -r '.tag_name') @@ -29,3 +18,9 @@ mv ../third_party/Vulkan-Headers-main/include/vulkan ../third_party/ mv ../third_party/Vulkan-Headers-main/include/vk_video ../third_party/ rm -rf ../third_party/Vulkan-Headers-main rm vulkan-headers.zip + +# Update KVF +rm -f ../third_party/kvf.h +git clone https://github.com/Kbz-8/KVF.git ../third_party/KVF/ +mv ../third_party/KVF/kvf.h ../third_party/kvf.h +rm -rf ../third_party/KVF diff --git a/third_party/glm/common.hpp b/third_party/glm/common.hpp deleted file mode 100755 index 0328dc9..0000000 --- a/third_party/glm/common.hpp +++ /dev/null @@ -1,539 +0,0 @@ -/// @ref core -/// @file glm/common.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.3 Common Functions -/// -/// @defgroup core_func_common Common functions -/// @ingroup core -/// -/// Provides GLSL common functions -/// -/// These all operate component-wise. The description is per component. -/// -/// Include to use these core features. - -#pragma once - -#include "detail/qualifier.hpp" -#include "detail/_fixes.hpp" - -namespace glm -{ - /// @addtogroup core_func_common - /// @{ - - /// Returns x if x >= 0; otherwise, it returns -x. - /// - /// @tparam genType floating-point or signed integer; scalar or vector types. - /// - /// @see GLSL abs man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR genType abs(genType x); - - /// Returns x if x >= 0; otherwise, it returns -x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or signed integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL abs man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec abs(vec const& x); - - /// Returns 1.0 if x > 0, 0.0 if x == 0, or -1.0 if x < 0. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL sign man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec sign(vec const& x); - - /// Returns a value equal to the nearest integer that is less then or equal to x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL floor man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec floor(vec const& x); - - /// Returns a value equal to the nearest integer to x - /// whose absolute value is not larger than the absolute value of x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL trunc man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec trunc(vec const& x); - - /// Returns a value equal to the nearest integer to x. - /// The fraction 0.5 will round in a direction chosen by the - /// implementation, presumably the direction that is fastest. - /// This includes the possibility that round(x) returns the - /// same value as roundEven(x) for all values of x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL round man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec round(vec const& x); - - /// Returns a value equal to the nearest integer to x. - /// A fractional part of 0.5 will round toward the nearest even - /// integer. (Both 3.5 and 4.5 for x will return 4.0.) - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL roundEven man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - /// @see New round to even technique - template - GLM_FUNC_DECL vec roundEven(vec const& x); - - /// Returns a value equal to the nearest integer - /// that is greater than or equal to x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL ceil man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec ceil(vec const& x); - - /// Return x - floor(x). - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see GLSL fract man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL genType fract(genType x); - - /// Return x - floor(x). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL fract man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec fract(vec const& x); - - template - GLM_FUNC_DECL genType mod(genType x, genType y); - - template - GLM_FUNC_DECL vec mod(vec const& x, T y); - - /// Modulus. Returns x - y * floor(x / y) - /// for each component in x using the floating point value y. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types, include glm/gtc/integer for integer scalar types support - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL mod man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec mod(vec const& x, vec const& y); - - /// Returns the fractional part of x and sets i to the integer - /// part (as a whole number floating point value). Both the - /// return value and the output parameter will have the same - /// sign as x. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see GLSL modf man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL genType modf(genType x, genType& i); - - /// Returns y if y < x; otherwise, it returns x. - /// - /// @tparam genType Floating-point or integer; scalar or vector types. - /// - /// @see GLSL min man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR genType min(genType x, genType y); - - /// Returns y if y < x; otherwise, it returns x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL min man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& x, T y); - - /// Returns y if y < x; otherwise, it returns x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL min man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& x, vec const& y); - - /// Returns y if x < y; otherwise, it returns x. - /// - /// @tparam genType Floating-point or integer; scalar or vector types. - /// - /// @see GLSL max man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR genType max(genType x, genType y); - - /// Returns y if x < y; otherwise, it returns x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL max man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec max(vec const& x, T y); - - /// Returns y if x < y; otherwise, it returns x. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL max man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec max(vec const& x, vec const& y); - - /// Returns min(max(x, minVal), maxVal) for each component in x - /// using the floating-point values minVal and maxVal. - /// - /// @tparam genType Floating-point or integer; scalar or vector types. - /// - /// @see GLSL clamp man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR genType clamp(genType x, genType minVal, genType maxVal); - - /// Returns min(max(x, minVal), maxVal) for each component in x - /// using the floating-point values minVal and maxVal. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL clamp man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec clamp(vec const& x, T minVal, T maxVal); - - /// Returns min(max(x, minVal), maxVal) for each component in x - /// using the floating-point values minVal and maxVal. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL clamp man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec clamp(vec const& x, vec const& minVal, vec const& maxVal); - - /// If genTypeU is a floating scalar or vector: - /// Returns x * (1.0 - a) + y * a, i.e., the linear blend of - /// x and y using the floating-point value a. - /// The value for a is not restricted to the range [0, 1]. - /// - /// If genTypeU is a boolean scalar or vector: - /// Selects which vector each returned component comes - /// from. For a component of 'a' that is false, the - /// corresponding component of 'x' is returned. For a - /// component of 'a' that is true, the corresponding - /// component of 'y' is returned. Components of 'x' and 'y' that - /// are not selected are allowed to be invalid floating point - /// values and will have no effect on the results. Thus, this - /// provides different functionality than - /// genType mix(genType x, genType y, genType(a)) - /// where a is a Boolean vector. - /// - /// @see GLSL mix man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - /// - /// @param[in] x Value to interpolate. - /// @param[in] y Value to interpolate. - /// @param[in] a Interpolant. - /// - /// @tparam genTypeT Floating point scalar or vector. - /// @tparam genTypeU Floating point or boolean scalar or vector. It can't be a vector if it is the length of genTypeT. - /// - /// @code - /// #include - /// ... - /// float a; - /// bool b; - /// glm::dvec3 e; - /// glm::dvec3 f; - /// glm::vec4 g; - /// glm::vec4 h; - /// ... - /// glm::vec4 r = glm::mix(g, h, a); // Interpolate with a floating-point scalar two vectors. - /// glm::vec4 s = glm::mix(g, h, b); // Returns g or h; - /// glm::dvec3 t = glm::mix(e, f, a); // Types of the third parameter is not required to match with the first and the second. - /// glm::vec4 u = glm::mix(g, h, r); // Interpolations can be perform per component with a vector for the last parameter. - /// @endcode - template - GLM_FUNC_DECL genTypeT mix(genTypeT x, genTypeT y, genTypeU a); - - template - GLM_FUNC_DECL vec mix(vec const& x, vec const& y, vec const& a); - - template - GLM_FUNC_DECL vec mix(vec const& x, vec const& y, U a); - - /// Returns 0.0 if x < edge, otherwise it returns 1.0 for each component of a genType. - /// - /// @see GLSL step man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL genType step(genType edge, genType x); - - /// Returns 0.0 if x < edge, otherwise it returns 1.0. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL step man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec step(T edge, vec const& x); - - /// Returns 0.0 if x < edge, otherwise it returns 1.0. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL step man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec step(vec const& edge, vec const& x); - - /// Returns 0.0 if x <= edge0 and 1.0 if x >= edge1 and - /// performs smooth Hermite interpolation between 0 and 1 - /// when edge0 < x < edge1. This is useful in cases where - /// you would want a threshold function with a smooth - /// transition. This is equivalent to: - /// genType t; - /// t = clamp ((x - edge0) / (edge1 - edge0), 0, 1); - /// return t * t * (3 - 2 * t); - /// Results are undefined if edge0 >= edge1. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see GLSL smoothstep man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL genType smoothstep(genType edge0, genType edge1, genType x); - - template - GLM_FUNC_DECL vec smoothstep(T edge0, T edge1, vec const& x); - - template - GLM_FUNC_DECL vec smoothstep(vec const& edge0, vec const& edge1, vec const& x); - - /// Returns true if x holds a NaN (not a number) - /// representation in the underlying implementation's set of - /// floating point representations. Returns false otherwise, - /// including for implementations with no NaN - /// representations. - /// - /// /!\ When using compiler fast math, this function may fail. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL isnan man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec isnan(vec const& x); - - /// Returns true if x holds a positive infinity or negative - /// infinity representation in the underlying implementation's - /// set of floating point representations. Returns false - /// otherwise, including for implementations with no infinity - /// representations. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL isinf man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec isinf(vec const& x); - - /// Returns a signed integer value representing - /// the encoding of a floating-point value. The floating-point - /// value's bit-level representation is preserved. - /// - /// @see GLSL floatBitsToInt man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - GLM_FUNC_DECL int floatBitsToInt(float const& v); - - /// Returns a signed integer value representing - /// the encoding of a floating-point value. The floatingpoint - /// value's bit-level representation is preserved. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL floatBitsToInt man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec floatBitsToInt(vec const& v); - - /// Returns a unsigned integer value representing - /// the encoding of a floating-point value. The floatingpoint - /// value's bit-level representation is preserved. - /// - /// @see GLSL floatBitsToUint man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - GLM_FUNC_DECL uint floatBitsToUint(float const& v); - - /// Returns a unsigned integer value representing - /// the encoding of a floating-point value. The floatingpoint - /// value's bit-level representation is preserved. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL floatBitsToUint man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec floatBitsToUint(vec const& v); - - /// Returns a floating-point value corresponding to a signed - /// integer encoding of a floating-point value. - /// If an inf or NaN is passed in, it will not signal, and the - /// resulting floating point value is unspecified. Otherwise, - /// the bit-level representation is preserved. - /// - /// @see GLSL intBitsToFloat man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - GLM_FUNC_DECL float intBitsToFloat(int const& v); - - /// Returns a floating-point value corresponding to a signed - /// integer encoding of a floating-point value. - /// If an inf or NaN is passed in, it will not signal, and the - /// resulting floating point value is unspecified. Otherwise, - /// the bit-level representation is preserved. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL intBitsToFloat man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec intBitsToFloat(vec const& v); - - /// Returns a floating-point value corresponding to a - /// unsigned integer encoding of a floating-point value. - /// If an inf or NaN is passed in, it will not signal, and the - /// resulting floating point value is unspecified. Otherwise, - /// the bit-level representation is preserved. - /// - /// @see GLSL uintBitsToFloat man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - GLM_FUNC_DECL float uintBitsToFloat(uint const& v); - - /// Returns a floating-point value corresponding to a - /// unsigned integer encoding of a floating-point value. - /// If an inf or NaN is passed in, it will not signal, and the - /// resulting floating point value is unspecified. Otherwise, - /// the bit-level representation is preserved. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL uintBitsToFloat man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL vec uintBitsToFloat(vec const& v); - - /// Computes and returns a * b + c. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see GLSL fma man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL genType fma(genType const& a, genType const& b, genType const& c); - - /// Splits x into a floating-point significand in the range - /// [0.5, 1.0) and an integral exponent of two, such that: - /// x = significand * exp(2, exponent) - /// - /// The significand is returned by the function and the - /// exponent is returned in the parameter exp. For a - /// floating-point value of zero, the significant and exponent - /// are both zero. For a floating-point value that is an - /// infinity or is not a number, the results are undefined. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see GLSL frexp man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL genType frexp(genType x, int& exp); - - template - GLM_FUNC_DECL vec frexp(vec const& v, vec& exp); - - /// Builds a floating-point number from x and the - /// corresponding integral exponent of two in exp, returning: - /// significand * exp(2, exponent) - /// - /// If this product is too large to be represented in the - /// floating-point type, the result is undefined. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see GLSL ldexp man page; - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL genType ldexp(genType const& x, int const& exp); - - template - GLM_FUNC_DECL vec ldexp(vec const& v, vec const& exp); - - /// @} -}//namespace glm - -#include "detail/func_common.inl" - diff --git a/third_party/glm/detail/_features.hpp b/third_party/glm/detail/_features.hpp deleted file mode 100755 index b0cbe9f..0000000 --- a/third_party/glm/detail/_features.hpp +++ /dev/null @@ -1,394 +0,0 @@ -#pragma once - -// #define GLM_CXX98_EXCEPTIONS -// #define GLM_CXX98_RTTI - -// #define GLM_CXX11_RVALUE_REFERENCES -// Rvalue references - GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n2118.html - -// GLM_CXX11_TRAILING_RETURN -// Rvalue references for *this - GCC not supported -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2439.htm - -// GLM_CXX11_NONSTATIC_MEMBER_INIT -// Initialization of class objects by rvalues - GCC any -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1610.html - -// GLM_CXX11_NONSTATIC_MEMBER_INIT -// Non-static data member initializers - GCC 4.7 -// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2008/n2756.htm - -// #define GLM_CXX11_VARIADIC_TEMPLATE -// Variadic templates - GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2242.pdf - -// -// Extending variadic template template parameters - GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2555.pdf - -// #define GLM_CXX11_GENERALIZED_INITIALIZERS -// Initializer lists - GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2672.htm - -// #define GLM_CXX11_STATIC_ASSERT -// Static assertions - GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1720.html - -// #define GLM_CXX11_AUTO_TYPE -// auto-typed variables - GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1984.pdf - -// #define GLM_CXX11_AUTO_TYPE -// Multi-declarator auto - GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1737.pdf - -// #define GLM_CXX11_AUTO_TYPE -// Removal of auto as a storage-class specifier - GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2546.htm - -// #define GLM_CXX11_AUTO_TYPE -// New function declarator syntax - GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2541.htm - -// #define GLM_CXX11_LAMBDAS -// New wording for C++0x lambdas - GCC 4.5 -// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2927.pdf - -// #define GLM_CXX11_DECLTYPE -// Declared type of an expression - GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2343.pdf - -// -// Right angle brackets - GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1757.html - -// -// Default template arguments for function templates DR226 GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html#226 - -// -// Solving the SFINAE problem for expressions DR339 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2634.html - -// #define GLM_CXX11_ALIAS_TEMPLATE -// Template aliases N2258 GCC 4.7 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2258.pdf - -// -// Extern templates N1987 Yes -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1987.htm - -// #define GLM_CXX11_NULLPTR -// Null pointer constant N2431 GCC 4.6 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2431.pdf - -// #define GLM_CXX11_STRONG_ENUMS -// Strongly-typed enums N2347 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2347.pdf - -// -// Forward declarations for enums N2764 GCC 4.6 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2764.pdf - -// -// Generalized attributes N2761 GCC 4.8 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2761.pdf - -// -// Generalized constant expressions N2235 GCC 4.6 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2235.pdf - -// -// Alignment support N2341 GCC 4.8 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2341.pdf - -// #define GLM_CXX11_DELEGATING_CONSTRUCTORS -// Delegating constructors N1986 GCC 4.7 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1986.pdf - -// -// Inheriting constructors N2540 GCC 4.8 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2540.htm - -// #define GLM_CXX11_EXPLICIT_CONVERSIONS -// Explicit conversion operators N2437 GCC 4.5 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2437.pdf - -// -// New character types N2249 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2249.html - -// -// Unicode string literals N2442 GCC 4.5 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm - -// -// Raw string literals N2442 GCC 4.5 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2442.htm - -// -// Universal character name literals N2170 GCC 4.5 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2170.html - -// #define GLM_CXX11_USER_LITERALS -// User-defined literals N2765 GCC 4.7 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2765.pdf - -// -// Standard Layout Types N2342 GCC 4.5 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2342.htm - -// #define GLM_CXX11_DEFAULTED_FUNCTIONS -// #define GLM_CXX11_DELETED_FUNCTIONS -// Defaulted and deleted functions N2346 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2346.htm - -// -// Extended friend declarations N1791 GCC 4.7 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1791.pdf - -// -// Extending sizeof N2253 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2253.html - -// #define GLM_CXX11_INLINE_NAMESPACES -// Inline namespaces N2535 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2535.htm - -// #define GLM_CXX11_UNRESTRICTED_UNIONS -// Unrestricted unions N2544 GCC 4.6 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2544.pdf - -// #define GLM_CXX11_LOCAL_TYPE_TEMPLATE_ARGS -// Local and unnamed types as template arguments N2657 GCC 4.5 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2657.htm - -// #define GLM_CXX11_RANGE_FOR -// Range-based for N2930 GCC 4.6 -// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2930.html - -// #define GLM_CXX11_OVERRIDE_CONTROL -// Explicit virtual overrides N2928 N3206 N3272 GCC 4.7 -// http://www.open-std.org/JTC1/SC22/WG21/docs/papers/2009/n2928.htm -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3206.htm -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2011/n3272.htm - -// -// Minimal support for garbage collection and reachability-based leak detection N2670 No -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2670.htm - -// #define GLM_CXX11_NOEXCEPT -// Allowing move constructors to throw [noexcept] N3050 GCC 4.6 (core language only) -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3050.html - -// -// Defining move special member functions N3053 GCC 4.6 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2010/n3053.html - -// -// Sequence points N2239 Yes -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2239.html - -// -// Atomic operations N2427 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2239.html - -// -// Strong Compare and Exchange N2748 GCC 4.5 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html - -// -// Bidirectional Fences N2752 GCC 4.8 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2752.htm - -// -// Memory model N2429 GCC 4.8 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2429.htm - -// -// Data-dependency ordering: atomics and memory model N2664 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2664.htm - -// -// Propagating exceptions N2179 GCC 4.4 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2179.html - -// -// Abandoning a process and at_quick_exit N2440 GCC 4.8 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2440.htm - -// -// Allow atomics use in signal handlers N2547 Yes -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2547.htm - -// -// Thread-local storage N2659 GCC 4.8 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2659.htm - -// -// Dynamic initialization and destruction with concurrency N2660 GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2660.htm - -// -// __func__ predefined identifier N2340 GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2340.htm - -// -// C99 preprocessor N1653 GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2004/n1653.htm - -// -// long long N1811 GCC 4.3 -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2005/n1811.pdf - -// -// Extended integral types N1988 Yes -// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2006/n1988.pdf - -#if(GLM_COMPILER & GLM_COMPILER_GCC) - -# define GLM_CXX11_STATIC_ASSERT - -#elif(GLM_COMPILER & GLM_COMPILER_CLANG) -# if(__has_feature(cxx_exceptions)) -# define GLM_CXX98_EXCEPTIONS -# endif - -# if(__has_feature(cxx_rtti)) -# define GLM_CXX98_RTTI -# endif - -# if(__has_feature(cxx_access_control_sfinae)) -# define GLM_CXX11_ACCESS_CONTROL_SFINAE -# endif - -# if(__has_feature(cxx_alias_templates)) -# define GLM_CXX11_ALIAS_TEMPLATE -# endif - -# if(__has_feature(cxx_alignas)) -# define GLM_CXX11_ALIGNAS -# endif - -# if(__has_feature(cxx_attributes)) -# define GLM_CXX11_ATTRIBUTES -# endif - -# if(__has_feature(cxx_constexpr)) -# define GLM_CXX11_CONSTEXPR -# endif - -# if(__has_feature(cxx_decltype)) -# define GLM_CXX11_DECLTYPE -# endif - -# if(__has_feature(cxx_default_function_template_args)) -# define GLM_CXX11_DEFAULT_FUNCTION_TEMPLATE_ARGS -# endif - -# if(__has_feature(cxx_defaulted_functions)) -# define GLM_CXX11_DEFAULTED_FUNCTIONS -# endif - -# if(__has_feature(cxx_delegating_constructors)) -# define GLM_CXX11_DELEGATING_CONSTRUCTORS -# endif - -# if(__has_feature(cxx_deleted_functions)) -# define GLM_CXX11_DELETED_FUNCTIONS -# endif - -# if(__has_feature(cxx_explicit_conversions)) -# define GLM_CXX11_EXPLICIT_CONVERSIONS -# endif - -# if(__has_feature(cxx_generalized_initializers)) -# define GLM_CXX11_GENERALIZED_INITIALIZERS -# endif - -# if(__has_feature(cxx_implicit_moves)) -# define GLM_CXX11_IMPLICIT_MOVES -# endif - -# if(__has_feature(cxx_inheriting_constructors)) -# define GLM_CXX11_INHERITING_CONSTRUCTORS -# endif - -# if(__has_feature(cxx_inline_namespaces)) -# define GLM_CXX11_INLINE_NAMESPACES -# endif - -# if(__has_feature(cxx_lambdas)) -# define GLM_CXX11_LAMBDAS -# endif - -# if(__has_feature(cxx_local_type_template_args)) -# define GLM_CXX11_LOCAL_TYPE_TEMPLATE_ARGS -# endif - -# if(__has_feature(cxx_noexcept)) -# define GLM_CXX11_NOEXCEPT -# endif - -# if(__has_feature(cxx_nonstatic_member_init)) -# define GLM_CXX11_NONSTATIC_MEMBER_INIT -# endif - -# if(__has_feature(cxx_nullptr)) -# define GLM_CXX11_NULLPTR -# endif - -# if(__has_feature(cxx_override_control)) -# define GLM_CXX11_OVERRIDE_CONTROL -# endif - -# if(__has_feature(cxx_reference_qualified_functions)) -# define GLM_CXX11_REFERENCE_QUALIFIED_FUNCTIONS -# endif - -# if(__has_feature(cxx_range_for)) -# define GLM_CXX11_RANGE_FOR -# endif - -# if(__has_feature(cxx_raw_string_literals)) -# define GLM_CXX11_RAW_STRING_LITERALS -# endif - -# if(__has_feature(cxx_rvalue_references)) -# define GLM_CXX11_RVALUE_REFERENCES -# endif - -# if(__has_feature(cxx_static_assert)) -# define GLM_CXX11_STATIC_ASSERT -# endif - -# if(__has_feature(cxx_auto_type)) -# define GLM_CXX11_AUTO_TYPE -# endif - -# if(__has_feature(cxx_strong_enums)) -# define GLM_CXX11_STRONG_ENUMS -# endif - -# if(__has_feature(cxx_trailing_return)) -# define GLM_CXX11_TRAILING_RETURN -# endif - -# if(__has_feature(cxx_unicode_literals)) -# define GLM_CXX11_UNICODE_LITERALS -# endif - -# if(__has_feature(cxx_unrestricted_unions)) -# define GLM_CXX11_UNRESTRICTED_UNIONS -# endif - -# if(__has_feature(cxx_user_literals)) -# define GLM_CXX11_USER_LITERALS -# endif - -# if(__has_feature(cxx_variadic_templates)) -# define GLM_CXX11_VARIADIC_TEMPLATES -# endif - -#endif//(GLM_COMPILER & GLM_COMPILER_CLANG) diff --git a/third_party/glm/detail/_fixes.hpp b/third_party/glm/detail/_fixes.hpp deleted file mode 100755 index a503c7c..0000000 --- a/third_party/glm/detail/_fixes.hpp +++ /dev/null @@ -1,27 +0,0 @@ -#include - -//! Workaround for compatibility with other libraries -#ifdef max -#undef max -#endif - -//! Workaround for compatibility with other libraries -#ifdef min -#undef min -#endif - -//! Workaround for Android -#ifdef isnan -#undef isnan -#endif - -//! Workaround for Android -#ifdef isinf -#undef isinf -#endif - -//! Workaround for Chrone Native Client -#ifdef log2 -#undef log2 -#endif - diff --git a/third_party/glm/detail/_noise.hpp b/third_party/glm/detail/_noise.hpp deleted file mode 100755 index 5a874a0..0000000 --- a/third_party/glm/detail/_noise.hpp +++ /dev/null @@ -1,81 +0,0 @@ -#pragma once - -#include "../common.hpp" - -namespace glm{ -namespace detail -{ - template - GLM_FUNC_QUALIFIER T mod289(T const& x) - { - return x - floor(x * (static_cast(1.0) / static_cast(289.0))) * static_cast(289.0); - } - - template - GLM_FUNC_QUALIFIER T permute(T const& x) - { - return mod289(((x * static_cast(34)) + static_cast(1)) * x); - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> permute(vec<2, T, Q> const& x) - { - return mod289(((x * static_cast(34)) + static_cast(1)) * x); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> permute(vec<3, T, Q> const& x) - { - return mod289(((x * static_cast(34)) + static_cast(1)) * x); - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> permute(vec<4, T, Q> const& x) - { - return mod289(((x * static_cast(34)) + static_cast(1)) * x); - } - - template - GLM_FUNC_QUALIFIER T taylorInvSqrt(T const& r) - { - return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> taylorInvSqrt(vec<2, T, Q> const& r) - { - return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> taylorInvSqrt(vec<3, T, Q> const& r) - { - return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> taylorInvSqrt(vec<4, T, Q> const& r) - { - return static_cast(1.79284291400159) - static_cast(0.85373472095314) * r; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> fade(vec<2, T, Q> const& t) - { - return (t * t * t) * (t * (t * static_cast(6) - static_cast(15)) + static_cast(10)); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> fade(vec<3, T, Q> const& t) - { - return (t * t * t) * (t * (t * static_cast(6) - static_cast(15)) + static_cast(10)); - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> fade(vec<4, T, Q> const& t) - { - return (t * t * t) * (t * (t * static_cast(6) - static_cast(15)) + static_cast(10)); - } -}//namespace detail -}//namespace glm - diff --git a/third_party/glm/detail/_swizzle.hpp b/third_party/glm/detail/_swizzle.hpp deleted file mode 100755 index 87896ef..0000000 --- a/third_party/glm/detail/_swizzle.hpp +++ /dev/null @@ -1,804 +0,0 @@ -#pragma once - -namespace glm{ -namespace detail -{ - // Internal class for implementing swizzle operators - template - struct _swizzle_base0 - { - protected: - GLM_FUNC_QUALIFIER T& elem(size_t i){ return (reinterpret_cast(_buffer))[i]; } - GLM_FUNC_QUALIFIER T const& elem(size_t i) const{ return (reinterpret_cast(_buffer))[i]; } - - // Use an opaque buffer to *ensure* the compiler doesn't call a constructor. - // The size 1 buffer is assumed to aligned to the actual members so that the - // elem() - char _buffer[1]; - }; - - template - struct _swizzle_base1 : public _swizzle_base0 - { - }; - - template - struct _swizzle_base1<2, T, Q, E0,E1,-1,-2, Aligned> : public _swizzle_base0 - { - GLM_FUNC_QUALIFIER vec<2, T, Q> operator ()() const { return vec<2, T, Q>(this->elem(E0), this->elem(E1)); } - }; - - template - struct _swizzle_base1<3, T, Q, E0,E1,E2,-1, Aligned> : public _swizzle_base0 - { - GLM_FUNC_QUALIFIER vec<3, T, Q> operator ()() const { return vec<3, T, Q>(this->elem(E0), this->elem(E1), this->elem(E2)); } - }; - - template - struct _swizzle_base1<4, T, Q, E0,E1,E2,E3, Aligned> : public _swizzle_base0 - { - GLM_FUNC_QUALIFIER vec<4, T, Q> operator ()() const { return vec<4, T, Q>(this->elem(E0), this->elem(E1), this->elem(E2), this->elem(E3)); } - }; - - // Internal class for implementing swizzle operators - /* - Template parameters: - - T = type of scalar values (e.g. float, double) - N = number of components in the vector (e.g. 3) - E0...3 = what index the n-th element of this swizzle refers to in the unswizzled vec - - DUPLICATE_ELEMENTS = 1 if there is a repeated element, 0 otherwise (used to specialize swizzles - containing duplicate elements so that they cannot be used as r-values). - */ - template - struct _swizzle_base2 : public _swizzle_base1::value> - { - struct op_equal - { - GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e = t; } - }; - - struct op_minus - { - GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e -= t; } - }; - - struct op_plus - { - GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e += t; } - }; - - struct op_mul - { - GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e *= t; } - }; - - struct op_div - { - GLM_FUNC_QUALIFIER void operator() (T& e, T& t) const{ e /= t; } - }; - - public: - GLM_FUNC_QUALIFIER _swizzle_base2& operator= (const T& t) - { - for (int i = 0; i < N; ++i) - (*this)[i] = t; - return *this; - } - - GLM_FUNC_QUALIFIER _swizzle_base2& operator= (vec const& that) - { - _apply_op(that, op_equal()); - return *this; - } - - GLM_FUNC_QUALIFIER void operator -= (vec const& that) - { - _apply_op(that, op_minus()); - } - - GLM_FUNC_QUALIFIER void operator += (vec const& that) - { - _apply_op(that, op_plus()); - } - - GLM_FUNC_QUALIFIER void operator *= (vec const& that) - { - _apply_op(that, op_mul()); - } - - GLM_FUNC_QUALIFIER void operator /= (vec const& that) - { - _apply_op(that, op_div()); - } - - GLM_FUNC_QUALIFIER T& operator[](size_t i) - { - const int offset_dst[4] = { E0, E1, E2, E3 }; - return this->elem(offset_dst[i]); - } - GLM_FUNC_QUALIFIER T operator[](size_t i) const - { - const int offset_dst[4] = { E0, E1, E2, E3 }; - return this->elem(offset_dst[i]); - } - - protected: - template - GLM_FUNC_QUALIFIER void _apply_op(vec const& that, const U& op) - { - // Make a copy of the data in this == &that. - // The copier should optimize out the copy in cases where the function is - // properly inlined and the copy is not necessary. - T t[N]; - for (int i = 0; i < N; ++i) - t[i] = that[i]; - for (int i = 0; i < N; ++i) - op( (*this)[i], t[i] ); - } - }; - - // Specialization for swizzles containing duplicate elements. These cannot be modified. - template - struct _swizzle_base2 : public _swizzle_base1::value> - { - struct Stub {}; - - GLM_FUNC_QUALIFIER _swizzle_base2& operator= (Stub const&) { return *this; } - - GLM_FUNC_QUALIFIER T operator[] (size_t i) const - { - const int offset_dst[4] = { E0, E1, E2, E3 }; - return this->elem(offset_dst[i]); - } - }; - - template - struct _swizzle : public _swizzle_base2 - { - typedef _swizzle_base2 base_type; - - using base_type::operator=; - - GLM_FUNC_QUALIFIER operator vec () const { return (*this)(); } - }; - -// -// To prevent the C++ syntax from getting entirely overwhelming, define some alias macros -// -#define GLM_SWIZZLE_TEMPLATE1 template -#define GLM_SWIZZLE_TEMPLATE2 template -#define GLM_SWIZZLE_TYPE1 _swizzle -#define GLM_SWIZZLE_TYPE2 _swizzle - -// -// Wrapper for a binary operator (e.g. u.yy + v.zy) -// -#define GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(OPERAND) \ - GLM_SWIZZLE_TEMPLATE2 \ - GLM_FUNC_QUALIFIER vec operator OPERAND ( const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE2& b) \ - { \ - return a() OPERAND b(); \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER vec operator OPERAND ( const GLM_SWIZZLE_TYPE1& a, const vec& b) \ - { \ - return a() OPERAND b; \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER vec operator OPERAND ( const vec& a, const GLM_SWIZZLE_TYPE1& b) \ - { \ - return a OPERAND b(); \ - } - -// -// Wrapper for a operand between a swizzle and a binary (e.g. 1.0f - u.xyz) -// -#define GLM_SWIZZLE_SCALAR_BINARY_OPERATOR_IMPLEMENTATION(OPERAND) \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER vec operator OPERAND ( const GLM_SWIZZLE_TYPE1& a, const T& b) \ - { \ - return a() OPERAND b; \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER vec operator OPERAND ( const T& a, const GLM_SWIZZLE_TYPE1& b) \ - { \ - return a OPERAND b(); \ - } - -// -// Macro for wrapping a function taking one argument (e.g. abs()) -// -#define GLM_SWIZZLE_FUNCTION_1_ARGS(RETURN_TYPE,FUNCTION) \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a) \ - { \ - return FUNCTION(a()); \ - } - -// -// Macro for wrapping a function taking two vector arguments (e.g. dot()). -// -#define GLM_SWIZZLE_FUNCTION_2_ARGS(RETURN_TYPE,FUNCTION) \ - GLM_SWIZZLE_TEMPLATE2 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE2& b) \ - { \ - return FUNCTION(a(), b()); \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE1& b) \ - { \ - return FUNCTION(a(), b()); \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const typename V& b) \ - { \ - return FUNCTION(a(), b); \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const V& a, const GLM_SWIZZLE_TYPE1& b) \ - { \ - return FUNCTION(a, b()); \ - } - -// -// Macro for wrapping a function take 2 vec arguments followed by a scalar (e.g. mix()). -// -#define GLM_SWIZZLE_FUNCTION_2_ARGS_SCALAR(RETURN_TYPE,FUNCTION) \ - GLM_SWIZZLE_TEMPLATE2 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE2& b, const T& c) \ - { \ - return FUNCTION(a(), b(), c); \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const GLM_SWIZZLE_TYPE1& b, const T& c) \ - { \ - return FUNCTION(a(), b(), c); \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const GLM_SWIZZLE_TYPE1& a, const typename S0::vec_type& b, const T& c)\ - { \ - return FUNCTION(a(), b, c); \ - } \ - GLM_SWIZZLE_TEMPLATE1 \ - GLM_FUNC_QUALIFIER typename GLM_SWIZZLE_TYPE1::RETURN_TYPE FUNCTION(const typename V& a, const GLM_SWIZZLE_TYPE1& b, const T& c) \ - { \ - return FUNCTION(a, b(), c); \ - } - -}//namespace detail -}//namespace glm - -namespace glm -{ - namespace detail - { - GLM_SWIZZLE_SCALAR_BINARY_OPERATOR_IMPLEMENTATION(-) - GLM_SWIZZLE_SCALAR_BINARY_OPERATOR_IMPLEMENTATION(*) - GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(+) - GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(-) - GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(*) - GLM_SWIZZLE_VECTOR_BINARY_OPERATOR_IMPLEMENTATION(/) - } - - // - // Swizzles are distinct types from the unswizzled type. The below macros will - // provide template specializations for the swizzle types for the given functions - // so that the compiler does not have any ambiguity to choosing how to handle - // the function. - // - // The alternative is to use the operator()() when calling the function in order - // to explicitly convert the swizzled type to the unswizzled type. - // - - //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, abs); - //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, acos); - //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, acosh); - //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, all); - //GLM_SWIZZLE_FUNCTION_1_ARGS(vec_type, any); - - //GLM_SWIZZLE_FUNCTION_2_ARGS(value_type, dot); - //GLM_SWIZZLE_FUNCTION_2_ARGS(vec_type, cross); - //GLM_SWIZZLE_FUNCTION_2_ARGS(vec_type, step); - //GLM_SWIZZLE_FUNCTION_2_ARGS_SCALAR(vec_type, mix); -} - -#define GLM_SWIZZLE2_2_MEMBERS(T, Q, E0,E1) \ - struct { detail::_swizzle<2, T, Q, 0,0,-1,-2> E0 ## E0; }; \ - struct { detail::_swizzle<2, T, Q, 0,1,-1,-2> E0 ## E1; }; \ - struct { detail::_swizzle<2, T, Q, 1,0,-1,-2> E1 ## E0; }; \ - struct { detail::_swizzle<2, T, Q, 1,1,-1,-2> E1 ## E1; }; - -#define GLM_SWIZZLE2_3_MEMBERS(T, Q, E0,E1) \ - struct { detail::_swizzle<3,T, Q, 0,0,0,-1> E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<3,T, Q, 0,0,1,-1> E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<3,T, Q, 0,1,0,-1> E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<3,T, Q, 0,1,1,-1> E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<3,T, Q, 1,0,0,-1> E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<3,T, Q, 1,0,1,-1> E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<3,T, Q, 1,1,0,-1> E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<3,T, Q, 1,1,1,-1> E1 ## E1 ## E1; }; - -#define GLM_SWIZZLE2_4_MEMBERS(T, Q, E0,E1) \ - struct { detail::_swizzle<4,T, Q, 0,0,0,0> E0 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,0,1> E0 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,1,0> E0 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,1,1> E0 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,0,0> E0 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,0,1> E0 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,1,0> E0 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,1,1> E0 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,0,0> E1 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,0,1> E1 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,1,0> E1 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,1,1> E1 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,0,0> E1 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,0,1> E1 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,1,0> E1 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,1,1> E1 ## E1 ## E1 ## E1; }; - -#define GLM_SWIZZLE3_2_MEMBERS(T, Q, E0,E1,E2) \ - struct { detail::_swizzle<2,T, Q, 0,0,-1,-2> E0 ## E0; }; \ - struct { detail::_swizzle<2,T, Q, 0,1,-1,-2> E0 ## E1; }; \ - struct { detail::_swizzle<2,T, Q, 0,2,-1,-2> E0 ## E2; }; \ - struct { detail::_swizzle<2,T, Q, 1,0,-1,-2> E1 ## E0; }; \ - struct { detail::_swizzle<2,T, Q, 1,1,-1,-2> E1 ## E1; }; \ - struct { detail::_swizzle<2,T, Q, 1,2,-1,-2> E1 ## E2; }; \ - struct { detail::_swizzle<2,T, Q, 2,0,-1,-2> E2 ## E0; }; \ - struct { detail::_swizzle<2,T, Q, 2,1,-1,-2> E2 ## E1; }; \ - struct { detail::_swizzle<2,T, Q, 2,2,-1,-2> E2 ## E2; }; - -#define GLM_SWIZZLE3_3_MEMBERS(T, Q ,E0,E1,E2) \ - struct { detail::_swizzle<3, T, Q, 0,0,0,-1> E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 0,0,1,-1> E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 0,0,2,-1> E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 0,1,0,-1> E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 0,1,1,-1> E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 0,1,2,-1> E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 0,2,0,-1> E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 0,2,1,-1> E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 0,2,2,-1> E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 1,0,0,-1> E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 1,0,1,-1> E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 1,0,2,-1> E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 1,1,0,-1> E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 1,1,1,-1> E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 1,1,2,-1> E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 1,2,0,-1> E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 1,2,1,-1> E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 1,2,2,-1> E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 2,0,0,-1> E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 2,0,1,-1> E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 2,0,2,-1> E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 2,1,0,-1> E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 2,1,1,-1> E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 2,1,2,-1> E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 2,2,0,-1> E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 2,2,1,-1> E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 2,2,2,-1> E2 ## E2 ## E2; }; - -#define GLM_SWIZZLE3_4_MEMBERS(T, Q, E0,E1,E2) \ - struct { detail::_swizzle<4,T, Q, 0,0,0,0> E0 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,0,1> E0 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,0,2> E0 ## E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,1,0> E0 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,1,1> E0 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,1,2> E0 ## E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,2,0> E0 ## E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,2,1> E0 ## E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,0,2,2> E0 ## E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,0,0> E0 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,0,1> E0 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,0,2> E0 ## E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,1,0> E0 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,1,1> E0 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,1,2> E0 ## E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,2,0> E0 ## E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,2,1> E0 ## E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,1,2,2> E0 ## E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,0,0> E0 ## E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,0,1> E0 ## E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,0,2> E0 ## E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,1,0> E0 ## E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,1,1> E0 ## E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,1,2> E0 ## E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,2,0> E0 ## E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,2,1> E0 ## E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 0,2,2,2> E0 ## E2 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,0,0> E1 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,0,1> E1 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,0,2> E1 ## E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,1,0> E1 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,1,1> E1 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,1,2> E1 ## E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,2,0> E1 ## E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,2,1> E1 ## E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,0,2,2> E1 ## E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,0,0> E1 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,0,1> E1 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,0,2> E1 ## E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,1,0> E1 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,1,1> E1 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,1,2> E1 ## E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,2,0> E1 ## E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,2,1> E1 ## E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,1,2,2> E1 ## E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,0,0> E1 ## E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,0,1> E1 ## E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,0,2> E1 ## E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,1,0> E1 ## E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,1,1> E1 ## E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,1,2> E1 ## E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,2,0> E1 ## E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,2,1> E1 ## E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 1,2,2,2> E1 ## E2 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,0,0> E2 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,0,1> E2 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,0,2> E2 ## E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,1,0> E2 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,1,1> E2 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,1,2> E2 ## E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,2,0> E2 ## E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,2,1> E2 ## E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,0,2,2> E2 ## E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,0,0> E2 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,0,1> E2 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,0,2> E2 ## E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,1,0> E2 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,1,1> E2 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,1,2> E2 ## E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,2,0> E2 ## E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,2,1> E2 ## E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,1,2,2> E2 ## E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,0,0> E2 ## E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,0,1> E2 ## E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,0,2> E2 ## E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,1,0> E2 ## E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,1,1> E2 ## E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,1,2> E2 ## E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,2,0> E2 ## E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,2,1> E2 ## E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<4,T, Q, 2,2,2,2> E2 ## E2 ## E2 ## E2; }; - -#define GLM_SWIZZLE4_2_MEMBERS(T, Q, E0,E1,E2,E3) \ - struct { detail::_swizzle<2,T, Q, 0,0,-1,-2> E0 ## E0; }; \ - struct { detail::_swizzle<2,T, Q, 0,1,-1,-2> E0 ## E1; }; \ - struct { detail::_swizzle<2,T, Q, 0,2,-1,-2> E0 ## E2; }; \ - struct { detail::_swizzle<2,T, Q, 0,3,-1,-2> E0 ## E3; }; \ - struct { detail::_swizzle<2,T, Q, 1,0,-1,-2> E1 ## E0; }; \ - struct { detail::_swizzle<2,T, Q, 1,1,-1,-2> E1 ## E1; }; \ - struct { detail::_swizzle<2,T, Q, 1,2,-1,-2> E1 ## E2; }; \ - struct { detail::_swizzle<2,T, Q, 1,3,-1,-2> E1 ## E3; }; \ - struct { detail::_swizzle<2,T, Q, 2,0,-1,-2> E2 ## E0; }; \ - struct { detail::_swizzle<2,T, Q, 2,1,-1,-2> E2 ## E1; }; \ - struct { detail::_swizzle<2,T, Q, 2,2,-1,-2> E2 ## E2; }; \ - struct { detail::_swizzle<2,T, Q, 2,3,-1,-2> E2 ## E3; }; \ - struct { detail::_swizzle<2,T, Q, 3,0,-1,-2> E3 ## E0; }; \ - struct { detail::_swizzle<2,T, Q, 3,1,-1,-2> E3 ## E1; }; \ - struct { detail::_swizzle<2,T, Q, 3,2,-1,-2> E3 ## E2; }; \ - struct { detail::_swizzle<2,T, Q, 3,3,-1,-2> E3 ## E3; }; - -#define GLM_SWIZZLE4_3_MEMBERS(T, Q, E0,E1,E2,E3) \ - struct { detail::_swizzle<3, T, Q, 0,0,0,-1> E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 0,0,1,-1> E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 0,0,2,-1> E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 0,0,3,-1> E0 ## E0 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 0,1,0,-1> E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 0,1,1,-1> E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 0,1,2,-1> E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 0,1,3,-1> E0 ## E1 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 0,2,0,-1> E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 0,2,1,-1> E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 0,2,2,-1> E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 0,2,3,-1> E0 ## E2 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 0,3,0,-1> E0 ## E3 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 0,3,1,-1> E0 ## E3 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 0,3,2,-1> E0 ## E3 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 0,3,3,-1> E0 ## E3 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 1,0,0,-1> E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 1,0,1,-1> E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 1,0,2,-1> E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 1,0,3,-1> E1 ## E0 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 1,1,0,-1> E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 1,1,1,-1> E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 1,1,2,-1> E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 1,1,3,-1> E1 ## E1 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 1,2,0,-1> E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 1,2,1,-1> E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 1,2,2,-1> E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 1,2,3,-1> E1 ## E2 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 1,3,0,-1> E1 ## E3 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 1,3,1,-1> E1 ## E3 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 1,3,2,-1> E1 ## E3 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 1,3,3,-1> E1 ## E3 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 2,0,0,-1> E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 2,0,1,-1> E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 2,0,2,-1> E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 2,0,3,-1> E2 ## E0 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 2,1,0,-1> E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 2,1,1,-1> E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 2,1,2,-1> E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 2,1,3,-1> E2 ## E1 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 2,2,0,-1> E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 2,2,1,-1> E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 2,2,2,-1> E2 ## E2 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 2,2,3,-1> E2 ## E2 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 2,3,0,-1> E2 ## E3 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 2,3,1,-1> E2 ## E3 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 2,3,2,-1> E2 ## E3 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 2,3,3,-1> E2 ## E3 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 3,0,0,-1> E3 ## E0 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 3,0,1,-1> E3 ## E0 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 3,0,2,-1> E3 ## E0 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 3,0,3,-1> E3 ## E0 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 3,1,0,-1> E3 ## E1 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 3,1,1,-1> E3 ## E1 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 3,1,2,-1> E3 ## E1 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 3,1,3,-1> E3 ## E1 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 3,2,0,-1> E3 ## E2 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 3,2,1,-1> E3 ## E2 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 3,2,2,-1> E3 ## E2 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 3,2,3,-1> E3 ## E2 ## E3; }; \ - struct { detail::_swizzle<3, T, Q, 3,3,0,-1> E3 ## E3 ## E0; }; \ - struct { detail::_swizzle<3, T, Q, 3,3,1,-1> E3 ## E3 ## E1; }; \ - struct { detail::_swizzle<3, T, Q, 3,3,2,-1> E3 ## E3 ## E2; }; \ - struct { detail::_swizzle<3, T, Q, 3,3,3,-1> E3 ## E3 ## E3; }; - -#define GLM_SWIZZLE4_4_MEMBERS(T, Q, E0,E1,E2,E3) \ - struct { detail::_swizzle<4, T, Q, 0,0,0,0> E0 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,0,1> E0 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,0,2> E0 ## E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,0,3> E0 ## E0 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,1,0> E0 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,1,1> E0 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,1,2> E0 ## E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,1,3> E0 ## E0 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,2,0> E0 ## E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,2,1> E0 ## E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,2,2> E0 ## E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,2,3> E0 ## E0 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,3,0> E0 ## E0 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,3,1> E0 ## E0 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,3,2> E0 ## E0 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,0,3,3> E0 ## E0 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,0,0> E0 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,0,1> E0 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,0,2> E0 ## E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,0,3> E0 ## E1 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,1,0> E0 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,1,1> E0 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,1,2> E0 ## E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,1,3> E0 ## E1 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,2,0> E0 ## E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,2,1> E0 ## E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,2,2> E0 ## E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,2,3> E0 ## E1 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,3,0> E0 ## E1 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,3,1> E0 ## E1 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,3,2> E0 ## E1 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,1,3,3> E0 ## E1 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,0,0> E0 ## E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,0,1> E0 ## E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,0,2> E0 ## E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,0,3> E0 ## E2 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,1,0> E0 ## E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,1,1> E0 ## E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,1,2> E0 ## E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,1,3> E0 ## E2 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,2,0> E0 ## E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,2,1> E0 ## E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,2,2> E0 ## E2 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,2,3> E0 ## E2 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,3,0> E0 ## E2 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,3,1> E0 ## E2 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,3,2> E0 ## E2 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,2,3,3> E0 ## E2 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,0,0> E0 ## E3 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,0,1> E0 ## E3 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,0,2> E0 ## E3 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,0,3> E0 ## E3 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,1,0> E0 ## E3 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,1,1> E0 ## E3 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,1,2> E0 ## E3 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,1,3> E0 ## E3 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,2,0> E0 ## E3 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,2,1> E0 ## E3 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,2,2> E0 ## E3 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,2,3> E0 ## E3 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,3,0> E0 ## E3 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,3,1> E0 ## E3 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,3,2> E0 ## E3 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 0,3,3,3> E0 ## E3 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,0,0> E1 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,0,1> E1 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,0,2> E1 ## E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,0,3> E1 ## E0 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,1,0> E1 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,1,1> E1 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,1,2> E1 ## E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,1,3> E1 ## E0 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,2,0> E1 ## E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,2,1> E1 ## E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,2,2> E1 ## E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,2,3> E1 ## E0 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,3,0> E1 ## E0 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,3,1> E1 ## E0 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,3,2> E1 ## E0 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,0,3,3> E1 ## E0 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,0,0> E1 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,0,1> E1 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,0,2> E1 ## E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,0,3> E1 ## E1 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,1,0> E1 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,1,1> E1 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,1,2> E1 ## E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,1,3> E1 ## E1 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,2,0> E1 ## E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,2,1> E1 ## E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,2,2> E1 ## E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,2,3> E1 ## E1 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,3,0> E1 ## E1 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,3,1> E1 ## E1 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,3,2> E1 ## E1 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,1,3,3> E1 ## E1 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,0,0> E1 ## E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,0,1> E1 ## E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,0,2> E1 ## E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,0,3> E1 ## E2 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,1,0> E1 ## E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,1,1> E1 ## E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,1,2> E1 ## E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,1,3> E1 ## E2 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,2,0> E1 ## E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,2,1> E1 ## E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,2,2> E1 ## E2 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,2,3> E1 ## E2 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,3,0> E1 ## E2 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,3,1> E1 ## E2 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,3,2> E1 ## E2 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,2,3,3> E1 ## E2 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,0,0> E1 ## E3 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,0,1> E1 ## E3 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,0,2> E1 ## E3 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,0,3> E1 ## E3 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,1,0> E1 ## E3 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,1,1> E1 ## E3 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,1,2> E1 ## E3 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,1,3> E1 ## E3 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,2,0> E1 ## E3 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,2,1> E1 ## E3 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,2,2> E1 ## E3 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,2,3> E1 ## E3 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,3,0> E1 ## E3 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,3,1> E1 ## E3 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,3,2> E1 ## E3 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 1,3,3,3> E1 ## E3 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,0,0> E2 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,0,1> E2 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,0,2> E2 ## E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,0,3> E2 ## E0 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,1,0> E2 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,1,1> E2 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,1,2> E2 ## E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,1,3> E2 ## E0 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,2,0> E2 ## E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,2,1> E2 ## E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,2,2> E2 ## E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,2,3> E2 ## E0 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,3,0> E2 ## E0 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,3,1> E2 ## E0 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,3,2> E2 ## E0 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,0,3,3> E2 ## E0 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,0,0> E2 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,0,1> E2 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,0,2> E2 ## E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,0,3> E2 ## E1 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,1,0> E2 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,1,1> E2 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,1,2> E2 ## E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,1,3> E2 ## E1 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,2,0> E2 ## E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,2,1> E2 ## E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,2,2> E2 ## E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,2,3> E2 ## E1 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,3,0> E2 ## E1 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,3,1> E2 ## E1 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,3,2> E2 ## E1 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,1,3,3> E2 ## E1 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,0,0> E2 ## E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,0,1> E2 ## E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,0,2> E2 ## E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,0,3> E2 ## E2 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,1,0> E2 ## E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,1,1> E2 ## E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,1,2> E2 ## E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,1,3> E2 ## E2 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,2,0> E2 ## E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,2,1> E2 ## E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,2,2> E2 ## E2 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,2,3> E2 ## E2 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,3,0> E2 ## E2 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,3,1> E2 ## E2 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,3,2> E2 ## E2 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,2,3,3> E2 ## E2 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,0,0> E2 ## E3 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,0,1> E2 ## E3 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,0,2> E2 ## E3 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,0,3> E2 ## E3 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,1,0> E2 ## E3 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,1,1> E2 ## E3 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,1,2> E2 ## E3 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,1,3> E2 ## E3 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,2,0> E2 ## E3 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,2,1> E2 ## E3 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,2,2> E2 ## E3 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,2,3> E2 ## E3 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,3,0> E2 ## E3 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,3,1> E2 ## E3 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,3,2> E2 ## E3 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 2,3,3,3> E2 ## E3 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,0,0> E3 ## E0 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,0,1> E3 ## E0 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,0,2> E3 ## E0 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,0,3> E3 ## E0 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,1,0> E3 ## E0 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,1,1> E3 ## E0 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,1,2> E3 ## E0 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,1,3> E3 ## E0 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,2,0> E3 ## E0 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,2,1> E3 ## E0 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,2,2> E3 ## E0 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,2,3> E3 ## E0 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,3,0> E3 ## E0 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,3,1> E3 ## E0 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,3,2> E3 ## E0 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,0,3,3> E3 ## E0 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,0,0> E3 ## E1 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,0,1> E3 ## E1 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,0,2> E3 ## E1 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,0,3> E3 ## E1 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,1,0> E3 ## E1 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,1,1> E3 ## E1 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,1,2> E3 ## E1 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,1,3> E3 ## E1 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,2,0> E3 ## E1 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,2,1> E3 ## E1 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,2,2> E3 ## E1 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,2,3> E3 ## E1 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,3,0> E3 ## E1 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,3,1> E3 ## E1 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,3,2> E3 ## E1 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,1,3,3> E3 ## E1 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,0,0> E3 ## E2 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,0,1> E3 ## E2 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,0,2> E3 ## E2 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,0,3> E3 ## E2 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,1,0> E3 ## E2 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,1,1> E3 ## E2 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,1,2> E3 ## E2 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,1,3> E3 ## E2 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,2,0> E3 ## E2 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,2,1> E3 ## E2 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,2,2> E3 ## E2 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,2,3> E3 ## E2 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,3,0> E3 ## E2 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,3,1> E3 ## E2 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,3,2> E3 ## E2 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,2,3,3> E3 ## E2 ## E3 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,0,0> E3 ## E3 ## E0 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,0,1> E3 ## E3 ## E0 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,0,2> E3 ## E3 ## E0 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,0,3> E3 ## E3 ## E0 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,1,0> E3 ## E3 ## E1 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,1,1> E3 ## E3 ## E1 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,1,2> E3 ## E3 ## E1 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,1,3> E3 ## E3 ## E1 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,2,0> E3 ## E3 ## E2 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,2,1> E3 ## E3 ## E2 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,2,2> E3 ## E3 ## E2 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,2,3> E3 ## E3 ## E2 ## E3; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,3,0> E3 ## E3 ## E3 ## E0; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,3,1> E3 ## E3 ## E3 ## E1; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,3,2> E3 ## E3 ## E3 ## E2; }; \ - struct { detail::_swizzle<4, T, Q, 3,3,3,3> E3 ## E3 ## E3 ## E3; }; diff --git a/third_party/glm/detail/_swizzle_func.hpp b/third_party/glm/detail/_swizzle_func.hpp deleted file mode 100755 index d93c6af..0000000 --- a/third_party/glm/detail/_swizzle_func.hpp +++ /dev/null @@ -1,682 +0,0 @@ -#pragma once - -#define GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, CONST, A, B) \ - vec<2, T, Q> A ## B() CONST \ - { \ - return vec<2, T, Q>(this->A, this->B); \ - } - -#define GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, CONST, A, B, C) \ - vec<3, T, Q> A ## B ## C() CONST \ - { \ - return vec<3, T, Q>(this->A, this->B, this->C); \ - } - -#define GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, CONST, A, B, C, D) \ - vec<4, T, Q> A ## B ## C ## D() CONST \ - { \ - return vec<4, T, Q>(this->A, this->B, this->C, this->D); \ - } - -#define GLM_SWIZZLE_GEN_VEC2_ENTRY_DEF(T, P, L, CONST, A, B) \ - template \ - vec vec::A ## B() CONST \ - { \ - return vec<2, T, Q>(this->A, this->B); \ - } - -#define GLM_SWIZZLE_GEN_VEC3_ENTRY_DEF(T, P, L, CONST, A, B, C) \ - template \ - vec<3, T, Q> vec::A ## B ## C() CONST \ - { \ - return vec<3, T, Q>(this->A, this->B, this->C); \ - } - -#define GLM_SWIZZLE_GEN_VEC4_ENTRY_DEF(T, P, L, CONST, A, B, C, D) \ - template \ - vec<4, T, Q> vec::A ## B ## C ## D() CONST \ - { \ - return vec<4, T, Q>(this->A, this->B, this->C, this->D); \ - } - -#define GLM_MUTABLE - -#define GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, 2, GLM_MUTABLE, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, 2, GLM_MUTABLE, B, A) - -#define GLM_SWIZZLE_GEN_REF_FROM_VEC2(T, P) \ - GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, x, y) \ - GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, r, g) \ - GLM_SWIZZLE_GEN_REF2_FROM_VEC2_SWIZZLE(T, P, s, t) - -#define GLM_SWIZZLE_GEN_REF2_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, B) - -#define GLM_SWIZZLE_GEN_REF3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, A, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, A, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, B, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, B, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, C, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, GLM_MUTABLE, C, B, A) - -#define GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_REF3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_REF2_FROM_VEC3_SWIZZLE(T, P, A, B, C) - -#define GLM_SWIZZLE_GEN_REF_FROM_VEC3(T, P) \ - GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, x, y, z) \ - GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, r, g, b) \ - GLM_SWIZZLE_GEN_REF_FROM_VEC3_COMP(T, P, s, t, p) - -#define GLM_SWIZZLE_GEN_REF2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, A, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, B, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, C, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, D, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, D, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, GLM_MUTABLE, D, C) - -#define GLM_SWIZZLE_GEN_REF3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, B, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, D, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , A, D, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, A, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, D, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , B, D, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, A, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, B, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, D, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , C, D, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, , D, C, B) - -#define GLM_SWIZZLE_GEN_REF4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, C, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, C, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, D, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, D, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, B, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, C, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, C, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, D, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, D, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, A, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , B, A, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, B, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, B, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, D, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, D, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, A, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , C, A, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, , D, B, C, A) - -#define GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_REF2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_REF3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_REF4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) - -#define GLM_SWIZZLE_GEN_REF_FROM_VEC4(T, P) \ - GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, x, y, z, w) \ - GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, r, g, b, a) \ - GLM_SWIZZLE_GEN_REF_FROM_VEC4_COMP(T, P, s, t, p, q) - -#define GLM_SWIZZLE_GEN_VEC2_FROM_VEC2_SWIZZLE(T, P, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, B) - -#define GLM_SWIZZLE_GEN_VEC3_FROM_VEC2_SWIZZLE(T, P, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, B) - -#define GLM_SWIZZLE_GEN_VEC4_FROM_VEC2_SWIZZLE(T, P, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, B) - -#define GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, A, B) \ - GLM_SWIZZLE_GEN_VEC2_FROM_VEC2_SWIZZLE(T, P, A, B) \ - GLM_SWIZZLE_GEN_VEC3_FROM_VEC2_SWIZZLE(T, P, A, B) \ - GLM_SWIZZLE_GEN_VEC4_FROM_VEC2_SWIZZLE(T, P, A, B) - -#define GLM_SWIZZLE_GEN_VEC_FROM_VEC2(T, P) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, x, y) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, r, g) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC2_COMP(T, P, s, t) - -#define GLM_SWIZZLE_GEN_VEC2_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, C) - -#define GLM_SWIZZLE_GEN_VEC3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, C) - -#define GLM_SWIZZLE_GEN_VEC4_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, C) - -#define GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC2_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC3_FROM_VEC3_SWIZZLE(T, P, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_FROM_VEC3_SWIZZLE(T, P, A, B, C) - -#define GLM_SWIZZLE_GEN_VEC_FROM_VEC3(T, P) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, x, y, z) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, r, g, b) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC3_COMP(T, P, s, t, p) - -#define GLM_SWIZZLE_GEN_VEC2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, A, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, B, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, C, D) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, A) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, B) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, C) \ - GLM_SWIZZLE_GEN_VEC2_ENTRY(T, P, const, D, D) - -#define GLM_SWIZZLE_GEN_VEC3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, A, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, B, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, A, D, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, A, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, B, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, B, D, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, A, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, B, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, C, D, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, A, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, B, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, C, D) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, A) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, B) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, C) \ - GLM_SWIZZLE_GEN_VEC3_ENTRY(T, P, const, D, D, D) - -#define GLM_SWIZZLE_GEN_VEC4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, A, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, B, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, C, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, A, D, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, A, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, B, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, C, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, B, D, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, A, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, B, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, C, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, C, D, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, A, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, B, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, C, D, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, A, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, B, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, C, D) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, A) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, B) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, C) \ - GLM_SWIZZLE_GEN_VEC4_ENTRY(T, P, const, D, D, D, D) - -#define GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC2_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC3_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) \ - GLM_SWIZZLE_GEN_VEC4_FROM_VEC4_SWIZZLE(T, P, A, B, C, D) - -#define GLM_SWIZZLE_GEN_VEC_FROM_VEC4(T, P) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, x, y, z, w) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, r, g, b, a) \ - GLM_SWIZZLE_GEN_VEC_FROM_VEC4_COMP(T, P, s, t, p, q) - diff --git a/third_party/glm/detail/_vectorize.hpp b/third_party/glm/detail/_vectorize.hpp deleted file mode 100755 index 1fcaec3..0000000 --- a/third_party/glm/detail/_vectorize.hpp +++ /dev/null @@ -1,162 +0,0 @@ -#pragma once - -namespace glm{ -namespace detail -{ - template class vec, length_t L, typename R, typename T, qualifier Q> - struct functor1{}; - - template class vec, typename R, typename T, qualifier Q> - struct functor1 - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<1, R, Q> call(R (*Func) (T x), vec<1, T, Q> const& v) - { - return vec<1, R, Q>(Func(v.x)); - } - }; - - template class vec, typename R, typename T, qualifier Q> - struct functor1 - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<2, R, Q> call(R (*Func) (T x), vec<2, T, Q> const& v) - { - return vec<2, R, Q>(Func(v.x), Func(v.y)); - } - }; - - template class vec, typename R, typename T, qualifier Q> - struct functor1 - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<3, R, Q> call(R (*Func) (T x), vec<3, T, Q> const& v) - { - return vec<3, R, Q>(Func(v.x), Func(v.y), Func(v.z)); - } - }; - - template class vec, typename R, typename T, qualifier Q> - struct functor1 - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, R, Q> call(R (*Func) (T x), vec<4, T, Q> const& v) - { - return vec<4, R, Q>(Func(v.x), Func(v.y), Func(v.z), Func(v.w)); - } - }; - - template class vec, length_t L, typename T, qualifier Q> - struct functor2{}; - - template class vec, typename T, qualifier Q> - struct functor2 - { - GLM_FUNC_QUALIFIER static vec<1, T, Q> call(T (*Func) (T x, T y), vec<1, T, Q> const& a, vec<1, T, Q> const& b) - { - return vec<1, T, Q>(Func(a.x, b.x)); - } - }; - - template class vec, typename T, qualifier Q> - struct functor2 - { - GLM_FUNC_QUALIFIER static vec<2, T, Q> call(T (*Func) (T x, T y), vec<2, T, Q> const& a, vec<2, T, Q> const& b) - { - return vec<2, T, Q>(Func(a.x, b.x), Func(a.y, b.y)); - } - }; - - template class vec, typename T, qualifier Q> - struct functor2 - { - GLM_FUNC_QUALIFIER static vec<3, T, Q> call(T (*Func) (T x, T y), vec<3, T, Q> const& a, vec<3, T, Q> const& b) - { - return vec<3, T, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z)); - } - }; - - template class vec, typename T, qualifier Q> - struct functor2 - { - GLM_FUNC_QUALIFIER static vec<4, T, Q> call(T (*Func) (T x, T y), vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z), Func(a.w, b.w)); - } - }; - - template class vec, length_t L, typename T, qualifier Q> - struct functor2_vec_sca{}; - - template class vec, typename T, qualifier Q> - struct functor2_vec_sca - { - GLM_FUNC_QUALIFIER static vec<1, T, Q> call(T (*Func) (T x, T y), vec<1, T, Q> const& a, T b) - { - return vec<1, T, Q>(Func(a.x, b)); - } - }; - - template class vec, typename T, qualifier Q> - struct functor2_vec_sca - { - GLM_FUNC_QUALIFIER static vec<2, T, Q> call(T (*Func) (T x, T y), vec<2, T, Q> const& a, T b) - { - return vec<2, T, Q>(Func(a.x, b), Func(a.y, b)); - } - }; - - template class vec, typename T, qualifier Q> - struct functor2_vec_sca - { - GLM_FUNC_QUALIFIER static vec<3, T, Q> call(T (*Func) (T x, T y), vec<3, T, Q> const& a, T b) - { - return vec<3, T, Q>(Func(a.x, b), Func(a.y, b), Func(a.z, b)); - } - }; - - template class vec, typename T, qualifier Q> - struct functor2_vec_sca - { - GLM_FUNC_QUALIFIER static vec<4, T, Q> call(T (*Func) (T x, T y), vec<4, T, Q> const& a, T b) - { - return vec<4, T, Q>(Func(a.x, b), Func(a.y, b), Func(a.z, b), Func(a.w, b)); - } - }; - - template - struct functor2_vec_int {}; - - template - struct functor2_vec_int<1, T, Q> - { - GLM_FUNC_QUALIFIER static vec<1, int, Q> call(int (*Func) (T x, int y), vec<1, T, Q> const& a, vec<1, int, Q> const& b) - { - return vec<1, int, Q>(Func(a.x, b.x)); - } - }; - - template - struct functor2_vec_int<2, T, Q> - { - GLM_FUNC_QUALIFIER static vec<2, int, Q> call(int (*Func) (T x, int y), vec<2, T, Q> const& a, vec<2, int, Q> const& b) - { - return vec<2, int, Q>(Func(a.x, b.x), Func(a.y, b.y)); - } - }; - - template - struct functor2_vec_int<3, T, Q> - { - GLM_FUNC_QUALIFIER static vec<3, int, Q> call(int (*Func) (T x, int y), vec<3, T, Q> const& a, vec<3, int, Q> const& b) - { - return vec<3, int, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z)); - } - }; - - template - struct functor2_vec_int<4, T, Q> - { - GLM_FUNC_QUALIFIER static vec<4, int, Q> call(int (*Func) (T x, int y), vec<4, T, Q> const& a, vec<4, int, Q> const& b) - { - return vec<4, int, Q>(Func(a.x, b.x), Func(a.y, b.y), Func(a.z, b.z), Func(a.w, b.w)); - } - }; -}//namespace detail -}//namespace glm diff --git a/third_party/glm/detail/compute_common.hpp b/third_party/glm/detail/compute_common.hpp deleted file mode 100755 index cc24b9e..0000000 --- a/third_party/glm/detail/compute_common.hpp +++ /dev/null @@ -1,50 +0,0 @@ -#pragma once - -#include "setup.hpp" -#include - -namespace glm{ -namespace detail -{ - template - struct compute_abs - {}; - - template - struct compute_abs - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genFIType call(genFIType x) - { - GLM_STATIC_ASSERT( - std::numeric_limits::is_iec559 || std::numeric_limits::is_signed, - "'abs' only accept floating-point and integer scalar or vector inputs"); - - return x >= genFIType(0) ? x : -x; - // TODO, perf comp with: *(((int *) &x) + 1) &= 0x7fffffff; - } - }; - -#if GLM_COMPILER & GLM_COMPILER_CUDA - template<> - struct compute_abs - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static float call(float x) - { - return fabsf(x); - } - }; -#endif - - template - struct compute_abs - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genFIType call(genFIType x) - { - GLM_STATIC_ASSERT( - (!std::numeric_limits::is_signed && std::numeric_limits::is_integer), - "'abs' only accept floating-point and integer scalar or vector inputs"); - return x; - } - }; -}//namespace detail -}//namespace glm diff --git a/third_party/glm/detail/compute_vector_relational.hpp b/third_party/glm/detail/compute_vector_relational.hpp deleted file mode 100755 index 167b634..0000000 --- a/third_party/glm/detail/compute_vector_relational.hpp +++ /dev/null @@ -1,30 +0,0 @@ -#pragma once - -//#include "compute_common.hpp" -#include "setup.hpp" -#include - -namespace glm{ -namespace detail -{ - template - struct compute_equal - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(T a, T b) - { - return a == b; - } - }; -/* - template - struct compute_equal - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(T a, T b) - { - return detail::compute_abs::is_signed>::call(b - a) <= static_cast(0); - //return std::memcmp(&a, &b, sizeof(T)) == 0; - } - }; -*/ -}//namespace detail -}//namespace glm diff --git a/third_party/glm/detail/func_common.inl b/third_party/glm/detail/func_common.inl deleted file mode 100755 index 4b5f144..0000000 --- a/third_party/glm/detail/func_common.inl +++ /dev/null @@ -1,792 +0,0 @@ -/// @ref core -/// @file glm/detail/func_common.inl - -#include "../vector_relational.hpp" -#include "compute_common.hpp" -#include "type_vec1.hpp" -#include "type_vec2.hpp" -#include "type_vec3.hpp" -#include "type_vec4.hpp" -#include "_vectorize.hpp" -#include - -namespace glm -{ - // min - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType min(genType x, genType y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'min' only accept floating-point or integer inputs"); - return (y < x) ? y : x; - } - - // max - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType max(genType x, genType y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'max' only accept floating-point or integer inputs"); - - return (x < y) ? y : x; - } - - // abs - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR int abs(int x) - { - int const y = x >> (sizeof(int) * 8 - 1); - return (x ^ y) - y; - } - - // round -# if GLM_HAS_CXX11_STL - using ::std::round; -# else - template - GLM_FUNC_QUALIFIER genType round(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'round' only accept floating-point inputs"); - - return x < static_cast(0) ? static_cast(int(x - static_cast(0.5))) : static_cast(int(x + static_cast(0.5))); - } -# endif - - // trunc -# if GLM_HAS_CXX11_STL - using ::std::trunc; -# else - template - GLM_FUNC_QUALIFIER genType trunc(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'trunc' only accept floating-point inputs"); - - return x < static_cast(0) ? -std::floor(-x) : std::floor(x); - } -# endif - -}//namespace glm - -namespace glm{ -namespace detail -{ - template - struct compute_abs_vector - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec call(vec const& x) - { - return detail::functor1::call(abs, x); - } - }; - - template - struct compute_mix_vector - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, vec const& a) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'mix' only accept floating-point inputs for the interpolator a"); - - return vec(vec(x) * (static_cast(1) - a) + vec(y) * a); - } - }; - - template - struct compute_mix_vector - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, vec const& a) - { - vec Result; - for(length_t i = 0; i < x.length(); ++i) - Result[i] = a[i] ? y[i] : x[i]; - return Result; - } - }; - - template - struct compute_mix_scalar - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, U const& a) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'mix' only accept floating-point inputs for the interpolator a"); - - return vec(vec(x) * (static_cast(1) - a) + vec(y) * a); - } - }; - - template - struct compute_mix_scalar - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y, bool const& a) - { - return a ? y : x; - } - }; - - template - struct compute_mix - { - GLM_FUNC_QUALIFIER static T call(T const& x, T const& y, U const& a) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'mix' only accept floating-point inputs for the interpolator a"); - - return static_cast(static_cast(x) * (static_cast(1) - a) + static_cast(y) * a); - } - }; - - template - struct compute_mix - { - GLM_FUNC_QUALIFIER static T call(T const& x, T const& y, bool const& a) - { - return a ? y : x; - } - }; - - template - struct compute_sign - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return vec(glm::lessThan(vec(0), x)) - vec(glm::lessThan(x, vec(0))); - } - }; - -# if GLM_ARCH == GLM_ARCH_X86 - template - struct compute_sign - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - T const Shift(static_cast(sizeof(T) * 8 - 1)); - vec const y(vec::type, Q>(-x) >> typename detail::make_unsigned::type(Shift)); - - return (x >> Shift) | y; - } - }; -# endif - - template - struct compute_floor - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return detail::functor1::call(std::floor, x); - } - }; - - template - struct compute_ceil - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return detail::functor1::call(std::ceil, x); - } - }; - - template - struct compute_fract - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return x - floor(x); - } - }; - - template - struct compute_trunc - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return detail::functor1::call(trunc, x); - } - }; - - template - struct compute_round - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return detail::functor1::call(round, x); - } - }; - - template - struct compute_mod - { - GLM_FUNC_QUALIFIER static vec call(vec const& a, vec const& b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'mod' only accept floating-point inputs. Include for integer inputs."); - return a - b * floor(a / b); - } - }; - - template - struct compute_min_vector - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y) - { - return detail::functor2::call(min, x, y); - } - }; - - template - struct compute_max_vector - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& y) - { - return detail::functor2::call(max, x, y); - } - }; - - template - struct compute_clamp_vector - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, vec const& minVal, vec const& maxVal) - { - return min(max(x, minVal), maxVal); - } - }; - - template - struct compute_step_vector - { - GLM_FUNC_QUALIFIER static vec call(vec const& edge, vec const& x) - { - return mix(vec(1), vec(0), glm::lessThan(x, edge)); - } - }; - - template - struct compute_smoothstep_vector - { - GLM_FUNC_QUALIFIER static vec call(vec const& edge0, vec const& edge1, vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'smoothstep' only accept floating-point inputs"); - vec const tmp(clamp((x - edge0) / (edge1 - edge0), static_cast(0), static_cast(1))); - return tmp * tmp * (static_cast(3) - static_cast(2) * tmp); - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genFIType abs(genFIType x) - { - return detail::compute_abs::is_signed>::call(x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec abs(vec const& x) - { - return detail::compute_abs_vector::value>::call(x); - } - - // sign - // fast and works for any type - template - GLM_FUNC_QUALIFIER genFIType sign(genFIType x) - { - GLM_STATIC_ASSERT( - std::numeric_limits::is_iec559 || (std::numeric_limits::is_signed && std::numeric_limits::is_integer), - "'sign' only accept signed inputs"); - - return detail::compute_sign<1, genFIType, defaultp, - std::numeric_limits::is_iec559, detail::is_aligned::value>::call(vec<1, genFIType>(x)).x; - } - - template - GLM_FUNC_QUALIFIER vec sign(vec const& x) - { - GLM_STATIC_ASSERT( - std::numeric_limits::is_iec559 || (std::numeric_limits::is_signed && std::numeric_limits::is_integer), - "'sign' only accept signed inputs"); - - return detail::compute_sign::is_iec559, detail::is_aligned::value>::call(x); - } - - // floor - using ::std::floor; - template - GLM_FUNC_QUALIFIER vec floor(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'floor' only accept floating-point inputs."); - return detail::compute_floor::value>::call(x); - } - - template - GLM_FUNC_QUALIFIER vec trunc(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'trunc' only accept floating-point inputs"); - return detail::compute_trunc::value>::call(x); - } - - template - GLM_FUNC_QUALIFIER vec round(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'round' only accept floating-point inputs"); - return detail::compute_round::value>::call(x); - } - -/* - // roundEven - template - GLM_FUNC_QUALIFIER genType roundEven(genType const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'roundEven' only accept floating-point inputs"); - - return genType(int(x + genType(int(x) % 2))); - } -*/ - - // roundEven - template - GLM_FUNC_QUALIFIER genType roundEven(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'roundEven' only accept floating-point inputs"); - - int Integer = static_cast(x); - genType IntegerPart = static_cast(Integer); - genType FractionalPart = fract(x); - - if(FractionalPart > static_cast(0.5) || FractionalPart < static_cast(0.5)) - { - return round(x); - } - else if((Integer % 2) == 0) - { - return IntegerPart; - } - else if(x <= static_cast(0)) // Work around... - { - return IntegerPart - static_cast(1); - } - else - { - return IntegerPart + static_cast(1); - } - //else // Bug on MinGW 4.5.2 - //{ - // return mix(IntegerPart + genType(-1), IntegerPart + genType(1), x <= genType(0)); - //} - } - - template - GLM_FUNC_QUALIFIER vec roundEven(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'roundEven' only accept floating-point inputs"); - return detail::functor1::call(roundEven, x); - } - - // ceil - using ::std::ceil; - template - GLM_FUNC_QUALIFIER vec ceil(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'ceil' only accept floating-point inputs"); - return detail::compute_ceil::value>::call(x); - } - - // fract - template - GLM_FUNC_QUALIFIER genType fract(genType x) - { - return fract(vec<1, genType>(x)).x; - } - - template - GLM_FUNC_QUALIFIER vec fract(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fract' only accept floating-point inputs"); - return detail::compute_fract::value>::call(x); - } - - // mod - template - GLM_FUNC_QUALIFIER genType mod(genType x, genType y) - { -# if GLM_COMPILER & GLM_COMPILER_CUDA - // Another Cuda compiler bug https://github.com/g-truc/glm/issues/530 - vec<1, genType, defaultp> Result(mod(vec<1, genType, defaultp>(x), y)); - return Result.x; -# else - return mod(vec<1, genType, defaultp>(x), y).x; -# endif - } - - template - GLM_FUNC_QUALIFIER vec mod(vec const& x, T y) - { - return detail::compute_mod::value>::call(x, vec(y)); - } - - template - GLM_FUNC_QUALIFIER vec mod(vec const& x, vec const& y) - { - return detail::compute_mod::value>::call(x, y); - } - - // modf - template - GLM_FUNC_QUALIFIER genType modf(genType x, genType & i) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'modf' only accept floating-point inputs"); - return std::modf(x, &i); - } - - template - GLM_FUNC_QUALIFIER vec<1, T, Q> modf(vec<1, T, Q> const& x, vec<1, T, Q> & i) - { - return vec<1, T, Q>( - modf(x.x, i.x)); - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> modf(vec<2, T, Q> const& x, vec<2, T, Q> & i) - { - return vec<2, T, Q>( - modf(x.x, i.x), - modf(x.y, i.y)); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> modf(vec<3, T, Q> const& x, vec<3, T, Q> & i) - { - return vec<3, T, Q>( - modf(x.x, i.x), - modf(x.y, i.y), - modf(x.z, i.z)); - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> modf(vec<4, T, Q> const& x, vec<4, T, Q> & i) - { - return vec<4, T, Q>( - modf(x.x, i.x), - modf(x.y, i.y), - modf(x.z, i.z), - modf(x.w, i.w)); - } - - //// Only valid if (INT_MIN <= x-y <= INT_MAX) - //// min(x,y) - //r = y + ((x - y) & ((x - y) >> (sizeof(int) * - //CHAR_BIT - 1))); - //// max(x,y) - //r = x - ((x - y) & ((x - y) >> (sizeof(int) * - //CHAR_BIT - 1))); - - // min - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& a, T b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'min' only accept floating-point or integer inputs"); - return detail::compute_min_vector::value>::call(a, vec(b)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& a, vec const& b) - { - return detail::compute_min_vector::value>::call(a, b); - } - - // max - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& a, T b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'max' only accept floating-point or integer inputs"); - return detail::compute_max_vector::value>::call(a, vec(b)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& a, vec const& b) - { - return detail::compute_max_vector::value>::call(a, b); - } - - // clamp - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType clamp(genType x, genType minVal, genType maxVal) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'clamp' only accept floating-point or integer inputs"); - return min(max(x, minVal), maxVal); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec clamp(vec const& x, T minVal, T maxVal) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'clamp' only accept floating-point or integer inputs"); - return detail::compute_clamp_vector::value>::call(x, vec(minVal), vec(maxVal)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec clamp(vec const& x, vec const& minVal, vec const& maxVal) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer, "'clamp' only accept floating-point or integer inputs"); - return detail::compute_clamp_vector::value>::call(x, minVal, maxVal); - } - - template - GLM_FUNC_QUALIFIER genTypeT mix(genTypeT x, genTypeT y, genTypeU a) - { - return detail::compute_mix::call(x, y, a); - } - - template - GLM_FUNC_QUALIFIER vec mix(vec const& x, vec const& y, U a) - { - return detail::compute_mix_scalar::value>::call(x, y, a); - } - - template - GLM_FUNC_QUALIFIER vec mix(vec const& x, vec const& y, vec const& a) - { - return detail::compute_mix_vector::value>::call(x, y, a); - } - - // step - template - GLM_FUNC_QUALIFIER genType step(genType edge, genType x) - { - return mix(static_cast(1), static_cast(0), x < edge); - } - - template - GLM_FUNC_QUALIFIER vec step(T edge, vec const& x) - { - return detail::compute_step_vector::value>::call(vec(edge), x); - } - - template - GLM_FUNC_QUALIFIER vec step(vec const& edge, vec const& x) - { - return detail::compute_step_vector::value>::call(edge, x); - } - - // smoothstep - template - GLM_FUNC_QUALIFIER genType smoothstep(genType edge0, genType edge1, genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'smoothstep' only accept floating-point inputs"); - - genType const tmp(clamp((x - edge0) / (edge1 - edge0), genType(0), genType(1))); - return tmp * tmp * (genType(3) - genType(2) * tmp); - } - - template - GLM_FUNC_QUALIFIER vec smoothstep(T edge0, T edge1, vec const& x) - { - return detail::compute_smoothstep_vector::value>::call(vec(edge0), vec(edge1), x); - } - - template - GLM_FUNC_QUALIFIER vec smoothstep(vec const& edge0, vec const& edge1, vec const& x) - { - return detail::compute_smoothstep_vector::value>::call(edge0, edge1, x); - } - -# if GLM_HAS_CXX11_STL - using std::isnan; -# else - template - GLM_FUNC_QUALIFIER bool isnan(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isnan' only accept floating-point inputs"); - -# if GLM_HAS_CXX11_STL - return std::isnan(x); -# elif GLM_COMPILER & GLM_COMPILER_VC - return _isnan(x) != 0; -# elif GLM_COMPILER & GLM_COMPILER_INTEL -# if GLM_PLATFORM & GLM_PLATFORM_WINDOWS - return _isnan(x) != 0; -# else - return ::isnan(x) != 0; -# endif -# elif (GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG)) && (GLM_PLATFORM & GLM_PLATFORM_ANDROID) && __cplusplus < 201103L - return _isnan(x) != 0; -# elif GLM_COMPILER & GLM_COMPILER_CUDA - return ::isnan(x) != 0; -# else - return std::isnan(x); -# endif - } -# endif - - template - GLM_FUNC_QUALIFIER vec isnan(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isnan' only accept floating-point inputs"); - - vec Result; - for (length_t l = 0; l < v.length(); ++l) - Result[l] = glm::isnan(v[l]); - return Result; - } - -# if GLM_HAS_CXX11_STL - using std::isinf; -# else - template - GLM_FUNC_QUALIFIER bool isinf(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isinf' only accept floating-point inputs"); - -# if GLM_HAS_CXX11_STL - return std::isinf(x); -# elif GLM_COMPILER & (GLM_COMPILER_INTEL | GLM_COMPILER_VC) -# if(GLM_PLATFORM & GLM_PLATFORM_WINDOWS) - return _fpclass(x) == _FPCLASS_NINF || _fpclass(x) == _FPCLASS_PINF; -# else - return ::isinf(x); -# endif -# elif GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG) -# if(GLM_PLATFORM & GLM_PLATFORM_ANDROID && __cplusplus < 201103L) - return _isinf(x) != 0; -# else - return std::isinf(x); -# endif -# elif GLM_COMPILER & GLM_COMPILER_CUDA - // http://developer.download.nvidia.com/compute/cuda/4_2/rel/toolkit/docs/online/group__CUDA__MATH__DOUBLE_g13431dd2b40b51f9139cbb7f50c18fab.html#g13431dd2b40b51f9139cbb7f50c18fab - return ::isinf(double(x)) != 0; -# else - return std::isinf(x); -# endif - } -# endif - - template - GLM_FUNC_QUALIFIER vec isinf(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isinf' only accept floating-point inputs"); - - vec Result; - for (length_t l = 0; l < v.length(); ++l) - Result[l] = glm::isinf(v[l]); - return Result; - } - - GLM_FUNC_QUALIFIER int floatBitsToInt(float const& v) - { - union - { - float in; - int out; - } u; - - u.in = v; - - return u.out; - } - - template - GLM_FUNC_QUALIFIER vec floatBitsToInt(vec const& v) - { - return reinterpret_cast&>(const_cast&>(v)); - } - - GLM_FUNC_QUALIFIER uint floatBitsToUint(float const& v) - { - union - { - float in; - uint out; - } u; - - u.in = v; - - return u.out; - } - - template - GLM_FUNC_QUALIFIER vec floatBitsToUint(vec const& v) - { - return reinterpret_cast&>(const_cast&>(v)); - } - - GLM_FUNC_QUALIFIER float intBitsToFloat(int const& v) - { - union - { - int in; - float out; - } u; - - u.in = v; - - return u.out; - } - - template - GLM_FUNC_QUALIFIER vec intBitsToFloat(vec const& v) - { - return reinterpret_cast&>(const_cast&>(v)); - } - - GLM_FUNC_QUALIFIER float uintBitsToFloat(uint const& v) - { - union - { - uint in; - float out; - } u; - - u.in = v; - - return u.out; - } - - template - GLM_FUNC_QUALIFIER vec uintBitsToFloat(vec const& v) - { - return reinterpret_cast&>(const_cast&>(v)); - } - -# if GLM_HAS_CXX11_STL - using std::fma; -# else - template - GLM_FUNC_QUALIFIER genType fma(genType const& a, genType const& b, genType const& c) - { - return a * b + c; - } -# endif - - template - GLM_FUNC_QUALIFIER genType frexp(genType x, int& exp) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'frexp' only accept floating-point inputs"); - - return std::frexp(x, &exp); - } - - template - GLM_FUNC_QUALIFIER vec frexp(vec const& v, vec& exp) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'frexp' only accept floating-point inputs"); - - vec Result; - for (length_t l = 0; l < v.length(); ++l) - Result[l] = std::frexp(v[l], &exp[l]); - return Result; - } - - template - GLM_FUNC_QUALIFIER genType ldexp(genType const& x, int const& exp) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'ldexp' only accept floating-point inputs"); - - return std::ldexp(x, exp); - } - - template - GLM_FUNC_QUALIFIER vec ldexp(vec const& v, vec const& exp) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'ldexp' only accept floating-point inputs"); - - vec Result; - for (length_t l = 0; l < v.length(); ++l) - Result[l] = std::ldexp(v[l], exp[l]); - return Result; - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_common_simd.inl" -#endif diff --git a/third_party/glm/detail/func_common_simd.inl b/third_party/glm/detail/func_common_simd.inl deleted file mode 100755 index ce0032d..0000000 --- a/third_party/glm/detail/func_common_simd.inl +++ /dev/null @@ -1,231 +0,0 @@ -/// @ref core -/// @file glm/detail/func_common_simd.inl - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -#include "../simd/common.h" - -#include - -namespace glm{ -namespace detail -{ - template - struct compute_abs_vector<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - vec<4, float, Q> result; - result.data = glm_vec4_abs(v.data); - return result; - } - }; - - template - struct compute_abs_vector<4, int, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v) - { - vec<4, int, Q> result; - result.data = glm_ivec4_abs(v.data); - return result; - } - }; - - template - struct compute_floor<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - vec<4, float, Q> result; - result.data = glm_vec4_floor(v.data); - return result; - } - }; - - template - struct compute_ceil<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - vec<4, float, Q> result; - result.data = glm_vec4_ceil(v.data); - return result; - } - }; - - template - struct compute_fract<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - vec<4, float, Q> result; - result.data = glm_vec4_fract(v.data); - return result; - } - }; - - template - struct compute_round<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - vec<4, float, Q> result; - result.data = glm_vec4_round(v.data); - return result; - } - }; - - template - struct compute_mod<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& x, vec<4, float, Q> const& y) - { - vec<4, float, Q> result; - result.data = glm_vec4_mod(x.data, y.data); - return result; - } - }; - - template - struct compute_min_vector<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) - { - vec<4, float, Q> result; - result.data = _mm_min_ps(v1.data, v2.data); - return result; - } - }; - - template - struct compute_min_vector<4, int, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) - { - vec<4, int, Q> result; - result.data = _mm_min_epi32(v1.data, v2.data); - return result; - } - }; - - template - struct compute_min_vector<4, uint, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) - { - vec<4, uint, Q> result; - result.data = _mm_min_epu32(v1.data, v2.data); - return result; - } - }; - - template - struct compute_max_vector<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) - { - vec<4, float, Q> result; - result.data = _mm_max_ps(v1.data, v2.data); - return result; - } - }; - - template - struct compute_max_vector<4, int, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) - { - vec<4, int, Q> result; - result.data = _mm_max_epi32(v1.data, v2.data); - return result; - } - }; - - template - struct compute_max_vector<4, uint, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) - { - vec<4, uint, Q> result; - result.data = _mm_max_epu32(v1.data, v2.data); - return result; - } - }; - - template - struct compute_clamp_vector<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& x, vec<4, float, Q> const& minVal, vec<4, float, Q> const& maxVal) - { - vec<4, float, Q> result; - result.data = _mm_min_ps(_mm_max_ps(x.data, minVal.data), maxVal.data); - return result; - } - }; - - template - struct compute_clamp_vector<4, int, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& x, vec<4, int, Q> const& minVal, vec<4, int, Q> const& maxVal) - { - vec<4, int, Q> result; - result.data = _mm_min_epi32(_mm_max_epi32(x.data, minVal.data), maxVal.data); - return result; - } - }; - - template - struct compute_clamp_vector<4, uint, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& x, vec<4, uint, Q> const& minVal, vec<4, uint, Q> const& maxVal) - { - vec<4, uint, Q> result; - result.data = _mm_min_epu32(_mm_max_epu32(x.data, minVal.data), maxVal.data); - return result; - } - }; - - template - struct compute_mix_vector<4, float, bool, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& x, vec<4, float, Q> const& y, vec<4, bool, Q> const& a) - { - __m128i const Load = _mm_set_epi32(-static_cast(a.w), -static_cast(a.z), -static_cast(a.y), -static_cast(a.x)); - __m128 const Mask = _mm_castsi128_ps(Load); - - vec<4, float, Q> Result; -# if 0 && GLM_ARCH & GLM_ARCH_AVX - Result.data = _mm_blendv_ps(x.data, y.data, Mask); -# else - Result.data = _mm_or_ps(_mm_and_ps(Mask, y.data), _mm_andnot_ps(Mask, x.data)); -# endif - return Result; - } - }; -/* FIXME - template - struct compute_step_vector - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& edge, vec<4, float, Q> const& x) - { - vec<4, float, Q> Result; - result.data = glm_vec4_step(edge.data, x.data); - return result; - } - }; -*/ - template - struct compute_smoothstep_vector<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& edge0, vec<4, float, Q> const& edge1, vec<4, float, Q> const& x) - { - vec<4, float, Q> Result; - Result.data = glm_vec4_smoothstep(edge0.data, edge1.data, x.data); - return Result; - } - }; -}//namespace detail -}//namespace glm - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/detail/func_exponential.inl b/third_party/glm/detail/func_exponential.inl deleted file mode 100755 index 2040d41..0000000 --- a/third_party/glm/detail/func_exponential.inl +++ /dev/null @@ -1,152 +0,0 @@ -/// @ref core -/// @file glm/detail/func_exponential.inl - -#include "../vector_relational.hpp" -#include "_vectorize.hpp" -#include -#include -#include - -namespace glm{ -namespace detail -{ -# if GLM_HAS_CXX11_STL - using std::log2; -# else - template - genType log2(genType Value) - { - return std::log(Value) * static_cast(1.4426950408889634073599246810019); - } -# endif - - template - struct compute_log2 - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'log2' only accept floating-point inputs. Include for integer inputs."); - - return detail::functor1::call(log2, v); - } - }; - - template - struct compute_sqrt - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return detail::functor1::call(std::sqrt, x); - } - }; - - template - struct compute_inversesqrt - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return static_cast(1) / sqrt(x); - } - }; - - template - struct compute_inversesqrt - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - vec tmp(x); - vec xhalf(tmp * 0.5f); - vec* p = reinterpret_cast*>(const_cast*>(&x)); - vec i = vec(0x5f375a86) - (*p >> vec(1)); - vec* ptmp = reinterpret_cast*>(&i); - tmp = *ptmp; - tmp = tmp * (1.5f - xhalf * tmp * tmp); - return tmp; - } - }; -}//namespace detail - - // pow - using std::pow; - template - GLM_FUNC_QUALIFIER vec pow(vec const& base, vec const& exponent) - { - return detail::functor2::call(pow, base, exponent); - } - - // exp - using std::exp; - template - GLM_FUNC_QUALIFIER vec exp(vec const& x) - { - return detail::functor1::call(exp, x); - } - - // log - using std::log; - template - GLM_FUNC_QUALIFIER vec log(vec const& x) - { - return detail::functor1::call(log, x); - } - -# if GLM_HAS_CXX11_STL - using std::exp2; -# else - //exp2, ln2 = 0.69314718055994530941723212145818f - template - GLM_FUNC_QUALIFIER genType exp2(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'exp2' only accept floating-point inputs"); - - return std::exp(static_cast(0.69314718055994530941723212145818) * x); - } -# endif - - template - GLM_FUNC_QUALIFIER vec exp2(vec const& x) - { - return detail::functor1::call(exp2, x); - } - - // log2, ln2 = 0.69314718055994530941723212145818f - template - GLM_FUNC_QUALIFIER genType log2(genType x) - { - return log2(vec<1, genType>(x)).x; - } - - template - GLM_FUNC_QUALIFIER vec log2(vec const& x) - { - return detail::compute_log2::is_iec559, detail::is_aligned::value>::call(x); - } - - // sqrt - using std::sqrt; - template - GLM_FUNC_QUALIFIER vec sqrt(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sqrt' only accept floating-point inputs"); - return detail::compute_sqrt::value>::call(x); - } - - // inversesqrt - template - GLM_FUNC_QUALIFIER genType inversesqrt(genType x) - { - return static_cast(1) / sqrt(x); - } - - template - GLM_FUNC_QUALIFIER vec inversesqrt(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'inversesqrt' only accept floating-point inputs"); - return detail::compute_inversesqrt::value>::call(x); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_exponential_simd.inl" -#endif - diff --git a/third_party/glm/detail/func_exponential_simd.inl b/third_party/glm/detail/func_exponential_simd.inl deleted file mode 100755 index fb78951..0000000 --- a/third_party/glm/detail/func_exponential_simd.inl +++ /dev/null @@ -1,37 +0,0 @@ -/// @ref core -/// @file glm/detail/func_exponential_simd.inl - -#include "../simd/exponential.h" - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -namespace glm{ -namespace detail -{ - template - struct compute_sqrt<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - vec<4, float, Q> Result; - Result.data = _mm_sqrt_ps(v.data); - return Result; - } - }; - -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE - template<> - struct compute_sqrt<4, float, aligned_lowp, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, aligned_lowp> call(vec<4, float, aligned_lowp> const& v) - { - vec<4, float, aligned_lowp> Result; - Result.data = glm_vec4_sqrt_lowp(v.data); - return Result; - } - }; -# endif -}//namespace detail -}//namespace glm - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/detail/func_geometric.inl b/third_party/glm/detail/func_geometric.inl deleted file mode 100755 index 9cde28f..0000000 --- a/third_party/glm/detail/func_geometric.inl +++ /dev/null @@ -1,243 +0,0 @@ -#include "../exponential.hpp" -#include "../common.hpp" - -namespace glm{ -namespace detail -{ - template - struct compute_length - { - GLM_FUNC_QUALIFIER static T call(vec const& v) - { - return sqrt(dot(v, v)); - } - }; - - template - struct compute_distance - { - GLM_FUNC_QUALIFIER static T call(vec const& p0, vec const& p1) - { - return length(p1 - p0); - } - }; - - template - struct compute_dot{}; - - template - struct compute_dot, T, Aligned> - { - GLM_FUNC_QUALIFIER static T call(vec<1, T, Q> const& a, vec<1, T, Q> const& b) - { - return a.x * b.x; - } - }; - - template - struct compute_dot, T, Aligned> - { - GLM_FUNC_QUALIFIER static T call(vec<2, T, Q> const& a, vec<2, T, Q> const& b) - { - vec<2, T, Q> tmp(a * b); - return tmp.x + tmp.y; - } - }; - - template - struct compute_dot, T, Aligned> - { - GLM_FUNC_QUALIFIER static T call(vec<3, T, Q> const& a, vec<3, T, Q> const& b) - { - vec<3, T, Q> tmp(a * b); - return tmp.x + tmp.y + tmp.z; - } - }; - - template - struct compute_dot, T, Aligned> - { - GLM_FUNC_QUALIFIER static T call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> tmp(a * b); - return (tmp.x + tmp.y) + (tmp.z + tmp.w); - } - }; - - template - struct compute_cross - { - GLM_FUNC_QUALIFIER static vec<3, T, Q> call(vec<3, T, Q> const& x, vec<3, T, Q> const& y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cross' accepts only floating-point inputs"); - - return vec<3, T, Q>( - x.y * y.z - y.y * x.z, - x.z * y.x - y.z * x.x, - x.x * y.y - y.x * x.y); - } - }; - - template - struct compute_normalize - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); - - return v * inversesqrt(dot(v, v)); - } - }; - - template - struct compute_faceforward - { - GLM_FUNC_QUALIFIER static vec call(vec const& N, vec const& I, vec const& Nref) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); - - return dot(Nref, I) < static_cast(0) ? N : -N; - } - }; - - template - struct compute_reflect - { - GLM_FUNC_QUALIFIER static vec call(vec const& I, vec const& N) - { - return I - N * dot(N, I) * static_cast(2); - } - }; - - template - struct compute_refract - { - GLM_FUNC_QUALIFIER static vec call(vec const& I, vec const& N, T eta) - { - T const dotValue(dot(N, I)); - T const k(static_cast(1) - eta * eta * (static_cast(1) - dotValue * dotValue)); - vec const Result = - (k >= static_cast(0)) ? (eta * I - (eta * dotValue + std::sqrt(k)) * N) : vec(0); - return Result; - } - }; -}//namespace detail - - // length - template - GLM_FUNC_QUALIFIER genType length(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length' accepts only floating-point inputs"); - - return abs(x); - } - - template - GLM_FUNC_QUALIFIER T length(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length' accepts only floating-point inputs"); - - return detail::compute_length::value>::call(v); - } - - // distance - template - GLM_FUNC_QUALIFIER genType distance(genType const& p0, genType const& p1) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'distance' accepts only floating-point inputs"); - - return length(p1 - p0); - } - - template - GLM_FUNC_QUALIFIER T distance(vec const& p0, vec const& p1) - { - return detail::compute_distance::value>::call(p0, p1); - } - - // dot - template - GLM_FUNC_QUALIFIER T dot(T x, T y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'dot' accepts only floating-point inputs"); - return x * y; - } - - template - GLM_FUNC_QUALIFIER T dot(vec const& x, vec const& y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'dot' accepts only floating-point inputs"); - return detail::compute_dot, T, detail::is_aligned::value>::call(x, y); - } - - // cross - template - GLM_FUNC_QUALIFIER vec<3, T, Q> cross(vec<3, T, Q> const& x, vec<3, T, Q> const& y) - { - return detail::compute_cross::value>::call(x, y); - } -/* - // normalize - template - GLM_FUNC_QUALIFIER genType normalize(genType const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); - - return x < genType(0) ? genType(-1) : genType(1); - } -*/ - template - GLM_FUNC_QUALIFIER vec normalize(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'normalize' accepts only floating-point inputs"); - - return detail::compute_normalize::value>::call(x); - } - - // faceforward - template - GLM_FUNC_QUALIFIER genType faceforward(genType const& N, genType const& I, genType const& Nref) - { - return dot(Nref, I) < static_cast(0) ? N : -N; - } - - template - GLM_FUNC_QUALIFIER vec faceforward(vec const& N, vec const& I, vec const& Nref) - { - return detail::compute_faceforward::value>::call(N, I, Nref); - } - - // reflect - template - GLM_FUNC_QUALIFIER genType reflect(genType const& I, genType const& N) - { - return I - N * dot(N, I) * genType(2); - } - - template - GLM_FUNC_QUALIFIER vec reflect(vec const& I, vec const& N) - { - return detail::compute_reflect::value>::call(I, N); - } - - // refract - template - GLM_FUNC_QUALIFIER genType refract(genType const& I, genType const& N, genType eta) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'refract' accepts only floating-point inputs"); - genType const dotValue(dot(N, I)); - genType const k(static_cast(1) - eta * eta * (static_cast(1) - dotValue * dotValue)); - return (eta * I - (eta * dotValue + sqrt(k)) * N) * static_cast(k >= static_cast(0)); - } - - template - GLM_FUNC_QUALIFIER vec refract(vec const& I, vec const& N, T eta) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'refract' accepts only floating-point inputs"); - return detail::compute_refract::value>::call(I, N, eta); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_geometric_simd.inl" -#endif diff --git a/third_party/glm/detail/func_geometric_simd.inl b/third_party/glm/detail/func_geometric_simd.inl deleted file mode 100755 index dfe3f4c..0000000 --- a/third_party/glm/detail/func_geometric_simd.inl +++ /dev/null @@ -1,165 +0,0 @@ -/// @ref core -/// @file glm/detail/func_geometric_simd.inl - -#include "../simd/geometric.h" - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -namespace glm{ -namespace detail -{ - template - struct compute_length<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& v) - { - return _mm_cvtss_f32(glm_vec4_length(v.data)); - } - }; - - template - struct compute_distance<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& p0, vec<4, float, Q> const& p1) - { - return _mm_cvtss_f32(glm_vec4_distance(p0.data, p1.data)); - } - }; - - template - struct compute_dot, float, true> - { - GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& x, vec<4, float, Q> const& y) - { - return _mm_cvtss_f32(glm_vec1_dot(x.data, y.data)); - } - }; - - template - struct compute_cross - { - GLM_FUNC_QUALIFIER static vec<3, float, Q> call(vec<3, float, Q> const& a, vec<3, float, Q> const& b) - { - __m128 const set0 = _mm_set_ps(0.0f, a.z, a.y, a.x); - __m128 const set1 = _mm_set_ps(0.0f, b.z, b.y, b.x); - __m128 const xpd0 = glm_vec4_cross(set0, set1); - - vec<4, float, Q> Result; - Result.data = xpd0; - return vec<3, float, Q>(Result); - } - }; - - template - struct compute_normalize<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - vec<4, float, Q> Result; - Result.data = glm_vec4_normalize(v.data); - return Result; - } - }; - - template - struct compute_faceforward<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& N, vec<4, float, Q> const& I, vec<4, float, Q> const& Nref) - { - vec<4, float, Q> Result; - Result.data = glm_vec4_faceforward(N.data, I.data, Nref.data); - return Result; - } - }; - - template - struct compute_reflect<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& I, vec<4, float, Q> const& N) - { - vec<4, float, Q> Result; - Result.data = glm_vec4_reflect(I.data, N.data); - return Result; - } - }; - - template - struct compute_refract<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& I, vec<4, float, Q> const& N, float eta) - { - vec<4, float, Q> Result; - Result.data = glm_vec4_refract(I.data, N.data, _mm_set1_ps(eta)); - return Result; - } - }; -}//namespace detail -}//namespace glm - -#elif GLM_ARCH & GLM_ARCH_NEON_BIT -namespace glm{ -namespace detail -{ - template - struct compute_length<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& v) - { - return compute_dot, float, true>::call(v, v); - } - }; - - template - struct compute_distance<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& p0, vec<4, float, Q> const& p1) - { - return compute_length<4, float, Q, true>::call(p1 - p0); - } - }; - - - template - struct compute_dot, float, true> - { - GLM_FUNC_QUALIFIER static float call(vec<4, float, Q> const& x, vec<4, float, Q> const& y) - { -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - float32x4_t v = vmulq_f32(x.data, y.data); - v = vpaddq_f32(v, v); - v = vpaddq_f32(v, v); - return vgetq_lane_f32(v, 0); -#else // Armv7a with Neon - float32x4_t p = vmulq_f32(x.data, y.data); - float32x2_t v = vpadd_f32(vget_low_f32(p), vget_high_f32(p)); - v = vpadd_f32(v, v); - return vget_lane_f32(v, 0); -#endif - } - }; - - template - struct compute_normalize<4, float, Q, true> - { - GLM_FUNC_QUALIFIER static vec<4, float, Q> call(vec<4, float, Q> const& v) - { - float32x4_t p = vmulq_f32(v.data, v.data); -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - p = vpaddq_f32(p, p); - p = vpaddq_f32(p, p); -#else - float32x2_t t = vpadd_f32(vget_low_f32(p), vget_high_f32(p)); - t = vpadd_f32(t, t); - p = vcombine_f32(t, t); -#endif - - float32x4_t vd = vrsqrteq_f32(p); - vec<4, float, Q> Result; - Result.data = vmulq_f32(v.data, vd); - return Result; - } - }; -}//namespace detail -}//namespace glm - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/detail/func_integer.inl b/third_party/glm/detail/func_integer.inl deleted file mode 100755 index 091e1e0..0000000 --- a/third_party/glm/detail/func_integer.inl +++ /dev/null @@ -1,372 +0,0 @@ -/// @ref core - -#include "_vectorize.hpp" -#if(GLM_ARCH & GLM_ARCH_X86 && GLM_COMPILER & GLM_COMPILER_VC) -# include -# pragma intrinsic(_BitScanReverse) -#endif//(GLM_ARCH & GLM_ARCH_X86 && GLM_COMPILER & GLM_COMPILER_VC) -#include - -#if !GLM_HAS_EXTENDED_INTEGER_TYPE -# if GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic ignored "-Wlong-long" -# endif -# if (GLM_COMPILER & GLM_COMPILER_CLANG) -# pragma clang diagnostic ignored "-Wc++11-long-long" -# endif -#endif - -namespace glm{ -namespace detail -{ - template - GLM_FUNC_QUALIFIER T mask(T Bits) - { - return Bits >= static_cast(sizeof(T) * 8) ? ~static_cast(0) : (static_cast(1) << Bits) - static_cast(1); - } - - template - struct compute_bitfieldReverseStep - { - GLM_FUNC_QUALIFIER static vec call(vec const& v, T, T) - { - return v; - } - }; - - template - struct compute_bitfieldReverseStep - { - GLM_FUNC_QUALIFIER static vec call(vec const& v, T Mask, T Shift) - { - return (v & Mask) << Shift | (v & (~Mask)) >> Shift; - } - }; - - template - struct compute_bitfieldBitCountStep - { - GLM_FUNC_QUALIFIER static vec call(vec const& v, T, T) - { - return v; - } - }; - - template - struct compute_bitfieldBitCountStep - { - GLM_FUNC_QUALIFIER static vec call(vec const& v, T Mask, T Shift) - { - return (v & Mask) + ((v >> Shift) & Mask); - } - }; - - template - struct compute_findLSB - { - GLM_FUNC_QUALIFIER static int call(genIUType Value) - { - if(Value == 0) - return -1; - - return glm::bitCount(~Value & (Value - static_cast(1))); - } - }; - -# if GLM_HAS_BITSCAN_WINDOWS - template - struct compute_findLSB - { - GLM_FUNC_QUALIFIER static int call(genIUType Value) - { - unsigned long Result(0); - unsigned char IsNotNull = _BitScanForward(&Result, *reinterpret_cast(&Value)); - return IsNotNull ? int(Result) : -1; - } - }; - -# if !((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_MODEL == GLM_MODEL_32)) - template - struct compute_findLSB - { - GLM_FUNC_QUALIFIER static int call(genIUType Value) - { - unsigned long Result(0); - unsigned char IsNotNull = _BitScanForward64(&Result, *reinterpret_cast(&Value)); - return IsNotNull ? int(Result) : -1; - } - }; -# endif -# endif//GLM_HAS_BITSCAN_WINDOWS - - template - struct compute_findMSB_step_vec - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, T Shift) - { - return x | (x >> Shift); - } - }; - - template - struct compute_findMSB_step_vec - { - GLM_FUNC_QUALIFIER static vec call(vec const& x, T) - { - return x; - } - }; - - template - struct compute_findMSB_vec - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - vec x(v); - x = compute_findMSB_step_vec= 8>::call(x, static_cast( 1)); - x = compute_findMSB_step_vec= 8>::call(x, static_cast( 2)); - x = compute_findMSB_step_vec= 8>::call(x, static_cast( 4)); - x = compute_findMSB_step_vec= 16>::call(x, static_cast( 8)); - x = compute_findMSB_step_vec= 32>::call(x, static_cast(16)); - x = compute_findMSB_step_vec= 64>::call(x, static_cast(32)); - return vec(sizeof(T) * 8 - 1) - glm::bitCount(~x); - } - }; - -# if GLM_HAS_BITSCAN_WINDOWS - template - GLM_FUNC_QUALIFIER int compute_findMSB_32(genIUType Value) - { - unsigned long Result(0); - unsigned char IsNotNull = _BitScanReverse(&Result, *reinterpret_cast(&Value)); - return IsNotNull ? int(Result) : -1; - } - - template - struct compute_findMSB_vec - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return detail::functor1::call(compute_findMSB_32, x); - } - }; - -# if !((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_MODEL == GLM_MODEL_32)) - template - GLM_FUNC_QUALIFIER int compute_findMSB_64(genIUType Value) - { - unsigned long Result(0); - unsigned char IsNotNull = _BitScanReverse64(&Result, *reinterpret_cast(&Value)); - return IsNotNull ? int(Result) : -1; - } - - template - struct compute_findMSB_vec - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - return detail::functor1::call(compute_findMSB_64, x); - } - }; -# endif -# endif//GLM_HAS_BITSCAN_WINDOWS -}//namespace detail - - // uaddCarry - GLM_FUNC_QUALIFIER uint uaddCarry(uint const& x, uint const& y, uint & Carry) - { - detail::uint64 const Value64(static_cast(x) + static_cast(y)); - detail::uint64 const Max32((static_cast(1) << static_cast(32)) - static_cast(1)); - Carry = Value64 > Max32 ? 1u : 0u; - return static_cast(Value64 % (Max32 + static_cast(1))); - } - - template - GLM_FUNC_QUALIFIER vec uaddCarry(vec const& x, vec const& y, vec& Carry) - { - vec Value64(vec(x) + vec(y)); - vec Max32((static_cast(1) << static_cast(32)) - static_cast(1)); - Carry = mix(vec(0), vec(1), greaterThan(Value64, Max32)); - return vec(Value64 % (Max32 + static_cast(1))); - } - - // usubBorrow - GLM_FUNC_QUALIFIER uint usubBorrow(uint const& x, uint const& y, uint & Borrow) - { - Borrow = x >= y ? static_cast(0) : static_cast(1); - if(y >= x) - return y - x; - else - return static_cast((static_cast(1) << static_cast(32)) + (static_cast(y) - static_cast(x))); - } - - template - GLM_FUNC_QUALIFIER vec usubBorrow(vec const& x, vec const& y, vec& Borrow) - { - Borrow = mix(vec(1), vec(0), greaterThanEqual(x, y)); - vec const YgeX(y - x); - vec const XgeY(vec((static_cast(1) << static_cast(32)) + (vec(y) - vec(x)))); - return mix(XgeY, YgeX, greaterThanEqual(y, x)); - } - - // umulExtended - GLM_FUNC_QUALIFIER void umulExtended(uint const& x, uint const& y, uint & msb, uint & lsb) - { - detail::uint64 Value64 = static_cast(x) * static_cast(y); - msb = static_cast(Value64 >> static_cast(32)); - lsb = static_cast(Value64); - } - - template - GLM_FUNC_QUALIFIER void umulExtended(vec const& x, vec const& y, vec& msb, vec& lsb) - { - vec Value64(vec(x) * vec(y)); - msb = vec(Value64 >> static_cast(32)); - lsb = vec(Value64); - } - - // imulExtended - GLM_FUNC_QUALIFIER void imulExtended(int x, int y, int& msb, int& lsb) - { - detail::int64 Value64 = static_cast(x) * static_cast(y); - msb = static_cast(Value64 >> static_cast(32)); - lsb = static_cast(Value64); - } - - template - GLM_FUNC_QUALIFIER void imulExtended(vec const& x, vec const& y, vec& msb, vec& lsb) - { - vec Value64(vec(x) * vec(y)); - lsb = vec(Value64 & static_cast(0xFFFFFFFF)); - msb = vec((Value64 >> static_cast(32)) & static_cast(0xFFFFFFFF)); - } - - // bitfieldExtract - template - GLM_FUNC_QUALIFIER genIUType bitfieldExtract(genIUType Value, int Offset, int Bits) - { - return bitfieldExtract(vec<1, genIUType>(Value), Offset, Bits).x; - } - - template - GLM_FUNC_QUALIFIER vec bitfieldExtract(vec const& Value, int Offset, int Bits) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldExtract' only accept integer inputs"); - - return (Value >> static_cast(Offset)) & static_cast(detail::mask(Bits)); - } - - // bitfieldInsert - template - GLM_FUNC_QUALIFIER genIUType bitfieldInsert(genIUType const& Base, genIUType const& Insert, int Offset, int Bits) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldInsert' only accept integer values"); - - return bitfieldInsert(vec<1, genIUType>(Base), vec<1, genIUType>(Insert), Offset, Bits).x; - } - - template - GLM_FUNC_QUALIFIER vec bitfieldInsert(vec const& Base, vec const& Insert, int Offset, int Bits) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldInsert' only accept integer values"); - - T const Mask = static_cast(detail::mask(Bits) << Offset); - return (Base & ~Mask) | ((Insert << static_cast(Offset)) & Mask); - } - - // bitfieldReverse - template - GLM_FUNC_QUALIFIER genIUType bitfieldReverse(genIUType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldReverse' only accept integer values"); - - return bitfieldReverse(glm::vec<1, genIUType, glm::defaultp>(x)).x; - } - - template - GLM_FUNC_QUALIFIER vec bitfieldReverse(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldReverse' only accept integer values"); - - vec x(v); - x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 2>::call(x, static_cast(0x5555555555555555ull), static_cast( 1)); - x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 4>::call(x, static_cast(0x3333333333333333ull), static_cast( 2)); - x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 8>::call(x, static_cast(0x0F0F0F0F0F0F0F0Full), static_cast( 4)); - x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 16>::call(x, static_cast(0x00FF00FF00FF00FFull), static_cast( 8)); - x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 32>::call(x, static_cast(0x0000FFFF0000FFFFull), static_cast(16)); - x = detail::compute_bitfieldReverseStep::value, sizeof(T) * 8>= 64>::call(x, static_cast(0x00000000FFFFFFFFull), static_cast(32)); - return x; - } - - // bitCount - template - GLM_FUNC_QUALIFIER int bitCount(genIUType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitCount' only accept integer values"); - - return bitCount(glm::vec<1, genIUType, glm::defaultp>(x)).x; - } - - template - GLM_FUNC_QUALIFIER vec bitCount(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitCount' only accept integer values"); - -# if GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(push) -# pragma warning(disable : 4310) //cast truncates constant value -# endif - - vec::type, Q> x(*reinterpret_cast::type, Q> const *>(&v)); - x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 2>::call(x, typename detail::make_unsigned::type(0x5555555555555555ull), typename detail::make_unsigned::type( 1)); - x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 4>::call(x, typename detail::make_unsigned::type(0x3333333333333333ull), typename detail::make_unsigned::type( 2)); - x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 8>::call(x, typename detail::make_unsigned::type(0x0F0F0F0F0F0F0F0Full), typename detail::make_unsigned::type( 4)); - x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 16>::call(x, typename detail::make_unsigned::type(0x00FF00FF00FF00FFull), typename detail::make_unsigned::type( 8)); - x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 32>::call(x, typename detail::make_unsigned::type(0x0000FFFF0000FFFFull), typename detail::make_unsigned::type(16)); - x = detail::compute_bitfieldBitCountStep::type, Q, detail::is_aligned::value, sizeof(T) * 8>= 64>::call(x, typename detail::make_unsigned::type(0x00000000FFFFFFFFull), typename detail::make_unsigned::type(32)); - return vec(x); - -# if GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(pop) -# endif - } - - // findLSB - template - GLM_FUNC_QUALIFIER int findLSB(genIUType Value) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findLSB' only accept integer values"); - - return detail::compute_findLSB::call(Value); - } - - template - GLM_FUNC_QUALIFIER vec findLSB(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findLSB' only accept integer values"); - - return detail::functor1::call(findLSB, x); - } - - // findMSB - template - GLM_FUNC_QUALIFIER int findMSB(genIUType v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findMSB' only accept integer values"); - - return findMSB(vec<1, genIUType>(v)).x; - } - - template - GLM_FUNC_QUALIFIER vec findMSB(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findMSB' only accept integer values"); - - return detail::compute_findMSB_vec::call(v); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_integer_simd.inl" -#endif - diff --git a/third_party/glm/detail/func_integer_simd.inl b/third_party/glm/detail/func_integer_simd.inl deleted file mode 100755 index 8be6c9c..0000000 --- a/third_party/glm/detail/func_integer_simd.inl +++ /dev/null @@ -1,65 +0,0 @@ -#include "../simd/integer.h" - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -namespace glm{ -namespace detail -{ - template - struct compute_bitfieldReverseStep<4, uint, Q, true, true> - { - GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v, uint Mask, uint Shift) - { - __m128i const set0 = v.data; - - __m128i const set1 = _mm_set1_epi32(static_cast(Mask)); - __m128i const and1 = _mm_and_si128(set0, set1); - __m128i const sft1 = _mm_slli_epi32(and1, Shift); - - __m128i const set2 = _mm_andnot_si128(set0, _mm_set1_epi32(-1)); - __m128i const and2 = _mm_and_si128(set0, set2); - __m128i const sft2 = _mm_srai_epi32(and2, Shift); - - __m128i const or0 = _mm_or_si128(sft1, sft2); - - return or0; - } - }; - - template - struct compute_bitfieldBitCountStep<4, uint, Q, true, true> - { - GLM_FUNC_QUALIFIER static vec<4, uint, Q> call(vec<4, uint, Q> const& v, uint Mask, uint Shift) - { - __m128i const set0 = v.data; - - __m128i const set1 = _mm_set1_epi32(static_cast(Mask)); - __m128i const and0 = _mm_and_si128(set0, set1); - __m128i const sft0 = _mm_slli_epi32(set0, Shift); - __m128i const and1 = _mm_and_si128(sft0, set1); - __m128i const add0 = _mm_add_epi32(and0, and1); - - return add0; - } - }; -}//namespace detail - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template<> - GLM_FUNC_QUALIFIER int bitCount(uint x) - { - return _mm_popcnt_u32(x); - } - -# if(GLM_MODEL == GLM_MODEL_64) - template<> - GLM_FUNC_QUALIFIER int bitCount(detail::uint64 x) - { - return static_cast(_mm_popcnt_u64(x)); - } -# endif//GLM_MODEL -# endif//GLM_ARCH - -}//namespace glm - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/detail/func_matrix.inl b/third_party/glm/detail/func_matrix.inl deleted file mode 100755 index d980c6d..0000000 --- a/third_party/glm/detail/func_matrix.inl +++ /dev/null @@ -1,398 +0,0 @@ -#include "../geometric.hpp" -#include - -namespace glm{ -namespace detail -{ - template - struct compute_matrixCompMult - { - GLM_FUNC_QUALIFIER static mat call(mat const& x, mat const& y) - { - mat Result; - for(length_t i = 0; i < Result.length(); ++i) - Result[i] = x[i] * y[i]; - return Result; - } - }; - - template - struct compute_transpose{}; - - template - struct compute_transpose<2, 2, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<2, 2, T, Q> call(mat<2, 2, T, Q> const& m) - { - mat<2, 2, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - return Result; - } - }; - - template - struct compute_transpose<2, 3, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<3, 2, T, Q> call(mat<2, 3, T, Q> const& m) - { - mat<3,2, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - return Result; - } - }; - - template - struct compute_transpose<2, 4, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<4, 2, T, Q> call(mat<2, 4, T, Q> const& m) - { - mat<4, 2, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - Result[3][0] = m[0][3]; - Result[3][1] = m[1][3]; - return Result; - } - }; - - template - struct compute_transpose<3, 2, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<2, 3, T, Q> call(mat<3, 2, T, Q> const& m) - { - mat<2, 3, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - return Result; - } - }; - - template - struct compute_transpose<3, 3, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<3, 3, T, Q> call(mat<3, 3, T, Q> const& m) - { - mat<3, 3, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - Result[2][2] = m[2][2]; - return Result; - } - }; - - template - struct compute_transpose<3, 4, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<4, 3, T, Q> call(mat<3, 4, T, Q> const& m) - { - mat<4, 3, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - Result[2][2] = m[2][2]; - Result[3][0] = m[0][3]; - Result[3][1] = m[1][3]; - Result[3][2] = m[2][3]; - return Result; - } - }; - - template - struct compute_transpose<4, 2, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<2, 4, T, Q> call(mat<4, 2, T, Q> const& m) - { - mat<2, 4, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - Result[0][3] = m[3][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - Result[1][3] = m[3][1]; - return Result; - } - }; - - template - struct compute_transpose<4, 3, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<3, 4, T, Q> call(mat<4, 3, T, Q> const& m) - { - mat<3, 4, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - Result[0][3] = m[3][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - Result[1][3] = m[3][1]; - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - Result[2][2] = m[2][2]; - Result[2][3] = m[3][2]; - return Result; - } - }; - - template - struct compute_transpose<4, 4, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<4, 4, T, Q> call(mat<4, 4, T, Q> const& m) - { - mat<4, 4, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - Result[0][3] = m[3][0]; - - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - Result[1][3] = m[3][1]; - - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - Result[2][2] = m[2][2]; - Result[2][3] = m[3][2]; - - Result[3][0] = m[0][3]; - Result[3][1] = m[1][3]; - Result[3][2] = m[2][3]; - Result[3][3] = m[3][3]; - return Result; - } - }; - - template - struct compute_determinant{}; - - template - struct compute_determinant<2, 2, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static T call(mat<2, 2, T, Q> const& m) - { - return m[0][0] * m[1][1] - m[1][0] * m[0][1]; - } - }; - - template - struct compute_determinant<3, 3, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static T call(mat<3, 3, T, Q> const& m) - { - return - + m[0][0] * (m[1][1] * m[2][2] - m[2][1] * m[1][2]) - - m[1][0] * (m[0][1] * m[2][2] - m[2][1] * m[0][2]) - + m[2][0] * (m[0][1] * m[1][2] - m[1][1] * m[0][2]); - } - }; - - template - struct compute_determinant<4, 4, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static T call(mat<4, 4, T, Q> const& m) - { - T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - - vec<4, T, Q> DetCof( - + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02), - - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04), - + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05), - - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05)); - - return - m[0][0] * DetCof[0] + m[0][1] * DetCof[1] + - m[0][2] * DetCof[2] + m[0][3] * DetCof[3]; - } - }; - - template - struct compute_inverse{}; - - template - struct compute_inverse<2, 2, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<2, 2, T, Q> call(mat<2, 2, T, Q> const& m) - { - T OneOverDeterminant = static_cast(1) / ( - + m[0][0] * m[1][1] - - m[1][0] * m[0][1]); - - mat<2, 2, T, Q> Inverse( - + m[1][1] * OneOverDeterminant, - - m[0][1] * OneOverDeterminant, - - m[1][0] * OneOverDeterminant, - + m[0][0] * OneOverDeterminant); - - return Inverse; - } - }; - - template - struct compute_inverse<3, 3, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<3, 3, T, Q> call(mat<3, 3, T, Q> const& m) - { - T OneOverDeterminant = static_cast(1) / ( - + m[0][0] * (m[1][1] * m[2][2] - m[2][1] * m[1][2]) - - m[1][0] * (m[0][1] * m[2][2] - m[2][1] * m[0][2]) - + m[2][0] * (m[0][1] * m[1][2] - m[1][1] * m[0][2])); - - mat<3, 3, T, Q> Inverse; - Inverse[0][0] = + (m[1][1] * m[2][2] - m[2][1] * m[1][2]) * OneOverDeterminant; - Inverse[1][0] = - (m[1][0] * m[2][2] - m[2][0] * m[1][2]) * OneOverDeterminant; - Inverse[2][0] = + (m[1][0] * m[2][1] - m[2][0] * m[1][1]) * OneOverDeterminant; - Inverse[0][1] = - (m[0][1] * m[2][2] - m[2][1] * m[0][2]) * OneOverDeterminant; - Inverse[1][1] = + (m[0][0] * m[2][2] - m[2][0] * m[0][2]) * OneOverDeterminant; - Inverse[2][1] = - (m[0][0] * m[2][1] - m[2][0] * m[0][1]) * OneOverDeterminant; - Inverse[0][2] = + (m[0][1] * m[1][2] - m[1][1] * m[0][2]) * OneOverDeterminant; - Inverse[1][2] = - (m[0][0] * m[1][2] - m[1][0] * m[0][2]) * OneOverDeterminant; - Inverse[2][2] = + (m[0][0] * m[1][1] - m[1][0] * m[0][1]) * OneOverDeterminant; - - return Inverse; - } - }; - - template - struct compute_inverse<4, 4, T, Q, Aligned> - { - GLM_FUNC_QUALIFIER static mat<4, 4, T, Q> call(mat<4, 4, T, Q> const& m) - { - T Coef00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - T Coef02 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; - T Coef03 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; - - T Coef04 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - T Coef06 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; - T Coef07 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; - - T Coef08 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - T Coef10 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; - T Coef11 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; - - T Coef12 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - T Coef14 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; - T Coef15 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; - - T Coef16 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - T Coef18 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; - T Coef19 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; - - T Coef20 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - T Coef22 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; - T Coef23 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; - - vec<4, T, Q> Fac0(Coef00, Coef00, Coef02, Coef03); - vec<4, T, Q> Fac1(Coef04, Coef04, Coef06, Coef07); - vec<4, T, Q> Fac2(Coef08, Coef08, Coef10, Coef11); - vec<4, T, Q> Fac3(Coef12, Coef12, Coef14, Coef15); - vec<4, T, Q> Fac4(Coef16, Coef16, Coef18, Coef19); - vec<4, T, Q> Fac5(Coef20, Coef20, Coef22, Coef23); - - vec<4, T, Q> Vec0(m[1][0], m[0][0], m[0][0], m[0][0]); - vec<4, T, Q> Vec1(m[1][1], m[0][1], m[0][1], m[0][1]); - vec<4, T, Q> Vec2(m[1][2], m[0][2], m[0][2], m[0][2]); - vec<4, T, Q> Vec3(m[1][3], m[0][3], m[0][3], m[0][3]); - - vec<4, T, Q> Inv0(Vec1 * Fac0 - Vec2 * Fac1 + Vec3 * Fac2); - vec<4, T, Q> Inv1(Vec0 * Fac0 - Vec2 * Fac3 + Vec3 * Fac4); - vec<4, T, Q> Inv2(Vec0 * Fac1 - Vec1 * Fac3 + Vec3 * Fac5); - vec<4, T, Q> Inv3(Vec0 * Fac2 - Vec1 * Fac4 + Vec2 * Fac5); - - vec<4, T, Q> SignA(+1, -1, +1, -1); - vec<4, T, Q> SignB(-1, +1, -1, +1); - mat<4, 4, T, Q> Inverse(Inv0 * SignA, Inv1 * SignB, Inv2 * SignA, Inv3 * SignB); - - vec<4, T, Q> Row0(Inverse[0][0], Inverse[1][0], Inverse[2][0], Inverse[3][0]); - - vec<4, T, Q> Dot0(m[0] * Row0); - T Dot1 = (Dot0.x + Dot0.y) + (Dot0.z + Dot0.w); - - T OneOverDeterminant = static_cast(1) / Dot1; - - return Inverse * OneOverDeterminant; - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER mat matrixCompMult(mat const& x, mat const& y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'matrixCompMult' only accept floating-point inputs"); - return detail::compute_matrixCompMult::value>::call(x, y); - } - - template - GLM_FUNC_QUALIFIER typename detail::outerProduct_trait::type outerProduct(vec const& c, vec const& r) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'outerProduct' only accept floating-point inputs"); - - typename detail::outerProduct_trait::type m; - for(length_t i = 0; i < m.length(); ++i) - m[i] = c * r[i]; - return m; - } - - template - GLM_FUNC_QUALIFIER typename mat::transpose_type transpose(mat const& m) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'transpose' only accept floating-point inputs"); - return detail::compute_transpose::value>::call(m); - } - - template - GLM_FUNC_QUALIFIER T determinant(mat const& m) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'determinant' only accept floating-point inputs"); - return detail::compute_determinant::value>::call(m); - } - - template - GLM_FUNC_QUALIFIER mat inverse(mat const& m) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || GLM_CONFIG_UNRESTRICTED_GENTYPE, "'inverse' only accept floating-point inputs"); - return detail::compute_inverse::value>::call(m); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_matrix_simd.inl" -#endif - diff --git a/third_party/glm/detail/func_matrix_simd.inl b/third_party/glm/detail/func_matrix_simd.inl deleted file mode 100755 index f67ac66..0000000 --- a/third_party/glm/detail/func_matrix_simd.inl +++ /dev/null @@ -1,249 +0,0 @@ -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -#include "type_mat4x4.hpp" -#include "../geometric.hpp" -#include "../simd/matrix.h" -#include - -namespace glm{ -namespace detail -{ -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE - template - struct compute_matrixCompMult<4, 4, float, Q, true> - { - GLM_STATIC_ASSERT(detail::is_aligned::value, "Specialization requires aligned"); - - GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& x, mat<4, 4, float, Q> const& y) - { - mat<4, 4, float, Q> Result; - glm_mat4_matrixCompMult( - *static_cast(&x[0].data), - *static_cast(&y[0].data), - *static_cast(&Result[0].data)); - return Result; - } - }; -# endif - - template - struct compute_transpose<4, 4, float, Q, true> - { - GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& m) - { - mat<4, 4, float, Q> Result; - glm_mat4_transpose(&m[0].data, &Result[0].data); - return Result; - } - }; - - template - struct compute_determinant<4, 4, float, Q, true> - { - GLM_FUNC_QUALIFIER static float call(mat<4, 4, float, Q> const& m) - { - return _mm_cvtss_f32(glm_mat4_determinant(&m[0].data)); - } - }; - - template - struct compute_inverse<4, 4, float, Q, true> - { - GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& m) - { - mat<4, 4, float, Q> Result; - glm_mat4_inverse(&m[0].data, &Result[0].data); - return Result; - } - }; -}//namespace detail - -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE - template<> - GLM_FUNC_QUALIFIER mat<4, 4, float, aligned_lowp> outerProduct<4, 4, float, aligned_lowp>(vec<4, float, aligned_lowp> const& c, vec<4, float, aligned_lowp> const& r) - { - __m128 NativeResult[4]; - glm_mat4_outerProduct(c.data, r.data, NativeResult); - mat<4, 4, float, aligned_lowp> Result; - std::memcpy(&Result[0], &NativeResult[0], sizeof(Result)); - return Result; - } - - template<> - GLM_FUNC_QUALIFIER mat<4, 4, float, aligned_mediump> outerProduct<4, 4, float, aligned_mediump>(vec<4, float, aligned_mediump> const& c, vec<4, float, aligned_mediump> const& r) - { - __m128 NativeResult[4]; - glm_mat4_outerProduct(c.data, r.data, NativeResult); - mat<4, 4, float, aligned_mediump> Result; - std::memcpy(&Result[0], &NativeResult[0], sizeof(Result)); - return Result; - } - - template<> - GLM_FUNC_QUALIFIER mat<4, 4, float, aligned_highp> outerProduct<4, 4, float, aligned_highp>(vec<4, float, aligned_highp> const& c, vec<4, float, aligned_highp> const& r) - { - __m128 NativeResult[4]; - glm_mat4_outerProduct(c.data, r.data, NativeResult); - mat<4, 4, float, aligned_highp> Result; - std::memcpy(&Result[0], &NativeResult[0], sizeof(Result)); - return Result; - } -# endif -}//namespace glm - -#elif GLM_ARCH & GLM_ARCH_NEON_BIT - -namespace glm { -#if GLM_LANG & GLM_LANG_CXX11_FLAG - template - GLM_FUNC_QUALIFIER - typename std::enable_if::value, mat<4, 4, float, Q>>::type - operator*(mat<4, 4, float, Q> const & m1, mat<4, 4, float, Q> const & m2) - { - auto MulRow = [&](int l) { - float32x4_t const SrcA = m2[l].data; - - float32x4_t r = neon::mul_lane(m1[0].data, SrcA, 0); - r = neon::madd_lane(r, m1[1].data, SrcA, 1); - r = neon::madd_lane(r, m1[2].data, SrcA, 2); - r = neon::madd_lane(r, m1[3].data, SrcA, 3); - - return r; - }; - - mat<4, 4, float, aligned_highp> Result; - Result[0].data = MulRow(0); - Result[1].data = MulRow(1); - Result[2].data = MulRow(2); - Result[3].data = MulRow(3); - - return Result; - } -#endif // CXX11 - - template - struct detail::compute_inverse<4, 4, float, Q, true> - { - GLM_FUNC_QUALIFIER static mat<4, 4, float, Q> call(mat<4, 4, float, Q> const& m) - { - float32x4_t const& m0 = m[0].data; - float32x4_t const& m1 = m[1].data; - float32x4_t const& m2 = m[2].data; - float32x4_t const& m3 = m[3].data; - - // m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // m[1][2] * m[3][3] - m[3][2] * m[1][3]; - // m[1][2] * m[2][3] - m[2][2] * m[1][3]; - - float32x4_t Fac0; - { - float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 2), neon::dup_lane(m1, 2)); - float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 3), 3, m2, 3); - float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 2), 3, m2, 2); - float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 3), neon::dup_lane(m1, 3)); - Fac0 = w0 * w1 - w2 * w3; - } - - // m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // m[1][1] * m[3][3] - m[3][1] * m[1][3]; - // m[1][1] * m[2][3] - m[2][1] * m[1][3]; - - float32x4_t Fac1; - { - float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 1), neon::dup_lane(m1, 1)); - float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 3), 3, m2, 3); - float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 1), 3, m2, 1); - float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 3), neon::dup_lane(m1, 3)); - Fac1 = w0 * w1 - w2 * w3; - } - - // m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // m[1][1] * m[3][2] - m[3][1] * m[1][2]; - // m[1][1] * m[2][2] - m[2][1] * m[1][2]; - - float32x4_t Fac2; - { - float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 1), neon::dup_lane(m1, 1)); - float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 2), 3, m2, 2); - float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 1), 3, m2, 1); - float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 2), neon::dup_lane(m1, 2)); - Fac2 = w0 * w1 - w2 * w3; - } - - // m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // m[1][0] * m[3][3] - m[3][0] * m[1][3]; - // m[1][0] * m[2][3] - m[2][0] * m[1][3]; - - float32x4_t Fac3; - { - float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 0), neon::dup_lane(m1, 0)); - float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 3), 3, m2, 3); - float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 0), 3, m2, 0); - float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 3), neon::dup_lane(m1, 3)); - Fac3 = w0 * w1 - w2 * w3; - } - - // m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // m[1][0] * m[3][2] - m[3][0] * m[1][2]; - // m[1][0] * m[2][2] - m[2][0] * m[1][2]; - - float32x4_t Fac4; - { - float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 0), neon::dup_lane(m1, 0)); - float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 2), 3, m2, 2); - float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 0), 3, m2, 0); - float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 2), neon::dup_lane(m1, 2)); - Fac4 = w0 * w1 - w2 * w3; - } - - // m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // m[1][0] * m[3][1] - m[3][0] * m[1][1]; - // m[1][0] * m[2][1] - m[2][0] * m[1][1]; - - float32x4_t Fac5; - { - float32x4_t w0 = vcombine_f32(neon::dup_lane(m2, 0), neon::dup_lane(m1, 0)); - float32x4_t w1 = neon::copy_lane(neon::dupq_lane(m3, 1), 3, m2, 1); - float32x4_t w2 = neon::copy_lane(neon::dupq_lane(m3, 0), 3, m2, 0); - float32x4_t w3 = vcombine_f32(neon::dup_lane(m2, 1), neon::dup_lane(m1, 1)); - Fac5 = w0 * w1 - w2 * w3; - } - - float32x4_t Vec0 = neon::copy_lane(neon::dupq_lane(m0, 0), 0, m1, 0); // (m[1][0], m[0][0], m[0][0], m[0][0]); - float32x4_t Vec1 = neon::copy_lane(neon::dupq_lane(m0, 1), 0, m1, 1); // (m[1][1], m[0][1], m[0][1], m[0][1]); - float32x4_t Vec2 = neon::copy_lane(neon::dupq_lane(m0, 2), 0, m1, 2); // (m[1][2], m[0][2], m[0][2], m[0][2]); - float32x4_t Vec3 = neon::copy_lane(neon::dupq_lane(m0, 3), 0, m1, 3); // (m[1][3], m[0][3], m[0][3], m[0][3]); - - float32x4_t Inv0 = Vec1 * Fac0 - Vec2 * Fac1 + Vec3 * Fac2; - float32x4_t Inv1 = Vec0 * Fac0 - Vec2 * Fac3 + Vec3 * Fac4; - float32x4_t Inv2 = Vec0 * Fac1 - Vec1 * Fac3 + Vec3 * Fac5; - float32x4_t Inv3 = Vec0 * Fac2 - Vec1 * Fac4 + Vec2 * Fac5; - - float32x4_t r0 = float32x4_t{-1, +1, -1, +1} * Inv0; - float32x4_t r1 = float32x4_t{+1, -1, +1, -1} * Inv1; - float32x4_t r2 = float32x4_t{-1, +1, -1, +1} * Inv2; - float32x4_t r3 = float32x4_t{+1, -1, +1, -1} * Inv3; - - float32x4_t det = neon::mul_lane(r0, m0, 0); - det = neon::madd_lane(det, r1, m0, 1); - det = neon::madd_lane(det, r2, m0, 2); - det = neon::madd_lane(det, r3, m0, 3); - - float32x4_t rdet = vdupq_n_f32(1 / vgetq_lane_f32(det, 0)); - - mat<4, 4, float, Q> r; - r[0].data = vmulq_f32(r0, rdet); - r[1].data = vmulq_f32(r1, rdet); - r[2].data = vmulq_f32(r2, rdet); - r[3].data = vmulq_f32(r3, rdet); - return r; - } - }; -}//namespace glm -#endif diff --git a/third_party/glm/detail/func_packing.inl b/third_party/glm/detail/func_packing.inl deleted file mode 100755 index 234b093..0000000 --- a/third_party/glm/detail/func_packing.inl +++ /dev/null @@ -1,189 +0,0 @@ -/// @ref core -/// @file glm/detail/func_packing.inl - -#include "../common.hpp" -#include "type_half.hpp" - -namespace glm -{ - GLM_FUNC_QUALIFIER uint packUnorm2x16(vec2 const& v) - { - union - { - unsigned short in[2]; - uint out; - } u; - - vec<2, unsigned short, defaultp> result(round(clamp(v, 0.0f, 1.0f) * 65535.0f)); - - u.in[0] = result[0]; - u.in[1] = result[1]; - - return u.out; - } - - GLM_FUNC_QUALIFIER vec2 unpackUnorm2x16(uint p) - { - union - { - uint in; - unsigned short out[2]; - } u; - - u.in = p; - - return vec2(u.out[0], u.out[1]) * 1.5259021896696421759365224689097e-5f; - } - - GLM_FUNC_QUALIFIER uint packSnorm2x16(vec2 const& v) - { - union - { - signed short in[2]; - uint out; - } u; - - vec<2, short, defaultp> result(round(clamp(v, -1.0f, 1.0f) * 32767.0f)); - - u.in[0] = result[0]; - u.in[1] = result[1]; - - return u.out; - } - - GLM_FUNC_QUALIFIER vec2 unpackSnorm2x16(uint p) - { - union - { - uint in; - signed short out[2]; - } u; - - u.in = p; - - return clamp(vec2(u.out[0], u.out[1]) * 3.0518509475997192297128208258309e-5f, -1.0f, 1.0f); - } - - GLM_FUNC_QUALIFIER uint packUnorm4x8(vec4 const& v) - { - union - { - unsigned char in[4]; - uint out; - } u; - - vec<4, unsigned char, defaultp> result(round(clamp(v, 0.0f, 1.0f) * 255.0f)); - - u.in[0] = result[0]; - u.in[1] = result[1]; - u.in[2] = result[2]; - u.in[3] = result[3]; - - return u.out; - } - - GLM_FUNC_QUALIFIER vec4 unpackUnorm4x8(uint p) - { - union - { - uint in; - unsigned char out[4]; - } u; - - u.in = p; - - return vec4(u.out[0], u.out[1], u.out[2], u.out[3]) * 0.0039215686274509803921568627451f; - } - - GLM_FUNC_QUALIFIER uint packSnorm4x8(vec4 const& v) - { - union - { - signed char in[4]; - uint out; - } u; - - vec<4, signed char, defaultp> result(round(clamp(v, -1.0f, 1.0f) * 127.0f)); - - u.in[0] = result[0]; - u.in[1] = result[1]; - u.in[2] = result[2]; - u.in[3] = result[3]; - - return u.out; - } - - GLM_FUNC_QUALIFIER glm::vec4 unpackSnorm4x8(uint p) - { - union - { - uint in; - signed char out[4]; - } u; - - u.in = p; - - return clamp(vec4(u.out[0], u.out[1], u.out[2], u.out[3]) * 0.0078740157480315f, -1.0f, 1.0f); - } - - GLM_FUNC_QUALIFIER double packDouble2x32(uvec2 const& v) - { - union - { - uint in[2]; - double out; - } u; - - u.in[0] = v[0]; - u.in[1] = v[1]; - - return u.out; - } - - GLM_FUNC_QUALIFIER uvec2 unpackDouble2x32(double v) - { - union - { - double in; - uint out[2]; - } u; - - u.in = v; - - return uvec2(u.out[0], u.out[1]); - } - - GLM_FUNC_QUALIFIER uint packHalf2x16(vec2 const& v) - { - union - { - signed short in[2]; - uint out; - } u; - - u.in[0] = detail::toFloat16(v.x); - u.in[1] = detail::toFloat16(v.y); - - return u.out; - } - - GLM_FUNC_QUALIFIER vec2 unpackHalf2x16(uint v) - { - union - { - uint in; - signed short out[2]; - } u; - - u.in = v; - - return vec2( - detail::toFloat32(u.out[0]), - detail::toFloat32(u.out[1])); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_packing_simd.inl" -#endif - diff --git a/third_party/glm/detail/func_packing_simd.inl b/third_party/glm/detail/func_packing_simd.inl deleted file mode 100755 index fd0fe8b..0000000 --- a/third_party/glm/detail/func_packing_simd.inl +++ /dev/null @@ -1,6 +0,0 @@ -namespace glm{ -namespace detail -{ - -}//namespace detail -}//namespace glm diff --git a/third_party/glm/detail/func_trigonometric.inl b/third_party/glm/detail/func_trigonometric.inl deleted file mode 100755 index e129dce..0000000 --- a/third_party/glm/detail/func_trigonometric.inl +++ /dev/null @@ -1,197 +0,0 @@ -#include "_vectorize.hpp" -#include -#include - -namespace glm -{ - // radians - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType radians(genType degrees) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'radians' only accept floating-point input"); - - return degrees * static_cast(0.01745329251994329576923690768489); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec radians(vec const& v) - { - return detail::functor1::call(radians, v); - } - - // degrees - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType degrees(genType radians) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'degrees' only accept floating-point input"); - - return radians * static_cast(57.295779513082320876798154814105); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec degrees(vec const& v) - { - return detail::functor1::call(degrees, v); - } - - // sin - using ::std::sin; - - template - GLM_FUNC_QUALIFIER vec sin(vec const& v) - { - return detail::functor1::call(sin, v); - } - - // cos - using std::cos; - - template - GLM_FUNC_QUALIFIER vec cos(vec const& v) - { - return detail::functor1::call(cos, v); - } - - // tan - using std::tan; - - template - GLM_FUNC_QUALIFIER vec tan(vec const& v) - { - return detail::functor1::call(tan, v); - } - - // asin - using std::asin; - - template - GLM_FUNC_QUALIFIER vec asin(vec const& v) - { - return detail::functor1::call(asin, v); - } - - // acos - using std::acos; - - template - GLM_FUNC_QUALIFIER vec acos(vec const& v) - { - return detail::functor1::call(acos, v); - } - - // atan - template - GLM_FUNC_QUALIFIER genType atan(genType y, genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'atan' only accept floating-point input"); - - return ::std::atan2(y, x); - } - - template - GLM_FUNC_QUALIFIER vec atan(vec const& a, vec const& b) - { - return detail::functor2::call(::std::atan2, a, b); - } - - using std::atan; - - template - GLM_FUNC_QUALIFIER vec atan(vec const& v) - { - return detail::functor1::call(atan, v); - } - - // sinh - using std::sinh; - - template - GLM_FUNC_QUALIFIER vec sinh(vec const& v) - { - return detail::functor1::call(sinh, v); - } - - // cosh - using std::cosh; - - template - GLM_FUNC_QUALIFIER vec cosh(vec const& v) - { - return detail::functor1::call(cosh, v); - } - - // tanh - using std::tanh; - - template - GLM_FUNC_QUALIFIER vec tanh(vec const& v) - { - return detail::functor1::call(tanh, v); - } - - // asinh -# if GLM_HAS_CXX11_STL - using std::asinh; -# else - template - GLM_FUNC_QUALIFIER genType asinh(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asinh' only accept floating-point input"); - - return (x < static_cast(0) ? static_cast(-1) : (x > static_cast(0) ? static_cast(1) : static_cast(0))) * log(std::abs(x) + sqrt(static_cast(1) + x * x)); - } -# endif - - template - GLM_FUNC_QUALIFIER vec asinh(vec const& v) - { - return detail::functor1::call(asinh, v); - } - - // acosh -# if GLM_HAS_CXX11_STL - using std::acosh; -# else - template - GLM_FUNC_QUALIFIER genType acosh(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acosh' only accept floating-point input"); - - if(x < static_cast(1)) - return static_cast(0); - return log(x + sqrt(x * x - static_cast(1))); - } -# endif - - template - GLM_FUNC_QUALIFIER vec acosh(vec const& v) - { - return detail::functor1::call(acosh, v); - } - - // atanh -# if GLM_HAS_CXX11_STL - using std::atanh; -# else - template - GLM_FUNC_QUALIFIER genType atanh(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'atanh' only accept floating-point input"); - - if(std::abs(x) >= static_cast(1)) - return 0; - return static_cast(0.5) * log((static_cast(1) + x) / (static_cast(1) - x)); - } -# endif - - template - GLM_FUNC_QUALIFIER vec atanh(vec const& v) - { - return detail::functor1::call(atanh, v); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_trigonometric_simd.inl" -#endif - diff --git a/third_party/glm/detail/func_trigonometric_simd.inl b/third_party/glm/detail/func_trigonometric_simd.inl deleted file mode 100755 index e69de29..0000000 diff --git a/third_party/glm/detail/func_vector_relational.inl b/third_party/glm/detail/func_vector_relational.inl deleted file mode 100755 index 80c9e87..0000000 --- a/third_party/glm/detail/func_vector_relational.inl +++ /dev/null @@ -1,87 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec lessThan(vec const& x, vec const& y) - { - vec Result(true); - for(length_t i = 0; i < L; ++i) - Result[i] = x[i] < y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec lessThanEqual(vec const& x, vec const& y) - { - vec Result(true); - for(length_t i = 0; i < L; ++i) - Result[i] = x[i] <= y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec greaterThan(vec const& x, vec const& y) - { - vec Result(true); - for(length_t i = 0; i < L; ++i) - Result[i] = x[i] > y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec greaterThanEqual(vec const& x, vec const& y) - { - vec Result(true); - for(length_t i = 0; i < L; ++i) - Result[i] = x[i] >= y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y) - { - vec Result(true); - for(length_t i = 0; i < L; ++i) - Result[i] = x[i] == y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y) - { - vec Result(true); - for(length_t i = 0; i < L; ++i) - Result[i] = x[i] != y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool any(vec const& v) - { - bool Result = false; - for(length_t i = 0; i < L; ++i) - Result = Result || v[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool all(vec const& v) - { - bool Result = true; - for(length_t i = 0; i < L; ++i) - Result = Result && v[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec not_(vec const& v) - { - vec Result(true); - for(length_t i = 0; i < L; ++i) - Result[i] = !v[i]; - return Result; - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "func_vector_relational_simd.inl" -#endif diff --git a/third_party/glm/detail/func_vector_relational_simd.inl b/third_party/glm/detail/func_vector_relational_simd.inl deleted file mode 100755 index fd0fe8b..0000000 --- a/third_party/glm/detail/func_vector_relational_simd.inl +++ /dev/null @@ -1,6 +0,0 @@ -namespace glm{ -namespace detail -{ - -}//namespace detail -}//namespace glm diff --git a/third_party/glm/detail/glm.cpp b/third_party/glm/detail/glm.cpp deleted file mode 100755 index e0755bd..0000000 --- a/third_party/glm/detail/glm.cpp +++ /dev/null @@ -1,263 +0,0 @@ -/// @ref core -/// @file glm/glm.cpp - -#ifndef GLM_ENABLE_EXPERIMENTAL -#define GLM_ENABLE_EXPERIMENTAL -#endif -#include -#include -#include -#include -#include -#include - -namespace glm -{ -// tvec1 type explicit instantiation -template struct vec<1, uint8, lowp>; -template struct vec<1, uint16, lowp>; -template struct vec<1, uint32, lowp>; -template struct vec<1, uint64, lowp>; -template struct vec<1, int8, lowp>; -template struct vec<1, int16, lowp>; -template struct vec<1, int32, lowp>; -template struct vec<1, int64, lowp>; -template struct vec<1, float32, lowp>; -template struct vec<1, float64, lowp>; - -template struct vec<1, uint8, mediump>; -template struct vec<1, uint16, mediump>; -template struct vec<1, uint32, mediump>; -template struct vec<1, uint64, mediump>; -template struct vec<1, int8, mediump>; -template struct vec<1, int16, mediump>; -template struct vec<1, int32, mediump>; -template struct vec<1, int64, mediump>; -template struct vec<1, float32, mediump>; -template struct vec<1, float64, mediump>; - -template struct vec<1, uint8, highp>; -template struct vec<1, uint16, highp>; -template struct vec<1, uint32, highp>; -template struct vec<1, uint64, highp>; -template struct vec<1, int8, highp>; -template struct vec<1, int16, highp>; -template struct vec<1, int32, highp>; -template struct vec<1, int64, highp>; -template struct vec<1, float32, highp>; -template struct vec<1, float64, highp>; - -// tvec2 type explicit instantiation -template struct vec<2, uint8, lowp>; -template struct vec<2, uint16, lowp>; -template struct vec<2, uint32, lowp>; -template struct vec<2, uint64, lowp>; -template struct vec<2, int8, lowp>; -template struct vec<2, int16, lowp>; -template struct vec<2, int32, lowp>; -template struct vec<2, int64, lowp>; -template struct vec<2, float32, lowp>; -template struct vec<2, float64, lowp>; - -template struct vec<2, uint8, mediump>; -template struct vec<2, uint16, mediump>; -template struct vec<2, uint32, mediump>; -template struct vec<2, uint64, mediump>; -template struct vec<2, int8, mediump>; -template struct vec<2, int16, mediump>; -template struct vec<2, int32, mediump>; -template struct vec<2, int64, mediump>; -template struct vec<2, float32, mediump>; -template struct vec<2, float64, mediump>; - -template struct vec<2, uint8, highp>; -template struct vec<2, uint16, highp>; -template struct vec<2, uint32, highp>; -template struct vec<2, uint64, highp>; -template struct vec<2, int8, highp>; -template struct vec<2, int16, highp>; -template struct vec<2, int32, highp>; -template struct vec<2, int64, highp>; -template struct vec<2, float32, highp>; -template struct vec<2, float64, highp>; - -// tvec3 type explicit instantiation -template struct vec<3, uint8, lowp>; -template struct vec<3, uint16, lowp>; -template struct vec<3, uint32, lowp>; -template struct vec<3, uint64, lowp>; -template struct vec<3, int8, lowp>; -template struct vec<3, int16, lowp>; -template struct vec<3, int32, lowp>; -template struct vec<3, int64, lowp>; -template struct vec<3, float32, lowp>; -template struct vec<3, float64, lowp>; - -template struct vec<3, uint8, mediump>; -template struct vec<3, uint16, mediump>; -template struct vec<3, uint32, mediump>; -template struct vec<3, uint64, mediump>; -template struct vec<3, int8, mediump>; -template struct vec<3, int16, mediump>; -template struct vec<3, int32, mediump>; -template struct vec<3, int64, mediump>; -template struct vec<3, float32, mediump>; -template struct vec<3, float64, mediump>; - -template struct vec<3, uint8, highp>; -template struct vec<3, uint16, highp>; -template struct vec<3, uint32, highp>; -template struct vec<3, uint64, highp>; -template struct vec<3, int8, highp>; -template struct vec<3, int16, highp>; -template struct vec<3, int32, highp>; -template struct vec<3, int64, highp>; -template struct vec<3, float32, highp>; -template struct vec<3, float64, highp>; - -// tvec4 type explicit instantiation -template struct vec<4, uint8, lowp>; -template struct vec<4, uint16, lowp>; -template struct vec<4, uint32, lowp>; -template struct vec<4, uint64, lowp>; -template struct vec<4, int8, lowp>; -template struct vec<4, int16, lowp>; -template struct vec<4, int32, lowp>; -template struct vec<4, int64, lowp>; -template struct vec<4, float32, lowp>; -template struct vec<4, float64, lowp>; - -template struct vec<4, uint8, mediump>; -template struct vec<4, uint16, mediump>; -template struct vec<4, uint32, mediump>; -template struct vec<4, uint64, mediump>; -template struct vec<4, int8, mediump>; -template struct vec<4, int16, mediump>; -template struct vec<4, int32, mediump>; -template struct vec<4, int64, mediump>; -template struct vec<4, float32, mediump>; -template struct vec<4, float64, mediump>; - -template struct vec<4, uint8, highp>; -template struct vec<4, uint16, highp>; -template struct vec<4, uint32, highp>; -template struct vec<4, uint64, highp>; -template struct vec<4, int8, highp>; -template struct vec<4, int16, highp>; -template struct vec<4, int32, highp>; -template struct vec<4, int64, highp>; -template struct vec<4, float32, highp>; -template struct vec<4, float64, highp>; - -// tmat2x2 type explicit instantiation -template struct mat<2, 2, float32, lowp>; -template struct mat<2, 2, float64, lowp>; - -template struct mat<2, 2, float32, mediump>; -template struct mat<2, 2, float64, mediump>; - -template struct mat<2, 2, float32, highp>; -template struct mat<2, 2, float64, highp>; - -// tmat2x3 type explicit instantiation -template struct mat<2, 3, float32, lowp>; -template struct mat<2, 3, float64, lowp>; - -template struct mat<2, 3, float32, mediump>; -template struct mat<2, 3, float64, mediump>; - -template struct mat<2, 3, float32, highp>; -template struct mat<2, 3, float64, highp>; - -// tmat2x4 type explicit instantiation -template struct mat<2, 4, float32, lowp>; -template struct mat<2, 4, float64, lowp>; - -template struct mat<2, 4, float32, mediump>; -template struct mat<2, 4, float64, mediump>; - -template struct mat<2, 4, float32, highp>; -template struct mat<2, 4, float64, highp>; - -// tmat3x2 type explicit instantiation -template struct mat<3, 2, float32, lowp>; -template struct mat<3, 2, float64, lowp>; - -template struct mat<3, 2, float32, mediump>; -template struct mat<3, 2, float64, mediump>; - -template struct mat<3, 2, float32, highp>; -template struct mat<3, 2, float64, highp>; - -// tmat3x3 type explicit instantiation -template struct mat<3, 3, float32, lowp>; -template struct mat<3, 3, float64, lowp>; - -template struct mat<3, 3, float32, mediump>; -template struct mat<3, 3, float64, mediump>; - -template struct mat<3, 3, float32, highp>; -template struct mat<3, 3, float64, highp>; - -// tmat3x4 type explicit instantiation -template struct mat<3, 4, float32, lowp>; -template struct mat<3, 4, float64, lowp>; - -template struct mat<3, 4, float32, mediump>; -template struct mat<3, 4, float64, mediump>; - -template struct mat<3, 4, float32, highp>; -template struct mat<3, 4, float64, highp>; - -// tmat4x2 type explicit instantiation -template struct mat<4, 2, float32, lowp>; -template struct mat<4, 2, float64, lowp>; - -template struct mat<4, 2, float32, mediump>; -template struct mat<4, 2, float64, mediump>; - -template struct mat<4, 2, float32, highp>; -template struct mat<4, 2, float64, highp>; - -// tmat4x3 type explicit instantiation -template struct mat<4, 3, float32, lowp>; -template struct mat<4, 3, float64, lowp>; - -template struct mat<4, 3, float32, mediump>; -template struct mat<4, 3, float64, mediump>; - -template struct mat<4, 3, float32, highp>; -template struct mat<4, 3, float64, highp>; - -// tmat4x4 type explicit instantiation -template struct mat<4, 4, float32, lowp>; -template struct mat<4, 4, float64, lowp>; - -template struct mat<4, 4, float32, mediump>; -template struct mat<4, 4, float64, mediump>; - -template struct mat<4, 4, float32, highp>; -template struct mat<4, 4, float64, highp>; - -// tquat type explicit instantiation -template struct qua; -template struct qua; - -template struct qua; -template struct qua; - -template struct qua; -template struct qua; - -//tdualquat type explicit instantiation -template struct tdualquat; -template struct tdualquat; - -template struct tdualquat; -template struct tdualquat; - -template struct tdualquat; -template struct tdualquat; - -}//namespace glm - diff --git a/third_party/glm/detail/qualifier.hpp b/third_party/glm/detail/qualifier.hpp deleted file mode 100755 index b6c9df0..0000000 --- a/third_party/glm/detail/qualifier.hpp +++ /dev/null @@ -1,230 +0,0 @@ -#pragma once - -#include "setup.hpp" - -namespace glm -{ - /// Qualify GLM types in term of alignment (packed, aligned) and precision in term of ULPs (lowp, mediump, highp) - enum qualifier - { - packed_highp, ///< Typed data is tightly packed in memory and operations are executed with high precision in term of ULPs - packed_mediump, ///< Typed data is tightly packed in memory and operations are executed with medium precision in term of ULPs for higher performance - packed_lowp, ///< Typed data is tightly packed in memory and operations are executed with low precision in term of ULPs to maximize performance - -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE - aligned_highp, ///< Typed data is aligned in memory allowing SIMD optimizations and operations are executed with high precision in term of ULPs - aligned_mediump, ///< Typed data is aligned in memory allowing SIMD optimizations and operations are executed with high precision in term of ULPs for higher performance - aligned_lowp, // ///< Typed data is aligned in memory allowing SIMD optimizations and operations are executed with high precision in term of ULPs to maximize performance - aligned = aligned_highp, ///< By default aligned qualifier is also high precision -# endif - - highp = packed_highp, ///< By default highp qualifier is also packed - mediump = packed_mediump, ///< By default mediump qualifier is also packed - lowp = packed_lowp, ///< By default lowp qualifier is also packed - packed = packed_highp, ///< By default packed qualifier is also high precision - -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE && defined(GLM_FORCE_DEFAULT_ALIGNED_GENTYPES) - defaultp = aligned_highp -# else - defaultp = highp -# endif - }; - - typedef qualifier precision; - - template struct vec; - template struct mat; - template struct qua; - -# if GLM_HAS_TEMPLATE_ALIASES - template using tvec1 = vec<1, T, Q>; - template using tvec2 = vec<2, T, Q>; - template using tvec3 = vec<3, T, Q>; - template using tvec4 = vec<4, T, Q>; - template using tmat2x2 = mat<2, 2, T, Q>; - template using tmat2x3 = mat<2, 3, T, Q>; - template using tmat2x4 = mat<2, 4, T, Q>; - template using tmat3x2 = mat<3, 2, T, Q>; - template using tmat3x3 = mat<3, 3, T, Q>; - template using tmat3x4 = mat<3, 4, T, Q>; - template using tmat4x2 = mat<4, 2, T, Q>; - template using tmat4x3 = mat<4, 3, T, Q>; - template using tmat4x4 = mat<4, 4, T, Q>; - template using tquat = qua; -# endif - -namespace detail -{ - template - struct is_aligned - { - static const bool value = false; - }; - -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE - template<> - struct is_aligned - { - static const bool value = true; - }; - - template<> - struct is_aligned - { - static const bool value = true; - }; - - template<> - struct is_aligned - { - static const bool value = true; - }; -# endif - - template - struct storage - { - typedef struct type { - T data[L]; - } type; - }; - -# if GLM_HAS_ALIGNOF - template - struct storage - { - typedef struct alignas(L * sizeof(T)) type { - T data[L]; - } type; - }; - - template - struct storage<3, T, true> - { - typedef struct alignas(4 * sizeof(T)) type { - T data[4]; - } type; - }; -# endif - -# if GLM_ARCH & GLM_ARCH_SSE2_BIT - template<> - struct storage<4, float, true> - { - typedef glm_f32vec4 type; - }; - - template<> - struct storage<4, int, true> - { - typedef glm_i32vec4 type; - }; - - template<> - struct storage<4, unsigned int, true> - { - typedef glm_u32vec4 type; - }; - - template<> - struct storage<2, double, true> - { - typedef glm_f64vec2 type; - }; - - template<> - struct storage<2, detail::int64, true> - { - typedef glm_i64vec2 type; - }; - - template<> - struct storage<2, detail::uint64, true> - { - typedef glm_u64vec2 type; - }; -# endif - -# if (GLM_ARCH & GLM_ARCH_AVX_BIT) - template<> - struct storage<4, double, true> - { - typedef glm_f64vec4 type; - }; -# endif - -# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) - template<> - struct storage<4, detail::int64, true> - { - typedef glm_i64vec4 type; - }; - - template<> - struct storage<4, detail::uint64, true> - { - typedef glm_u64vec4 type; - }; -# endif - -# if GLM_ARCH & GLM_ARCH_NEON_BIT - template<> - struct storage<4, float, true> - { - typedef glm_f32vec4 type; - }; - - template<> - struct storage<4, int, true> - { - typedef glm_i32vec4 type; - }; - - template<> - struct storage<4, unsigned int, true> - { - typedef glm_u32vec4 type; - }; -# endif - - enum genTypeEnum - { - GENTYPE_VEC, - GENTYPE_MAT, - GENTYPE_QUAT - }; - - template - struct genTypeTrait - {}; - - template - struct genTypeTrait > - { - static const genTypeEnum GENTYPE = GENTYPE_MAT; - }; - - template - struct init_gentype - { - }; - - template - struct init_gentype - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genType identity() - { - return genType(1, 0, 0, 0); - } - }; - - template - struct init_gentype - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static genType identity() - { - return genType(1); - } - }; -}//namespace detail -}//namespace glm diff --git a/third_party/glm/detail/setup.hpp b/third_party/glm/detail/setup.hpp deleted file mode 100755 index 07db656..0000000 --- a/third_party/glm/detail/setup.hpp +++ /dev/null @@ -1,1135 +0,0 @@ -#ifndef GLM_SETUP_INCLUDED - -#include -#include - -#define GLM_VERSION_MAJOR 0 -#define GLM_VERSION_MINOR 9 -#define GLM_VERSION_PATCH 9 -#define GLM_VERSION_REVISION 7 -#define GLM_VERSION 997 -#define GLM_VERSION_MESSAGE "GLM: version 0.9.9.7" - -#define GLM_SETUP_INCLUDED GLM_VERSION - -/////////////////////////////////////////////////////////////////////////////////// -// Active states - -#define GLM_DISABLE 0 -#define GLM_ENABLE 1 - -/////////////////////////////////////////////////////////////////////////////////// -// Messages - -#if defined(GLM_FORCE_MESSAGES) -# define GLM_MESSAGES GLM_ENABLE -#else -# define GLM_MESSAGES GLM_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Detect the platform - -#include "../simd/platform.h" - -/////////////////////////////////////////////////////////////////////////////////// -// Build model - -#if defined(_M_ARM64) || defined(__LP64__) || defined(_M_X64) || defined(__ppc64__) || defined(__x86_64__) -# define GLM_MODEL GLM_MODEL_64 -#elif defined(__i386__) || defined(__ppc__) || defined(__ILP32__) || defined(_M_ARM) -# define GLM_MODEL GLM_MODEL_32 -#else -# define GLM_MODEL GLM_MODEL_32 -#endif// - -#if !defined(GLM_MODEL) && GLM_COMPILER != 0 -# error "GLM_MODEL undefined, your compiler may not be supported by GLM. Add #define GLM_MODEL 0 to ignore this message." -#endif//GLM_MODEL - -/////////////////////////////////////////////////////////////////////////////////// -// C++ Version - -// User defines: GLM_FORCE_CXX98, GLM_FORCE_CXX03, GLM_FORCE_CXX11, GLM_FORCE_CXX14, GLM_FORCE_CXX17, GLM_FORCE_CXX2A - -#define GLM_LANG_CXX98_FLAG (1 << 1) -#define GLM_LANG_CXX03_FLAG (1 << 2) -#define GLM_LANG_CXX0X_FLAG (1 << 3) -#define GLM_LANG_CXX11_FLAG (1 << 4) -#define GLM_LANG_CXX14_FLAG (1 << 5) -#define GLM_LANG_CXX17_FLAG (1 << 6) -#define GLM_LANG_CXX2A_FLAG (1 << 7) -#define GLM_LANG_CXXMS_FLAG (1 << 8) -#define GLM_LANG_CXXGNU_FLAG (1 << 9) - -#define GLM_LANG_CXX98 GLM_LANG_CXX98_FLAG -#define GLM_LANG_CXX03 (GLM_LANG_CXX98 | GLM_LANG_CXX03_FLAG) -#define GLM_LANG_CXX0X (GLM_LANG_CXX03 | GLM_LANG_CXX0X_FLAG) -#define GLM_LANG_CXX11 (GLM_LANG_CXX0X | GLM_LANG_CXX11_FLAG) -#define GLM_LANG_CXX14 (GLM_LANG_CXX11 | GLM_LANG_CXX14_FLAG) -#define GLM_LANG_CXX17 (GLM_LANG_CXX14 | GLM_LANG_CXX17_FLAG) -#define GLM_LANG_CXX2A (GLM_LANG_CXX17 | GLM_LANG_CXX2A_FLAG) -#define GLM_LANG_CXXMS GLM_LANG_CXXMS_FLAG -#define GLM_LANG_CXXGNU GLM_LANG_CXXGNU_FLAG - -#if (defined(_MSC_EXTENSIONS)) -# define GLM_LANG_EXT GLM_LANG_CXXMS_FLAG -#elif ((GLM_COMPILER & (GLM_COMPILER_CLANG | GLM_COMPILER_GCC)) && (GLM_ARCH & GLM_ARCH_SIMD_BIT)) -# define GLM_LANG_EXT GLM_LANG_CXXMS_FLAG -#else -# define GLM_LANG_EXT 0 -#endif - -#if (defined(GLM_FORCE_CXX_UNKNOWN)) -# define GLM_LANG 0 -#elif defined(GLM_FORCE_CXX2A) -# define GLM_LANG (GLM_LANG_CXX2A | GLM_LANG_EXT) -# define GLM_LANG_STL11_FORCED -#elif defined(GLM_FORCE_CXX17) -# define GLM_LANG (GLM_LANG_CXX17 | GLM_LANG_EXT) -# define GLM_LANG_STL11_FORCED -#elif defined(GLM_FORCE_CXX14) -# define GLM_LANG (GLM_LANG_CXX14 | GLM_LANG_EXT) -# define GLM_LANG_STL11_FORCED -#elif defined(GLM_FORCE_CXX11) -# define GLM_LANG (GLM_LANG_CXX11 | GLM_LANG_EXT) -# define GLM_LANG_STL11_FORCED -#elif defined(GLM_FORCE_CXX03) -# define GLM_LANG (GLM_LANG_CXX03 | GLM_LANG_EXT) -#elif defined(GLM_FORCE_CXX98) -# define GLM_LANG (GLM_LANG_CXX98 | GLM_LANG_EXT) -#else -# if GLM_COMPILER & GLM_COMPILER_VC && defined(_MSVC_LANG) -# if GLM_COMPILER >= GLM_COMPILER_VC15_7 -# define GLM_LANG_PLATFORM _MSVC_LANG -# elif GLM_COMPILER >= GLM_COMPILER_VC15 -# if _MSVC_LANG > 201402L -# define GLM_LANG_PLATFORM 201402L -# else -# define GLM_LANG_PLATFORM _MSVC_LANG -# endif -# else -# define GLM_LANG_PLATFORM 0 -# endif -# else -# define GLM_LANG_PLATFORM 0 -# endif - -# if __cplusplus > 201703L || GLM_LANG_PLATFORM > 201703L -# define GLM_LANG (GLM_LANG_CXX2A | GLM_LANG_EXT) -# elif __cplusplus == 201703L || GLM_LANG_PLATFORM == 201703L -# define GLM_LANG (GLM_LANG_CXX17 | GLM_LANG_EXT) -# elif __cplusplus == 201402L || __cplusplus == 201500L || GLM_LANG_PLATFORM == 201402L -# define GLM_LANG (GLM_LANG_CXX14 | GLM_LANG_EXT) -# elif __cplusplus == 201103L || GLM_LANG_PLATFORM == 201103L -# define GLM_LANG (GLM_LANG_CXX11 | GLM_LANG_EXT) -# elif defined(__INTEL_CXX11_MODE__) || defined(_MSC_VER) || defined(__GXX_EXPERIMENTAL_CXX0X__) -# define GLM_LANG (GLM_LANG_CXX0X | GLM_LANG_EXT) -# elif __cplusplus == 199711L -# define GLM_LANG (GLM_LANG_CXX98 | GLM_LANG_EXT) -# else -# define GLM_LANG (0 | GLM_LANG_EXT) -# endif -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Has of C++ features - -// http://clang.llvm.org/cxx_status.html -// http://gcc.gnu.org/projects/cxx0x.html -// http://msdn.microsoft.com/en-us/library/vstudio/hh567368(v=vs.120).aspx - -// Android has multiple STLs but C++11 STL detection doesn't always work #284 #564 -#if GLM_PLATFORM == GLM_PLATFORM_ANDROID && !defined(GLM_LANG_STL11_FORCED) -# define GLM_HAS_CXX11_STL 0 -#elif GLM_COMPILER & GLM_COMPILER_CLANG -# if (defined(_LIBCPP_VERSION) || (GLM_LANG & GLM_LANG_CXX11_FLAG) || defined(GLM_LANG_STL11_FORCED)) -# define GLM_HAS_CXX11_STL 1 -# else -# define GLM_HAS_CXX11_STL 0 -# endif -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_CXX11_STL 1 -#else -# define GLM_HAS_CXX11_STL ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_GCC) && (GLM_COMPILER >= GLM_COMPILER_GCC48)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ - ((GLM_PLATFORM != GLM_PLATFORM_WINDOWS) && (GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL15)))) -#endif - -// N1720 -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_STATIC_ASSERT __has_feature(cxx_static_assert) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_STATIC_ASSERT 1 -#else -# define GLM_HAS_STATIC_ASSERT ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_CUDA)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC)))) -#endif - -// N1988 -#if GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_EXTENDED_INTEGER_TYPE 1 -#else -# define GLM_HAS_EXTENDED_INTEGER_TYPE (\ - ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_COMPILER & GLM_COMPILER_VC)) || \ - ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_COMPILER & GLM_COMPILER_CUDA)) || \ - ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_COMPILER & GLM_COMPILER_CLANG))) -#endif - -// N2672 Initializer lists http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2672.htm -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_INITIALIZER_LISTS __has_feature(cxx_generalized_initializers) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_INITIALIZER_LISTS 1 -#else -# define GLM_HAS_INITIALIZER_LISTS ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC15)) || \ - ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL14)) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA)))) -#endif - -// N2544 Unrestricted unions http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2008/n2544.pdf -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_UNRESTRICTED_UNIONS __has_feature(cxx_unrestricted_unions) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_UNRESTRICTED_UNIONS 1 -#else -# define GLM_HAS_UNRESTRICTED_UNIONS (GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - (GLM_COMPILER & GLM_COMPILER_VC) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA))) -#endif - -// N2346 -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_DEFAULTED_FUNCTIONS __has_feature(cxx_defaulted_functions) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_DEFAULTED_FUNCTIONS 1 -#else -# define GLM_HAS_DEFAULTED_FUNCTIONS ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ - ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ - (GLM_COMPILER & GLM_COMPILER_CUDA))) -#endif - -// N2118 -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_RVALUE_REFERENCES __has_feature(cxx_rvalue_references) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_RVALUE_REFERENCES 1 -#else -# define GLM_HAS_RVALUE_REFERENCES ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_VC)) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA)))) -#endif - -// N2437 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2437.pdf -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_EXPLICIT_CONVERSION_OPERATORS __has_feature(cxx_explicit_conversions) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_EXPLICIT_CONVERSION_OPERATORS 1 -#else -# define GLM_HAS_EXPLICIT_CONVERSION_OPERATORS ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL14)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA)))) -#endif - -// N2258 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2258.pdf -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_TEMPLATE_ALIASES __has_feature(cxx_alias_templates) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_TEMPLATE_ALIASES 1 -#else -# define GLM_HAS_TEMPLATE_ALIASES ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA)))) -#endif - -// N2930 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2009/n2930.html -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_RANGE_FOR __has_feature(cxx_range_for) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_RANGE_FOR 1 -#else -# define GLM_HAS_RANGE_FOR ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC)) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA)))) -#endif - -// N2341 http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2341.pdf -#if GLM_COMPILER & GLM_COMPILER_CLANG -# define GLM_HAS_ALIGNOF __has_feature(cxx_alignas) -#elif GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_ALIGNOF 1 -#else -# define GLM_HAS_ALIGNOF ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL15)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC14)) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA)))) -#endif - -// N2235 Generalized Constant Expressions http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2235.pdf -// N3652 Extended Constant Expressions http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2013/n3652.html -#if (GLM_ARCH & GLM_ARCH_SIMD_BIT) // Compiler SIMD intrinsics don't support constexpr... -# define GLM_HAS_CONSTEXPR 0 -#elif (GLM_COMPILER & GLM_COMPILER_CLANG) -# define GLM_HAS_CONSTEXPR __has_feature(cxx_relaxed_constexpr) -#elif (GLM_LANG & GLM_LANG_CXX14_FLAG) -# define GLM_HAS_CONSTEXPR 1 -#else -# define GLM_HAS_CONSTEXPR ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && GLM_HAS_INITIALIZER_LISTS && (\ - ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_COMPILER >= GLM_COMPILER_INTEL17)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC15)))) -#endif - -#if GLM_HAS_CONSTEXPR -# define GLM_CONSTEXPR constexpr -#else -# define GLM_CONSTEXPR -#endif - -// -#if GLM_HAS_CONSTEXPR -# if (GLM_COMPILER & GLM_COMPILER_CLANG) -# if __has_feature(cxx_if_constexpr) -# define GLM_HAS_IF_CONSTEXPR 1 -# else -# define GLM_HAS_IF_CONSTEXPR 0 -# endif -# elif (GLM_LANG & GLM_LANG_CXX17_FLAG) -# define GLM_HAS_IF_CONSTEXPR 1 -# else -# define GLM_HAS_IF_CONSTEXPR 0 -# endif -#else -# define GLM_HAS_IF_CONSTEXPR 0 -#endif - -#if GLM_HAS_IF_CONSTEXPR -# define GLM_IF_CONSTEXPR if constexpr -#else -# define GLM_IF_CONSTEXPR if -#endif - -// -#if GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_ASSIGNABLE 1 -#else -# define GLM_HAS_ASSIGNABLE ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC15)) || \ - ((GLM_COMPILER & GLM_COMPILER_GCC) && (GLM_COMPILER >= GLM_COMPILER_GCC49)))) -#endif - -// -#define GLM_HAS_TRIVIAL_QUERIES 0 - -// -#if GLM_LANG & GLM_LANG_CXX11_FLAG -# define GLM_HAS_MAKE_SIGNED 1 -#else -# define GLM_HAS_MAKE_SIGNED ((GLM_LANG & GLM_LANG_CXX0X_FLAG) && (\ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC12)) || \ - ((GLM_COMPILER & GLM_COMPILER_CUDA)))) -#endif - -// -#if defined(GLM_FORCE_INTRINSICS) -# define GLM_HAS_BITSCAN_WINDOWS ((GLM_PLATFORM & GLM_PLATFORM_WINDOWS) && (\ - ((GLM_COMPILER & GLM_COMPILER_INTEL)) || \ - ((GLM_COMPILER & GLM_COMPILER_VC) && (GLM_COMPILER >= GLM_COMPILER_VC14) && (GLM_ARCH & GLM_ARCH_X86_BIT)))) -#else -# define GLM_HAS_BITSCAN_WINDOWS 0 -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// OpenMP -#ifdef _OPENMP -# if GLM_COMPILER & GLM_COMPILER_GCC -# if GLM_COMPILER >= GLM_COMPILER_GCC61 -# define GLM_HAS_OPENMP 45 -# elif GLM_COMPILER >= GLM_COMPILER_GCC49 -# define GLM_HAS_OPENMP 40 -# elif GLM_COMPILER >= GLM_COMPILER_GCC47 -# define GLM_HAS_OPENMP 31 -# else -# define GLM_HAS_OPENMP 0 -# endif -# elif GLM_COMPILER & GLM_COMPILER_CLANG -# if GLM_COMPILER >= GLM_COMPILER_CLANG38 -# define GLM_HAS_OPENMP 31 -# else -# define GLM_HAS_OPENMP 0 -# endif -# elif GLM_COMPILER & GLM_COMPILER_VC -# define GLM_HAS_OPENMP 20 -# elif GLM_COMPILER & GLM_COMPILER_INTEL -# if GLM_COMPILER >= GLM_COMPILER_INTEL16 -# define GLM_HAS_OPENMP 40 -# else -# define GLM_HAS_OPENMP 0 -# endif -# else -# define GLM_HAS_OPENMP 0 -# endif -#else -# define GLM_HAS_OPENMP 0 -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// nullptr - -#if GLM_LANG & GLM_LANG_CXX0X_FLAG -# define GLM_CONFIG_NULLPTR GLM_ENABLE -#else -# define GLM_CONFIG_NULLPTR GLM_DISABLE -#endif - -#if GLM_CONFIG_NULLPTR == GLM_ENABLE -# define GLM_NULLPTR nullptr -#else -# define GLM_NULLPTR 0 -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Static assert - -#if GLM_HAS_STATIC_ASSERT -# define GLM_STATIC_ASSERT(x, message) static_assert(x, message) -#elif GLM_COMPILER & GLM_COMPILER_VC -# define GLM_STATIC_ASSERT(x, message) typedef char __CASSERT__##__LINE__[(x) ? 1 : -1] -#else -# define GLM_STATIC_ASSERT(x, message) assert(x) -#endif//GLM_LANG - -/////////////////////////////////////////////////////////////////////////////////// -// Qualifiers - -#if GLM_COMPILER & GLM_COMPILER_CUDA -# define GLM_CUDA_FUNC_DEF __device__ __host__ -# define GLM_CUDA_FUNC_DECL __device__ __host__ -#else -# define GLM_CUDA_FUNC_DEF -# define GLM_CUDA_FUNC_DECL -#endif - -#if defined(GLM_FORCE_INLINE) -# if GLM_COMPILER & GLM_COMPILER_VC -# define GLM_INLINE __forceinline -# define GLM_NEVER_INLINE __declspec((noinline)) -# elif GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG) -# define GLM_INLINE inline __attribute__((__always_inline__)) -# define GLM_NEVER_INLINE __attribute__((__noinline__)) -# elif GLM_COMPILER & GLM_COMPILER_CUDA -# define GLM_INLINE __forceinline__ -# define GLM_NEVER_INLINE __noinline__ -# else -# define GLM_INLINE inline -# define GLM_NEVER_INLINE -# endif//GLM_COMPILER -#else -# define GLM_INLINE inline -# define GLM_NEVER_INLINE -#endif//defined(GLM_FORCE_INLINE) - -#define GLM_FUNC_DECL GLM_CUDA_FUNC_DECL -#define GLM_FUNC_QUALIFIER GLM_CUDA_FUNC_DEF GLM_INLINE - -/////////////////////////////////////////////////////////////////////////////////// -// Swizzle operators - -// User defines: GLM_FORCE_SWIZZLE - -#define GLM_SWIZZLE_DISABLED 0 -#define GLM_SWIZZLE_OPERATOR 1 -#define GLM_SWIZZLE_FUNCTION 2 - -#if defined(GLM_FORCE_XYZW_ONLY) -# undef GLM_FORCE_SWIZZLE -#endif - -#if defined(GLM_SWIZZLE) -# pragma message("GLM: GLM_SWIZZLE is deprecated, use GLM_FORCE_SWIZZLE instead.") -# define GLM_FORCE_SWIZZLE -#endif - -#if defined(GLM_FORCE_SWIZZLE) && (GLM_LANG & GLM_LANG_CXXMS_FLAG) -# define GLM_CONFIG_SWIZZLE GLM_SWIZZLE_OPERATOR -#elif defined(GLM_FORCE_SWIZZLE) -# define GLM_CONFIG_SWIZZLE GLM_SWIZZLE_FUNCTION -#else -# define GLM_CONFIG_SWIZZLE GLM_SWIZZLE_DISABLED -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Allows using not basic types as genType - -// #define GLM_FORCE_UNRESTRICTED_GENTYPE - -#ifdef GLM_FORCE_UNRESTRICTED_GENTYPE -# define GLM_CONFIG_UNRESTRICTED_GENTYPE GLM_ENABLE -#else -# define GLM_CONFIG_UNRESTRICTED_GENTYPE GLM_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Clip control, define GLM_FORCE_DEPTH_ZERO_TO_ONE before including GLM -// to use a clip space between 0 to 1. -// Coordinate system, define GLM_FORCE_LEFT_HANDED before including GLM -// to use left handed coordinate system by default. - -#define GLM_CLIP_CONTROL_ZO_BIT (1 << 0) // ZERO_TO_ONE -#define GLM_CLIP_CONTROL_NO_BIT (1 << 1) // NEGATIVE_ONE_TO_ONE -#define GLM_CLIP_CONTROL_LH_BIT (1 << 2) // LEFT_HANDED, For DirectX, Metal, Vulkan -#define GLM_CLIP_CONTROL_RH_BIT (1 << 3) // RIGHT_HANDED, For OpenGL, default in GLM - -#define GLM_CLIP_CONTROL_LH_ZO (GLM_CLIP_CONTROL_LH_BIT | GLM_CLIP_CONTROL_ZO_BIT) -#define GLM_CLIP_CONTROL_LH_NO (GLM_CLIP_CONTROL_LH_BIT | GLM_CLIP_CONTROL_NO_BIT) -#define GLM_CLIP_CONTROL_RH_ZO (GLM_CLIP_CONTROL_RH_BIT | GLM_CLIP_CONTROL_ZO_BIT) -#define GLM_CLIP_CONTROL_RH_NO (GLM_CLIP_CONTROL_RH_BIT | GLM_CLIP_CONTROL_NO_BIT) - -#ifdef GLM_FORCE_DEPTH_ZERO_TO_ONE -# ifdef GLM_FORCE_LEFT_HANDED -# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_LH_ZO -# else -# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_RH_ZO -# endif -#else -# ifdef GLM_FORCE_LEFT_HANDED -# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_LH_NO -# else -# define GLM_CONFIG_CLIP_CONTROL GLM_CLIP_CONTROL_RH_NO -# endif -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Qualifiers - -#if (GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS)) -# define GLM_DEPRECATED __declspec(deprecated) -# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef __declspec(align(alignment)) type name -#elif GLM_COMPILER & (GLM_COMPILER_GCC | GLM_COMPILER_CLANG | GLM_COMPILER_INTEL) -# define GLM_DEPRECATED __attribute__((__deprecated__)) -# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef type name __attribute__((aligned(alignment))) -#elif GLM_COMPILER & GLM_COMPILER_CUDA -# define GLM_DEPRECATED -# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef type name __align__(x) -#else -# define GLM_DEPRECATED -# define GLM_ALIGNED_TYPEDEF(type, name, alignment) typedef type name -#endif - -/////////////////////////////////////////////////////////////////////////////////// - -#ifdef GLM_FORCE_EXPLICIT_CTOR -# define GLM_EXPLICIT explicit -#else -# define GLM_EXPLICIT -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// SYCL - -#if GLM_COMPILER==GLM_COMPILER_SYCL - -#include -#include - -namespace glm { -namespace std { - // Import SYCL's functions into the namespace glm::std to force their usages. - // It's important to use the math built-in function (sin, exp, ...) - // of SYCL instead the std ones. - using namespace cl::sycl; - - /////////////////////////////////////////////////////////////////////////////// - // Import some "harmless" std's stuffs used by glm into - // the new glm::std namespace. - template - using numeric_limits = ::std::numeric_limits; - - using ::std::size_t; - - using ::std::uint8_t; - using ::std::uint16_t; - using ::std::uint32_t; - using ::std::uint64_t; - - using ::std::int8_t; - using ::std::int16_t; - using ::std::int32_t; - using ::std::int64_t; - - using ::std::make_unsigned; - /////////////////////////////////////////////////////////////////////////////// -} //namespace std -} //namespace glm - -#endif - -/////////////////////////////////////////////////////////////////////////////////// - -/////////////////////////////////////////////////////////////////////////////////// -// Length type: all length functions returns a length_t type. -// When GLM_FORCE_SIZE_T_LENGTH is defined, length_t is a typedef of size_t otherwise -// length_t is a typedef of int like GLSL defines it. - -#define GLM_LENGTH_INT 1 -#define GLM_LENGTH_SIZE_T 2 - -#ifdef GLM_FORCE_SIZE_T_LENGTH -# define GLM_CONFIG_LENGTH_TYPE GLM_LENGTH_SIZE_T -#else -# define GLM_CONFIG_LENGTH_TYPE GLM_LENGTH_INT -#endif - -namespace glm -{ - using std::size_t; -# if GLM_CONFIG_LENGTH_TYPE == GLM_LENGTH_SIZE_T - typedef size_t length_t; -# else - typedef int length_t; -# endif -}//namespace glm - -/////////////////////////////////////////////////////////////////////////////////// -// constexpr - -#if GLM_HAS_CONSTEXPR -# define GLM_CONFIG_CONSTEXP GLM_ENABLE - - namespace glm - { - template - constexpr std::size_t countof(T const (&)[N]) - { - return N; - } - }//namespace glm -# define GLM_COUNTOF(arr) glm::countof(arr) -#elif defined(_MSC_VER) -# define GLM_CONFIG_CONSTEXP GLM_DISABLE - -# define GLM_COUNTOF(arr) _countof(arr) -#else -# define GLM_CONFIG_CONSTEXP GLM_DISABLE - -# define GLM_COUNTOF(arr) sizeof(arr) / sizeof(arr[0]) -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// uint - -namespace glm{ -namespace detail -{ - template - struct is_int - { - enum test {value = 0}; - }; - - template<> - struct is_int - { - enum test {value = ~0}; - }; - - template<> - struct is_int - { - enum test {value = ~0}; - }; -}//namespace detail - - typedef unsigned int uint; -}//namespace glm - -/////////////////////////////////////////////////////////////////////////////////// -// 64-bit int - -#if GLM_HAS_EXTENDED_INTEGER_TYPE -# include -#endif - -namespace glm{ -namespace detail -{ -# if GLM_HAS_EXTENDED_INTEGER_TYPE - typedef std::uint64_t uint64; - typedef std::int64_t int64; -# elif (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) // C99 detected, 64 bit types available - typedef uint64_t uint64; - typedef int64_t int64; -# elif GLM_COMPILER & GLM_COMPILER_VC - typedef unsigned __int64 uint64; - typedef signed __int64 int64; -# elif GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic ignored "-Wlong-long" - __extension__ typedef unsigned long long uint64; - __extension__ typedef signed long long int64; -# elif (GLM_COMPILER & GLM_COMPILER_CLANG) -# pragma clang diagnostic ignored "-Wc++11-long-long" - typedef unsigned long long uint64; - typedef signed long long int64; -# else//unknown compiler - typedef unsigned long long uint64; - typedef signed long long int64; -# endif -}//namespace detail -}//namespace glm - -/////////////////////////////////////////////////////////////////////////////////// -// make_unsigned - -#if GLM_HAS_MAKE_SIGNED -# include - -namespace glm{ -namespace detail -{ - using std::make_unsigned; -}//namespace detail -}//namespace glm - -#else - -namespace glm{ -namespace detail -{ - template - struct make_unsigned - {}; - - template<> - struct make_unsigned - { - typedef unsigned char type; - }; - - template<> - struct make_unsigned - { - typedef unsigned char type; - }; - - template<> - struct make_unsigned - { - typedef unsigned short type; - }; - - template<> - struct make_unsigned - { - typedef unsigned int type; - }; - - template<> - struct make_unsigned - { - typedef unsigned long type; - }; - - template<> - struct make_unsigned - { - typedef uint64 type; - }; - - template<> - struct make_unsigned - { - typedef unsigned char type; - }; - - template<> - struct make_unsigned - { - typedef unsigned short type; - }; - - template<> - struct make_unsigned - { - typedef unsigned int type; - }; - - template<> - struct make_unsigned - { - typedef unsigned long type; - }; - - template<> - struct make_unsigned - { - typedef uint64 type; - }; -}//namespace detail -}//namespace glm -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Only use x, y, z, w as vector type components - -#ifdef GLM_FORCE_XYZW_ONLY -# define GLM_CONFIG_XYZW_ONLY GLM_ENABLE -#else -# define GLM_CONFIG_XYZW_ONLY GLM_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Configure the use of defaulted initialized types - -#define GLM_CTOR_INIT_DISABLE 0 -#define GLM_CTOR_INITIALIZER_LIST 1 -#define GLM_CTOR_INITIALISATION 2 - -#if defined(GLM_FORCE_CTOR_INIT) && GLM_HAS_INITIALIZER_LISTS -# define GLM_CONFIG_CTOR_INIT GLM_CTOR_INITIALIZER_LIST -#elif defined(GLM_FORCE_CTOR_INIT) && !GLM_HAS_INITIALIZER_LISTS -# define GLM_CONFIG_CTOR_INIT GLM_CTOR_INITIALISATION -#else -# define GLM_CONFIG_CTOR_INIT GLM_CTOR_INIT_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Use SIMD instruction sets - -#if GLM_HAS_ALIGNOF && (GLM_LANG & GLM_LANG_CXXMS_FLAG) && (GLM_ARCH & GLM_ARCH_SIMD_BIT) -# define GLM_CONFIG_SIMD GLM_ENABLE -#else -# define GLM_CONFIG_SIMD GLM_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Configure the use of defaulted function - -#if GLM_HAS_DEFAULTED_FUNCTIONS && GLM_CONFIG_CTOR_INIT == GLM_CTOR_INIT_DISABLE -# define GLM_CONFIG_DEFAULTED_FUNCTIONS GLM_ENABLE -# define GLM_DEFAULT = default -#else -# define GLM_CONFIG_DEFAULTED_FUNCTIONS GLM_DISABLE -# define GLM_DEFAULT -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Configure the use of aligned gentypes - -#ifdef GLM_FORCE_ALIGNED // Legacy define -# define GLM_FORCE_DEFAULT_ALIGNED_GENTYPES -#endif - -#ifdef GLM_FORCE_DEFAULT_ALIGNED_GENTYPES -# define GLM_FORCE_ALIGNED_GENTYPES -#endif - -#if GLM_HAS_ALIGNOF && (GLM_LANG & GLM_LANG_CXXMS_FLAG) && (defined(GLM_FORCE_ALIGNED_GENTYPES) || (GLM_CONFIG_SIMD == GLM_ENABLE)) -# define GLM_CONFIG_ALIGNED_GENTYPES GLM_ENABLE -#else -# define GLM_CONFIG_ALIGNED_GENTYPES GLM_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Configure the use of anonymous structure as implementation detail - -#if ((GLM_CONFIG_SIMD == GLM_ENABLE) || (GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR) || (GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE)) -# define GLM_CONFIG_ANONYMOUS_STRUCT GLM_ENABLE -#else -# define GLM_CONFIG_ANONYMOUS_STRUCT GLM_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Silent warnings - -#ifdef GLM_FORCE_SILENT_WARNINGS -# define GLM_SILENT_WARNINGS GLM_ENABLE -#else -# define GLM_SILENT_WARNINGS GLM_DISABLE -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Precision - -#define GLM_HIGHP 1 -#define GLM_MEDIUMP 2 -#define GLM_LOWP 3 - -#if defined(GLM_FORCE_PRECISION_HIGHP_BOOL) || defined(GLM_PRECISION_HIGHP_BOOL) -# define GLM_CONFIG_PRECISION_BOOL GLM_HIGHP -#elif defined(GLM_FORCE_PRECISION_MEDIUMP_BOOL) || defined(GLM_PRECISION_MEDIUMP_BOOL) -# define GLM_CONFIG_PRECISION_BOOL GLM_MEDIUMP -#elif defined(GLM_FORCE_PRECISION_LOWP_BOOL) || defined(GLM_PRECISION_LOWP_BOOL) -# define GLM_CONFIG_PRECISION_BOOL GLM_LOWP -#else -# define GLM_CONFIG_PRECISION_BOOL GLM_HIGHP -#endif - -#if defined(GLM_FORCE_PRECISION_HIGHP_INT) || defined(GLM_PRECISION_HIGHP_INT) -# define GLM_CONFIG_PRECISION_INT GLM_HIGHP -#elif defined(GLM_FORCE_PRECISION_MEDIUMP_INT) || defined(GLM_PRECISION_MEDIUMP_INT) -# define GLM_CONFIG_PRECISION_INT GLM_MEDIUMP -#elif defined(GLM_FORCE_PRECISION_LOWP_INT) || defined(GLM_PRECISION_LOWP_INT) -# define GLM_CONFIG_PRECISION_INT GLM_LOWP -#else -# define GLM_CONFIG_PRECISION_INT GLM_HIGHP -#endif - -#if defined(GLM_FORCE_PRECISION_HIGHP_UINT) || defined(GLM_PRECISION_HIGHP_UINT) -# define GLM_CONFIG_PRECISION_UINT GLM_HIGHP -#elif defined(GLM_FORCE_PRECISION_MEDIUMP_UINT) || defined(GLM_PRECISION_MEDIUMP_UINT) -# define GLM_CONFIG_PRECISION_UINT GLM_MEDIUMP -#elif defined(GLM_FORCE_PRECISION_LOWP_UINT) || defined(GLM_PRECISION_LOWP_UINT) -# define GLM_CONFIG_PRECISION_UINT GLM_LOWP -#else -# define GLM_CONFIG_PRECISION_UINT GLM_HIGHP -#endif - -#if defined(GLM_FORCE_PRECISION_HIGHP_FLOAT) || defined(GLM_PRECISION_HIGHP_FLOAT) -# define GLM_CONFIG_PRECISION_FLOAT GLM_HIGHP -#elif defined(GLM_FORCE_PRECISION_MEDIUMP_FLOAT) || defined(GLM_PRECISION_MEDIUMP_FLOAT) -# define GLM_CONFIG_PRECISION_FLOAT GLM_MEDIUMP -#elif defined(GLM_FORCE_PRECISION_LOWP_FLOAT) || defined(GLM_PRECISION_LOWP_FLOAT) -# define GLM_CONFIG_PRECISION_FLOAT GLM_LOWP -#else -# define GLM_CONFIG_PRECISION_FLOAT GLM_HIGHP -#endif - -#if defined(GLM_FORCE_PRECISION_HIGHP_DOUBLE) || defined(GLM_PRECISION_HIGHP_DOUBLE) -# define GLM_CONFIG_PRECISION_DOUBLE GLM_HIGHP -#elif defined(GLM_FORCE_PRECISION_MEDIUMP_DOUBLE) || defined(GLM_PRECISION_MEDIUMP_DOUBLE) -# define GLM_CONFIG_PRECISION_DOUBLE GLM_MEDIUMP -#elif defined(GLM_FORCE_PRECISION_LOWP_DOUBLE) || defined(GLM_PRECISION_LOWP_DOUBLE) -# define GLM_CONFIG_PRECISION_DOUBLE GLM_LOWP -#else -# define GLM_CONFIG_PRECISION_DOUBLE GLM_HIGHP -#endif - -/////////////////////////////////////////////////////////////////////////////////// -// Check inclusions of different versions of GLM - -#elif ((GLM_SETUP_INCLUDED != GLM_VERSION) && !defined(GLM_FORCE_IGNORE_VERSION)) -# error "GLM error: A different version of GLM is already included. Define GLM_FORCE_IGNORE_VERSION before including GLM headers to ignore this error." -#elif GLM_SETUP_INCLUDED == GLM_VERSION - -/////////////////////////////////////////////////////////////////////////////////// -// Messages - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_MESSAGE_DISPLAYED) -# define GLM_MESSAGE_DISPLAYED -# define GLM_STR_HELPER(x) #x -# define GLM_STR(x) GLM_STR_HELPER(x) - - // Report GLM version -# pragma message (GLM_STR(GLM_VERSION_MESSAGE)) - - // Report C++ language -# if (GLM_LANG & GLM_LANG_CXX2A_FLAG) && (GLM_LANG & GLM_LANG_EXT) -# pragma message("GLM: C++ 2A with extensions") -# elif (GLM_LANG & GLM_LANG_CXX2A_FLAG) -# pragma message("GLM: C++ 2A") -# elif (GLM_LANG & GLM_LANG_CXX17_FLAG) && (GLM_LANG & GLM_LANG_EXT) -# pragma message("GLM: C++ 17 with extensions") -# elif (GLM_LANG & GLM_LANG_CXX17_FLAG) -# pragma message("GLM: C++ 17") -# elif (GLM_LANG & GLM_LANG_CXX14_FLAG) && (GLM_LANG & GLM_LANG_EXT) -# pragma message("GLM: C++ 14 with extensions") -# elif (GLM_LANG & GLM_LANG_CXX14_FLAG) -# pragma message("GLM: C++ 14") -# elif (GLM_LANG & GLM_LANG_CXX11_FLAG) && (GLM_LANG & GLM_LANG_EXT) -# pragma message("GLM: C++ 11 with extensions") -# elif (GLM_LANG & GLM_LANG_CXX11_FLAG) -# pragma message("GLM: C++ 11") -# elif (GLM_LANG & GLM_LANG_CXX0X_FLAG) && (GLM_LANG & GLM_LANG_EXT) -# pragma message("GLM: C++ 0x with extensions") -# elif (GLM_LANG & GLM_LANG_CXX0X_FLAG) -# pragma message("GLM: C++ 0x") -# elif (GLM_LANG & GLM_LANG_CXX03_FLAG) && (GLM_LANG & GLM_LANG_EXT) -# pragma message("GLM: C++ 03 with extensions") -# elif (GLM_LANG & GLM_LANG_CXX03_FLAG) -# pragma message("GLM: C++ 03") -# elif (GLM_LANG & GLM_LANG_CXX98_FLAG) && (GLM_LANG & GLM_LANG_EXT) -# pragma message("GLM: C++ 98 with extensions") -# elif (GLM_LANG & GLM_LANG_CXX98_FLAG) -# pragma message("GLM: C++ 98") -# else -# pragma message("GLM: C++ language undetected") -# endif//GLM_LANG - - // Report compiler detection -# if GLM_COMPILER & GLM_COMPILER_CUDA -# pragma message("GLM: CUDA compiler detected") -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma message("GLM: Visual C++ compiler detected") -# elif GLM_COMPILER & GLM_COMPILER_CLANG -# pragma message("GLM: Clang compiler detected") -# elif GLM_COMPILER & GLM_COMPILER_INTEL -# pragma message("GLM: Intel Compiler detected") -# elif GLM_COMPILER & GLM_COMPILER_GCC -# pragma message("GLM: GCC compiler detected") -# else -# pragma message("GLM: Compiler not detected") -# endif - - // Report build target -# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits with AVX2 instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_AVX2_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits with AVX2 instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_AVX_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits with AVX instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_AVX_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits with AVX instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_SSE42_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits with SSE4.2 instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_SSE42_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits with SSE4.2 instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_SSE41_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits with SSE4.1 instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_SSE41_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits with SSE4.1 instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_SSSE3_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits with SSSE3 instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_SSSE3_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits with SSSE3 instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_SSE3_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits with SSE3 instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_SSE3_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits with SSE3 instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_SSE2_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits with SSE2 instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_SSE2_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits with SSE2 instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_X86_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: x86 64 bits build target") -# elif (GLM_ARCH & GLM_ARCH_X86_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: x86 32 bits build target") - -# elif (GLM_ARCH & GLM_ARCH_NEON_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: ARM 64 bits with Neon instruction set build target") -# elif (GLM_ARCH & GLM_ARCH_NEON_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: ARM 32 bits with Neon instruction set build target") - -# elif (GLM_ARCH & GLM_ARCH_ARM_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: ARM 64 bits build target") -# elif (GLM_ARCH & GLM_ARCH_ARM_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: ARM 32 bits build target") - -# elif (GLM_ARCH & GLM_ARCH_MIPS_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: MIPS 64 bits build target") -# elif (GLM_ARCH & GLM_ARCH_MIPS_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: MIPS 32 bits build target") - -# elif (GLM_ARCH & GLM_ARCH_PPC_BIT) && (GLM_MODEL == GLM_MODEL_64) -# pragma message("GLM: PowerPC 64 bits build target") -# elif (GLM_ARCH & GLM_ARCH_PPC_BIT) && (GLM_MODEL == GLM_MODEL_32) -# pragma message("GLM: PowerPC 32 bits build target") -# else -# pragma message("GLM: Unknown build target") -# endif//GLM_ARCH - - // Report platform name -# if(GLM_PLATFORM & GLM_PLATFORM_QNXNTO) -# pragma message("GLM: QNX platform detected") -//# elif(GLM_PLATFORM & GLM_PLATFORM_IOS) -//# pragma message("GLM: iOS platform detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_APPLE) -# pragma message("GLM: Apple platform detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_WINCE) -# pragma message("GLM: WinCE platform detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_WINDOWS) -# pragma message("GLM: Windows platform detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_CHROME_NACL) -# pragma message("GLM: Native Client detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) -# pragma message("GLM: Android platform detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_LINUX) -# pragma message("GLM: Linux platform detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_UNIX) -# pragma message("GLM: UNIX platform detected") -# elif(GLM_PLATFORM & GLM_PLATFORM_UNKNOWN) -# pragma message("GLM: platform unknown") -# else -# pragma message("GLM: platform not detected") -# endif - - // Report whether only xyzw component are used -# if defined GLM_FORCE_XYZW_ONLY -# pragma message("GLM: GLM_FORCE_XYZW_ONLY is defined. Only x, y, z and w component are available in vector type. This define disables swizzle operators and SIMD instruction sets.") -# endif - - // Report swizzle operator support -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR -# pragma message("GLM: GLM_FORCE_SWIZZLE is defined, swizzling operators enabled.") -# elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION -# pragma message("GLM: GLM_FORCE_SWIZZLE is defined, swizzling functions enabled. Enable compiler C++ language extensions to enable swizzle operators.") -# else -# pragma message("GLM: GLM_FORCE_SWIZZLE is undefined. swizzling functions or operators are disabled.") -# endif - - // Report .length() type -# if GLM_CONFIG_LENGTH_TYPE == GLM_LENGTH_SIZE_T -# pragma message("GLM: GLM_FORCE_SIZE_T_LENGTH is defined. .length() returns a glm::length_t, a typedef of std::size_t.") -# else -# pragma message("GLM: GLM_FORCE_SIZE_T_LENGTH is undefined. .length() returns a glm::length_t, a typedef of int following GLSL.") -# endif - -# if GLM_CONFIG_UNRESTRICTED_GENTYPE == GLM_ENABLE -# pragma message("GLM: GLM_FORCE_UNRESTRICTED_GENTYPE is defined. Removes GLSL restrictions on valid function genTypes.") -# else -# pragma message("GLM: GLM_FORCE_UNRESTRICTED_GENTYPE is undefined. Follows strictly GLSL on valid function genTypes.") -# endif - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# pragma message("GLM: GLM_FORCE_SILENT_WARNINGS is defined. Ignores C++ warnings from using C++ language extensions.") -# else -# pragma message("GLM: GLM_FORCE_SILENT_WARNINGS is undefined. Shows C++ warnings from using C++ language extensions.") -# endif - -# ifdef GLM_FORCE_SINGLE_ONLY -# pragma message("GLM: GLM_FORCE_SINGLE_ONLY is defined. Using only single precision floating-point types.") -# endif - -# if defined(GLM_FORCE_ALIGNED_GENTYPES) && (GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE) -# undef GLM_FORCE_ALIGNED_GENTYPES -# pragma message("GLM: GLM_FORCE_ALIGNED_GENTYPES is defined, allowing aligned types. This prevents the use of C++ constexpr.") -# elif defined(GLM_FORCE_ALIGNED_GENTYPES) && (GLM_CONFIG_ALIGNED_GENTYPES == GLM_DISABLE) -# undef GLM_FORCE_ALIGNED_GENTYPES -# pragma message("GLM: GLM_FORCE_ALIGNED_GENTYPES is defined but is disabled. It requires C++11 and language extensions.") -# endif - -# if defined(GLM_FORCE_DEFAULT_ALIGNED_GENTYPES) -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_DISABLE -# undef GLM_FORCE_DEFAULT_ALIGNED_GENTYPES -# pragma message("GLM: GLM_FORCE_DEFAULT_ALIGNED_GENTYPES is defined but is disabled. It requires C++11 and language extensions.") -# elif GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE -# pragma message("GLM: GLM_FORCE_DEFAULT_ALIGNED_GENTYPES is defined. All gentypes (e.g. vec3) will be aligned and padded by default.") -# endif -# endif - -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT -# pragma message("GLM: GLM_FORCE_DEPTH_ZERO_TO_ONE is defined. Using zero to one depth clip space.") -# else -# pragma message("GLM: GLM_FORCE_DEPTH_ZERO_TO_ONE is undefined. Using negative one to one depth clip space.") -# endif - -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT -# pragma message("GLM: GLM_FORCE_LEFT_HANDED is defined. Using left handed coordinate system.") -# else -# pragma message("GLM: GLM_FORCE_LEFT_HANDED is undefined. Using right handed coordinate system.") -# endif -#endif//GLM_MESSAGES - -#endif//GLM_SETUP_INCLUDED diff --git a/third_party/glm/detail/type_float.hpp b/third_party/glm/detail/type_float.hpp deleted file mode 100755 index c8037eb..0000000 --- a/third_party/glm/detail/type_float.hpp +++ /dev/null @@ -1,68 +0,0 @@ -#pragma once - -#include "setup.hpp" - -#if GLM_COMPILER == GLM_COMPILER_VC12 -# pragma warning(push) -# pragma warning(disable: 4512) // assignment operator could not be generated -#endif - -namespace glm{ -namespace detail -{ - template - union float_t - {}; - - // https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/ - template <> - union float_t - { - typedef int int_type; - typedef float float_type; - - GLM_CONSTEXPR float_t(float_type Num = 0.0f) : f(Num) {} - - GLM_CONSTEXPR float_t& operator=(float_t const& x) - { - f = x.f; - return *this; - } - - // Portable extraction of components. - GLM_CONSTEXPR bool negative() const { return i < 0; } - GLM_CONSTEXPR int_type mantissa() const { return i & ((1 << 23) - 1); } - GLM_CONSTEXPR int_type exponent() const { return (i >> 23) & ((1 << 8) - 1); } - - int_type i; - float_type f; - }; - - template <> - union float_t - { - typedef detail::int64 int_type; - typedef double float_type; - - GLM_CONSTEXPR float_t(float_type Num = static_cast(0)) : f(Num) {} - - GLM_CONSTEXPR float_t& operator=(float_t const& x) - { - f = x.f; - return *this; - } - - // Portable extraction of components. - GLM_CONSTEXPR bool negative() const { return i < 0; } - GLM_CONSTEXPR int_type mantissa() const { return i & ((int_type(1) << 52) - 1); } - GLM_CONSTEXPR int_type exponent() const { return (i >> 52) & ((int_type(1) << 11) - 1); } - - int_type i; - float_type f; - }; -}//namespace detail -}//namespace glm - -#if GLM_COMPILER == GLM_COMPILER_VC12 -# pragma warning(pop) -#endif diff --git a/third_party/glm/detail/type_half.hpp b/third_party/glm/detail/type_half.hpp deleted file mode 100755 index 40b8bec..0000000 --- a/third_party/glm/detail/type_half.hpp +++ /dev/null @@ -1,16 +0,0 @@ -#pragma once - -#include "setup.hpp" - -namespace glm{ -namespace detail -{ - typedef short hdata; - - GLM_FUNC_DECL float toFloat32(hdata value); - GLM_FUNC_DECL hdata toFloat16(float const& value); - -}//namespace detail -}//namespace glm - -#include "type_half.inl" diff --git a/third_party/glm/detail/type_half.inl b/third_party/glm/detail/type_half.inl deleted file mode 100755 index b0723e3..0000000 --- a/third_party/glm/detail/type_half.inl +++ /dev/null @@ -1,241 +0,0 @@ -namespace glm{ -namespace detail -{ - GLM_FUNC_QUALIFIER float overflow() - { - volatile float f = 1e10; - - for(int i = 0; i < 10; ++i) - f *= f; // this will overflow before the for loop terminates - return f; - } - - union uif32 - { - GLM_FUNC_QUALIFIER uif32() : - i(0) - {} - - GLM_FUNC_QUALIFIER uif32(float f_) : - f(f_) - {} - - GLM_FUNC_QUALIFIER uif32(unsigned int i_) : - i(i_) - {} - - float f; - unsigned int i; - }; - - GLM_FUNC_QUALIFIER float toFloat32(hdata value) - { - int s = (value >> 15) & 0x00000001; - int e = (value >> 10) & 0x0000001f; - int m = value & 0x000003ff; - - if(e == 0) - { - if(m == 0) - { - // - // Plus or minus zero - // - - detail::uif32 result; - result.i = static_cast(s << 31); - return result.f; - } - else - { - // - // Denormalized number -- renormalize it - // - - while(!(m & 0x00000400)) - { - m <<= 1; - e -= 1; - } - - e += 1; - m &= ~0x00000400; - } - } - else if(e == 31) - { - if(m == 0) - { - // - // Positive or negative infinity - // - - uif32 result; - result.i = static_cast((s << 31) | 0x7f800000); - return result.f; - } - else - { - // - // Nan -- preserve sign and significand bits - // - - uif32 result; - result.i = static_cast((s << 31) | 0x7f800000 | (m << 13)); - return result.f; - } - } - - // - // Normalized number - // - - e = e + (127 - 15); - m = m << 13; - - // - // Assemble s, e and m. - // - - uif32 Result; - Result.i = static_cast((s << 31) | (e << 23) | m); - return Result.f; - } - - GLM_FUNC_QUALIFIER hdata toFloat16(float const& f) - { - uif32 Entry; - Entry.f = f; - int i = static_cast(Entry.i); - - // - // Our floating point number, f, is represented by the bit - // pattern in integer i. Disassemble that bit pattern into - // the sign, s, the exponent, e, and the significand, m. - // Shift s into the position where it will go in the - // resulting half number. - // Adjust e, accounting for the different exponent bias - // of float and half (127 versus 15). - // - - int s = (i >> 16) & 0x00008000; - int e = ((i >> 23) & 0x000000ff) - (127 - 15); - int m = i & 0x007fffff; - - // - // Now reassemble s, e and m into a half: - // - - if(e <= 0) - { - if(e < -10) - { - // - // E is less than -10. The absolute value of f is - // less than half_MIN (f may be a small normalized - // float, a denormalized float or a zero). - // - // We convert f to a half zero. - // - - return hdata(s); - } - - // - // E is between -10 and 0. F is a normalized float, - // whose magnitude is less than __half_NRM_MIN. - // - // We convert f to a denormalized half. - // - - m = (m | 0x00800000) >> (1 - e); - - // - // Round to nearest, round "0.5" up. - // - // Rounding may cause the significand to overflow and make - // our number normalized. Because of the way a half's bits - // are laid out, we don't have to treat this case separately; - // the code below will handle it correctly. - // - - if(m & 0x00001000) - m += 0x00002000; - - // - // Assemble the half from s, e (zero) and m. - // - - return hdata(s | (m >> 13)); - } - else if(e == 0xff - (127 - 15)) - { - if(m == 0) - { - // - // F is an infinity; convert f to a half - // infinity with the same sign as f. - // - - return hdata(s | 0x7c00); - } - else - { - // - // F is a NAN; we produce a half NAN that preserves - // the sign bit and the 10 leftmost bits of the - // significand of f, with one exception: If the 10 - // leftmost bits are all zero, the NAN would turn - // into an infinity, so we have to set at least one - // bit in the significand. - // - - m >>= 13; - - return hdata(s | 0x7c00 | m | (m == 0)); - } - } - else - { - // - // E is greater than zero. F is a normalized float. - // We try to convert f to a normalized half. - // - - // - // Round to nearest, round "0.5" up - // - - if(m & 0x00001000) - { - m += 0x00002000; - - if(m & 0x00800000) - { - m = 0; // overflow in significand, - e += 1; // adjust exponent - } - } - - // - // Handle exponent overflow - // - - if (e > 30) - { - overflow(); // Cause a hardware floating point overflow; - - return hdata(s | 0x7c00); - // if this returns, the half becomes an - } // infinity with the same sign as f. - - // - // Assemble the half from s, e and m. - // - - return hdata(s | (e << 10) | (m >> 13)); - } - } - -}//namespace detail -}//namespace glm diff --git a/third_party/glm/detail/type_mat2x2.hpp b/third_party/glm/detail/type_mat2x2.hpp deleted file mode 100755 index 033908f..0000000 --- a/third_party/glm/detail/type_mat2x2.hpp +++ /dev/null @@ -1,177 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat2x2.hpp - -#pragma once - -#include "type_vec2.hpp" -#include -#include - -namespace glm -{ - template - struct mat<2, 2, T, Q> - { - typedef vec<2, T, Q> col_type; - typedef vec<2, T, Q> row_type; - typedef mat<2, 2, T, Q> type; - typedef mat<2, 2, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[2]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 2; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<2, 2, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T const& x1, T const& y1, - T const& x2, T const& y2); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v1, - col_type const& v2); - - // -- Conversions -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - U const& x1, V const& y1, - M const& x2, N const& y2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<2, U, Q> const& v1, - vec<2, V, Q> const& v2); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator=(mat<2, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator+=(mat<2, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator-=(mat<2, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator*=(mat<2, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator/=(U s); - template - GLM_FUNC_DECL mat<2, 2, T, Q> & operator/=(mat<2, 2, U, Q> const& m); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<2, 2, T, Q> & operator++ (); - GLM_FUNC_DECL mat<2, 2, T, Q> & operator-- (); - GLM_FUNC_DECL mat<2, 2, T, Q> operator++(int); - GLM_FUNC_DECL mat<2, 2, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator+(T scalar, mat<2, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator-(T scalar, mat<2, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator*(T scalar, mat<2, 2, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<2, 2, T, Q>::col_type operator*(mat<2, 2, T, Q> const& m, typename mat<2, 2, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<2, 2, T, Q>::row_type operator*(typename mat<2, 2, T, Q>::col_type const& v, mat<2, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator/(T scalar, mat<2, 2, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<2, 2, T, Q>::col_type operator/(mat<2, 2, T, Q> const& m, typename mat<2, 2, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<2, 2, T, Q>::row_type operator/(typename mat<2, 2, T, Q>::col_type const& v, mat<2, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2); -} //namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat2x2.inl" -#endif diff --git a/third_party/glm/detail/type_mat2x2.inl b/third_party/glm/detail/type_mat2x2.inl deleted file mode 100755 index fe5d1aa..0000000 --- a/third_party/glm/detail/type_mat2x2.inl +++ /dev/null @@ -1,536 +0,0 @@ -#include "../matrix.hpp" - -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0), col_type(0, 1)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0); - this->value[1] = col_type(0, 1); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 2, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{m[0], m[1]} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(T scalar) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(scalar, 0), col_type(0, scalar)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(scalar, 0); - this->value[1] = col_type(0, scalar); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat - ( - T const& x0, T const& y0, - T const& x1, T const& y1 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0), col_type(x1, y1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0); - this->value[1] = col_type(x1, y1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(col_type const& v0, col_type const& v1) -# if GLM_HAS_INITIALIZER_LISTS - : value{v0, v1} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = v0; - this->value[1] = v1; -# endif - } - - // -- Conversion constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat - ( - X1 const& x1, Y1 const& y1, - X2 const& x2, Y2 const& y2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(static_cast(x1), value_type(y1)), col_type(static_cast(x2), value_type(y2)) } -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(static_cast(x1), value_type(y1)); - this->value[1] = col_type(static_cast(x2), value_type(y2)); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(vec<2, V1, Q> const& v1, vec<2, V2, Q> const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v1); - this->value[1] = col_type(v2); -# endif - } - - // -- mat2x2 matrix conversions -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 2, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 2, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::col_type& mat<2, 2, T, Q>::operator[](typename mat<2, 2, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<2, 2, T, Q>::col_type const& mat<2, 2, T, Q>::operator[](typename mat<2, 2, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator=(mat<2, 2, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator+=(U scalar) - { - this->value[0] += scalar; - this->value[1] += scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator+=(mat<2, 2, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator-=(U scalar) - { - this->value[0] -= scalar; - this->value[1] -= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator-=(mat<2, 2, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator*=(U scalar) - { - this->value[0] *= scalar; - this->value[1] *= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator*=(mat<2, 2, U, Q> const& m) - { - return (*this = *this * m); - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator/=(U scalar) - { - this->value[0] /= scalar; - this->value[1] /= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator/=(mat<2, 2, U, Q> const& m) - { - return *this *= inverse(m); - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q>& mat<2, 2, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> mat<2, 2, T, Q>::operator++(int) - { - mat<2, 2, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> mat<2, 2, T, Q>::operator--(int) - { - mat<2, 2, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m) - { - return mat<2, 2, T, Q>( - -m[0], - -m[1]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m, T scalar) - { - return mat<2, 2, T, Q>( - m[0] + scalar, - m[1] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(T scalar, mat<2, 2, T, Q> const& m) - { - return mat<2, 2, T, Q>( - m[0] + scalar, - m[1] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator+(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - return mat<2, 2, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m, T scalar) - { - return mat<2, 2, T, Q>( - m[0] - scalar, - m[1] - scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(T scalar, mat<2, 2, T, Q> const& m) - { - return mat<2, 2, T, Q>( - scalar - m[0], - scalar - m[1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator-(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - return mat<2, 2, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m, T scalar) - { - return mat<2, 2, T, Q>( - m[0] * scalar, - m[1] * scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(T scalar, mat<2, 2, T, Q> const& m) - { - return mat<2, 2, T, Q>( - m[0] * scalar, - m[1] * scalar); - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::col_type operator* - ( - mat<2, 2, T, Q> const& m, - typename mat<2, 2, T, Q>::row_type const& v - ) - { - return vec<2, T, Q>( - m[0][0] * v.x + m[1][0] * v.y, - m[0][1] * v.x + m[1][1] * v.y); - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::row_type operator* - ( - typename mat<2, 2, T, Q>::col_type const& v, - mat<2, 2, T, Q> const& m - ) - { - return vec<2, T, Q>( - v.x * m[0][0] + v.y * m[0][1], - v.x * m[1][0] + v.y * m[1][1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - return mat<2, 2, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) - { - return mat<3, 2, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<2, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) - { - return mat<4, 2, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1], - m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1], - m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m, T scalar) - { - return mat<2, 2, T, Q>( - m[0] / scalar, - m[1] / scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator/(T scalar, mat<2, 2, T, Q> const& m) - { - return mat<2, 2, T, Q>( - scalar / m[0], - scalar / m[1]); - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::col_type operator/(mat<2, 2, T, Q> const& m, typename mat<2, 2, T, Q>::row_type const& v) - { - return inverse(m) * v; - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 2, T, Q>::row_type operator/(typename mat<2, 2, T, Q>::col_type const& v, mat<2, 2, T, Q> const& m) - { - return v * inverse(m); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator/(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - mat<2, 2, T, Q> m1_copy(m1); - return m1_copy /= m2; - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<2, 2, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat2x3.hpp b/third_party/glm/detail/type_mat2x3.hpp deleted file mode 100755 index d6596e4..0000000 --- a/third_party/glm/detail/type_mat2x3.hpp +++ /dev/null @@ -1,159 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat2x3.hpp - -#pragma once - -#include "type_vec2.hpp" -#include "type_vec3.hpp" -#include -#include - -namespace glm -{ - template - struct mat<2, 3, T, Q> - { - typedef vec<3, T, Q> col_type; - typedef vec<2, T, Q> row_type; - typedef mat<2, 3, T, Q> type; - typedef mat<3, 2, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[2]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 2; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<2, 3, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T x0, T y0, T z0, - T x1, T y1, T z1); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1); - - // -- Conversions -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X1 x1, Y1 y1, Z1 z1, - X2 x2, Y2 y2, Z2 z2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<3, U, Q> const& v1, - vec<3, V, Q> const& v2); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<2, 3, T, Q> & operator=(mat<2, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 3, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<2, 3, T, Q> & operator+=(mat<2, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 3, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<2, 3, T, Q> & operator-=(mat<2, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 3, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<2, 3, T, Q> & operator/=(U s); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<2, 3, T, Q> & operator++ (); - GLM_FUNC_DECL mat<2, 3, T, Q> & operator-- (); - GLM_FUNC_DECL mat<2, 3, T, Q> operator++(int); - GLM_FUNC_DECL mat<2, 3, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator*(T scalar, mat<2, 3, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<2, 3, T, Q>::col_type operator*(mat<2, 3, T, Q> const& m, typename mat<2, 3, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<2, 3, T, Q>::row_type operator*(typename mat<2, 3, T, Q>::col_type const& v, mat<2, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<2, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<3, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<4, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator/(mat<2, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator/(T scalar, mat<2, 3, T, Q> const& m); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat2x3.inl" -#endif diff --git a/third_party/glm/detail/type_mat2x3.inl b/third_party/glm/detail/type_mat2x3.inl deleted file mode 100755 index 5fec17e..0000000 --- a/third_party/glm/detail/type_mat2x3.inl +++ /dev/null @@ -1,510 +0,0 @@ -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0, 0), col_type(0, 1, 0)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0, 0); - this->value[1] = col_type(0, 1, 0); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 3, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{m.value[0], m.value[1]} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m.value[0]; - this->value[1] = m.value[1]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(T scalar) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(scalar, 0, 0), col_type(0, scalar, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(scalar, 0, 0); - this->value[1] = col_type(0, scalar, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat - ( - T x0, T y0, T z0, - T x1, T y1, T z1 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0, z0), col_type(x1, y1, z1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0); - this->value[1] = col_type(x1, y1, z1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(col_type const& v0, col_type const& v1) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v0); - this->value[1] = col_type(v1); -# endif - } - - // -- Conversion constructors -- - - template - template< - typename X1, typename Y1, typename Z1, - typename X2, typename Y2, typename Z2> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat - ( - X1 x1, Y1 y1, Z1 z1, - X2 x2, Y2 y2, Z2 z2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x1, y1, z1), col_type(x2, y2, z2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x1, y1, z1); - this->value[1] = col_type(x2, y2, z2); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(vec<3, V1, Q> const& v1, vec<3, V2, Q> const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v1); - this->value[1] = col_type(v2); -# endif - } - - // -- Matrix conversions -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 3, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 3, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<2, 3, T, Q>::col_type & mat<2, 3, T, Q>::operator[](typename mat<2, 3, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<2, 3, T, Q>::col_type const& mat<2, 3, T, Q>::operator[](typename mat<2, 3, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator=(mat<2, 3, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator+=(mat<2, 3, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator-=(mat<2, 3, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q>& mat<2, 3, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> & mat<2, 3, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> mat<2, 3, T, Q>::operator++(int) - { - mat<2, 3, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> mat<2, 3, T, Q>::operator--(int) - { - mat<2, 3, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m) - { - return mat<2, 3, T, Q>( - -m[0], - -m[1]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m, T scalar) - { - return mat<2, 3, T, Q>( - m[0] + scalar, - m[1] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator+(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) - { - return mat<2, 3, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m, T scalar) - { - return mat<2, 3, T, Q>( - m[0] - scalar, - m[1] - scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator-(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) - { - return mat<2, 3, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m, T scalar) - { - return mat<2, 3, T, Q>( - m[0] * scalar, - m[1] * scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(T scalar, mat<2, 3, T, Q> const& m) - { - return mat<2, 3, T, Q>( - m[0] * scalar, - m[1] * scalar); - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 3, T, Q>::col_type operator* - ( - mat<2, 3, T, Q> const& m, - typename mat<2, 3, T, Q>::row_type const& v) - { - return typename mat<2, 3, T, Q>::col_type( - m[0][0] * v.x + m[1][0] * v.y, - m[0][1] * v.x + m[1][1] * v.y, - m[0][2] * v.x + m[1][2] * v.y); - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 3, T, Q>::row_type operator* - ( - typename mat<2, 3, T, Q>::col_type const& v, - mat<2, 3, T, Q> const& m) - { - return typename mat<2, 3, T, Q>::row_type( - v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2], - v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - return mat<2, 3, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<3, 2, T, Q> const& m2) - { - T SrcA00 = m1[0][0]; - T SrcA01 = m1[0][1]; - T SrcA02 = m1[0][2]; - T SrcA10 = m1[1][0]; - T SrcA11 = m1[1][1]; - T SrcA12 = m1[1][2]; - - T SrcB00 = m2[0][0]; - T SrcB01 = m2[0][1]; - T SrcB10 = m2[1][0]; - T SrcB11 = m2[1][1]; - T SrcB20 = m2[2][0]; - T SrcB21 = m2[2][1]; - - mat<3, 3, T, Q> Result; - Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01; - Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01; - Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01; - Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11; - Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11; - Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11; - Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21; - Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21; - Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<2, 3, T, Q> const& m1, mat<4, 2, T, Q> const& m2) - { - return mat<4, 3, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1], - m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1], - m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1], - m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1], - m1[0][2] * m2[3][0] + m1[1][2] * m2[3][1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator/(mat<2, 3, T, Q> const& m, T scalar) - { - return mat<2, 3, T, Q>( - m[0] / scalar, - m[1] / scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator/(T scalar, mat<2, 3, T, Q> const& m) - { - return mat<2, 3, T, Q>( - scalar / m[0], - scalar / m[1]); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<2, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat2x4.hpp b/third_party/glm/detail/type_mat2x4.hpp deleted file mode 100755 index ff03e21..0000000 --- a/third_party/glm/detail/type_mat2x4.hpp +++ /dev/null @@ -1,161 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat2x4.hpp - -#pragma once - -#include "type_vec2.hpp" -#include "type_vec4.hpp" -#include -#include - -namespace glm -{ - template - struct mat<2, 4, T, Q> - { - typedef vec<4, T, Q> col_type; - typedef vec<2, T, Q> row_type; - typedef mat<2, 4, T, Q> type; - typedef mat<4, 2, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[2]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 2; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<2, 4, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T x0, T y0, T z0, T w0, - T x1, T y1, T z1, T w1); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1); - - // -- Conversions -- - - template< - typename X1, typename Y1, typename Z1, typename W1, - typename X2, typename Y2, typename Z2, typename W2> - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X1 x1, Y1 y1, Z1 z1, W1 w1, - X2 x2, Y2 y2, Z2 z2, W2 w2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<4, U, Q> const& v1, - vec<4, V, Q> const& v2); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<2, 4, T, Q> & operator=(mat<2, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 4, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<2, 4, T, Q> & operator+=(mat<2, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 4, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<2, 4, T, Q> & operator-=(mat<2, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<2, 4, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<2, 4, T, Q> & operator/=(U s); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<2, 4, T, Q> & operator++ (); - GLM_FUNC_DECL mat<2, 4, T, Q> & operator-- (); - GLM_FUNC_DECL mat<2, 4, T, Q> operator++(int); - GLM_FUNC_DECL mat<2, 4, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator*(T scalar, mat<2, 4, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<2, 4, T, Q>::col_type operator*(mat<2, 4, T, Q> const& m, typename mat<2, 4, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<2, 4, T, Q>::row_type operator*(typename mat<2, 4, T, Q>::col_type const& v, mat<2, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<4, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<2, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<3, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator/(mat<2, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator/(T scalar, mat<2, 4, T, Q> const& m); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat2x4.inl" -#endif diff --git a/third_party/glm/detail/type_mat2x4.inl b/third_party/glm/detail/type_mat2x4.inl deleted file mode 100755 index b6d2b9d..0000000 --- a/third_party/glm/detail/type_mat2x4.inl +++ /dev/null @@ -1,520 +0,0 @@ -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0, 0, 0), col_type(0, 1, 0, 0)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0, 0, 0); - this->value[1] = col_type(0, 1, 0, 0); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 4, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{m[0], m[1]} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(T s) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(s, 0, 0, 0), col_type(0, s, 0, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(s, 0, 0, 0); - this->value[1] = col_type(0, s, 0, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat - ( - T x0, T y0, T z0, T w0, - T x1, T y1, T z1, T w1 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0, z0, w0), col_type(x1, y1, z1, w1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0, w0); - this->value[1] = col_type(x1, y1, z1, w1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(col_type const& v0, col_type const& v1) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = v0; - this->value[1] = v1; -# endif - } - - // -- Conversion constructors -- - - template - template< - typename X1, typename Y1, typename Z1, typename W1, - typename X2, typename Y2, typename Z2, typename W2> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat - ( - X1 x1, Y1 y1, Z1 z1, W1 w1, - X2 x2, Y2 y2, Z2 z2, W2 w2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{ - col_type(x1, y1, z1, w1), - col_type(x2, y2, z2, w2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x1, y1, z1, w1); - this->value[1] = col_type(x2, y2, z2, w2); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(vec<4, V1, Q> const& v1, vec<4, V2, Q> const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v1); - this->value[1] = col_type(v2); -# endif - } - - // -- Matrix conversions -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 4, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<2, 4, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<2, 4, T, Q>::col_type & mat<2, 4, T, Q>::operator[](typename mat<2, 4, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<2, 4, T, Q>::col_type const& mat<2, 4, T, Q>::operator[](typename mat<2, 4, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator=(mat<2, 4, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator+=(mat<2, 4, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator-=(mat<2, 4, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> & mat<2, 4, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q>& mat<2, 4, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> mat<2, 4, T, Q>::operator++(int) - { - mat<2, 4, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> mat<2, 4, T, Q>::operator--(int) - { - mat<2, 4, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m) - { - return mat<2, 4, T, Q>( - -m[0], - -m[1]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m, T scalar) - { - return mat<2, 4, T, Q>( - m[0] + scalar, - m[1] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator+(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) - { - return mat<2, 4, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m, T scalar) - { - return mat<2, 4, T, Q>( - m[0] - scalar, - m[1] - scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator-(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) - { - return mat<2, 4, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m, T scalar) - { - return mat<2, 4, T, Q>( - m[0] * scalar, - m[1] * scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(T scalar, mat<2, 4, T, Q> const& m) - { - return mat<2, 4, T, Q>( - m[0] * scalar, - m[1] * scalar); - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 4, T, Q>::col_type operator*(mat<2, 4, T, Q> const& m, typename mat<2, 4, T, Q>::row_type const& v) - { - return typename mat<2, 4, T, Q>::col_type( - m[0][0] * v.x + m[1][0] * v.y, - m[0][1] * v.x + m[1][1] * v.y, - m[0][2] * v.x + m[1][2] * v.y, - m[0][3] * v.x + m[1][3] * v.y); - } - - template - GLM_FUNC_QUALIFIER typename mat<2, 4, T, Q>::row_type operator*(typename mat<2, 4, T, Q>::col_type const& v, mat<2, 4, T, Q> const& m) - { - return typename mat<2, 4, T, Q>::row_type( - v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2] + v.w * m[0][3], - v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2] + v.w * m[1][3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<4, 2, T, Q> const& m2) - { - T SrcA00 = m1[0][0]; - T SrcA01 = m1[0][1]; - T SrcA02 = m1[0][2]; - T SrcA03 = m1[0][3]; - T SrcA10 = m1[1][0]; - T SrcA11 = m1[1][1]; - T SrcA12 = m1[1][2]; - T SrcA13 = m1[1][3]; - - T SrcB00 = m2[0][0]; - T SrcB01 = m2[0][1]; - T SrcB10 = m2[1][0]; - T SrcB11 = m2[1][1]; - T SrcB20 = m2[2][0]; - T SrcB21 = m2[2][1]; - T SrcB30 = m2[3][0]; - T SrcB31 = m2[3][1]; - - mat<4, 4, T, Q> Result; - Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01; - Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01; - Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01; - Result[0][3] = SrcA03 * SrcB00 + SrcA13 * SrcB01; - Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11; - Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11; - Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11; - Result[1][3] = SrcA03 * SrcB10 + SrcA13 * SrcB11; - Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21; - Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21; - Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21; - Result[2][3] = SrcA03 * SrcB20 + SrcA13 * SrcB21; - Result[3][0] = SrcA00 * SrcB30 + SrcA10 * SrcB31; - Result[3][1] = SrcA01 * SrcB30 + SrcA11 * SrcB31; - Result[3][2] = SrcA02 * SrcB30 + SrcA12 * SrcB31; - Result[3][3] = SrcA03 * SrcB30 + SrcA13 * SrcB31; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<2, 2, T, Q> const& m2) - { - return mat<2, 4, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], - m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1], - m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<2, 4, T, Q> const& m1, mat<3, 2, T, Q> const& m2) - { - return mat<3, 4, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1], - m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1], - m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1], - m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1], - m1[0][3] * m2[2][0] + m1[1][3] * m2[2][1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator/(mat<2, 4, T, Q> const& m, T scalar) - { - return mat<2, 4, T, Q>( - m[0] / scalar, - m[1] / scalar); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator/(T scalar, mat<2, 4, T, Q> const& m) - { - return mat<2, 4, T, Q>( - scalar / m[0], - scalar / m[1]); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<2, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat3x2.hpp b/third_party/glm/detail/type_mat3x2.hpp deleted file mode 100755 index e166581..0000000 --- a/third_party/glm/detail/type_mat3x2.hpp +++ /dev/null @@ -1,167 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat3x2.hpp - -#pragma once - -#include "type_vec2.hpp" -#include "type_vec3.hpp" -#include -#include - -namespace glm -{ - template - struct mat<3, 2, T, Q> - { - typedef vec<2, T, Q> col_type; - typedef vec<3, T, Q> row_type; - typedef mat<3, 2, T, Q> type; - typedef mat<2, 3, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[3]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 3; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<3, 2, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T x0, T y0, - T x1, T y1, - T x2, T y2); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1, - col_type const& v2); - - // -- Conversions -- - - template< - typename X1, typename Y1, - typename X2, typename Y2, - typename X3, typename Y3> - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X1 x1, Y1 y1, - X2 x2, Y2 y2, - X3 x3, Y3 y3); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<2, V1, Q> const& v1, - vec<2, V2, Q> const& v2, - vec<2, V3, Q> const& v3); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<3, 2, T, Q> & operator=(mat<3, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 2, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<3, 2, T, Q> & operator+=(mat<3, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 2, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<3, 2, T, Q> & operator-=(mat<3, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 2, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<3, 2, T, Q> & operator/=(U s); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<3, 2, T, Q> & operator++ (); - GLM_FUNC_DECL mat<3, 2, T, Q> & operator-- (); - GLM_FUNC_DECL mat<3, 2, T, Q> operator++(int); - GLM_FUNC_DECL mat<3, 2, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator*(T scalar, mat<3, 2, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<3, 2, T, Q>::col_type operator*(mat<3, 2, T, Q> const& m, typename mat<3, 2, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<3, 2, T, Q>::row_type operator*(typename mat<3, 2, T, Q>::col_type const& v, mat<3, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<2, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<3, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<4, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator/(mat<3, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator/(T scalar, mat<3, 2, T, Q> const& m); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2); - -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat3x2.inl" -#endif diff --git a/third_party/glm/detail/type_mat3x2.inl b/third_party/glm/detail/type_mat3x2.inl deleted file mode 100755 index b4b948b..0000000 --- a/third_party/glm/detail/type_mat3x2.inl +++ /dev/null @@ -1,532 +0,0 @@ -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0), col_type(0, 1), col_type(0, 0)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0); - this->value[1] = col_type(0, 1); - this->value[2] = col_type(0, 0); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 2, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(T s) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(s, 0), col_type(0, s), col_type(0, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(s, 0); - this->value[1] = col_type(0, s); - this->value[2] = col_type(0, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat - ( - T x0, T y0, - T x1, T y1, - T x2, T y2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0); - this->value[1] = col_type(x1, y1); - this->value[2] = col_type(x2, y2); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = v0; - this->value[1] = v1; - this->value[2] = v2; -# endif - } - - // -- Conversion constructors -- - - template - template< - typename X0, typename Y0, - typename X1, typename Y1, - typename X2, typename Y2> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat - ( - X0 x0, Y0 y0, - X1 x1, Y1 y1, - X2 x2, Y2 y2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0); - this->value[1] = col_type(x1, y1); - this->value[2] = col_type(x2, y2); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(vec<2, V0, Q> const& v0, vec<2, V1, Q> const& v1, vec<2, V2, Q> const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v0); - this->value[1] = col_type(v1); - this->value[2] = col_type(v2); -# endif - } - - // -- Matrix conversions -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 2, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 2, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<3, 2, T, Q>::col_type & mat<3, 2, T, Q>::operator[](typename mat<3, 2, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<3, 2, T, Q>::col_type const& mat<3, 2, T, Q>::operator[](typename mat<3, 2, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator=(mat<3, 2, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - this->value[2] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator+=(mat<3, 2, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - this->value[2] += m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - this->value[2] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator-=(mat<3, 2, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - this->value[2] -= m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - this->value[2] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> & mat<3, 2, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - this->value[2] /= s; - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - ++this->value[2]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q>& mat<3, 2, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - --this->value[2]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> mat<3, 2, T, Q>::operator++(int) - { - mat<3, 2, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> mat<3, 2, T, Q>::operator--(int) - { - mat<3, 2, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m) - { - return mat<3, 2, T, Q>( - -m[0], - -m[1], - -m[2]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m, T scalar) - { - return mat<3, 2, T, Q>( - m[0] + scalar, - m[1] + scalar, - m[2] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator+(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) - { - return mat<3, 2, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1], - m1[2] + m2[2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m, T scalar) - { - return mat<3, 2, T, Q>( - m[0] - scalar, - m[1] - scalar, - m[2] - scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator-(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) - { - return mat<3, 2, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1], - m1[2] - m2[2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m, T scalar) - { - return mat<3, 2, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(T scalar, mat<3, 2, T, Q> const& m) - { - return mat<3, 2, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar); - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 2, T, Q>::col_type operator*(mat<3, 2, T, Q> const& m, typename mat<3, 2, T, Q>::row_type const& v) - { - return typename mat<3, 2, T, Q>::col_type( - m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z, - m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z); - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 2, T, Q>::row_type operator*(typename mat<3, 2, T, Q>::col_type const& v, mat<3, 2, T, Q> const& m) - { - return typename mat<3, 2, T, Q>::row_type( - v.x * m[0][0] + v.y * m[0][1], - v.x * m[1][0] + v.y * m[1][1], - v.x * m[2][0] + v.y * m[2][1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<2, 3, T, Q> const& m2) - { - const T SrcA00 = m1[0][0]; - const T SrcA01 = m1[0][1]; - const T SrcA10 = m1[1][0]; - const T SrcA11 = m1[1][1]; - const T SrcA20 = m1[2][0]; - const T SrcA21 = m1[2][1]; - - const T SrcB00 = m2[0][0]; - const T SrcB01 = m2[0][1]; - const T SrcB02 = m2[0][2]; - const T SrcB10 = m2[1][0]; - const T SrcB11 = m2[1][1]; - const T SrcB12 = m2[1][2]; - - mat<2, 2, T, Q> Result; - Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02; - Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02; - Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12; - Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - return mat<3, 2, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<3, 2, T, Q> const& m1, mat<4, 3, T, Q> const& m2) - { - return mat<4, 2, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2], - m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2], - m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator/(mat<3, 2, T, Q> const& m, T scalar) - { - return mat<3, 2, T, Q>( - m[0] / scalar, - m[1] / scalar, - m[2] / scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator/(T scalar, mat<3, 2, T, Q> const& m) - { - return mat<3, 2, T, Q>( - scalar / m[0], - scalar / m[1], - scalar / m[2]); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<3, 2, T, Q> const& m1, mat<3, 2, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat3x3.hpp b/third_party/glm/detail/type_mat3x3.hpp deleted file mode 100755 index 3174872..0000000 --- a/third_party/glm/detail/type_mat3x3.hpp +++ /dev/null @@ -1,184 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat3x3.hpp - -#pragma once - -#include "type_vec3.hpp" -#include -#include - -namespace glm -{ - template - struct mat<3, 3, T, Q> - { - typedef vec<3, T, Q> col_type; - typedef vec<3, T, Q> row_type; - typedef mat<3, 3, T, Q> type; - typedef mat<3, 3, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[3]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 3; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<3, 3, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T x0, T y0, T z0, - T x1, T y1, T z1, - T x2, T y2, T z2); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1, - col_type const& v2); - - // -- Conversions -- - - template< - typename X1, typename Y1, typename Z1, - typename X2, typename Y2, typename Z2, - typename X3, typename Y3, typename Z3> - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X1 x1, Y1 y1, Z1 z1, - X2 x2, Y2 y2, Z2 z2, - X3 x3, Y3 y3, Z3 z3); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<3, V1, Q> const& v1, - vec<3, V2, Q> const& v2, - vec<3, V3, Q> const& v3); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator=(mat<3, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator+=(mat<3, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator-=(mat<3, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator*=(mat<3, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator/=(U s); - template - GLM_FUNC_DECL mat<3, 3, T, Q> & operator/=(mat<3, 3, U, Q> const& m); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<3, 3, T, Q> & operator++(); - GLM_FUNC_DECL mat<3, 3, T, Q> & operator--(); - GLM_FUNC_DECL mat<3, 3, T, Q> operator++(int); - GLM_FUNC_DECL mat<3, 3, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator+(T scalar, mat<3, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator-(T scalar, mat<3, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator*(T scalar, mat<3, 3, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<3, 3, T, Q>::col_type operator*(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<3, 3, T, Q>::row_type operator*(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator/(T scalar, mat<3, 3, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<3, 3, T, Q>::col_type operator/(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<3, 3, T, Q>::row_type operator/(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat3x3.inl" -#endif diff --git a/third_party/glm/detail/type_mat3x3.inl b/third_party/glm/detail/type_mat3x3.inl deleted file mode 100755 index 1ddaf99..0000000 --- a/third_party/glm/detail/type_mat3x3.inl +++ /dev/null @@ -1,601 +0,0 @@ -#include "../matrix.hpp" - -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0, 0), col_type(0, 1, 0), col_type(0, 0, 1)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0, 0); - this->value[1] = col_type(0, 1, 0); - this->value[2] = col_type(0, 0, 1); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 3, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(T s) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(s, 0, 0), col_type(0, s, 0), col_type(0, 0, s)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(s, 0, 0); - this->value[1] = col_type(0, s, 0); - this->value[2] = col_type(0, 0, s); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat - ( - T x0, T y0, T z0, - T x1, T y1, T z1, - T x2, T y2, T z2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0, z0), col_type(x1, y1, z1), col_type(x2, y2, z2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0); - this->value[1] = col_type(x1, y1, z1); - this->value[2] = col_type(x2, y2, z2); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v0); - this->value[1] = col_type(v1); - this->value[2] = col_type(v2); -# endif - } - - // -- Conversion constructors -- - - template - template< - typename X1, typename Y1, typename Z1, - typename X2, typename Y2, typename Z2, - typename X3, typename Y3, typename Z3> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat - ( - X1 x1, Y1 y1, Z1 z1, - X2 x2, Y2 y2, Z2 z2, - X3 x3, Y3 y3, Z3 z3 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x1, y1, z1), col_type(x2, y2, z2), col_type(x3, y3, z3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x1, y1, z1); - this->value[1] = col_type(x2, y2, z2); - this->value[2] = col_type(x3, y3, z3); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(vec<3, V1, Q> const& v1, vec<3, V2, Q> const& v2, vec<3, V3, Q> const& v3) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v1), col_type(v2), col_type(v3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v1); - this->value[1] = col_type(v2); - this->value[2] = col_type(v3); -# endif - } - - // -- Matrix conversions -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 3, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 3, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::col_type & mat<3, 3, T, Q>::operator[](typename mat<3, 3, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<3, 3, T, Q>::col_type const& mat<3, 3, T, Q>::operator[](typename mat<3, 3, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator=(mat<3, 3, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - this->value[2] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator+=(mat<3, 3, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - this->value[2] += m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - this->value[2] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator-=(mat<3, 3, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - this->value[2] -= m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - this->value[2] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator*=(mat<3, 3, U, Q> const& m) - { - return (*this = *this * m); - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - this->value[2] /= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator/=(mat<3, 3, U, Q> const& m) - { - return *this *= inverse(m); - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - ++this->value[2]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> & mat<3, 3, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - --this->value[2]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> mat<3, 3, T, Q>::operator++(int) - { - mat<3, 3, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> mat<3, 3, T, Q>::operator--(int) - { - mat<3, 3, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m) - { - return mat<3, 3, T, Q>( - -m[0], - -m[1], - -m[2]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m, T scalar) - { - return mat<3, 3, T, Q>( - m[0] + scalar, - m[1] + scalar, - m[2] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(T scalar, mat<3, 3, T, Q> const& m) - { - return mat<3, 3, T, Q>( - m[0] + scalar, - m[1] + scalar, - m[2] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator+(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - return mat<3, 3, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1], - m1[2] + m2[2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m, T scalar) - { - return mat<3, 3, T, Q>( - m[0] - scalar, - m[1] - scalar, - m[2] - scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(T scalar, mat<3, 3, T, Q> const& m) - { - return mat<3, 3, T, Q>( - scalar - m[0], - scalar - m[1], - scalar - m[2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator-(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - return mat<3, 3, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1], - m1[2] - m2[2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m, T scalar) - { - return mat<3, 3, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(T scalar, mat<3, 3, T, Q> const& m) - { - return mat<3, 3, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar); - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::col_type operator*(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v) - { - return typename mat<3, 3, T, Q>::col_type( - m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z, - m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z, - m[0][2] * v.x + m[1][2] * v.y + m[2][2] * v.z); - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::row_type operator*(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m) - { - return typename mat<3, 3, T, Q>::row_type( - m[0][0] * v.x + m[0][1] * v.y + m[0][2] * v.z, - m[1][0] * v.x + m[1][1] * v.y + m[1][2] * v.z, - m[2][0] * v.x + m[2][1] * v.y + m[2][2] * v.z); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - T const SrcA00 = m1[0][0]; - T const SrcA01 = m1[0][1]; - T const SrcA02 = m1[0][2]; - T const SrcA10 = m1[1][0]; - T const SrcA11 = m1[1][1]; - T const SrcA12 = m1[1][2]; - T const SrcA20 = m1[2][0]; - T const SrcA21 = m1[2][1]; - T const SrcA22 = m1[2][2]; - - T const SrcB00 = m2[0][0]; - T const SrcB01 = m2[0][1]; - T const SrcB02 = m2[0][2]; - T const SrcB10 = m2[1][0]; - T const SrcB11 = m2[1][1]; - T const SrcB12 = m2[1][2]; - T const SrcB20 = m2[2][0]; - T const SrcB21 = m2[2][1]; - T const SrcB22 = m2[2][2]; - - mat<3, 3, T, Q> Result; - Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02; - Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02; - Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01 + SrcA22 * SrcB02; - Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12; - Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12; - Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11 + SrcA22 * SrcB12; - Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21 + SrcA20 * SrcB22; - Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21 + SrcA21 * SrcB22; - Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21 + SrcA22 * SrcB22; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<2, 3, T, Q> const& m2) - { - return mat<2, 3, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<3, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) - { - return mat<4, 3, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2], - m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2], - m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2], - m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2], - m1[0][2] * m2[3][0] + m1[1][2] * m2[3][1] + m1[2][2] * m2[3][2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m, T scalar) - { - return mat<3, 3, T, Q>( - m[0] / scalar, - m[1] / scalar, - m[2] / scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator/(T scalar, mat<3, 3, T, Q> const& m) - { - return mat<3, 3, T, Q>( - scalar / m[0], - scalar / m[1], - scalar / m[2]); - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::col_type operator/(mat<3, 3, T, Q> const& m, typename mat<3, 3, T, Q>::row_type const& v) - { - return inverse(m) * v; - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 3, T, Q>::row_type operator/(typename mat<3, 3, T, Q>::col_type const& v, mat<3, 3, T, Q> const& m) - { - return v * inverse(m); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator/(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - mat<3, 3, T, Q> m1_copy(m1); - return m1_copy /= m2; - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<3, 3, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat3x4.hpp b/third_party/glm/detail/type_mat3x4.hpp deleted file mode 100755 index 6e40b90..0000000 --- a/third_party/glm/detail/type_mat3x4.hpp +++ /dev/null @@ -1,166 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat3x4.hpp - -#pragma once - -#include "type_vec3.hpp" -#include "type_vec4.hpp" -#include -#include - -namespace glm -{ - template - struct mat<3, 4, T, Q> - { - typedef vec<4, T, Q> col_type; - typedef vec<3, T, Q> row_type; - typedef mat<3, 4, T, Q> type; - typedef mat<4, 3, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[3]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 3; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<3, 4, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T x0, T y0, T z0, T w0, - T x1, T y1, T z1, T w1, - T x2, T y2, T z2, T w2); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1, - col_type const& v2); - - // -- Conversions -- - - template< - typename X1, typename Y1, typename Z1, typename W1, - typename X2, typename Y2, typename Z2, typename W2, - typename X3, typename Y3, typename Z3, typename W3> - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X1 x1, Y1 y1, Z1 z1, W1 w1, - X2 x2, Y2 y2, Z2 z2, W2 w2, - X3 x3, Y3 y3, Z3 z3, W3 w3); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<4, V1, Q> const& v1, - vec<4, V2, Q> const& v2, - vec<4, V3, Q> const& v3); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<3, 4, T, Q> & operator=(mat<3, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 4, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<3, 4, T, Q> & operator+=(mat<3, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 4, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<3, 4, T, Q> & operator-=(mat<3, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<3, 4, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<3, 4, T, Q> & operator/=(U s); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<3, 4, T, Q> & operator++(); - GLM_FUNC_DECL mat<3, 4, T, Q> & operator--(); - GLM_FUNC_DECL mat<3, 4, T, Q> operator++(int); - GLM_FUNC_DECL mat<3, 4, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator*(T scalar, mat<3, 4, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<3, 4, T, Q>::col_type operator*(mat<3, 4, T, Q> const& m, typename mat<3, 4, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<3, 4, T, Q>::row_type operator*(typename mat<3, 4, T, Q>::col_type const& v, mat<3, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<4, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<2, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<3, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator/(mat<3, 4, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator/(T scalar, mat<3, 4, T, Q> const& m); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat3x4.inl" -#endif diff --git a/third_party/glm/detail/type_mat3x4.inl b/third_party/glm/detail/type_mat3x4.inl deleted file mode 100755 index 6ee416c..0000000 --- a/third_party/glm/detail/type_mat3x4.inl +++ /dev/null @@ -1,578 +0,0 @@ -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0, 0, 0), col_type(0, 1, 0, 0), col_type(0, 0, 1, 0)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0, 0, 0); - this->value[1] = col_type(0, 1, 0, 0); - this->value[2] = col_type(0, 0, 1, 0); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 4, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(T s) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(s, 0, 0, 0), col_type(0, s, 0, 0), col_type(0, 0, s, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(s, 0, 0, 0); - this->value[1] = col_type(0, s, 0, 0); - this->value[2] = col_type(0, 0, s, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat - ( - T x0, T y0, T z0, T w0, - T x1, T y1, T z1, T w1, - T x2, T y2, T z2, T w2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{ - col_type(x0, y0, z0, w0), - col_type(x1, y1, z1, w1), - col_type(x2, y2, z2, w2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0, w0); - this->value[1] = col_type(x1, y1, z1, w1); - this->value[2] = col_type(x2, y2, z2, w2); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = v0; - this->value[1] = v1; - this->value[2] = v2; -# endif - } - - // -- Conversion constructors -- - - template - template< - typename X0, typename Y0, typename Z0, typename W0, - typename X1, typename Y1, typename Z1, typename W1, - typename X2, typename Y2, typename Z2, typename W2> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat - ( - X0 x0, Y0 y0, Z0 z0, W0 w0, - X1 x1, Y1 y1, Z1 z1, W1 w1, - X2 x2, Y2 y2, Z2 z2, W2 w2 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{ - col_type(x0, y0, z0, w0), - col_type(x1, y1, z1, w1), - col_type(x2, y2, z2, w2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0, w0); - this->value[1] = col_type(x1, y1, z1, w1); - this->value[2] = col_type(x2, y2, z2, w2); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(vec<4, V1, Q> const& v0, vec<4, V2, Q> const& v1, vec<4, V3, Q> const& v2) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v0); - this->value[1] = col_type(v1); - this->value[2] = col_type(v2); -# endif - } - - // -- Matrix conversions -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 4, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(0, 0, 1, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); - this->value[2] = col_type(0, 0, 1, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(0, 0, 1, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(m[2], 1, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); - this->value[2] = col_type(m[2], 1, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0, 0, 1, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(m[2], 1, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); - this->value[2] = col_type(m[2], 1, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<3, 4, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 0); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<3, 4, T, Q>::col_type & mat<3, 4, T, Q>::operator[](typename mat<3, 4, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<3, 4, T, Q>::col_type const& mat<3, 4, T, Q>::operator[](typename mat<3, 4, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator=(mat<3, 4, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - this->value[2] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator+=(mat<3, 4, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - this->value[2] += m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - this->value[2] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator-=(mat<3, 4, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - this->value[2] -= m[2]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - this->value[2] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> & mat<3, 4, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - this->value[2] /= s; - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - ++this->value[2]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q>& mat<3, 4, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - --this->value[2]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> mat<3, 4, T, Q>::operator++(int) - { - mat<3, 4, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> mat<3, 4, T, Q>::operator--(int) - { - mat<3, 4, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m) - { - return mat<3, 4, T, Q>( - -m[0], - -m[1], - -m[2]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m, T scalar) - { - return mat<3, 4, T, Q>( - m[0] + scalar, - m[1] + scalar, - m[2] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator+(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) - { - return mat<3, 4, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1], - m1[2] + m2[2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m, T scalar) - { - return mat<3, 4, T, Q>( - m[0] - scalar, - m[1] - scalar, - m[2] - scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator-(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) - { - return mat<3, 4, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1], - m1[2] - m2[2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m, T scalar) - { - return mat<3, 4, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(T scalar, mat<3, 4, T, Q> const& m) - { - return mat<3, 4, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar); - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 4, T, Q>::col_type operator* - ( - mat<3, 4, T, Q> const& m, - typename mat<3, 4, T, Q>::row_type const& v - ) - { - return typename mat<3, 4, T, Q>::col_type( - m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z, - m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z, - m[0][2] * v.x + m[1][2] * v.y + m[2][2] * v.z, - m[0][3] * v.x + m[1][3] * v.y + m[2][3] * v.z); - } - - template - GLM_FUNC_QUALIFIER typename mat<3, 4, T, Q>::row_type operator* - ( - typename mat<3, 4, T, Q>::col_type const& v, - mat<3, 4, T, Q> const& m - ) - { - return typename mat<3, 4, T, Q>::row_type( - v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2] + v.w * m[0][3], - v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2] + v.w * m[1][3], - v.x * m[2][0] + v.y * m[2][1] + v.z * m[2][2] + v.w * m[2][3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<4, 3, T, Q> const& m2) - { - const T SrcA00 = m1[0][0]; - const T SrcA01 = m1[0][1]; - const T SrcA02 = m1[0][2]; - const T SrcA03 = m1[0][3]; - const T SrcA10 = m1[1][0]; - const T SrcA11 = m1[1][1]; - const T SrcA12 = m1[1][2]; - const T SrcA13 = m1[1][3]; - const T SrcA20 = m1[2][0]; - const T SrcA21 = m1[2][1]; - const T SrcA22 = m1[2][2]; - const T SrcA23 = m1[2][3]; - - const T SrcB00 = m2[0][0]; - const T SrcB01 = m2[0][1]; - const T SrcB02 = m2[0][2]; - const T SrcB10 = m2[1][0]; - const T SrcB11 = m2[1][1]; - const T SrcB12 = m2[1][2]; - const T SrcB20 = m2[2][0]; - const T SrcB21 = m2[2][1]; - const T SrcB22 = m2[2][2]; - const T SrcB30 = m2[3][0]; - const T SrcB31 = m2[3][1]; - const T SrcB32 = m2[3][2]; - - mat<4, 4, T, Q> Result; - Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02; - Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02; - Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01 + SrcA22 * SrcB02; - Result[0][3] = SrcA03 * SrcB00 + SrcA13 * SrcB01 + SrcA23 * SrcB02; - Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12; - Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12; - Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11 + SrcA22 * SrcB12; - Result[1][3] = SrcA03 * SrcB10 + SrcA13 * SrcB11 + SrcA23 * SrcB12; - Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21 + SrcA20 * SrcB22; - Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21 + SrcA21 * SrcB22; - Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21 + SrcA22 * SrcB22; - Result[2][3] = SrcA03 * SrcB20 + SrcA13 * SrcB21 + SrcA23 * SrcB22; - Result[3][0] = SrcA00 * SrcB30 + SrcA10 * SrcB31 + SrcA20 * SrcB32; - Result[3][1] = SrcA01 * SrcB30 + SrcA11 * SrcB31 + SrcA21 * SrcB32; - Result[3][2] = SrcA02 * SrcB30 + SrcA12 * SrcB31 + SrcA22 * SrcB32; - Result[3][3] = SrcA03 * SrcB30 + SrcA13 * SrcB31 + SrcA23 * SrcB32; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<2, 3, T, Q> const& m2) - { - return mat<2, 4, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], - m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2], - m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<3, 4, T, Q> const& m1, mat<3, 3, T, Q> const& m2) - { - return mat<3, 4, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2], - m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2], - m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2], - m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2], - m1[0][3] * m2[2][0] + m1[1][3] * m2[2][1] + m1[2][3] * m2[2][2]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator/(mat<3, 4, T, Q> const& m, T scalar) - { - return mat<3, 4, T, Q>( - m[0] / scalar, - m[1] / scalar, - m[2] / scalar); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator/(T scalar, mat<3, 4, T, Q> const& m) - { - return mat<3, 4, T, Q>( - scalar / m[0], - scalar / m[1], - scalar / m[2]); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<3, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat4x2.hpp b/third_party/glm/detail/type_mat4x2.hpp deleted file mode 100755 index 8d34352..0000000 --- a/third_party/glm/detail/type_mat4x2.hpp +++ /dev/null @@ -1,171 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat4x2.hpp - -#pragma once - -#include "type_vec2.hpp" -#include "type_vec4.hpp" -#include -#include - -namespace glm -{ - template - struct mat<4, 2, T, Q> - { - typedef vec<2, T, Q> col_type; - typedef vec<4, T, Q> row_type; - typedef mat<4, 2, T, Q> type; - typedef mat<2, 4, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[4]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 4; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<4, 2, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T x0, T y0, - T x1, T y1, - T x2, T y2, - T x3, T y3); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1, - col_type const& v2, - col_type const& v3); - - // -- Conversions -- - - template< - typename X0, typename Y0, - typename X1, typename Y1, - typename X2, typename Y2, - typename X3, typename Y3> - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X0 x0, Y0 y0, - X1 x1, Y1 y1, - X2 x2, Y2 y2, - X3 x3, Y3 y3); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<2, V1, Q> const& v1, - vec<2, V2, Q> const& v2, - vec<2, V3, Q> const& v3, - vec<2, V4, Q> const& v4); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<4, 2, T, Q> & operator=(mat<4, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 2, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<4, 2, T, Q> & operator+=(mat<4, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 2, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<4, 2, T, Q> & operator-=(mat<4, 2, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 2, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<4, 2, T, Q> & operator/=(U s); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<4, 2, T, Q> & operator++ (); - GLM_FUNC_DECL mat<4, 2, T, Q> & operator-- (); - GLM_FUNC_DECL mat<4, 2, T, Q> operator++(int); - GLM_FUNC_DECL mat<4, 2, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator*(T scalar, mat<4, 2, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<4, 2, T, Q>::col_type operator*(mat<4, 2, T, Q> const& m, typename mat<4, 2, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<4, 2, T, Q>::row_type operator*(typename mat<4, 2, T, Q>::col_type const& v, mat<4, 2, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<2, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<3, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<4, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator/(mat<4, 2, T, Q> const& m, T scalar); - - template - GLM_FUNC_DECL mat<4, 2, T, Q> operator/(T scalar, mat<4, 2, T, Q> const& m); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat4x2.inl" -#endif diff --git a/third_party/glm/detail/type_mat4x2.inl b/third_party/glm/detail/type_mat4x2.inl deleted file mode 100755 index 419c80c..0000000 --- a/third_party/glm/detail/type_mat4x2.inl +++ /dev/null @@ -1,574 +0,0 @@ -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0), col_type(0, 1), col_type(0, 0), col_type(0, 0)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0); - this->value[1] = col_type(0, 1); - this->value[2] = col_type(0, 0); - this->value[3] = col_type(0, 0); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 2, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - this->value[3] = m[3]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(T s) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(s, 0), col_type(0, s), col_type(0, 0), col_type(0, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(s, 0); - this->value[1] = col_type(0, s); - this->value[2] = col_type(0, 0); - this->value[3] = col_type(0, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat - ( - T x0, T y0, - T x1, T y1, - T x2, T y2, - T x3, T y3 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2), col_type(x3, y3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0); - this->value[1] = col_type(x1, y1); - this->value[2] = col_type(x2, y2); - this->value[3] = col_type(x3, y3); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2, col_type const& v3) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = v0; - this->value[1] = v1; - this->value[2] = v2; - this->value[3] = v3; -# endif - } - - // -- Conversion constructors -- - - template - template< - typename X0, typename Y0, - typename X1, typename Y1, - typename X2, typename Y2, - typename X3, typename Y3> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat - ( - X0 x0, Y0 y0, - X1 x1, Y1 y1, - X2 x2, Y2 y2, - X3 x3, Y3 y3 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0), col_type(x1, y1), col_type(x2, y2), col_type(x3, y3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0); - this->value[1] = col_type(x1, y1); - this->value[2] = col_type(x2, y2); - this->value[3] = col_type(x3, y3); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(vec<2, V0, Q> const& v0, vec<2, V1, Q> const& v1, vec<2, V2, Q> const& v2, vec<2, V3, Q> const& v3) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v0); - this->value[1] = col_type(v1); - this->value[2] = col_type(v2); - this->value[3] = col_type(v3); -# endif - } - - // -- Conversion -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 2, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(m[3]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(m[3]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(m[3]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 2, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(0); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<4, 2, T, Q>::col_type & mat<4, 2, T, Q>::operator[](typename mat<4, 2, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<4, 2, T, Q>::col_type const& mat<4, 2, T, Q>::operator[](typename mat<4, 2, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q>& mat<4, 2, T, Q>::operator=(mat<4, 2, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - this->value[3] = m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - this->value[2] += s; - this->value[3] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator+=(mat<4, 2, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - this->value[2] += m[2]; - this->value[3] += m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - this->value[2] -= s; - this->value[3] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator-=(mat<4, 2, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - this->value[2] -= m[2]; - this->value[3] -= m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - this->value[2] *= s; - this->value[3] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - this->value[2] /= s; - this->value[3] /= s; - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - ++this->value[2]; - ++this->value[3]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> & mat<4, 2, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - --this->value[2]; - --this->value[3]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> mat<4, 2, T, Q>::operator++(int) - { - mat<4, 2, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> mat<4, 2, T, Q>::operator--(int) - { - mat<4, 2, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m) - { - return mat<4, 2, T, Q>( - -m[0], - -m[1], - -m[2], - -m[3]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m, T scalar) - { - return mat<4, 2, T, Q>( - m[0] + scalar, - m[1] + scalar, - m[2] + scalar, - m[3] + scalar); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator+(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) - { - return mat<4, 2, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1], - m1[2] + m2[2], - m1[3] + m2[3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m, T scalar) - { - return mat<4, 2, T, Q>( - m[0] - scalar, - m[1] - scalar, - m[2] - scalar, - m[3] - scalar); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator-(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) - { - return mat<4, 2, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1], - m1[2] - m2[2], - m1[3] - m2[3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m, T scalar) - { - return mat<4, 2, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar, - m[3] * scalar); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(T scalar, mat<4, 2, T, Q> const& m) - { - return mat<4, 2, T, Q>( - m[0] * scalar, - m[1] * scalar, - m[2] * scalar, - m[3] * scalar); - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 2, T, Q>::col_type operator*(mat<4, 2, T, Q> const& m, typename mat<4, 2, T, Q>::row_type const& v) - { - return typename mat<4, 2, T, Q>::col_type( - m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z + m[3][0] * v.w, - m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z + m[3][1] * v.w); - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 2, T, Q>::row_type operator*(typename mat<4, 2, T, Q>::col_type const& v, mat<4, 2, T, Q> const& m) - { - return typename mat<4, 2, T, Q>::row_type( - v.x * m[0][0] + v.y * m[0][1], - v.x * m[1][0] + v.y * m[1][1], - v.x * m[2][0] + v.y * m[2][1], - v.x * m[3][0] + v.y * m[3][1]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<2, 4, T, Q> const& m2) - { - T const SrcA00 = m1[0][0]; - T const SrcA01 = m1[0][1]; - T const SrcA10 = m1[1][0]; - T const SrcA11 = m1[1][1]; - T const SrcA20 = m1[2][0]; - T const SrcA21 = m1[2][1]; - T const SrcA30 = m1[3][0]; - T const SrcA31 = m1[3][1]; - - T const SrcB00 = m2[0][0]; - T const SrcB01 = m2[0][1]; - T const SrcB02 = m2[0][2]; - T const SrcB03 = m2[0][3]; - T const SrcB10 = m2[1][0]; - T const SrcB11 = m2[1][1]; - T const SrcB12 = m2[1][2]; - T const SrcB13 = m2[1][3]; - - mat<2, 2, T, Q> Result; - Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02 + SrcA30 * SrcB03; - Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02 + SrcA31 * SrcB03; - Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12 + SrcA30 * SrcB13; - Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12 + SrcA31 * SrcB13; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<3, 4, T, Q> const& m2) - { - return mat<3, 2, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator*(mat<4, 2, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - return mat<4, 2, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3], - m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2] + m1[3][0] * m2[3][3], - m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2] + m1[3][1] * m2[3][3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator/(mat<4, 2, T, Q> const& m, T scalar) - { - return mat<4, 2, T, Q>( - m[0] / scalar, - m[1] / scalar, - m[2] / scalar, - m[3] / scalar); - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> operator/(T scalar, mat<4, 2, T, Q> const& m) - { - return mat<4, 2, T, Q>( - scalar / m[0], - scalar / m[1], - scalar / m[2], - scalar / m[3]); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]) && (m1[3] == m2[3]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<4, 2, T, Q> const& m1, mat<4, 2, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]) || (m1[3] != m2[3]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat4x3.hpp b/third_party/glm/detail/type_mat4x3.hpp deleted file mode 100755 index 16e4270..0000000 --- a/third_party/glm/detail/type_mat4x3.hpp +++ /dev/null @@ -1,171 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat4x3.hpp - -#pragma once - -#include "type_vec3.hpp" -#include "type_vec4.hpp" -#include -#include - -namespace glm -{ - template - struct mat<4, 3, T, Q> - { - typedef vec<3, T, Q> col_type; - typedef vec<4, T, Q> row_type; - typedef mat<4, 3, T, Q> type; - typedef mat<3, 4, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[4]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length() { return 4; } - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<4, 3, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T const& x); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T const& x0, T const& y0, T const& z0, - T const& x1, T const& y1, T const& z1, - T const& x2, T const& y2, T const& z2, - T const& x3, T const& y3, T const& z3); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1, - col_type const& v2, - col_type const& v3); - - // -- Conversions -- - - template< - typename X1, typename Y1, typename Z1, - typename X2, typename Y2, typename Z2, - typename X3, typename Y3, typename Z3, - typename X4, typename Y4, typename Z4> - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X1 const& x1, Y1 const& y1, Z1 const& z1, - X2 const& x2, Y2 const& y2, Z2 const& z2, - X3 const& x3, Y3 const& y3, Z3 const& z3, - X4 const& x4, Y4 const& y4, Z4 const& z4); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<3, V1, Q> const& v1, - vec<3, V2, Q> const& v2, - vec<3, V3, Q> const& v3, - vec<3, V4, Q> const& v4); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<4, 3, T, Q> & operator=(mat<4, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 3, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<4, 3, T, Q> & operator+=(mat<4, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 3, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<4, 3, T, Q> & operator-=(mat<4, 3, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 3, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<4, 3, T, Q> & operator/=(U s); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<4, 3, T, Q>& operator++(); - GLM_FUNC_DECL mat<4, 3, T, Q>& operator--(); - GLM_FUNC_DECL mat<4, 3, T, Q> operator++(int); - GLM_FUNC_DECL mat<4, 3, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator*(T const& s, mat<4, 3, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<4, 3, T, Q>::col_type operator*(mat<4, 3, T, Q> const& m, typename mat<4, 3, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<4, 3, T, Q>::row_type operator*(typename mat<4, 3, T, Q>::col_type const& v, mat<4, 3, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<2, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<3, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<4, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator/(mat<4, 3, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 3, T, Q> operator/(T const& s, mat<4, 3, T, Q> const& m); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat4x3.inl" -#endif //GLM_EXTERNAL_TEMPLATE diff --git a/third_party/glm/detail/type_mat4x3.inl b/third_party/glm/detail/type_mat4x3.inl deleted file mode 100755 index 11b1ee3..0000000 --- a/third_party/glm/detail/type_mat4x3.inl +++ /dev/null @@ -1,598 +0,0 @@ -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0, 0), col_type(0, 1, 0), col_type(0, 0, 1), col_type(0, 0, 0)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0, 0); - this->value[1] = col_type(0, 1, 0); - this->value[2] = col_type(0, 0, 1); - this->value[3] = col_type(0, 0, 0); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 3, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - this->value[3] = m[3]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(T const& s) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(s, 0, 0), col_type(0, s, 0), col_type(0, 0, s), col_type(0, 0, 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(s, 0, 0); - this->value[1] = col_type(0, s, 0); - this->value[2] = col_type(0, 0, s); - this->value[3] = col_type(0, 0, 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat - ( - T const& x0, T const& y0, T const& z0, - T const& x1, T const& y1, T const& z1, - T const& x2, T const& y2, T const& z2, - T const& x3, T const& y3, T const& z3 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0, z0), col_type(x1, y1, z1), col_type(x2, y2, z2), col_type(x3, y3, z3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0); - this->value[1] = col_type(x1, y1, z1); - this->value[2] = col_type(x2, y2, z2); - this->value[3] = col_type(x3, y3, z3); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2, col_type const& v3) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = v0; - this->value[1] = v1; - this->value[2] = v2; - this->value[3] = v3; -# endif - } - - // -- Conversion constructors -- - - template - template< - typename X0, typename Y0, typename Z0, - typename X1, typename Y1, typename Z1, - typename X2, typename Y2, typename Z2, - typename X3, typename Y3, typename Z3> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat - ( - X0 const& x0, Y0 const& y0, Z0 const& z0, - X1 const& x1, Y1 const& y1, Z1 const& z1, - X2 const& x2, Y2 const& y2, Z2 const& z2, - X3 const& x3, Y3 const& y3, Z3 const& z3 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x0, y0, z0), col_type(x1, y1, z1), col_type(x2, y2, z2), col_type(x3, y3, z3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0); - this->value[1] = col_type(x1, y1, z1); - this->value[2] = col_type(x2, y2, z2); - this->value[3] = col_type(x3, y3, z3); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(vec<3, V1, Q> const& v1, vec<3, V2, Q> const& v2, vec<3, V3, Q> const& v3, vec<3, V4, Q> const& v4) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v1), col_type(v2), col_type(v3), col_type(v4)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v1); - this->value[1] = col_type(v2); - this->value[2] = col_type(v3); - this->value[3] = col_type(v4); -# endif - } - - // -- Matrix conversions -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 3, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(m[3]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(0, 0, 1); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(m[3]); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0, 0, 1); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 1); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(0, 0, 1); - this->value[3] = col_type(0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 1), col_type(m[3], 0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 1); - this->value[3] = col_type(m[3], 0); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 3, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(0); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<4, 3, T, Q>::col_type & mat<4, 3, T, Q>::operator[](typename mat<4, 3, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<4, 3, T, Q>::col_type const& mat<4, 3, T, Q>::operator[](typename mat<4, 3, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary updatable operators -- - - template - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q>& mat<4, 3, T, Q>::operator=(mat<4, 3, U, Q> const& m) - { - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - this->value[3] = m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - this->value[2] += s; - this->value[3] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator+=(mat<4, 3, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - this->value[2] += m[2]; - this->value[3] += m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - this->value[2] -= s; - this->value[3] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator-=(mat<4, 3, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - this->value[2] -= m[2]; - this->value[3] -= m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - this->value[2] *= s; - this->value[3] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - this->value[2] /= s; - this->value[3] /= s; - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - ++this->value[2]; - ++this->value[3]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> & mat<4, 3, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - --this->value[2]; - --this->value[3]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> mat<4, 3, T, Q>::operator++(int) - { - mat<4, 3, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> mat<4, 3, T, Q>::operator--(int) - { - mat<4, 3, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m) - { - return mat<4, 3, T, Q>( - -m[0], - -m[1], - -m[2], - -m[3]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m, T const& s) - { - return mat<4, 3, T, Q>( - m[0] + s, - m[1] + s, - m[2] + s, - m[3] + s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator+(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) - { - return mat<4, 3, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1], - m1[2] + m2[2], - m1[3] + m2[3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m, T const& s) - { - return mat<4, 3, T, Q>( - m[0] - s, - m[1] - s, - m[2] - s, - m[3] - s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator-(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) - { - return mat<4, 3, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1], - m1[2] - m2[2], - m1[3] - m2[3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m, T const& s) - { - return mat<4, 3, T, Q>( - m[0] * s, - m[1] * s, - m[2] * s, - m[3] * s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(T const& s, mat<4, 3, T, Q> const& m) - { - return mat<4, 3, T, Q>( - m[0] * s, - m[1] * s, - m[2] * s, - m[3] * s); - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 3, T, Q>::col_type operator* - ( - mat<4, 3, T, Q> const& m, - typename mat<4, 3, T, Q>::row_type const& v) - { - return typename mat<4, 3, T, Q>::col_type( - m[0][0] * v.x + m[1][0] * v.y + m[2][0] * v.z + m[3][0] * v.w, - m[0][1] * v.x + m[1][1] * v.y + m[2][1] * v.z + m[3][1] * v.w, - m[0][2] * v.x + m[1][2] * v.y + m[2][2] * v.z + m[3][2] * v.w); - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 3, T, Q>::row_type operator* - ( - typename mat<4, 3, T, Q>::col_type const& v, - mat<4, 3, T, Q> const& m) - { - return typename mat<4, 3, T, Q>::row_type( - v.x * m[0][0] + v.y * m[0][1] + v.z * m[0][2], - v.x * m[1][0] + v.y * m[1][1] + v.z * m[1][2], - v.x * m[2][0] + v.y * m[2][1] + v.z * m[2][2], - v.x * m[3][0] + v.y * m[3][1] + v.z * m[3][2]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<2, 4, T, Q> const& m2) - { - return mat<2, 3, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<3, 4, T, Q> const& m2) - { - T const SrcA00 = m1[0][0]; - T const SrcA01 = m1[0][1]; - T const SrcA02 = m1[0][2]; - T const SrcA10 = m1[1][0]; - T const SrcA11 = m1[1][1]; - T const SrcA12 = m1[1][2]; - T const SrcA20 = m1[2][0]; - T const SrcA21 = m1[2][1]; - T const SrcA22 = m1[2][2]; - T const SrcA30 = m1[3][0]; - T const SrcA31 = m1[3][1]; - T const SrcA32 = m1[3][2]; - - T const SrcB00 = m2[0][0]; - T const SrcB01 = m2[0][1]; - T const SrcB02 = m2[0][2]; - T const SrcB03 = m2[0][3]; - T const SrcB10 = m2[1][0]; - T const SrcB11 = m2[1][1]; - T const SrcB12 = m2[1][2]; - T const SrcB13 = m2[1][3]; - T const SrcB20 = m2[2][0]; - T const SrcB21 = m2[2][1]; - T const SrcB22 = m2[2][2]; - T const SrcB23 = m2[2][3]; - - mat<3, 3, T, Q> Result; - Result[0][0] = SrcA00 * SrcB00 + SrcA10 * SrcB01 + SrcA20 * SrcB02 + SrcA30 * SrcB03; - Result[0][1] = SrcA01 * SrcB00 + SrcA11 * SrcB01 + SrcA21 * SrcB02 + SrcA31 * SrcB03; - Result[0][2] = SrcA02 * SrcB00 + SrcA12 * SrcB01 + SrcA22 * SrcB02 + SrcA32 * SrcB03; - Result[1][0] = SrcA00 * SrcB10 + SrcA10 * SrcB11 + SrcA20 * SrcB12 + SrcA30 * SrcB13; - Result[1][1] = SrcA01 * SrcB10 + SrcA11 * SrcB11 + SrcA21 * SrcB12 + SrcA31 * SrcB13; - Result[1][2] = SrcA02 * SrcB10 + SrcA12 * SrcB11 + SrcA22 * SrcB12 + SrcA32 * SrcB13; - Result[2][0] = SrcA00 * SrcB20 + SrcA10 * SrcB21 + SrcA20 * SrcB22 + SrcA30 * SrcB23; - Result[2][1] = SrcA01 * SrcB20 + SrcA11 * SrcB21 + SrcA21 * SrcB22 + SrcA31 * SrcB23; - Result[2][2] = SrcA02 * SrcB20 + SrcA12 * SrcB21 + SrcA22 * SrcB22 + SrcA32 * SrcB23; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator*(mat<4, 3, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - return mat<4, 3, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3], - m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2] + m1[3][2] * m2[2][3], - m1[0][0] * m2[3][0] + m1[1][0] * m2[3][1] + m1[2][0] * m2[3][2] + m1[3][0] * m2[3][3], - m1[0][1] * m2[3][0] + m1[1][1] * m2[3][1] + m1[2][1] * m2[3][2] + m1[3][1] * m2[3][3], - m1[0][2] * m2[3][0] + m1[1][2] * m2[3][1] + m1[2][2] * m2[3][2] + m1[3][2] * m2[3][3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator/(mat<4, 3, T, Q> const& m, T const& s) - { - return mat<4, 3, T, Q>( - m[0] / s, - m[1] / s, - m[2] / s, - m[3] / s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> operator/(T const& s, mat<4, 3, T, Q> const& m) - { - return mat<4, 3, T, Q>( - s / m[0], - s / m[1], - s / m[2], - s / m[3]); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]) && (m1[3] == m2[3]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<4, 3, T, Q> const& m1, mat<4, 3, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]) || (m1[3] != m2[3]); - } -} //namespace glm diff --git a/third_party/glm/detail/type_mat4x4.hpp b/third_party/glm/detail/type_mat4x4.hpp deleted file mode 100755 index 3517f9f..0000000 --- a/third_party/glm/detail/type_mat4x4.hpp +++ /dev/null @@ -1,189 +0,0 @@ -/// @ref core -/// @file glm/detail/type_mat4x4.hpp - -#pragma once - -#include "type_vec4.hpp" -#include -#include - -namespace glm -{ - template - struct mat<4, 4, T, Q> - { - typedef vec<4, T, Q> col_type; - typedef vec<4, T, Q> row_type; - typedef mat<4, 4, T, Q> type; - typedef mat<4, 4, T, Q> transpose_type; - typedef T value_type; - - private: - col_type value[4]; - - public: - // -- Accesses -- - - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 4;} - - GLM_FUNC_DECL col_type & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR col_type const& operator[](length_type i) const; - - // -- Constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR mat() GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR mat(mat<4, 4, T, P> const& m); - - GLM_FUNC_DECL explicit GLM_CONSTEXPR mat(T const& x); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - T const& x0, T const& y0, T const& z0, T const& w0, - T const& x1, T const& y1, T const& z1, T const& w1, - T const& x2, T const& y2, T const& z2, T const& w2, - T const& x3, T const& y3, T const& z3, T const& w3); - GLM_FUNC_DECL GLM_CONSTEXPR mat( - col_type const& v0, - col_type const& v1, - col_type const& v2, - col_type const& v3); - - // -- Conversions -- - - template< - typename X1, typename Y1, typename Z1, typename W1, - typename X2, typename Y2, typename Z2, typename W2, - typename X3, typename Y3, typename Z3, typename W3, - typename X4, typename Y4, typename Z4, typename W4> - GLM_FUNC_DECL GLM_CONSTEXPR mat( - X1 const& x1, Y1 const& y1, Z1 const& z1, W1 const& w1, - X2 const& x2, Y2 const& y2, Z2 const& z2, W2 const& w2, - X3 const& x3, Y3 const& y3, Z3 const& z3, W3 const& w3, - X4 const& x4, Y4 const& y4, Z4 const& z4, W4 const& w4); - - template - GLM_FUNC_DECL GLM_CONSTEXPR mat( - vec<4, V1, Q> const& v1, - vec<4, V2, Q> const& v2, - vec<4, V3, Q> const& v3, - vec<4, V4, Q> const& v4); - - // -- Matrix conversions -- - - template - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 4, U, P> const& m); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 3, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<2, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 2, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<3, 4, T, Q> const& x); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR mat(mat<4, 3, T, Q> const& x); - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator=(mat<4, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator+=(U s); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator+=(mat<4, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator-=(U s); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator-=(mat<4, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator*=(U s); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator*=(mat<4, 4, U, Q> const& m); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator/=(U s); - template - GLM_FUNC_DECL mat<4, 4, T, Q> & operator/=(mat<4, 4, U, Q> const& m); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL mat<4, 4, T, Q> & operator++(); - GLM_FUNC_DECL mat<4, 4, T, Q> & operator--(); - GLM_FUNC_DECL mat<4, 4, T, Q> operator++(int); - GLM_FUNC_DECL mat<4, 4, T, Q> operator--(int); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m); - - // -- Binary operators -- - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator+(T const& s, mat<4, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator-(T const& s, mat<4, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator*(T const& s, mat<4, 4, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<4, 4, T, Q>::col_type operator*(mat<4, 4, T, Q> const& m, typename mat<4, 4, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<4, 4, T, Q>::row_type operator*(typename mat<4, 4, T, Q>::col_type const& v, mat<4, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<2, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<3, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m, T const& s); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator/(T const& s, mat<4, 4, T, Q> const& m); - - template - GLM_FUNC_DECL typename mat<4, 4, T, Q>::col_type operator/(mat<4, 4, T, Q> const& m, typename mat<4, 4, T, Q>::row_type const& v); - - template - GLM_FUNC_DECL typename mat<4, 4, T, Q>::row_type operator/(typename mat<4, 4, T, Q>::col_type const& v, mat<4, 4, T, Q> const& m); - - template - GLM_FUNC_DECL mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); - - template - GLM_FUNC_DECL bool operator!=(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_mat4x4.inl" -#endif//GLM_EXTERNAL_TEMPLATE diff --git a/third_party/glm/detail/type_mat4x4.inl b/third_party/glm/detail/type_mat4x4.inl deleted file mode 100755 index e38b87f..0000000 --- a/third_party/glm/detail/type_mat4x4.inl +++ /dev/null @@ -1,706 +0,0 @@ -#include "../matrix.hpp" - -namespace glm -{ - // -- Constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat() -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALIZER_LIST - : value{col_type(1, 0, 0, 0), col_type(0, 1, 0, 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} -# endif - { -# if GLM_CONFIG_CTOR_INIT == GLM_CTOR_INITIALISATION - this->value[0] = col_type(1, 0, 0, 0); - this->value[1] = col_type(0, 1, 0, 0); - this->value[2] = col_type(0, 0, 1, 0); - this->value[3] = col_type(0, 0, 0, 1); -# endif - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 4, T, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - this->value[3] = m[3]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(T const& s) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(s, 0, 0, 0), col_type(0, s, 0, 0), col_type(0, 0, s, 0), col_type(0, 0, 0, s)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(s, 0, 0, 0); - this->value[1] = col_type(0, s, 0, 0); - this->value[2] = col_type(0, 0, s, 0); - this->value[3] = col_type(0, 0, 0, s); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat - ( - T const& x0, T const& y0, T const& z0, T const& w0, - T const& x1, T const& y1, T const& z1, T const& w1, - T const& x2, T const& y2, T const& z2, T const& w2, - T const& x3, T const& y3, T const& z3, T const& w3 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{ - col_type(x0, y0, z0, w0), - col_type(x1, y1, z1, w1), - col_type(x2, y2, z2, w2), - col_type(x3, y3, z3, w3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x0, y0, z0, w0); - this->value[1] = col_type(x1, y1, z1, w1); - this->value[2] = col_type(x2, y2, z2, w2); - this->value[3] = col_type(x3, y3, z3, w3); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(col_type const& v0, col_type const& v1, col_type const& v2, col_type const& v3) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v0), col_type(v1), col_type(v2), col_type(v3)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = v0; - this->value[1] = v1; - this->value[2] = v2; - this->value[3] = v3; -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 4, U, P> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(m[3])} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0]); - this->value[1] = col_type(m[1]); - this->value[2] = col_type(m[2]); - this->value[3] = col_type(m[3]); -# endif - } - - // -- Conversions -- - - template - template< - typename X1, typename Y1, typename Z1, typename W1, - typename X2, typename Y2, typename Z2, typename W2, - typename X3, typename Y3, typename Z3, typename W3, - typename X4, typename Y4, typename Z4, typename W4> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat - ( - X1 const& x1, Y1 const& y1, Z1 const& z1, W1 const& w1, - X2 const& x2, Y2 const& y2, Z2 const& z2, W2 const& w2, - X3 const& x3, Y3 const& y3, Z3 const& z3, W3 const& w3, - X4 const& x4, Y4 const& y4, Z4 const& z4, W4 const& w4 - ) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(x1, y1, z1, w1), col_type(x2, y2, z2, w2), col_type(x3, y3, z3, w3), col_type(x4, y4, z4, w4)} -# endif - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 1st parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 2nd parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 3rd parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 4th parameter type invalid."); - - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 5th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 6th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 7th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 8th parameter type invalid."); - - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 9th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 10th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 11th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 12th parameter type invalid."); - - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 13th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 14th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 15th parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 16th parameter type invalid."); - -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(x1, y1, z1, w1); - this->value[1] = col_type(x2, y2, z2, w2); - this->value[2] = col_type(x3, y3, z3, w3); - this->value[3] = col_type(x4, y4, z4, w4); -# endif - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(vec<4, V1, Q> const& v1, vec<4, V2, Q> const& v2, vec<4, V3, Q> const& v3, vec<4, V4, Q> const& v4) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(v1), col_type(v2), col_type(v3), col_type(v4)} -# endif - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 1st parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 2nd parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 3rd parameter type invalid."); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559 || std::numeric_limits::is_integer || GLM_CONFIG_UNRESTRICTED_GENTYPE, "*mat4x4 constructor only takes float and integer types, 4th parameter type invalid."); - -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(v1); - this->value[1] = col_type(v2); - this->value[2] = col_type(v3); - this->value[3] = col_type(v4); -# endif - } - - // -- Matrix conversions -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<2, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); - this->value[2] = col_type(0, 0, 1, 0); - this->value[3] = col_type(0, 0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<3, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0), col_type(0, 0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 0); - this->value[3] = col_type(0, 0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<2, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(0, 0, 1, 0); - this->value[3] = col_type(0, 0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<3, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(m[2], 1, 0), col_type(0, 0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); - this->value[2] = col_type(m[2], 1, 0); - this->value[3] = col_type(0, 0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<2, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = col_type(0, 0, 1, 0); - this->value[3] = col_type(0, 0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 2, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0, 0), col_type(m[1], 0, 0), col_type(0, 0, 1, 0), col_type(0, 0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0, 0); - this->value[1] = col_type(m[1], 0, 0); - this->value[2] = col_type(0, 0, 1, 0); - this->value[3] = col_type(0, 0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<3, 4, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0]), col_type(m[1]), col_type(m[2]), col_type(0, 0, 0, 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - this->value[3] = col_type(0, 0, 0, 1); -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR mat<4, 4, T, Q>::mat(mat<4, 3, T, Q> const& m) -# if GLM_HAS_INITIALIZER_LISTS - : value{col_type(m[0], 0), col_type(m[1], 0), col_type(m[2], 0), col_type(m[3], 1)} -# endif - { -# if !GLM_HAS_INITIALIZER_LISTS - this->value[0] = col_type(m[0], 0); - this->value[1] = col_type(m[1], 0); - this->value[2] = col_type(m[2], 0); - this->value[3] = col_type(m[3], 1); -# endif - } - - // -- Accesses -- - - template - GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::col_type & mat<4, 4, T, Q>::operator[](typename mat<4, 4, T, Q>::length_type i) - { - assert(i < this->length()); - return this->value[i]; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR typename mat<4, 4, T, Q>::col_type const& mat<4, 4, T, Q>::operator[](typename mat<4, 4, T, Q>::length_type i) const - { - assert(i < this->length()); - return this->value[i]; - } - - // -- Unary arithmetic operators -- - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q>& mat<4, 4, T, Q>::operator=(mat<4, 4, U, Q> const& m) - { - //memcpy could be faster - //memcpy(&this->value, &m.value, 16 * sizeof(valType)); - this->value[0] = m[0]; - this->value[1] = m[1]; - this->value[2] = m[2]; - this->value[3] = m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q>& mat<4, 4, T, Q>::operator+=(U s) - { - this->value[0] += s; - this->value[1] += s; - this->value[2] += s; - this->value[3] += s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q>& mat<4, 4, T, Q>::operator+=(mat<4, 4, U, Q> const& m) - { - this->value[0] += m[0]; - this->value[1] += m[1]; - this->value[2] += m[2]; - this->value[3] += m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator-=(U s) - { - this->value[0] -= s; - this->value[1] -= s; - this->value[2] -= s; - this->value[3] -= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator-=(mat<4, 4, U, Q> const& m) - { - this->value[0] -= m[0]; - this->value[1] -= m[1]; - this->value[2] -= m[2]; - this->value[3] -= m[3]; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator*=(U s) - { - this->value[0] *= s; - this->value[1] *= s; - this->value[2] *= s; - this->value[3] *= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator*=(mat<4, 4, U, Q> const& m) - { - return (*this = *this * m); - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator/=(U s) - { - this->value[0] /= s; - this->value[1] /= s; - this->value[2] /= s; - this->value[3] /= s; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator/=(mat<4, 4, U, Q> const& m) - { - return *this *= inverse(m); - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator++() - { - ++this->value[0]; - ++this->value[1]; - ++this->value[2]; - ++this->value[3]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> & mat<4, 4, T, Q>::operator--() - { - --this->value[0]; - --this->value[1]; - --this->value[2]; - --this->value[3]; - return *this; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> mat<4, 4, T, Q>::operator++(int) - { - mat<4, 4, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> mat<4, 4, T, Q>::operator--(int) - { - mat<4, 4, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary constant operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m) - { - return m; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m) - { - return mat<4, 4, T, Q>( - -m[0], - -m[1], - -m[2], - -m[3]); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m, T const& s) - { - return mat<4, 4, T, Q>( - m[0] + s, - m[1] + s, - m[2] + s, - m[3] + s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(T const& s, mat<4, 4, T, Q> const& m) - { - return mat<4, 4, T, Q>( - m[0] + s, - m[1] + s, - m[2] + s, - m[3] + s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator+(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - return mat<4, 4, T, Q>( - m1[0] + m2[0], - m1[1] + m2[1], - m1[2] + m2[2], - m1[3] + m2[3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m, T const& s) - { - return mat<4, 4, T, Q>( - m[0] - s, - m[1] - s, - m[2] - s, - m[3] - s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(T const& s, mat<4, 4, T, Q> const& m) - { - return mat<4, 4, T, Q>( - s - m[0], - s - m[1], - s - m[2], - s - m[3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator-(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - return mat<4, 4, T, Q>( - m1[0] - m2[0], - m1[1] - m2[1], - m1[2] - m2[2], - m1[3] - m2[3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m, T const & s) - { - return mat<4, 4, T, Q>( - m[0] * s, - m[1] * s, - m[2] * s, - m[3] * s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(T const& s, mat<4, 4, T, Q> const& m) - { - return mat<4, 4, T, Q>( - m[0] * s, - m[1] * s, - m[2] * s, - m[3] * s); - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::col_type operator* - ( - mat<4, 4, T, Q> const& m, - typename mat<4, 4, T, Q>::row_type const& v - ) - { -/* - __m128 v0 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 v1 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(1, 1, 1, 1)); - __m128 v2 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(2, 2, 2, 2)); - __m128 v3 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 m0 = _mm_mul_ps(m[0].data, v0); - __m128 m1 = _mm_mul_ps(m[1].data, v1); - __m128 a0 = _mm_add_ps(m0, m1); - - __m128 m2 = _mm_mul_ps(m[2].data, v2); - __m128 m3 = _mm_mul_ps(m[3].data, v3); - __m128 a1 = _mm_add_ps(m2, m3); - - __m128 a2 = _mm_add_ps(a0, a1); - - return typename mat<4, 4, T, Q>::col_type(a2); -*/ - - typename mat<4, 4, T, Q>::col_type const Mov0(v[0]); - typename mat<4, 4, T, Q>::col_type const Mov1(v[1]); - typename mat<4, 4, T, Q>::col_type const Mul0 = m[0] * Mov0; - typename mat<4, 4, T, Q>::col_type const Mul1 = m[1] * Mov1; - typename mat<4, 4, T, Q>::col_type const Add0 = Mul0 + Mul1; - typename mat<4, 4, T, Q>::col_type const Mov2(v[2]); - typename mat<4, 4, T, Q>::col_type const Mov3(v[3]); - typename mat<4, 4, T, Q>::col_type const Mul2 = m[2] * Mov2; - typename mat<4, 4, T, Q>::col_type const Mul3 = m[3] * Mov3; - typename mat<4, 4, T, Q>::col_type const Add1 = Mul2 + Mul3; - typename mat<4, 4, T, Q>::col_type const Add2 = Add0 + Add1; - return Add2; - -/* - return typename mat<4, 4, T, Q>::col_type( - m[0][0] * v[0] + m[1][0] * v[1] + m[2][0] * v[2] + m[3][0] * v[3], - m[0][1] * v[0] + m[1][1] * v[1] + m[2][1] * v[2] + m[3][1] * v[3], - m[0][2] * v[0] + m[1][2] * v[1] + m[2][2] * v[2] + m[3][2] * v[3], - m[0][3] * v[0] + m[1][3] * v[1] + m[2][3] * v[2] + m[3][3] * v[3]); -*/ - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::row_type operator* - ( - typename mat<4, 4, T, Q>::col_type const& v, - mat<4, 4, T, Q> const& m - ) - { - return typename mat<4, 4, T, Q>::row_type( - m[0][0] * v[0] + m[0][1] * v[1] + m[0][2] * v[2] + m[0][3] * v[3], - m[1][0] * v[0] + m[1][1] * v[1] + m[1][2] * v[2] + m[1][3] * v[3], - m[2][0] * v[0] + m[2][1] * v[1] + m[2][2] * v[2] + m[2][3] * v[3], - m[3][0] * v[0] + m[3][1] * v[1] + m[3][2] * v[2] + m[3][3] * v[3]); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<2, 4, T, Q> const& m2) - { - return mat<2, 4, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], - m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2] + m1[3][3] * m2[0][3], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3], - m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2] + m1[3][3] * m2[1][3]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<3, 4, T, Q> const& m2) - { - return mat<3, 4, T, Q>( - m1[0][0] * m2[0][0] + m1[1][0] * m2[0][1] + m1[2][0] * m2[0][2] + m1[3][0] * m2[0][3], - m1[0][1] * m2[0][0] + m1[1][1] * m2[0][1] + m1[2][1] * m2[0][2] + m1[3][1] * m2[0][3], - m1[0][2] * m2[0][0] + m1[1][2] * m2[0][1] + m1[2][2] * m2[0][2] + m1[3][2] * m2[0][3], - m1[0][3] * m2[0][0] + m1[1][3] * m2[0][1] + m1[2][3] * m2[0][2] + m1[3][3] * m2[0][3], - m1[0][0] * m2[1][0] + m1[1][0] * m2[1][1] + m1[2][0] * m2[1][2] + m1[3][0] * m2[1][3], - m1[0][1] * m2[1][0] + m1[1][1] * m2[1][1] + m1[2][1] * m2[1][2] + m1[3][1] * m2[1][3], - m1[0][2] * m2[1][0] + m1[1][2] * m2[1][1] + m1[2][2] * m2[1][2] + m1[3][2] * m2[1][3], - m1[0][3] * m2[1][0] + m1[1][3] * m2[1][1] + m1[2][3] * m2[1][2] + m1[3][3] * m2[1][3], - m1[0][0] * m2[2][0] + m1[1][0] * m2[2][1] + m1[2][0] * m2[2][2] + m1[3][0] * m2[2][3], - m1[0][1] * m2[2][0] + m1[1][1] * m2[2][1] + m1[2][1] * m2[2][2] + m1[3][1] * m2[2][3], - m1[0][2] * m2[2][0] + m1[1][2] * m2[2][1] + m1[2][2] * m2[2][2] + m1[3][2] * m2[2][3], - m1[0][3] * m2[2][0] + m1[1][3] * m2[2][1] + m1[2][3] * m2[2][2] + m1[3][3] * m2[2][3]); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator*(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - typename mat<4, 4, T, Q>::col_type const SrcA0 = m1[0]; - typename mat<4, 4, T, Q>::col_type const SrcA1 = m1[1]; - typename mat<4, 4, T, Q>::col_type const SrcA2 = m1[2]; - typename mat<4, 4, T, Q>::col_type const SrcA3 = m1[3]; - - typename mat<4, 4, T, Q>::col_type const SrcB0 = m2[0]; - typename mat<4, 4, T, Q>::col_type const SrcB1 = m2[1]; - typename mat<4, 4, T, Q>::col_type const SrcB2 = m2[2]; - typename mat<4, 4, T, Q>::col_type const SrcB3 = m2[3]; - - mat<4, 4, T, Q> Result; - Result[0] = SrcA0 * SrcB0[0] + SrcA1 * SrcB0[1] + SrcA2 * SrcB0[2] + SrcA3 * SrcB0[3]; - Result[1] = SrcA0 * SrcB1[0] + SrcA1 * SrcB1[1] + SrcA2 * SrcB1[2] + SrcA3 * SrcB1[3]; - Result[2] = SrcA0 * SrcB2[0] + SrcA1 * SrcB2[1] + SrcA2 * SrcB2[2] + SrcA3 * SrcB2[3]; - Result[3] = SrcA0 * SrcB3[0] + SrcA1 * SrcB3[1] + SrcA2 * SrcB3[2] + SrcA3 * SrcB3[3]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m, T const& s) - { - return mat<4, 4, T, Q>( - m[0] / s, - m[1] / s, - m[2] / s, - m[3] / s); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator/(T const& s, mat<4, 4, T, Q> const& m) - { - return mat<4, 4, T, Q>( - s / m[0], - s / m[1], - s / m[2], - s / m[3]); - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::col_type operator/(mat<4, 4, T, Q> const& m, typename mat<4, 4, T, Q>::row_type const& v) - { - return inverse(m) * v; - } - - template - GLM_FUNC_QUALIFIER typename mat<4, 4, T, Q>::row_type operator/(typename mat<4, 4, T, Q>::col_type const& v, mat<4, 4, T, Q> const& m) - { - return v * inverse(m); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> operator/(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - mat<4, 4, T, Q> m1_copy(m1); - return m1_copy /= m2; - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - return (m1[0] == m2[0]) && (m1[1] == m2[1]) && (m1[2] == m2[2]) && (m1[3] == m2[3]); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2) - { - return (m1[0] != m2[0]) || (m1[1] != m2[1]) || (m1[2] != m2[2]) || (m1[3] != m2[3]); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "type_mat4x4_simd.inl" -#endif diff --git a/third_party/glm/detail/type_mat4x4_simd.inl b/third_party/glm/detail/type_mat4x4_simd.inl deleted file mode 100755 index fb3a16f..0000000 --- a/third_party/glm/detail/type_mat4x4_simd.inl +++ /dev/null @@ -1,6 +0,0 @@ -/// @ref core - -namespace glm -{ - -}//namespace glm diff --git a/third_party/glm/detail/type_quat.hpp b/third_party/glm/detail/type_quat.hpp deleted file mode 100755 index 0e60bc3..0000000 --- a/third_party/glm/detail/type_quat.hpp +++ /dev/null @@ -1,186 +0,0 @@ -/// @ref core -/// @file glm/detail/type_quat.hpp - -#pragma once - -// Dependency: -#include "../detail/type_mat3x3.hpp" -#include "../detail/type_mat4x4.hpp" -#include "../detail/type_vec3.hpp" -#include "../detail/type_vec4.hpp" -#include "../ext/vector_relational.hpp" -#include "../ext/quaternion_relational.hpp" -#include "../gtc/constants.hpp" -#include "../gtc/matrix_transform.hpp" - -namespace glm -{ - template - struct qua - { - // -- Implementation detail -- - - typedef qua type; - typedef T value_type; - - // -- Data -- - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic push -# pragma GCC diagnostic ignored "-Wpedantic" -# elif GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" -# pragma clang diagnostic ignored "-Wnested-anon-types" -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(push) -# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union -# endif -# endif - -# if GLM_LANG & GLM_LANG_CXXMS_FLAG - union - { -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - struct { T w, x, y, z; }; -# else - struct { T x, y, z, w; }; -# endif - - typename detail::storage<4, T, detail::is_aligned::value>::type data; - }; -# else -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - T w, x, y, z; -# else - T x, y, z, w; -# endif -# endif - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(pop) -# endif -# endif - - // -- Component accesses -- - - typedef length_t length_type; - - /// Return the count of components of a quaternion - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 4;} - - GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; - - // -- Implicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR qua() GLM_DEFAULT; - GLM_FUNC_DECL GLM_CONSTEXPR qua(qua const& q) GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR qua(qua const& q); - - // -- Explicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR qua(T s, vec<3, T, Q> const& v); - GLM_FUNC_DECL GLM_CONSTEXPR qua(T w, T x, T y, T z); - - // -- Conversion constructors -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT qua(qua const& q); - - /// Explicit conversion operators -# if GLM_HAS_EXPLICIT_CONVERSION_OPERATORS - GLM_FUNC_DECL explicit operator mat<3, 3, T, Q>() const; - GLM_FUNC_DECL explicit operator mat<4, 4, T, Q>() const; -# endif - - /// Create a quaternion from two normalized axis - /// - /// @param u A first normalized axis - /// @param v A second normalized axis - /// @see gtc_quaternion - /// @see http://lolengine.net/blog/2013/09/18/beautiful-maths-quaternion-from-vectors - GLM_FUNC_DECL qua(vec<3, T, Q> const& u, vec<3, T, Q> const& v); - - /// Build a quaternion from euler angles (pitch, yaw, roll), in radians. - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT qua(vec<3, T, Q> const& eulerAngles); - GLM_FUNC_DECL GLM_EXPLICIT qua(mat<3, 3, T, Q> const& q); - GLM_FUNC_DECL GLM_EXPLICIT qua(mat<4, 4, T, Q> const& q); - - // -- Unary arithmetic operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR qua& operator=(qua const& q) GLM_DEFAULT; - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua& operator=(qua const& q); - template - GLM_FUNC_DECL GLM_CONSTEXPR qua& operator+=(qua const& q); - template - GLM_FUNC_DECL GLM_CONSTEXPR qua& operator-=(qua const& q); - template - GLM_FUNC_DECL GLM_CONSTEXPR qua& operator*=(qua const& q); - template - GLM_FUNC_DECL GLM_CONSTEXPR qua& operator*=(U s); - template - GLM_FUNC_DECL GLM_CONSTEXPR qua& operator/=(U s); - }; - - // -- Unary bit operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator+(qua const& q); - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator-(qua const& q); - - // -- Binary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator+(qua const& q, qua const& p); - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator-(qua const& q, qua const& p); - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator*(qua const& q, qua const& p); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(qua const& q, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, qua const& q); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(qua const& q, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, qua const& q); - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator*(qua const& q, T const& s); - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator*(T const& s, qua const& q); - - template - GLM_FUNC_DECL GLM_CONSTEXPR qua operator/(qua const& q, T const& s); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(qua const& q1, qua const& q2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(qua const& q1, qua const& q2); -} //namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_quat.inl" -#endif//GLM_EXTERNAL_TEMPLATE diff --git a/third_party/glm/detail/type_quat.inl b/third_party/glm/detail/type_quat.inl deleted file mode 100755 index 67b9310..0000000 --- a/third_party/glm/detail/type_quat.inl +++ /dev/null @@ -1,408 +0,0 @@ -#include "../trigonometric.hpp" -#include "../exponential.hpp" -#include "../ext/quaternion_geometric.hpp" -#include - -namespace glm{ -namespace detail -{ - template - struct genTypeTrait > - { - static const genTypeEnum GENTYPE = GENTYPE_QUAT; - }; - - template - struct compute_dot, T, Aligned> - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static T call(qua const& a, qua const& b) - { - vec<4, T, Q> tmp(a.w * b.w, a.x * b.x, a.y * b.y, a.z * b.z); - return (tmp.x + tmp.y) + (tmp.z + tmp.w); - } - }; - - template - struct compute_quat_add - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, qua const& p) - { - return qua(q.w + p.w, q.x + p.x, q.y + p.y, q.z + p.z); - } - }; - - template - struct compute_quat_sub - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, qua const& p) - { - return qua(q.w - p.w, q.x - p.x, q.y - p.y, q.z - p.z); - } - }; - - template - struct compute_quat_mul_scalar - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, T s) - { - return qua(q.w * s, q.x * s, q.y * s, q.z * s); - } - }; - - template - struct compute_quat_div_scalar - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static qua call(qua const& q, T s) - { - return qua(q.w / s, q.x / s, q.y / s, q.z / s); - } - }; - - template - struct compute_quat_mul_vec4 - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(qua const& q, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(q * vec<3, T, Q>(v), v.w); - } - }; -}//namespace detail - - // -- Component accesses -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & qua::operator[](typename qua::length_type i) - { - assert(i >= 0 && i < this->length()); -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - return (&w)[i]; -# else - return (&x)[i]; -# endif - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& qua::operator[](typename qua::length_type i) const - { - assert(i >= 0 && i < this->length()); -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - return (&w)[i]; -# else - return (&x)[i]; -# endif - } - - // -- Implicit basic constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua() -# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - : w(1), x(0), y(0), z(0) -# else - : x(0), y(0), z(0), w(1) -# endif -# endif - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(qua const& q) -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - : w(q.w), x(q.x), y(q.y), z(q.z) -# else - : x(q.x), y(q.y), z(q.z), w(q.w) -# endif - {} -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(qua const& q) -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - : w(q.w), x(q.x), y(q.y), z(q.z) -# else - : x(q.x), y(q.y), z(q.z), w(q.w) -# endif - {} - - // -- Explicit basic constructors -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(T s, vec<3, T, Q> const& v) -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - : w(s), x(v.x), y(v.y), z(v.z) -# else - : x(v.x), y(v.y), z(v.z), w(s) -# endif - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(T _w, T _x, T _y, T _z) -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - : w(_w), x(_x), y(_y), z(_z) -# else - : x(_x), y(_y), z(_z), w(_w) -# endif - {} - - // -- Conversion constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(qua const& q) -# ifdef GLM_FORCE_QUAT_DATA_WXYZ - : w(static_cast(q.w)), x(static_cast(q.x)), y(static_cast(q.y)), z(static_cast(q.z)) -# else - : x(static_cast(q.x)), y(static_cast(q.y)), z(static_cast(q.z)), w(static_cast(q.w)) -# endif - {} - - //template - //GLM_FUNC_QUALIFIER qua::qua - //( - // valType const& pitch, - // valType const& yaw, - // valType const& roll - //) - //{ - // vec<3, valType> eulerAngle(pitch * valType(0.5), yaw * valType(0.5), roll * valType(0.5)); - // vec<3, valType> c = glm::cos(eulerAngle * valType(0.5)); - // vec<3, valType> s = glm::sin(eulerAngle * valType(0.5)); - // - // this->w = c.x * c.y * c.z + s.x * s.y * s.z; - // this->x = s.x * c.y * c.z - c.x * s.y * s.z; - // this->y = c.x * s.y * c.z + s.x * c.y * s.z; - // this->z = c.x * c.y * s.z - s.x * s.y * c.z; - //} - - template - GLM_FUNC_QUALIFIER qua::qua(vec<3, T, Q> const& u, vec<3, T, Q> const& v) - { - T norm_u_norm_v = sqrt(dot(u, u) * dot(v, v)); - T real_part = norm_u_norm_v + dot(u, v); - vec<3, T, Q> t; - - if(real_part < static_cast(1.e-6f) * norm_u_norm_v) - { - // If u and v are exactly opposite, rotate 180 degrees - // around an arbitrary orthogonal axis. Axis normalisation - // can happen later, when we normalise the quaternion. - real_part = static_cast(0); - t = abs(u.x) > abs(u.z) ? vec<3, T, Q>(-u.y, u.x, static_cast(0)) : vec<3, T, Q>(static_cast(0), -u.z, u.y); - } - else - { - // Otherwise, build quaternion the standard way. - t = cross(u, v); - } - - *this = normalize(qua(real_part, t.x, t.y, t.z)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua::qua(vec<3, T, Q> const& eulerAngle) - { - vec<3, T, Q> c = glm::cos(eulerAngle * T(0.5)); - vec<3, T, Q> s = glm::sin(eulerAngle * T(0.5)); - - this->w = c.x * c.y * c.z + s.x * s.y * s.z; - this->x = s.x * c.y * c.z - c.x * s.y * s.z; - this->y = c.x * s.y * c.z + s.x * c.y * s.z; - this->z = c.x * c.y * s.z - s.x * s.y * c.z; - } - - template - GLM_FUNC_QUALIFIER qua::qua(mat<3, 3, T, Q> const& m) - { - *this = quat_cast(m); - } - - template - GLM_FUNC_QUALIFIER qua::qua(mat<4, 4, T, Q> const& m) - { - *this = quat_cast(m); - } - -# if GLM_HAS_EXPLICIT_CONVERSION_OPERATORS - template - GLM_FUNC_QUALIFIER qua::operator mat<3, 3, T, Q>() const - { - return mat3_cast(*this); - } - - template - GLM_FUNC_QUALIFIER qua::operator mat<4, 4, T, Q>() const - { - return mat4_cast(*this); - } -# endif//GLM_HAS_EXPLICIT_CONVERSION_OPERATORS - - // -- Unary arithmetic operators -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator=(qua const& q) - { - this->w = q.w; - this->x = q.x; - this->y = q.y; - this->z = q.z; - return *this; - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator=(qua const& q) - { - this->w = static_cast(q.w); - this->x = static_cast(q.x); - this->y = static_cast(q.y); - this->z = static_cast(q.z); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator+=(qua const& q) - { - return (*this = detail::compute_quat_add::value>::call(*this, qua(q))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator-=(qua const& q) - { - return (*this = detail::compute_quat_sub::value>::call(*this, qua(q))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator*=(qua const& r) - { - qua const p(*this); - qua const q(r); - - this->w = p.w * q.w - p.x * q.x - p.y * q.y - p.z * q.z; - this->x = p.w * q.x + p.x * q.w + p.y * q.z - p.z * q.y; - this->y = p.w * q.y + p.y * q.w + p.z * q.x - p.x * q.z; - this->z = p.w * q.z + p.z * q.w + p.x * q.y - p.y * q.x; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator*=(U s) - { - return (*this = detail::compute_quat_mul_scalar::value>::call(*this, static_cast(s))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua & qua::operator/=(U s) - { - return (*this = detail::compute_quat_div_scalar::value>::call(*this, static_cast(s))); - } - - // -- Unary bit operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator+(qua const& q) - { - return q; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator-(qua const& q) - { - return qua(-q.w, -q.x, -q.y, -q.z); - } - - // -- Binary operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator+(qua const& q, qua const& p) - { - return qua(q) += p; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator-(qua const& q, qua const& p) - { - return qua(q) -= p; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator*(qua const& q, qua const& p) - { - return qua(q) *= p; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(qua const& q, vec<3, T, Q> const& v) - { - vec<3, T, Q> const QuatVector(q.x, q.y, q.z); - vec<3, T, Q> const uv(glm::cross(QuatVector, v)); - vec<3, T, Q> const uuv(glm::cross(QuatVector, uv)); - - return v + ((uv * q.w) + uuv) * static_cast(2); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, qua const& q) - { - return glm::inverse(q) * v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(qua const& q, vec<4, T, Q> const& v) - { - return detail::compute_quat_mul_vec4::value>::call(q, v); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, qua const& q) - { - return glm::inverse(q) * v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator*(qua const& q, T const& s) - { - return qua( - q.w * s, q.x * s, q.y * s, q.z * s); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator*(T const& s, qua const& q) - { - return q * s; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua operator/(qua const& q, T const& s) - { - return qua( - q.w / s, q.x / s, q.y / s, q.z / s); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(qua const& q1, qua const& q2) - { - return q1.x == q2.x && q1.y == q2.y && q1.z == q2.z && q1.w == q2.w; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(qua const& q1, qua const& q2) - { - return q1.x != q2.x || q1.y != q2.y || q1.z != q2.z || q1.w != q2.w; - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "type_quat_simd.inl" -#endif - diff --git a/third_party/glm/detail/type_quat_simd.inl b/third_party/glm/detail/type_quat_simd.inl deleted file mode 100755 index 3333e59..0000000 --- a/third_party/glm/detail/type_quat_simd.inl +++ /dev/null @@ -1,188 +0,0 @@ -/// @ref core - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -namespace glm{ -namespace detail -{ -/* - template - struct compute_quat_mul - { - static qua call(qua const& q1, qua const& q2) - { - // SSE2 STATS: 11 shuffle, 8 mul, 8 add - // SSE4 STATS: 3 shuffle, 4 mul, 4 dpps - - __m128 const mul0 = _mm_mul_ps(q1.Data, _mm_shuffle_ps(q2.Data, q2.Data, _MM_SHUFFLE(0, 1, 2, 3))); - __m128 const mul1 = _mm_mul_ps(q1.Data, _mm_shuffle_ps(q2.Data, q2.Data, _MM_SHUFFLE(1, 0, 3, 2))); - __m128 const mul2 = _mm_mul_ps(q1.Data, _mm_shuffle_ps(q2.Data, q2.Data, _MM_SHUFFLE(2, 3, 0, 1))); - __m128 const mul3 = _mm_mul_ps(q1.Data, q2.Data); - -# if GLM_ARCH & GLM_ARCH_SSE41_BIT - __m128 const add0 = _mm_dp_ps(mul0, _mm_set_ps(1.0f, -1.0f, 1.0f, 1.0f), 0xff); - __m128 const add1 = _mm_dp_ps(mul1, _mm_set_ps(1.0f, 1.0f, 1.0f, -1.0f), 0xff); - __m128 const add2 = _mm_dp_ps(mul2, _mm_set_ps(1.0f, 1.0f, -1.0f, 1.0f), 0xff); - __m128 const add3 = _mm_dp_ps(mul3, _mm_set_ps(1.0f, -1.0f, -1.0f, -1.0f), 0xff); -# else - __m128 const mul4 = _mm_mul_ps(mul0, _mm_set_ps(1.0f, -1.0f, 1.0f, 1.0f)); - __m128 const add0 = _mm_add_ps(mul0, _mm_movehl_ps(mul4, mul4)); - __m128 const add4 = _mm_add_ss(add0, _mm_shuffle_ps(add0, add0, 1)); - - __m128 const mul5 = _mm_mul_ps(mul1, _mm_set_ps(1.0f, 1.0f, 1.0f, -1.0f)); - __m128 const add1 = _mm_add_ps(mul1, _mm_movehl_ps(mul5, mul5)); - __m128 const add5 = _mm_add_ss(add1, _mm_shuffle_ps(add1, add1, 1)); - - __m128 const mul6 = _mm_mul_ps(mul2, _mm_set_ps(1.0f, 1.0f, -1.0f, 1.0f)); - __m128 const add2 = _mm_add_ps(mul6, _mm_movehl_ps(mul6, mul6)); - __m128 const add6 = _mm_add_ss(add2, _mm_shuffle_ps(add2, add2, 1)); - - __m128 const mul7 = _mm_mul_ps(mul3, _mm_set_ps(1.0f, -1.0f, -1.0f, -1.0f)); - __m128 const add3 = _mm_add_ps(mul3, _mm_movehl_ps(mul7, mul7)); - __m128 const add7 = _mm_add_ss(add3, _mm_shuffle_ps(add3, add3, 1)); - #endif - - // This SIMD code is a politically correct way of doing this, but in every test I've tried it has been slower than - // the final code below. I'll keep this here for reference - maybe somebody else can do something better... - // - //__m128 xxyy = _mm_shuffle_ps(add4, add5, _MM_SHUFFLE(0, 0, 0, 0)); - //__m128 zzww = _mm_shuffle_ps(add6, add7, _MM_SHUFFLE(0, 0, 0, 0)); - // - //return _mm_shuffle_ps(xxyy, zzww, _MM_SHUFFLE(2, 0, 2, 0)); - - qua Result; - _mm_store_ss(&Result.x, add4); - _mm_store_ss(&Result.y, add5); - _mm_store_ss(&Result.z, add6); - _mm_store_ss(&Result.w, add7); - return Result; - } - }; -*/ - - template - struct compute_quat_add - { - static qua call(qua const& q, qua const& p) - { - qua Result; - Result.data = _mm_add_ps(q.data, p.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_quat_add - { - static qua call(qua const& a, qua const& b) - { - qua Result; - Result.data = _mm256_add_pd(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_quat_sub - { - static qua call(qua const& q, qua const& p) - { - vec<4, float, Q> Result; - Result.data = _mm_sub_ps(q.data, p.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_quat_sub - { - static qua call(qua const& a, qua const& b) - { - qua Result; - Result.data = _mm256_sub_pd(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_quat_mul_scalar - { - static qua call(qua const& q, float s) - { - vec<4, float, Q> Result; - Result.data = _mm_mul_ps(q.data, _mm_set_ps1(s)); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_quat_mul_scalar - { - static qua call(qua const& q, double s) - { - qua Result; - Result.data = _mm256_mul_pd(q.data, _mm_set_ps1(s)); - return Result; - } - }; -# endif - - template - struct compute_quat_div_scalar - { - static qua call(qua const& q, float s) - { - vec<4, float, Q> Result; - Result.data = _mm_div_ps(q.data, _mm_set_ps1(s)); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_quat_div_scalar - { - static qua call(qua const& q, double s) - { - qua Result; - Result.data = _mm256_div_pd(q.data, _mm_set_ps1(s)); - return Result; - } - }; -# endif - - template - struct compute_quat_mul_vec4 - { - static vec<4, float, Q> call(qua const& q, vec<4, float, Q> const& v) - { - __m128 const q_wwww = _mm_shuffle_ps(q.data, q.data, _MM_SHUFFLE(3, 3, 3, 3)); - __m128 const q_swp0 = _mm_shuffle_ps(q.data, q.data, _MM_SHUFFLE(3, 0, 2, 1)); - __m128 const q_swp1 = _mm_shuffle_ps(q.data, q.data, _MM_SHUFFLE(3, 1, 0, 2)); - __m128 const v_swp0 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(3, 0, 2, 1)); - __m128 const v_swp1 = _mm_shuffle_ps(v.data, v.data, _MM_SHUFFLE(3, 1, 0, 2)); - - __m128 uv = _mm_sub_ps(_mm_mul_ps(q_swp0, v_swp1), _mm_mul_ps(q_swp1, v_swp0)); - __m128 uv_swp0 = _mm_shuffle_ps(uv, uv, _MM_SHUFFLE(3, 0, 2, 1)); - __m128 uv_swp1 = _mm_shuffle_ps(uv, uv, _MM_SHUFFLE(3, 1, 0, 2)); - __m128 uuv = _mm_sub_ps(_mm_mul_ps(q_swp0, uv_swp1), _mm_mul_ps(q_swp1, uv_swp0)); - - __m128 const two = _mm_set1_ps(2.0f); - uv = _mm_mul_ps(uv, _mm_mul_ps(q_wwww, two)); - uuv = _mm_mul_ps(uuv, two); - - vec<4, float, Q> Result; - Result.data = _mm_add_ps(v.Data, _mm_add_ps(uv, uuv)); - return Result; - } - }; -}//namespace detail -}//namespace glm - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT - diff --git a/third_party/glm/detail/type_vec1.hpp b/third_party/glm/detail/type_vec1.hpp deleted file mode 100755 index 51163f1..0000000 --- a/third_party/glm/detail/type_vec1.hpp +++ /dev/null @@ -1,308 +0,0 @@ -/// @ref core -/// @file glm/detail/type_vec1.hpp - -#pragma once - -#include "qualifier.hpp" -#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR -# include "_swizzle.hpp" -#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION -# include "_swizzle_func.hpp" -#endif -#include - -namespace glm -{ - template - struct vec<1, T, Q> - { - // -- Implementation detail -- - - typedef T value_type; - typedef vec<1, T, Q> type; - typedef vec<1, bool, Q> bool_type; - - // -- Data -- - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic push -# pragma GCC diagnostic ignored "-Wpedantic" -# elif GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" -# pragma clang diagnostic ignored "-Wnested-anon-types" -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(push) -# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union -# endif -# endif - -# if GLM_CONFIG_XYZW_ONLY - T x; -# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE - union - { - T x; - T r; - T s; - - typename detail::storage<1, T, detail::is_aligned::value>::type data; -/* -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - _GLM_SWIZZLE1_2_MEMBERS(T, Q, x) - _GLM_SWIZZLE1_2_MEMBERS(T, Q, r) - _GLM_SWIZZLE1_2_MEMBERS(T, Q, s) - _GLM_SWIZZLE1_3_MEMBERS(T, Q, x) - _GLM_SWIZZLE1_3_MEMBERS(T, Q, r) - _GLM_SWIZZLE1_3_MEMBERS(T, Q, s) - _GLM_SWIZZLE1_4_MEMBERS(T, Q, x) - _GLM_SWIZZLE1_4_MEMBERS(T, Q, r) - _GLM_SWIZZLE1_4_MEMBERS(T, Q, s) -# endif -*/ - }; -# else - union {T x, r, s;}; -/* -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION - GLM_SWIZZLE_GEN_VEC_FROM_VEC1(T, Q) -# endif -*/ -# endif - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(pop) -# endif -# endif - - // -- Component accesses -- - - /// Return the count of components of the vector - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 1;} - - GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; - - // -- Implicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec const& v) GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, T, P> const& v); - - // -- Explicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); - - // -- Conversion vector constructors -- - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<2, U, P> const& v); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<3, U, P> const& v); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<1, U, P> const& v); - - // -- Swizzle constructors -- -/* -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<1, T, Q, E0, -1,-2,-3> const& that) - { - *this = that(); - } -# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR -*/ - // -- Unary arithmetic operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator=(vec const& v) GLM_DEFAULT; - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator+=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator+=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator-=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator-=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator*=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator*=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator/=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator/=(vec<1, U, Q> const& v); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator++(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator--(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator++(int); - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator--(int); - - // -- Unary bit operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator%=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator%=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator&=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator&=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator|=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator|=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator^=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator^=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator<<=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator<<=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator>>=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> & operator>>=(vec<1, U, Q> const& v); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v); - - // -- Binary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator*(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator/(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator%(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator&(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator|(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator^(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator<<(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator>>(T scalar, vec<1, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, T, Q> operator~(vec<1, T, Q> const& v); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, bool, Q> operator&&(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<1, bool, Q> operator||(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_vec1.inl" -#endif//GLM_EXTERNAL_TEMPLATE diff --git a/third_party/glm/detail/type_vec1.inl b/third_party/glm/detail/type_vec1.inl deleted file mode 100755 index d0f49fd..0000000 --- a/third_party/glm/detail/type_vec1.inl +++ /dev/null @@ -1,551 +0,0 @@ -/// @ref core - -#include "./compute_vector_relational.hpp" - -namespace glm -{ - // -- Implicit basic constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec() -# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE - : x(0) -# endif - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<1, T, Q> const& v) - : x(v.x) - {} -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<1, T, P> const& v) - : x(v.x) - {} - - // -- Explicit basic constructors -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(T scalar) - : x(scalar) - {} - - // -- Conversion vector constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<1, U, P> const& v) - : x(static_cast(v.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<2, U, P> const& v) - : x(static_cast(v.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<3, U, P> const& v) - : x(static_cast(v.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q>::vec(vec<4, U, P> const& v) - : x(static_cast(v.x)) - {} - - // -- Component accesses -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & vec<1, T, Q>::operator[](typename vec<1, T, Q>::length_type) - { - return x; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<1, T, Q>::operator[](typename vec<1, T, Q>::length_type) const - { - return x; - } - - // -- Unary arithmetic operators -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator=(vec<1, T, Q> const& v) - { - this->x = v.x; - return *this; - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator=(vec<1, U, Q> const& v) - { - this->x = static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator+=(U scalar) - { - this->x += static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator+=(vec<1, U, Q> const& v) - { - this->x += static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator-=(U scalar) - { - this->x -= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator-=(vec<1, U, Q> const& v) - { - this->x -= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator*=(U scalar) - { - this->x *= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator*=(vec<1, U, Q> const& v) - { - this->x *= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator/=(U scalar) - { - this->x /= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator/=(vec<1, U, Q> const& v) - { - this->x /= static_cast(v.x); - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator++() - { - ++this->x; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator--() - { - --this->x; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> vec<1, T, Q>::operator++(int) - { - vec<1, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> vec<1, T, Q>::operator--(int) - { - vec<1, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary bit operators -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator%=(U scalar) - { - this->x %= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator%=(vec<1, U, Q> const& v) - { - this->x %= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator&=(U scalar) - { - this->x &= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator&=(vec<1, U, Q> const& v) - { - this->x &= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator|=(U scalar) - { - this->x |= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator|=(vec<1, U, Q> const& v) - { - this->x |= U(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator^=(U scalar) - { - this->x ^= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator^=(vec<1, U, Q> const& v) - { - this->x ^= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator<<=(U scalar) - { - this->x <<= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator<<=(vec<1, U, Q> const& v) - { - this->x <<= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator>>=(U scalar) - { - this->x >>= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> & vec<1, T, Q>::operator>>=(vec<1, U, Q> const& v) - { - this->x >>= static_cast(v.x); - return *this; - } - - // -- Unary constant operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v) - { - return v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - -v.x); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x + scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar + v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator+(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x + v2.x); - } - - //operator- - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x - scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar - v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator-(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x - v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x * scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator*(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar * v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator*(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x * v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x / scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator/(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar / v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator/(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x / v2.x); - } - - // -- Binary bit operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x % scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator%(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar % v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator%(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x % v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x & scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator&(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar & v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator&(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x & v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x | scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator|(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar | v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator|(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x | v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x ^ scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator^(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar ^ v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator^(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x ^ v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - static_cast(v.x << scalar)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator<<(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar << v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator<<(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x << v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v, T scalar) - { - return vec<1, T, Q>( - v.x >> scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator>>(T scalar, vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - scalar >> v.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator>>(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<1, T, Q>( - v1.x >> v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, T, Q> operator~(vec<1, T, Q> const& v) - { - return vec<1, T, Q>( - ~v.x); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return detail::compute_equal::is_iec559>::call(v1.x, v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<1, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return !(v1 == v2); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, bool, Q> operator&&(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2) - { - return vec<1, bool, Q>(v1.x && v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<1, bool, Q> operator||(vec<1, bool, Q> const& v1, vec<1, bool, Q> const& v2) - { - return vec<1, bool, Q>(v1.x || v2.x); - } -}//namespace glm diff --git a/third_party/glm/detail/type_vec2.hpp b/third_party/glm/detail/type_vec2.hpp deleted file mode 100755 index 52ef408..0000000 --- a/third_party/glm/detail/type_vec2.hpp +++ /dev/null @@ -1,399 +0,0 @@ -/// @ref core -/// @file glm/detail/type_vec2.hpp - -#pragma once - -#include "qualifier.hpp" -#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR -# include "_swizzle.hpp" -#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION -# include "_swizzle_func.hpp" -#endif -#include - -namespace glm -{ - template - struct vec<2, T, Q> - { - // -- Implementation detail -- - - typedef T value_type; - typedef vec<2, T, Q> type; - typedef vec<2, bool, Q> bool_type; - - // -- Data -- - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic push -# pragma GCC diagnostic ignored "-Wpedantic" -# elif GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" -# pragma clang diagnostic ignored "-Wnested-anon-types" -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(push) -# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union -# endif -# endif - -# if GLM_CONFIG_XYZW_ONLY - T x, y; -# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE - union - { - struct{ T x, y; }; - struct{ T r, g; }; - struct{ T s, t; }; - - typename detail::storage<2, T, detail::is_aligned::value>::type data; - -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - GLM_SWIZZLE2_2_MEMBERS(T, Q, x, y) - GLM_SWIZZLE2_2_MEMBERS(T, Q, r, g) - GLM_SWIZZLE2_2_MEMBERS(T, Q, s, t) - GLM_SWIZZLE2_3_MEMBERS(T, Q, x, y) - GLM_SWIZZLE2_3_MEMBERS(T, Q, r, g) - GLM_SWIZZLE2_3_MEMBERS(T, Q, s, t) - GLM_SWIZZLE2_4_MEMBERS(T, Q, x, y) - GLM_SWIZZLE2_4_MEMBERS(T, Q, r, g) - GLM_SWIZZLE2_4_MEMBERS(T, Q, s, t) -# endif - }; -# else - union {T x, r, s;}; - union {T y, g, t;}; - -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION - GLM_SWIZZLE_GEN_VEC_FROM_VEC2(T, Q) -# endif//GLM_CONFIG_SWIZZLE -# endif - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(pop) -# endif -# endif - - // -- Component accesses -- - - /// Return the count of components of the vector - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 2;} - - GLM_FUNC_DECL GLM_CONSTEXPR T& operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; - - // -- Implicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec const& v) GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, T, P> const& v); - - // -- Explicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR vec(T x, T y); - - // -- Conversion constructors -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(vec<1, U, P> const& v); - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A x, B y); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, Q> const& x, B y); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A x, vec<1, B, Q> const& y); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, Q> const& x, vec<1, B, Q> const& y); - - // -- Conversion vector constructors -- - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<3, U, P> const& v); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<2, U, P> const& v); - - // -- Swizzle constructors -- -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1,-1,-2> const& that) - { - *this = that(); - } -# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - - // -- Unary arithmetic operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator=(vec const& v) GLM_DEFAULT; - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator+=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator+=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator+=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator-=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator-=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator-=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator*=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator*=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator*=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator/=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator/=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator/=(vec<2, U, Q> const& v); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator++(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator--(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator++(int); - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator--(int); - - // -- Unary bit operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator%=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator%=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator%=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator&=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator&=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator&=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator|=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator|=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator|=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator^=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator^=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator^=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator<<=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator<<=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator<<=(vec<2, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator>>=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator>>=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> & operator>>=(vec<2, U, Q> const& v); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v); - - // -- Binary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(T scalar, vec<2, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, T, Q> operator~(vec<2, T, Q> const& v); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, bool, Q> operator&&(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<2, bool, Q> operator||(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_vec2.inl" -#endif//GLM_EXTERNAL_TEMPLATE diff --git a/third_party/glm/detail/type_vec2.inl b/third_party/glm/detail/type_vec2.inl deleted file mode 100755 index 8e65d6b..0000000 --- a/third_party/glm/detail/type_vec2.inl +++ /dev/null @@ -1,913 +0,0 @@ -/// @ref core - -#include "./compute_vector_relational.hpp" - -namespace glm -{ - // -- Implicit basic constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec() -# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE - : x(0), y(0) -# endif - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<2, T, Q> const& v) - : x(v.x), y(v.y) - {} -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<2, T, P> const& v) - : x(v.x), y(v.y) - {} - - // -- Explicit basic constructors -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(T scalar) - : x(scalar), y(scalar) - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(T _x, T _y) - : x(_x), y(_y) - {} - - // -- Conversion scalar constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<1, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(A _x, B _y) - : x(static_cast(_x)) - , y(static_cast(_y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<1, A, Q> const& _x, B _y) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(A _x, vec<1, B, Q> const& _y) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<1, A, Q> const& _x, vec<1, B, Q> const& _y) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - {} - - // -- Conversion vector constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<2, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<3, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q>::vec(vec<4, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.y)) - {} - - // -- Component accesses -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & vec<2, T, Q>::operator[](typename vec<2, T, Q>::length_type i) - { - assert(i >= 0 && i < this->length()); - switch(i) - { - default: - case 0: - return x; - case 1: - return y; - } - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<2, T, Q>::operator[](typename vec<2, T, Q>::length_type i) const - { - assert(i >= 0 && i < this->length()); - switch(i) - { - default: - case 0: - return x; - case 1: - return y; - } - } - - // -- Unary arithmetic operators -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator=(vec<2, T, Q> const& v) - { - this->x = v.x; - this->y = v.y; - return *this; - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator=(vec<2, U, Q> const& v) - { - this->x = static_cast(v.x); - this->y = static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator+=(U scalar) - { - this->x += static_cast(scalar); - this->y += static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator+=(vec<1, U, Q> const& v) - { - this->x += static_cast(v.x); - this->y += static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator+=(vec<2, U, Q> const& v) - { - this->x += static_cast(v.x); - this->y += static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator-=(U scalar) - { - this->x -= static_cast(scalar); - this->y -= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator-=(vec<1, U, Q> const& v) - { - this->x -= static_cast(v.x); - this->y -= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator-=(vec<2, U, Q> const& v) - { - this->x -= static_cast(v.x); - this->y -= static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator*=(U scalar) - { - this->x *= static_cast(scalar); - this->y *= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator*=(vec<1, U, Q> const& v) - { - this->x *= static_cast(v.x); - this->y *= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator*=(vec<2, U, Q> const& v) - { - this->x *= static_cast(v.x); - this->y *= static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator/=(U scalar) - { - this->x /= static_cast(scalar); - this->y /= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator/=(vec<1, U, Q> const& v) - { - this->x /= static_cast(v.x); - this->y /= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator/=(vec<2, U, Q> const& v) - { - this->x /= static_cast(v.x); - this->y /= static_cast(v.y); - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator++() - { - ++this->x; - ++this->y; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator--() - { - --this->x; - --this->y; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> vec<2, T, Q>::operator++(int) - { - vec<2, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> vec<2, T, Q>::operator--(int) - { - vec<2, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary bit operators -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator%=(U scalar) - { - this->x %= static_cast(scalar); - this->y %= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator%=(vec<1, U, Q> const& v) - { - this->x %= static_cast(v.x); - this->y %= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator%=(vec<2, U, Q> const& v) - { - this->x %= static_cast(v.x); - this->y %= static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator&=(U scalar) - { - this->x &= static_cast(scalar); - this->y &= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator&=(vec<1, U, Q> const& v) - { - this->x &= static_cast(v.x); - this->y &= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator&=(vec<2, U, Q> const& v) - { - this->x &= static_cast(v.x); - this->y &= static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator|=(U scalar) - { - this->x |= static_cast(scalar); - this->y |= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator|=(vec<1, U, Q> const& v) - { - this->x |= static_cast(v.x); - this->y |= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator|=(vec<2, U, Q> const& v) - { - this->x |= static_cast(v.x); - this->y |= static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator^=(U scalar) - { - this->x ^= static_cast(scalar); - this->y ^= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator^=(vec<1, U, Q> const& v) - { - this->x ^= static_cast(v.x); - this->y ^= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator^=(vec<2, U, Q> const& v) - { - this->x ^= static_cast(v.x); - this->y ^= static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator<<=(U scalar) - { - this->x <<= static_cast(scalar); - this->y <<= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator<<=(vec<1, U, Q> const& v) - { - this->x <<= static_cast(v.x); - this->y <<= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator<<=(vec<2, U, Q> const& v) - { - this->x <<= static_cast(v.x); - this->y <<= static_cast(v.y); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator>>=(U scalar) - { - this->x >>= static_cast(scalar); - this->y >>= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator>>=(vec<1, U, Q> const& v) - { - this->x >>= static_cast(v.x); - this->y >>= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> & vec<2, T, Q>::operator>>=(vec<2, U, Q> const& v) - { - this->x >>= static_cast(v.x); - this->y >>= static_cast(v.y); - return *this; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v) - { - return v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - -v.x, - -v.y); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x + scalar, - v.y + scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x + v2.x, - v1.y + v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar + v.x, - scalar + v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x + v2.x, - v1.x + v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator+(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x + v2.x, - v1.y + v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x - scalar, - v.y - scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x - v2.x, - v1.y - v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar - v.x, - scalar - v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x - v2.x, - v1.x - v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator-(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x - v2.x, - v1.y - v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x * scalar, - v.y * scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x * v2.x, - v1.y * v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar * v.x, - scalar * v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x * v2.x, - v1.x * v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator*(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x * v2.x, - v1.y * v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x / scalar, - v.y / scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x / v2.x, - v1.y / v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar / v.x, - scalar / v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x / v2.x, - v1.x / v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator/(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x / v2.x, - v1.y / v2.y); - } - - // -- Binary bit operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x % scalar, - v.y % scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x % v2.x, - v1.y % v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar % v.x, - scalar % v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x % v2.x, - v1.x % v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator%(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x % v2.x, - v1.y % v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x & scalar, - v.y & scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x & v2.x, - v1.y & v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar & v.x, - scalar & v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x & v2.x, - v1.x & v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator&(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x & v2.x, - v1.y & v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x | scalar, - v.y | scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x | v2.x, - v1.y | v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar | v.x, - scalar | v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x | v2.x, - v1.x | v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator|(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x | v2.x, - v1.y | v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x ^ scalar, - v.y ^ scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x ^ v2.x, - v1.y ^ v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar ^ v.x, - scalar ^ v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x ^ v2.x, - v1.x ^ v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator^(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x ^ v2.x, - v1.y ^ v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x << scalar, - v.y << scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x << v2.x, - v1.y << v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar << v.x, - scalar << v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x << v2.x, - v1.x << v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator<<(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x << v2.x, - v1.y << v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v, T scalar) - { - return vec<2, T, Q>( - v.x >> scalar, - v.y >> scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x >> v2.x, - v1.y >> v2.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(T scalar, vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - scalar >> v.x, - scalar >> v.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<1, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x >> v2.x, - v1.x >> v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator>>(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return vec<2, T, Q>( - v1.x >> v2.x, - v1.y >> v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, T, Q> operator~(vec<2, T, Q> const& v) - { - return vec<2, T, Q>( - ~v.x, - ~v.y); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return - detail::compute_equal::is_iec559>::call(v1.x, v2.x) && - detail::compute_equal::is_iec559>::call(v1.y, v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<2, T, Q> const& v1, vec<2, T, Q> const& v2) - { - return !(v1 == v2); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, bool, Q> operator&&(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2) - { - return vec<2, bool, Q>(v1.x && v2.x, v1.y && v2.y); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<2, bool, Q> operator||(vec<2, bool, Q> const& v1, vec<2, bool, Q> const& v2) - { - return vec<2, bool, Q>(v1.x || v2.x, v1.y || v2.y); - } -}//namespace glm diff --git a/third_party/glm/detail/type_vec3.hpp b/third_party/glm/detail/type_vec3.hpp deleted file mode 100755 index d83cde6..0000000 --- a/third_party/glm/detail/type_vec3.hpp +++ /dev/null @@ -1,432 +0,0 @@ -/// @ref core -/// @file glm/detail/type_vec3.hpp - -#pragma once - -#include "qualifier.hpp" -#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR -# include "_swizzle.hpp" -#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION -# include "_swizzle_func.hpp" -#endif -#include - -namespace glm -{ - template - struct vec<3, T, Q> - { - // -- Implementation detail -- - - typedef T value_type; - typedef vec<3, T, Q> type; - typedef vec<3, bool, Q> bool_type; - - // -- Data -- - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic push -# pragma GCC diagnostic ignored "-Wpedantic" -# elif GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" -# pragma clang diagnostic ignored "-Wnested-anon-types" -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(push) -# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union -# if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE -# pragma warning(disable: 4324) // structure was padded due to alignment specifier -# endif -# endif -# endif - -# if GLM_CONFIG_XYZW_ONLY - T x, y, z; -# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE - union - { - struct{ T x, y, z; }; - struct{ T r, g, b; }; - struct{ T s, t, p; }; - - typename detail::storage<3, T, detail::is_aligned::value>::type data; - -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - GLM_SWIZZLE3_2_MEMBERS(T, Q, x, y, z) - GLM_SWIZZLE3_2_MEMBERS(T, Q, r, g, b) - GLM_SWIZZLE3_2_MEMBERS(T, Q, s, t, p) - GLM_SWIZZLE3_3_MEMBERS(T, Q, x, y, z) - GLM_SWIZZLE3_3_MEMBERS(T, Q, r, g, b) - GLM_SWIZZLE3_3_MEMBERS(T, Q, s, t, p) - GLM_SWIZZLE3_4_MEMBERS(T, Q, x, y, z) - GLM_SWIZZLE3_4_MEMBERS(T, Q, r, g, b) - GLM_SWIZZLE3_4_MEMBERS(T, Q, s, t, p) -# endif - }; -# else - union { T x, r, s; }; - union { T y, g, t; }; - union { T z, b, p; }; - -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION - GLM_SWIZZLE_GEN_VEC_FROM_VEC3(T, Q) -# endif//GLM_CONFIG_SWIZZLE -# endif//GLM_LANG - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(pop) -# endif -# endif - - // -- Component accesses -- - - /// Return the count of components of the vector - typedef length_t length_type; - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 3;} - - GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; - - // -- Implicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec const& v) GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<3, T, P> const& v); - - // -- Explicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR vec(T a, T b, T c); - - // -- Conversion scalar constructors -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(vec<1, U, P> const& v); - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X x, Y y, Z z); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, Z _z); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, Z _z); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, vec<1, Z, Q> const& _z); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z); - - // -- Conversion vector constructors -- - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, B _z); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<2, B, P> const& _yz); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<3, U, P> const& v); - - // -- Swizzle constructors -- -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<3, T, Q, E0, E1, E2, -1> const& that) - { - *this = that(); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, T const& scalar) - { - *this = vec(v(), scalar); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& scalar, detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v) - { - *this = vec(scalar, v()); - } -# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - - // -- Unary arithmetic operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q>& operator=(vec<3, T, Q> const& v) GLM_DEFAULT; - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator+=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator+=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator+=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator-=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator-=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator-=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator*=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator*=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator*=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator/=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator/=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator/=(vec<3, U, Q> const& v); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator++(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator--(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator++(int); - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator--(int); - - // -- Unary bit operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator%=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator%=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator%=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator&=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator&=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator&=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator|=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator|=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator|=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator^=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator^=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator^=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator<<=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator<<=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator<<=(vec<3, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator>>=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator>>=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> & operator>>=(vec<3, U, Q> const& v); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v); - - // -- Binary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(T scalar, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<1, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, T, Q> operator~(vec<3, T, Q> const& v); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, bool, Q> operator&&(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<3, bool, Q> operator||(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_vec3.inl" -#endif//GLM_EXTERNAL_TEMPLATE diff --git a/third_party/glm/detail/type_vec3.inl b/third_party/glm/detail/type_vec3.inl deleted file mode 100755 index 6532c9e..0000000 --- a/third_party/glm/detail/type_vec3.inl +++ /dev/null @@ -1,1068 +0,0 @@ -/// @ref core - -#include "compute_vector_relational.hpp" - -namespace glm -{ - // -- Implicit basic constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec() -# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE - : x(0), y(0), z(0) -# endif - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<3, T, Q> const& v) - : x(v.x), y(v.y), z(v.z) - {} -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<3, T, P> const& v) - : x(v.x), y(v.y), z(v.z) - {} - - // -- Explicit basic constructors -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(T scalar) - : x(scalar), y(scalar), z(scalar) - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(T _x, T _y, T _z) - : x(_x), y(_y), z(_z) - {} - - // -- Conversion scalar constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.x)) - , z(static_cast(v.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, Y _y, Z _z) - : x(static_cast(_x)) - , y(static_cast(_y)) - , z(static_cast(_z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, Z _z) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - , z(static_cast(_z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, Z _z) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - , z(static_cast(_z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - , z(static_cast(_z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, Y _y, vec<1, Z, Q> const& _z) - : x(static_cast(_x)) - , y(static_cast(_y)) - , z(static_cast(_z.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - , z(static_cast(_z.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - , z(static_cast(_z.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - , z(static_cast(_z.x)) - {} - - // -- Conversion vector constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<2, A, P> const& _xy, B _z) - : x(static_cast(_xy.x)) - , y(static_cast(_xy.y)) - , z(static_cast(_z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z) - : x(static_cast(_xy.x)) - , y(static_cast(_xy.y)) - , z(static_cast(_z.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(A _x, vec<2, B, P> const& _yz) - : x(static_cast(_x)) - , y(static_cast(_yz.x)) - , z(static_cast(_yz.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz) - : x(static_cast(_x.x)) - , y(static_cast(_yz.x)) - , z(static_cast(_yz.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<3, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.y)) - , z(static_cast(v.z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>::vec(vec<4, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.y)) - , z(static_cast(v.z)) - {} - - // -- Component accesses -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T & vec<3, T, Q>::operator[](typename vec<3, T, Q>::length_type i) - { - assert(i >= 0 && i < this->length()); - switch(i) - { - default: - case 0: - return x; - case 1: - return y; - case 2: - return z; - } - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<3, T, Q>::operator[](typename vec<3, T, Q>::length_type i) const - { - assert(i >= 0 && i < this->length()); - switch(i) - { - default: - case 0: - return x; - case 1: - return y; - case 2: - return z; - } - } - - // -- Unary arithmetic operators -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>& vec<3, T, Q>::operator=(vec<3, T, Q> const& v) - { - this->x = v.x; - this->y = v.y; - this->z = v.z; - return *this; - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q>& vec<3, T, Q>::operator=(vec<3, U, Q> const& v) - { - this->x = static_cast(v.x); - this->y = static_cast(v.y); - this->z = static_cast(v.z); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator+=(U scalar) - { - this->x += static_cast(scalar); - this->y += static_cast(scalar); - this->z += static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator+=(vec<1, U, Q> const& v) - { - this->x += static_cast(v.x); - this->y += static_cast(v.x); - this->z += static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator+=(vec<3, U, Q> const& v) - { - this->x += static_cast(v.x); - this->y += static_cast(v.y); - this->z += static_cast(v.z); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator-=(U scalar) - { - this->x -= static_cast(scalar); - this->y -= static_cast(scalar); - this->z -= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator-=(vec<1, U, Q> const& v) - { - this->x -= static_cast(v.x); - this->y -= static_cast(v.x); - this->z -= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator-=(vec<3, U, Q> const& v) - { - this->x -= static_cast(v.x); - this->y -= static_cast(v.y); - this->z -= static_cast(v.z); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator*=(U scalar) - { - this->x *= static_cast(scalar); - this->y *= static_cast(scalar); - this->z *= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator*=(vec<1, U, Q> const& v) - { - this->x *= static_cast(v.x); - this->y *= static_cast(v.x); - this->z *= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator*=(vec<3, U, Q> const& v) - { - this->x *= static_cast(v.x); - this->y *= static_cast(v.y); - this->z *= static_cast(v.z); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator/=(U v) - { - this->x /= static_cast(v); - this->y /= static_cast(v); - this->z /= static_cast(v); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator/=(vec<1, U, Q> const& v) - { - this->x /= static_cast(v.x); - this->y /= static_cast(v.x); - this->z /= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator/=(vec<3, U, Q> const& v) - { - this->x /= static_cast(v.x); - this->y /= static_cast(v.y); - this->z /= static_cast(v.z); - return *this; - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator++() - { - ++this->x; - ++this->y; - ++this->z; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator--() - { - --this->x; - --this->y; - --this->z; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> vec<3, T, Q>::operator++(int) - { - vec<3, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> vec<3, T, Q>::operator--(int) - { - vec<3, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary bit operators -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator%=(U scalar) - { - this->x %= scalar; - this->y %= scalar; - this->z %= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator%=(vec<1, U, Q> const& v) - { - this->x %= v.x; - this->y %= v.x; - this->z %= v.x; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator%=(vec<3, U, Q> const& v) - { - this->x %= v.x; - this->y %= v.y; - this->z %= v.z; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator&=(U scalar) - { - this->x &= scalar; - this->y &= scalar; - this->z &= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator&=(vec<1, U, Q> const& v) - { - this->x &= v.x; - this->y &= v.x; - this->z &= v.x; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator&=(vec<3, U, Q> const& v) - { - this->x &= v.x; - this->y &= v.y; - this->z &= v.z; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator|=(U scalar) - { - this->x |= scalar; - this->y |= scalar; - this->z |= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator|=(vec<1, U, Q> const& v) - { - this->x |= v.x; - this->y |= v.x; - this->z |= v.x; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator|=(vec<3, U, Q> const& v) - { - this->x |= v.x; - this->y |= v.y; - this->z |= v.z; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator^=(U scalar) - { - this->x ^= scalar; - this->y ^= scalar; - this->z ^= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator^=(vec<1, U, Q> const& v) - { - this->x ^= v.x; - this->y ^= v.x; - this->z ^= v.x; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator^=(vec<3, U, Q> const& v) - { - this->x ^= v.x; - this->y ^= v.y; - this->z ^= v.z; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator<<=(U scalar) - { - this->x <<= scalar; - this->y <<= scalar; - this->z <<= scalar; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator<<=(vec<1, U, Q> const& v) - { - this->x <<= static_cast(v.x); - this->y <<= static_cast(v.x); - this->z <<= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator<<=(vec<3, U, Q> const& v) - { - this->x <<= static_cast(v.x); - this->y <<= static_cast(v.y); - this->z <<= static_cast(v.z); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator>>=(U scalar) - { - this->x >>= static_cast(scalar); - this->y >>= static_cast(scalar); - this->z >>= static_cast(scalar); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator>>=(vec<1, U, Q> const& v) - { - this->x >>= static_cast(v.x); - this->y >>= static_cast(v.x); - this->z >>= static_cast(v.x); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> & vec<3, T, Q>::operator>>=(vec<3, U, Q> const& v) - { - this->x >>= static_cast(v.x); - this->y >>= static_cast(v.y); - this->z >>= static_cast(v.z); - return *this; - } - - // -- Unary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v) - { - return v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - -v.x, - -v.y, - -v.z); - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x + scalar, - v.y + scalar, - v.z + scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x + scalar.x, - v.y + scalar.x, - v.z + scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar + v.x, - scalar + v.y, - scalar + v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x + v.x, - scalar.x + v.y, - scalar.x + v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator+(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x + v2.x, - v1.y + v2.y, - v1.z + v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x - scalar, - v.y - scalar, - v.z - scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x - scalar.x, - v.y - scalar.x, - v.z - scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar - v.x, - scalar - v.y, - scalar - v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x - v.x, - scalar.x - v.y, - scalar.x - v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator-(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x - v2.x, - v1.y - v2.y, - v1.z - v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x * scalar, - v.y * scalar, - v.z * scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x * scalar.x, - v.y * scalar.x, - v.z * scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar * v.x, - scalar * v.y, - scalar * v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x * v.x, - scalar.x * v.y, - scalar.x * v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator*(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x * v2.x, - v1.y * v2.y, - v1.z * v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x / scalar, - v.y / scalar, - v.z / scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x / scalar.x, - v.y / scalar.x, - v.z / scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar / v.x, - scalar / v.y, - scalar / v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x / v.x, - scalar.x / v.y, - scalar.x / v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator/(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x / v2.x, - v1.y / v2.y, - v1.z / v2.z); - } - - // -- Binary bit operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x % scalar, - v.y % scalar, - v.z % scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x % scalar.x, - v.y % scalar.x, - v.z % scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar % v.x, - scalar % v.y, - scalar % v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x % v.x, - scalar.x % v.y, - scalar.x % v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator%(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x % v2.x, - v1.y % v2.y, - v1.z % v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x & scalar, - v.y & scalar, - v.z & scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x & scalar.x, - v.y & scalar.x, - v.z & scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar & v.x, - scalar & v.y, - scalar & v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x & v.x, - scalar.x & v.y, - scalar.x & v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator&(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x & v2.x, - v1.y & v2.y, - v1.z & v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x | scalar, - v.y | scalar, - v.z | scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x | scalar.x, - v.y | scalar.x, - v.z | scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar | v.x, - scalar | v.y, - scalar | v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x | v.x, - scalar.x | v.y, - scalar.x | v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator|(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x | v2.x, - v1.y | v2.y, - v1.z | v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x ^ scalar, - v.y ^ scalar, - v.z ^ scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x ^ scalar.x, - v.y ^ scalar.x, - v.z ^ scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar ^ v.x, - scalar ^ v.y, - scalar ^ v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x ^ v.x, - scalar.x ^ v.y, - scalar.x ^ v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator^(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x ^ v2.x, - v1.y ^ v2.y, - v1.z ^ v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x << scalar, - v.y << scalar, - v.z << scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x << scalar.x, - v.y << scalar.x, - v.z << scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar << v.x, - scalar << v.y, - scalar << v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x << v.x, - scalar.x << v.y, - scalar.x << v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator<<(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x << v2.x, - v1.y << v2.y, - v1.z << v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v, T scalar) - { - return vec<3, T, Q>( - v.x >> scalar, - v.y >> scalar, - v.z >> scalar); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<3, T, Q>( - v.x >> scalar.x, - v.y >> scalar.x, - v.z >> scalar.x); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(T scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar >> v.x, - scalar >> v.y, - scalar >> v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<1, T, Q> const& scalar, vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - scalar.x >> v.x, - scalar.x >> v.y, - scalar.x >> v.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator>>(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return vec<3, T, Q>( - v1.x >> v2.x, - v1.y >> v2.y, - v1.z >> v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, T, Q> operator~(vec<3, T, Q> const& v) - { - return vec<3, T, Q>( - ~v.x, - ~v.y, - ~v.z); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return - detail::compute_equal::is_iec559>::call(v1.x, v2.x) && - detail::compute_equal::is_iec559>::call(v1.y, v2.y) && - detail::compute_equal::is_iec559>::call(v1.z, v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<3, T, Q> const& v1, vec<3, T, Q> const& v2) - { - return !(v1 == v2); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, bool, Q> operator&&(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2) - { - return vec<3, bool, Q>(v1.x && v2.x, v1.y && v2.y, v1.z && v2.z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<3, bool, Q> operator||(vec<3, bool, Q> const& v1, vec<3, bool, Q> const& v2) - { - return vec<3, bool, Q>(v1.x || v2.x, v1.y || v2.y, v1.z || v2.z); - } -}//namespace glm diff --git a/third_party/glm/detail/type_vec4.hpp b/third_party/glm/detail/type_vec4.hpp deleted file mode 100755 index 4a36434..0000000 --- a/third_party/glm/detail/type_vec4.hpp +++ /dev/null @@ -1,505 +0,0 @@ -/// @ref core -/// @file glm/detail/type_vec4.hpp - -#pragma once - -#include "qualifier.hpp" -#if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR -# include "_swizzle.hpp" -#elif GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION -# include "_swizzle_func.hpp" -#endif -#include - -namespace glm -{ - template - struct vec<4, T, Q> - { - // -- Implementation detail -- - - typedef T value_type; - typedef vec<4, T, Q> type; - typedef vec<4, bool, Q> bool_type; - - // -- Data -- - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic push -# pragma GCC diagnostic ignored "-Wpedantic" -# elif GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wgnu-anonymous-struct" -# pragma clang diagnostic ignored "-Wnested-anon-types" -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(push) -# pragma warning(disable: 4201) // nonstandard extension used : nameless struct/union -# endif -# endif - -# if GLM_CONFIG_XYZW_ONLY - T x, y, z, w; -# elif GLM_CONFIG_ANONYMOUS_STRUCT == GLM_ENABLE - union - { - struct { T x, y, z, w; }; - struct { T r, g, b, a; }; - struct { T s, t, p, q; }; - - typename detail::storage<4, T, detail::is_aligned::value>::type data; - -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - GLM_SWIZZLE4_2_MEMBERS(T, Q, x, y, z, w) - GLM_SWIZZLE4_2_MEMBERS(T, Q, r, g, b, a) - GLM_SWIZZLE4_2_MEMBERS(T, Q, s, t, p, q) - GLM_SWIZZLE4_3_MEMBERS(T, Q, x, y, z, w) - GLM_SWIZZLE4_3_MEMBERS(T, Q, r, g, b, a) - GLM_SWIZZLE4_3_MEMBERS(T, Q, s, t, p, q) - GLM_SWIZZLE4_4_MEMBERS(T, Q, x, y, z, w) - GLM_SWIZZLE4_4_MEMBERS(T, Q, r, g, b, a) - GLM_SWIZZLE4_4_MEMBERS(T, Q, s, t, p, q) -# endif - }; -# else - union { T x, r, s; }; - union { T y, g, t; }; - union { T z, b, p; }; - union { T w, a, q; }; - -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_FUNCTION - GLM_SWIZZLE_GEN_VEC_FROM_VEC4(T, Q) -# endif -# endif - -# if GLM_SILENT_WARNINGS == GLM_ENABLE -# if GLM_COMPILER & GLM_COMPILER_CLANG -# pragma clang diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_GCC -# pragma GCC diagnostic pop -# elif GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(pop) -# endif -# endif - - // -- Component accesses -- - - typedef length_t length_type; - - /// Return the count of components of the vector - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 4;} - - GLM_FUNC_DECL GLM_CONSTEXPR T & operator[](length_type i); - GLM_FUNC_DECL GLM_CONSTEXPR T const& operator[](length_type i) const; - - // -- Implicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec() GLM_DEFAULT; - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<4, T, Q> const& v) GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<4, T, P> const& v); - - // -- Explicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(T scalar); - GLM_FUNC_DECL GLM_CONSTEXPR vec(T x, T y, T z, T w); - - // -- Conversion scalar constructors -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR explicit vec(vec<1, U, P> const& v); - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, Z _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, Z _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, Z _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, vec<1, Z, Q> const& _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, Z _z, vec<1, W, Q> const& _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _Y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w); - - // -- Conversion vector constructors -- - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, B _z, C _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, C _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, B _z, vec<1, C, P> const& _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, vec<1, C, P> const& _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<2, B, P> const& _yz, C _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, C _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, B _y, vec<2, C, P> const& _zw); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, B _y, vec<2, C, P> const& _zw); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<3, A, P> const& _xyz, B _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<3, A, P> const& _xyz, vec<1, B, P> const& _w); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(A _x, vec<3, B, P> const& _yzw); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<1, A, P> const& _x, vec<3, B, P> const& _yzw); - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(vec<2, A, P> const& _xy, vec<2, B, P> const& _zw); - - /// Explicit conversions (From section 5.4.1 Conversion and scalar constructors of GLSL 1.30.08 specification) - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT vec(vec<4, U, P> const& v); - - // -- Swizzle constructors -- -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<4, T, Q, E0, E1, E2, E3> const& that) - { - *this = that(); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, detail::_swizzle<2, T, Q, F0, F1, -1, -2> const& u) - { - *this = vec<4, T, Q>(v(), u()); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& x, T const& y, detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v) - { - *this = vec<4, T, Q>(x, y, v()); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& x, detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, T const& w) - { - *this = vec<4, T, Q>(x, v(), w); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<2, T, Q, E0, E1, -1, -2> const& v, T const& z, T const& w) - { - *this = vec<4, T, Q>(v(), z, w); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(detail::_swizzle<3, T, Q, E0, E1, E2, -1> const& v, T const& w) - { - *this = vec<4, T, Q>(v(), w); - } - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec(T const& x, detail::_swizzle<3, T, Q, E0, E1, E2, -1> const& v) - { - *this = vec<4, T, Q>(x, v()); - } -# endif//GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - - // -- Unary arithmetic operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator=(vec<4, T, Q> const& v) GLM_DEFAULT; - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator+=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator+=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator+=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator-=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator-=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator-=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator*=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator*=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator*=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator/=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator/=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q>& operator/=(vec<4, U, Q> const& v); - - // -- Increment and decrement operators -- - - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator++(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator--(); - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator++(int); - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator--(int); - - // -- Unary bit operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator%=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator%=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator%=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator&=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator&=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator&=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator|=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator|=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator|=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator^=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator^=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator^=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator<<=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator<<=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator<<=(vec<4, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator>>=(U scalar); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator>>=(vec<1, U, Q> const& v); - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> & operator>>=(vec<4, U, Q> const& v); - }; - - // -- Unary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v); - - // -- Binary operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v, T const & scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v, T const & scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, T const & scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v, T const & scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v, T scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(T scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, T, Q> operator~(vec<4, T, Q> const& v); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator==(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR bool operator!=(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, bool, Q> operator&&(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2); - - template - GLM_FUNC_DECL GLM_CONSTEXPR vec<4, bool, Q> operator||(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2); -}//namespace glm - -#ifndef GLM_EXTERNAL_TEMPLATE -#include "type_vec4.inl" -#endif//GLM_EXTERNAL_TEMPLATE diff --git a/third_party/glm/detail/type_vec4.inl b/third_party/glm/detail/type_vec4.inl deleted file mode 100755 index 3c212d9..0000000 --- a/third_party/glm/detail/type_vec4.inl +++ /dev/null @@ -1,1140 +0,0 @@ -/// @ref core - -#include "compute_vector_relational.hpp" - -namespace glm{ -namespace detail -{ - template - struct compute_vec4_add - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); - } - }; - - template - struct compute_vec4_sub - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); - } - }; - - template - struct compute_vec4_mul - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x * b.x, a.y * b.y, a.z * b.z, a.w * b.w); - } - }; - - template - struct compute_vec4_div - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x / b.x, a.y / b.y, a.z / b.z, a.w / b.w); - } - }; - - template - struct compute_vec4_mod - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x % b.x, a.y % b.y, a.z % b.z, a.w % b.w); - } - }; - - template - struct compute_vec4_and - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x & b.x, a.y & b.y, a.z & b.z, a.w & b.w); - } - }; - - template - struct compute_vec4_or - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x | b.x, a.y | b.y, a.z | b.z, a.w | b.w); - } - }; - - template - struct compute_vec4_xor - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x ^ b.x, a.y ^ b.y, a.z ^ b.z, a.w ^ b.w); - } - }; - - template - struct compute_vec4_shift_left - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x << b.x, a.y << b.y, a.z << b.z, a.w << b.w); - } - }; - - template - struct compute_vec4_shift_right - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - return vec<4, T, Q>(a.x >> b.x, a.y >> b.y, a.z >> b.z, a.w >> b.w); - } - }; - - template - struct compute_vec4_equal - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return - detail::compute_equal::is_iec559>::call(v1.x, v2.x) && - detail::compute_equal::is_iec559>::call(v1.y, v2.y) && - detail::compute_equal::is_iec559>::call(v1.z, v2.z) && - detail::compute_equal::is_iec559>::call(v1.w, v2.w); - } - }; - - template - struct compute_vec4_nequal - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static bool call(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return !compute_vec4_equal::value, sizeof(T) * 8, detail::is_aligned::value>::call(v1, v2); - } - }; - - template - struct compute_vec4_bitwise_not - { - GLM_FUNC_QUALIFIER GLM_CONSTEXPR static vec<4, T, Q> call(vec<4, T, Q> const& v) - { - return vec<4, T, Q>(~v.x, ~v.y, ~v.z, ~v.w); - } - }; -}//namespace detail - - // -- Implicit basic constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec() -# if GLM_CONFIG_CTOR_INIT != GLM_CTOR_INIT_DISABLE - : x(0), y(0), z(0), w(0) -# endif - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<4, T, Q> const& v) - : x(v.x), y(v.y), z(v.z), w(v.w) - {} -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<4, T, P> const& v) - : x(v.x), y(v.y), z(v.z), w(v.w) - {} - - // -- Explicit basic constructors -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(T scalar) - : x(scalar), y(scalar), z(scalar), w(scalar) - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(T _x, T _y, T _z, T _w) - : x(_x), y(_y), z(_z), w(_w) - {} - - // -- Conversion scalar constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.x)) - , z(static_cast(v.x)) - , w(static_cast(v.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, Y _y, Z _z, W _w) - : x(static_cast(_x)) - , y(static_cast(_y)) - , z(static_cast(_z)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, Z _z, W _w) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - , z(static_cast(_z)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, Z _z, W _w) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - , z(static_cast(_z)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, W _w) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - , z(static_cast(_z)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, Y _y, vec<1, Z, Q> const& _z, W _w) - : x(static_cast(_x)) - , y(static_cast(_y)) - , z(static_cast(_z.x)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, W _w) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - , z(static_cast(_z.x)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - , z(static_cast(_z.x)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, W _w) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - , z(static_cast(_z.x)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, Z _z, vec<1, W, Q> const& _w) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - , z(static_cast(_z)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - , z(static_cast(_z)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, Z _z, vec<1, W, Q> const& _w) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - , z(static_cast(_z)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) - : x(static_cast(_x)) - , y(static_cast(_y)) - , z(static_cast(_z.x)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, Y _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - , z(static_cast(_z.x)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(X _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - , z(static_cast(_z.x)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, X, Q> const& _x, vec<1, Y, Q> const& _y, vec<1, Z, Q> const& _z, vec<1, W, Q> const& _w) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - , z(static_cast(_z.x)) - , w(static_cast(_w.x)) - {} - - // -- Conversion vector constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, B _z, C _w) - : x(static_cast(_xy.x)) - , y(static_cast(_xy.y)) - , z(static_cast(_z)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, C _w) - : x(static_cast(_xy.x)) - , y(static_cast(_xy.y)) - , z(static_cast(_z.x)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, B _z, vec<1, C, P> const& _w) - : x(static_cast(_xy.x)) - , y(static_cast(_xy.y)) - , z(static_cast(_z)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, vec<1, B, P> const& _z, vec<1, C, P> const& _w) - : x(static_cast(_xy.x)) - , y(static_cast(_xy.y)) - , z(static_cast(_z.x)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<2, B, P> const& _yz, C _w) - : x(static_cast(_x)) - , y(static_cast(_yz.x)) - , z(static_cast(_yz.y)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, C _w) - : x(static_cast(_x.x)) - , y(static_cast(_yz.x)) - , z(static_cast(_yz.y)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w) - : x(static_cast(_x)) - , y(static_cast(_yz.x)) - , z(static_cast(_yz.y)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<2, B, P> const& _yz, vec<1, C, P> const& _w) - : x(static_cast(_x.x)) - , y(static_cast(_yz.x)) - , z(static_cast(_yz.y)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, B _y, vec<2, C, P> const& _zw) - : x(static_cast(_x)) - , y(static_cast(_y)) - , z(static_cast(_zw.x)) - , w(static_cast(_zw.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, B _y, vec<2, C, P> const& _zw) - : x(static_cast(_x.x)) - , y(static_cast(_y)) - , z(static_cast(_zw.x)) - , w(static_cast(_zw.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw) - : x(static_cast(_x)) - , y(static_cast(_y.x)) - , z(static_cast(_zw.x)) - , w(static_cast(_zw.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<1, B, P> const& _y, vec<2, C, P> const& _zw) - : x(static_cast(_x.x)) - , y(static_cast(_y.x)) - , z(static_cast(_zw.x)) - , w(static_cast(_zw.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<3, A, P> const& _xyz, B _w) - : x(static_cast(_xyz.x)) - , y(static_cast(_xyz.y)) - , z(static_cast(_xyz.z)) - , w(static_cast(_w)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<3, A, P> const& _xyz, vec<1, B, P> const& _w) - : x(static_cast(_xyz.x)) - , y(static_cast(_xyz.y)) - , z(static_cast(_xyz.z)) - , w(static_cast(_w.x)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(A _x, vec<3, B, P> const& _yzw) - : x(static_cast(_x)) - , y(static_cast(_yzw.x)) - , z(static_cast(_yzw.y)) - , w(static_cast(_yzw.z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<1, A, P> const& _x, vec<3, B, P> const& _yzw) - : x(static_cast(_x.x)) - , y(static_cast(_yzw.x)) - , z(static_cast(_yzw.y)) - , w(static_cast(_yzw.z)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<2, A, P> const& _xy, vec<2, B, P> const& _zw) - : x(static_cast(_xy.x)) - , y(static_cast(_xy.y)) - , z(static_cast(_zw.x)) - , w(static_cast(_zw.y)) - {} - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>::vec(vec<4, U, P> const& v) - : x(static_cast(v.x)) - , y(static_cast(v.y)) - , z(static_cast(v.z)) - , w(static_cast(v.w)) - {} - - // -- Component accesses -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T& vec<4, T, Q>::operator[](typename vec<4, T, Q>::length_type i) - { - assert(i >= 0 && i < this->length()); - switch(i) - { - default: - case 0: - return x; - case 1: - return y; - case 2: - return z; - case 3: - return w; - } - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T const& vec<4, T, Q>::operator[](typename vec<4, T, Q>::length_type i) const - { - assert(i >= 0 && i < this->length()); - switch(i) - { - default: - case 0: - return x; - case 1: - return y; - case 2: - return z; - case 3: - return w; - } - } - - // -- Unary arithmetic operators -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>& vec<4, T, Q>::operator=(vec<4, T, Q> const& v) - { - this->x = v.x; - this->y = v.y; - this->z = v.z; - this->w = v.w; - return *this; - } -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q>& vec<4, T, Q>::operator=(vec<4, U, Q> const& v) - { - this->x = static_cast(v.x); - this->y = static_cast(v.y); - this->z = static_cast(v.z); - this->w = static_cast(v.w); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator+=(U scalar) - { - return (*this = detail::compute_vec4_add::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator+=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_add::value>::call(*this, vec<4, T, Q>(v.x))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator+=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_add::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator-=(U scalar) - { - return (*this = detail::compute_vec4_sub::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator-=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_sub::value>::call(*this, vec<4, T, Q>(v.x))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator-=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_sub::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator*=(U scalar) - { - return (*this = detail::compute_vec4_mul::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator*=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_mul::value>::call(*this, vec<4, T, Q>(v.x))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator*=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_mul::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator/=(U scalar) - { - return (*this = detail::compute_vec4_div::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator/=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_div::value>::call(*this, vec<4, T, Q>(v.x))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator/=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_div::value>::call(*this, vec<4, T, Q>(v))); - } - - // -- Increment and decrement operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator++() - { - ++this->x; - ++this->y; - ++this->z; - ++this->w; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator--() - { - --this->x; - --this->y; - --this->z; - --this->w; - return *this; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> vec<4, T, Q>::operator++(int) - { - vec<4, T, Q> Result(*this); - ++*this; - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> vec<4, T, Q>::operator--(int) - { - vec<4, T, Q> Result(*this); - --*this; - return Result; - } - - // -- Unary bit operators -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator%=(U scalar) - { - return (*this = detail::compute_vec4_mod::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator%=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_mod::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator%=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_mod::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator&=(U scalar) - { - return (*this = detail::compute_vec4_and::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator&=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_and::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator&=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_and::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator|=(U scalar) - { - return (*this = detail::compute_vec4_or::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator|=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_or::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator|=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_or::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator^=(U scalar) - { - return (*this = detail::compute_vec4_xor::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator^=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_xor::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator^=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_xor::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator<<=(U scalar) - { - return (*this = detail::compute_vec4_shift_left::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator<<=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_shift_left::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator<<=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_shift_left::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator>>=(U scalar) - { - return (*this = detail::compute_vec4_shift_right::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(scalar))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator>>=(vec<1, U, Q> const& v) - { - return (*this = detail::compute_vec4_shift_right::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> & vec<4, T, Q>::operator>>=(vec<4, U, Q> const& v) - { - return (*this = detail::compute_vec4_shift_right::value, sizeof(T) * 8, detail::is_aligned::value>::call(*this, vec<4, T, Q>(v))); - } - - // -- Unary constant operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v) - { - return v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v) - { - return vec<4, T, Q>(0) -= v; - } - - // -- Binary arithmetic operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v, T const & scalar) - { - return vec<4, T, Q>(v) += scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) += v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(v) += scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v2) += v1; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator+(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) += v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v, T const & scalar) - { - return vec<4, T, Q>(v) -= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) -= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) -= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1.x) -= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator-(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) -= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v, T const & scalar) - { - return vec<4, T, Q>(v) *= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) *= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(v) *= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v2) *= v1; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator*(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) *= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v, T const & scalar) - { - return vec<4, T, Q>(v) /= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) /= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) /= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1.x) /= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator/(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) /= v2; - } - - // -- Binary bit operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v, T scalar) - { - return vec<4, T, Q>(v) %= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) %= v2.x; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) %= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<1, T, Q> const& scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar.x) %= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator%(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) %= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, T scalar) - { - return vec<4, T, Q>(v) &= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v, vec<1, T, Q> const& scalar) - { - return vec<4, T, Q>(v) &= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) &= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1.x) &= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator&(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) &= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v, T scalar) - { - return vec<4, T, Q>(v) |= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) |= v2.x; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) |= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1.x) |= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator|(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) |= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v, T scalar) - { - return vec<4, T, Q>(v) ^= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) ^= v2.x; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) ^= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1.x) ^= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator^(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) ^= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v, T scalar) - { - return vec<4, T, Q>(v) <<= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) <<= v2.x; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) <<= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1.x) <<= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator<<(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) <<= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v, T scalar) - { - return vec<4, T, Q>(v) >>= scalar; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v1, vec<1, T, Q> const& v2) - { - return vec<4, T, Q>(v1) >>= v2.x; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(T scalar, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(scalar) >>= v; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<1, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1.x) >>= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator>>(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return vec<4, T, Q>(v1) >>= v2; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, T, Q> operator~(vec<4, T, Q> const& v) - { - return detail::compute_vec4_bitwise_not::value, sizeof(T) * 8, detail::is_aligned::value>::call(v); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator==(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return detail::compute_vec4_equal::value, sizeof(T) * 8, detail::is_aligned::value>::call(v1, v2); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool operator!=(vec<4, T, Q> const& v1, vec<4, T, Q> const& v2) - { - return detail::compute_vec4_nequal::value, sizeof(T) * 8, detail::is_aligned::value>::call(v1, v2); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, bool, Q> operator&&(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2) - { - return vec<4, bool, Q>(v1.x && v2.x, v1.y && v2.y, v1.z && v2.z, v1.w && v2.w); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, bool, Q> operator||(vec<4, bool, Q> const& v1, vec<4, bool, Q> const& v2) - { - return vec<4, bool, Q>(v1.x || v2.x, v1.y || v2.y, v1.z || v2.z, v1.w || v2.w); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "type_vec4_simd.inl" -#endif diff --git a/third_party/glm/detail/type_vec4_simd.inl b/third_party/glm/detail/type_vec4_simd.inl deleted file mode 100755 index 29559b5..0000000 --- a/third_party/glm/detail/type_vec4_simd.inl +++ /dev/null @@ -1,775 +0,0 @@ -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -namespace glm{ -namespace detail -{ -# if GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - template - struct _swizzle_base1<4, float, Q, E0,E1,E2,E3, true> : public _swizzle_base0 - { - GLM_FUNC_QUALIFIER vec<4, float, Q> operator ()() const - { - __m128 data = *reinterpret_cast<__m128 const*>(&this->_buffer); - - vec<4, float, Q> Result; -# if GLM_ARCH & GLM_ARCH_AVX_BIT - Result.data = _mm_permute_ps(data, _MM_SHUFFLE(E3, E2, E1, E0)); -# else - Result.data = _mm_shuffle_ps(data, data, _MM_SHUFFLE(E3, E2, E1, E0)); -# endif - return Result; - } - }; - - template - struct _swizzle_base1<4, int, Q, E0,E1,E2,E3, true> : public _swizzle_base0 - { - GLM_FUNC_QUALIFIER vec<4, int, Q> operator ()() const - { - __m128i data = *reinterpret_cast<__m128i const*>(&this->_buffer); - - vec<4, int, Q> Result; - Result.data = _mm_shuffle_epi32(data, _MM_SHUFFLE(E3, E2, E1, E0)); - return Result; - } - }; - - template - struct _swizzle_base1<4, uint, Q, E0,E1,E2,E3, true> : public _swizzle_base0 - { - GLM_FUNC_QUALIFIER vec<4, uint, Q> operator ()() const - { - __m128i data = *reinterpret_cast<__m128i const*>(&this->_buffer); - - vec<4, uint, Q> Result; - Result.data = _mm_shuffle_epi32(data, _MM_SHUFFLE(E3, E2, E1, E0)); - return Result; - } - }; -# endif// GLM_CONFIG_SWIZZLE == GLM_SWIZZLE_OPERATOR - - template - struct compute_vec4_add - { - static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = _mm_add_ps(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_vec4_add - { - static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) - { - vec<4, double, Q> Result; - Result.data = _mm256_add_pd(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_sub - { - static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = _mm_sub_ps(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_vec4_sub - { - static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) - { - vec<4, double, Q> Result; - Result.data = _mm256_sub_pd(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_mul - { - static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = _mm_mul_ps(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_vec4_mul - { - static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) - { - vec<4, double, Q> Result; - Result.data = _mm256_mul_pd(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_div - { - static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = _mm_div_ps(a.data, b.data); - return Result; - } - }; - - # if GLM_ARCH & GLM_ARCH_AVX_BIT - template - struct compute_vec4_div - { - static vec<4, double, Q> call(vec<4, double, Q> const& a, vec<4, double, Q> const& b) - { - vec<4, double, Q> Result; - Result.data = _mm256_div_pd(a.data, b.data); - return Result; - } - }; -# endif - - template<> - struct compute_vec4_div - { - static vec<4, float, aligned_lowp> call(vec<4, float, aligned_lowp> const& a, vec<4, float, aligned_lowp> const& b) - { - vec<4, float, aligned_lowp> Result; - Result.data = _mm_mul_ps(a.data, _mm_rcp_ps(b.data)); - return Result; - } - }; - - template - struct compute_vec4_and - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm_and_si128(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - template - struct compute_vec4_and - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm256_and_si256(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_or - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm_or_si128(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - template - struct compute_vec4_or - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm256_or_si256(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_xor - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm_xor_si128(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - template - struct compute_vec4_xor - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm256_xor_si256(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_shift_left - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm_sll_epi32(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - template - struct compute_vec4_shift_left - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm256_sll_epi64(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_shift_right - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm_srl_epi32(a.data, b.data); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - template - struct compute_vec4_shift_right - { - static vec<4, T, Q> call(vec<4, T, Q> const& a, vec<4, T, Q> const& b) - { - vec<4, T, Q> Result; - Result.data = _mm256_srl_epi64(a.data, b.data); - return Result; - } - }; -# endif - - template - struct compute_vec4_bitwise_not - { - static vec<4, T, Q> call(vec<4, T, Q> const& v) - { - vec<4, T, Q> Result; - Result.data = _mm_xor_si128(v.data, _mm_set1_epi32(-1)); - return Result; - } - }; - -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - template - struct compute_vec4_bitwise_not - { - static vec<4, T, Q> call(vec<4, T, Q> const& v) - { - vec<4, T, Q> Result; - Result.data = _mm256_xor_si256(v.data, _mm_set1_epi32(-1)); - return Result; - } - }; -# endif - - template - struct compute_vec4_equal - { - static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) - { - return _mm_movemask_ps(_mm_cmpeq_ps(v1.data, v2.data)) != 0; - } - }; - -# if GLM_ARCH & GLM_ARCH_SSE41_BIT - template - struct compute_vec4_equal - { - static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) - { - //return _mm_movemask_epi8(_mm_cmpeq_epi32(v1.data, v2.data)) != 0; - __m128i neq = _mm_xor_si128(v1.data, v2.data); - return _mm_test_all_zeros(neq, neq) == 0; - } - }; -# endif - - template - struct compute_vec4_nequal - { - static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) - { - return _mm_movemask_ps(_mm_cmpneq_ps(v1.data, v2.data)) != 0; - } - }; - -# if GLM_ARCH & GLM_ARCH_SSE41_BIT - template - struct compute_vec4_nequal - { - static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) - { - //return _mm_movemask_epi8(_mm_cmpneq_epi32(v1.data, v2.data)) != 0; - __m128i neq = _mm_xor_si128(v1.data, v2.data); - return _mm_test_all_zeros(neq, neq) != 0; - } - }; -# endif -}//namespace detail - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(float _s) : - data(_mm_set1_ps(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(float _s) : - data(_mm_set1_ps(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(float _s) : - data(_mm_set1_ps(_s)) - {} - -# if GLM_ARCH & GLM_ARCH_AVX_BIT - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, double, aligned_lowp>::vec(double _s) : - data(_mm256_set1_pd(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, double, aligned_mediump>::vec(double _s) : - data(_mm256_set1_pd(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, double, aligned_highp>::vec(double _s) : - data(_mm256_set1_pd(_s)) - {} -# endif - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_lowp>::vec(int _s) : - data(_mm_set1_epi32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_mediump>::vec(int _s) : - data(_mm_set1_epi32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_highp>::vec(int _s) : - data(_mm_set1_epi32(_s)) - {} - -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, detail::int64, aligned_lowp>::vec(detail::int64 _s) : - data(_mm256_set1_epi64x(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, detail::int64, aligned_mediump>::vec(detail::int64 _s) : - data(_mm256_set1_epi64x(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, detail::int64, aligned_highp>::vec(detail::int64 _s) : - data(_mm256_set1_epi64x(_s)) - {} -# endif - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(float _x, float _y, float _z, float _w) : - data(_mm_set_ps(_w, _z, _y, _x)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(float _x, float _y, float _z, float _w) : - data(_mm_set_ps(_w, _z, _y, _x)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(float _x, float _y, float _z, float _w) : - data(_mm_set_ps(_w, _z, _y, _x)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_lowp>::vec(int _x, int _y, int _z, int _w) : - data(_mm_set_epi32(_w, _z, _y, _x)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_mediump>::vec(int _x, int _y, int _z, int _w) : - data(_mm_set_epi32(_w, _z, _y, _x)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_highp>::vec(int _x, int _y, int _z, int _w) : - data(_mm_set_epi32(_w, _z, _y, _x)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(int _x, int _y, int _z, int _w) : - data(_mm_cvtepi32_ps(_mm_set_epi32(_w, _z, _y, _x))) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(int _x, int _y, int _z, int _w) : - data(_mm_cvtepi32_ps(_mm_set_epi32(_w, _z, _y, _x))) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(int _x, int _y, int _z, int _w) : - data(_mm_cvtepi32_ps(_mm_set_epi32(_w, _z, _y, _x))) - {} -}//namespace glm - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT - -#if GLM_ARCH & GLM_ARCH_NEON_BIT -namespace glm { -namespace detail { - - template - struct compute_vec4_add - { - static - vec<4, float, Q> - call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = vaddq_f32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_add - { - static - vec<4, uint, Q> - call(vec<4, uint, Q> const& a, vec<4, uint, Q> const& b) - { - vec<4, uint, Q> Result; - Result.data = vaddq_u32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_add - { - static - vec<4, int, Q> - call(vec<4, int, Q> const& a, vec<4, int, Q> const& b) - { - vec<4, uint, Q> Result; - Result.data = vaddq_s32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_sub - { - static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = vsubq_f32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_sub - { - static vec<4, uint, Q> call(vec<4, uint, Q> const& a, vec<4, uint, Q> const& b) - { - vec<4, uint, Q> Result; - Result.data = vsubq_u32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_sub - { - static vec<4, int, Q> call(vec<4, int, Q> const& a, vec<4, int, Q> const& b) - { - vec<4, int, Q> Result; - Result.data = vsubq_s32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_mul - { - static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = vmulq_f32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_mul - { - static vec<4, uint, Q> call(vec<4, uint, Q> const& a, vec<4, uint, Q> const& b) - { - vec<4, uint, Q> Result; - Result.data = vmulq_u32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_mul - { - static vec<4, int, Q> call(vec<4, int, Q> const& a, vec<4, int, Q> const& b) - { - vec<4, int, Q> Result; - Result.data = vmulq_s32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_div - { - static vec<4, float, Q> call(vec<4, float, Q> const& a, vec<4, float, Q> const& b) - { - vec<4, float, Q> Result; - Result.data = vdivq_f32(a.data, b.data); - return Result; - } - }; - - template - struct compute_vec4_equal - { - static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) - { - uint32x4_t cmp = vceqq_f32(v1.data, v2.data); -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - cmp = vpminq_u32(cmp, cmp); - cmp = vpminq_u32(cmp, cmp); - uint32_t r = cmp[0]; -#else - uint32x2_t cmpx2 = vpmin_u32(vget_low_f32(cmp), vget_high_f32(cmp)); - cmpx2 = vpmin_u32(cmpx2, cmpx2); - uint32_t r = cmpx2[0]; -#endif - return r == ~0u; - } - }; - - template - struct compute_vec4_equal - { - static bool call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) - { - uint32x4_t cmp = vceqq_u32(v1.data, v2.data); -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - cmp = vpminq_u32(cmp, cmp); - cmp = vpminq_u32(cmp, cmp); - uint32_t r = cmp[0]; -#else - uint32x2_t cmpx2 = vpmin_u32(vget_low_f32(cmp), vget_high_f32(cmp)); - cmpx2 = vpmin_u32(cmpx2, cmpx2); - uint32_t r = cmpx2[0]; -#endif - return r == ~0u; - } - }; - - template - struct compute_vec4_equal - { - static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) - { - uint32x4_t cmp = vceqq_s32(v1.data, v2.data); -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - cmp = vpminq_u32(cmp, cmp); - cmp = vpminq_u32(cmp, cmp); - uint32_t r = cmp[0]; -#else - uint32x2_t cmpx2 = vpmin_u32(vget_low_f32(cmp), vget_high_f32(cmp)); - cmpx2 = vpmin_u32(cmpx2, cmpx2); - uint32_t r = cmpx2[0]; -#endif - return r == ~0u; - } - }; - - template - struct compute_vec4_nequal - { - static bool call(vec<4, float, Q> const& v1, vec<4, float, Q> const& v2) - { - return !compute_vec4_equal::call(v1, v2); - } - }; - - template - struct compute_vec4_nequal - { - static bool call(vec<4, uint, Q> const& v1, vec<4, uint, Q> const& v2) - { - return !compute_vec4_equal::call(v1, v2); - } - }; - - template - struct compute_vec4_nequal - { - static bool call(vec<4, int, Q> const& v1, vec<4, int, Q> const& v2) - { - return !compute_vec4_equal::call(v1, v2); - } - }; - -}//namespace detail - -#if !GLM_CONFIG_XYZW_ONLY - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(float _s) : - data(vdupq_n_f32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(float _s) : - data(vdupq_n_f32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(float _s) : - data(vdupq_n_f32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_lowp>::vec(int _s) : - data(vdupq_n_s32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_mediump>::vec(int _s) : - data(vdupq_n_s32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, int, aligned_highp>::vec(int _s) : - data(vdupq_n_s32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, uint, aligned_lowp>::vec(uint _s) : - data(vdupq_n_u32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, uint, aligned_mediump>::vec(uint _s) : - data(vdupq_n_u32(_s)) - {} - - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, uint, aligned_highp>::vec(uint _s) : - data(vdupq_n_u32(_s)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(const vec<4, float, aligned_highp>& rhs) : - data(rhs.data) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(const vec<4, int, aligned_highp>& rhs) : - data(vcvtq_f32_s32(rhs.data)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(const vec<4, uint, aligned_highp>& rhs) : - data(vcvtq_f32_u32(rhs.data)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(int _x, int _y, int _z, int _w) : - data(vcvtq_f32_s32(vec<4, int, aligned_lowp>(_x, _y, _z, _w).data)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(int _x, int _y, int _z, int _w) : - data(vcvtq_f32_s32(vec<4, int, aligned_mediump>(_x, _y, _z, _w).data)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(int _x, int _y, int _z, int _w) : - data(vcvtq_f32_s32(vec<4, int, aligned_highp>(_x, _y, _z, _w).data)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_lowp>::vec(uint _x, uint _y, uint _z, uint _w) : - data(vcvtq_f32_u32(vec<4, uint, aligned_lowp>(_x, _y, _z, _w).data)) - {} - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_mediump>::vec(uint _x, uint _y, uint _z, uint _w) : - data(vcvtq_f32_u32(vec<4, uint, aligned_mediump>(_x, _y, _z, _w).data)) - {} - - - template<> - template<> - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec<4, float, aligned_highp>::vec(uint _x, uint _y, uint _z, uint _w) : - data(vcvtq_f32_u32(vec<4, uint, aligned_highp>(_x, _y, _z, _w).data)) - {} - -#endif -}//namespace glm - -#endif diff --git a/third_party/glm/exponential.hpp b/third_party/glm/exponential.hpp deleted file mode 100755 index f8fb886..0000000 --- a/third_party/glm/exponential.hpp +++ /dev/null @@ -1,110 +0,0 @@ -/// @ref core -/// @file glm/exponential.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions -/// -/// @defgroup core_func_exponential Exponential functions -/// @ingroup core -/// -/// Provides GLSL exponential functions -/// -/// These all operate component-wise. The description is per component. -/// -/// Include to use these core features. - -#pragma once - -#include "detail/type_vec1.hpp" -#include "detail/type_vec2.hpp" -#include "detail/type_vec3.hpp" -#include "detail/type_vec4.hpp" -#include - -namespace glm -{ - /// @addtogroup core_func_exponential - /// @{ - - /// Returns 'base' raised to the power 'exponent'. - /// - /// @param base Floating point value. pow function is defined for input values of 'base' defined in the range (inf-, inf+) in the limit of the type qualifier. - /// @param exponent Floating point value representing the 'exponent'. - /// - /// @see GLSL pow man page - /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions - template - GLM_FUNC_DECL vec pow(vec const& base, vec const& exponent); - - /// Returns the natural exponentiation of x, i.e., e^x. - /// - /// @param v exp function is defined for input values of v defined in the range (inf-, inf+) in the limit of the type qualifier. - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL exp man page - /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions - template - GLM_FUNC_DECL vec exp(vec const& v); - - /// Returns the natural logarithm of v, i.e., - /// returns the value y which satisfies the equation x = e^y. - /// Results are undefined if v <= 0. - /// - /// @param v log function is defined for input values of v defined in the range (0, inf+) in the limit of the type qualifier. - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL log man page - /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions - template - GLM_FUNC_DECL vec log(vec const& v); - - /// Returns 2 raised to the v power. - /// - /// @param v exp2 function is defined for input values of v defined in the range (inf-, inf+) in the limit of the type qualifier. - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL exp2 man page - /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions - template - GLM_FUNC_DECL vec exp2(vec const& v); - - /// Returns the base 2 log of x, i.e., returns the value y, - /// which satisfies the equation x = 2 ^ y. - /// - /// @param v log2 function is defined for input values of v defined in the range (0, inf+) in the limit of the type qualifier. - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL log2 man page - /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions - template - GLM_FUNC_DECL vec log2(vec const& v); - - /// Returns the positive square root of v. - /// - /// @param v sqrt function is defined for input values of v defined in the range [0, inf+) in the limit of the type qualifier. - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL sqrt man page - /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions - template - GLM_FUNC_DECL vec sqrt(vec const& v); - - /// Returns the reciprocal of the positive square root of v. - /// - /// @param v inversesqrt function is defined for input values of v defined in the range [0, inf+) in the limit of the type qualifier. - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL inversesqrt man page - /// @see GLSL 4.20.8 specification, section 8.2 Exponential Functions - template - GLM_FUNC_DECL vec inversesqrt(vec const& v); - - /// @} -}//namespace glm - -#include "detail/func_exponential.inl" diff --git a/third_party/glm/ext.hpp b/third_party/glm/ext.hpp deleted file mode 100755 index 3bc8db2..0000000 --- a/third_party/glm/ext.hpp +++ /dev/null @@ -1,196 +0,0 @@ -/// @file glm/ext.hpp -/// -/// @ref core (Dependence) - -#include "detail/setup.hpp" - -#pragma once - -#include "glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_MESSAGE_EXT_INCLUDED_DISPLAYED) -# define GLM_MESSAGE_EXT_INCLUDED_DISPLAYED -# pragma message("GLM: All extensions included (not recommended)") -#endif//GLM_MESSAGES - -#include "./ext/matrix_double2x2.hpp" -#include "./ext/matrix_double2x2_precision.hpp" -#include "./ext/matrix_double2x3.hpp" -#include "./ext/matrix_double2x3_precision.hpp" -#include "./ext/matrix_double2x4.hpp" -#include "./ext/matrix_double2x4_precision.hpp" -#include "./ext/matrix_double3x2.hpp" -#include "./ext/matrix_double3x2_precision.hpp" -#include "./ext/matrix_double3x3.hpp" -#include "./ext/matrix_double3x3_precision.hpp" -#include "./ext/matrix_double3x4.hpp" -#include "./ext/matrix_double3x4_precision.hpp" -#include "./ext/matrix_double4x2.hpp" -#include "./ext/matrix_double4x2_precision.hpp" -#include "./ext/matrix_double4x3.hpp" -#include "./ext/matrix_double4x3_precision.hpp" -#include "./ext/matrix_double4x4.hpp" -#include "./ext/matrix_double4x4_precision.hpp" - -#include "./ext/matrix_float2x2.hpp" -#include "./ext/matrix_float2x2_precision.hpp" -#include "./ext/matrix_float2x3.hpp" -#include "./ext/matrix_float2x3_precision.hpp" -#include "./ext/matrix_float2x4.hpp" -#include "./ext/matrix_float2x4_precision.hpp" -#include "./ext/matrix_float3x2.hpp" -#include "./ext/matrix_float3x2_precision.hpp" -#include "./ext/matrix_float3x3.hpp" -#include "./ext/matrix_float3x3_precision.hpp" -#include "./ext/matrix_float3x4.hpp" -#include "./ext/matrix_float3x4_precision.hpp" -#include "./ext/matrix_float4x2.hpp" -#include "./ext/matrix_float4x2_precision.hpp" -#include "./ext/matrix_float4x3.hpp" -#include "./ext/matrix_float4x3_precision.hpp" -#include "./ext/matrix_float4x4.hpp" -#include "./ext/matrix_float4x4_precision.hpp" - -#include "./ext/matrix_relational.hpp" - -#include "./ext/quaternion_double.hpp" -#include "./ext/quaternion_double_precision.hpp" -#include "./ext/quaternion_float.hpp" -#include "./ext/quaternion_float_precision.hpp" -#include "./ext/quaternion_geometric.hpp" -#include "./ext/quaternion_relational.hpp" - -#include "./ext/scalar_constants.hpp" -#include "./ext/scalar_int_sized.hpp" -#include "./ext/scalar_relational.hpp" - -#include "./ext/vector_bool1.hpp" -#include "./ext/vector_bool1_precision.hpp" -#include "./ext/vector_bool2.hpp" -#include "./ext/vector_bool2_precision.hpp" -#include "./ext/vector_bool3.hpp" -#include "./ext/vector_bool3_precision.hpp" -#include "./ext/vector_bool4.hpp" -#include "./ext/vector_bool4_precision.hpp" - -#include "./ext/vector_double1.hpp" -#include "./ext/vector_double1_precision.hpp" -#include "./ext/vector_double2.hpp" -#include "./ext/vector_double2_precision.hpp" -#include "./ext/vector_double3.hpp" -#include "./ext/vector_double3_precision.hpp" -#include "./ext/vector_double4.hpp" -#include "./ext/vector_double4_precision.hpp" - -#include "./ext/vector_float1.hpp" -#include "./ext/vector_float1_precision.hpp" -#include "./ext/vector_float2.hpp" -#include "./ext/vector_float2_precision.hpp" -#include "./ext/vector_float3.hpp" -#include "./ext/vector_float3_precision.hpp" -#include "./ext/vector_float4.hpp" -#include "./ext/vector_float4_precision.hpp" - -#include "./ext/vector_int1.hpp" -#include "./ext/vector_int1_precision.hpp" -#include "./ext/vector_int2.hpp" -#include "./ext/vector_int2_precision.hpp" -#include "./ext/vector_int3.hpp" -#include "./ext/vector_int3_precision.hpp" -#include "./ext/vector_int4.hpp" -#include "./ext/vector_int4_precision.hpp" - -#include "./ext/vector_relational.hpp" - -#include "./ext/vector_uint1.hpp" -#include "./ext/vector_uint1_precision.hpp" -#include "./ext/vector_uint2.hpp" -#include "./ext/vector_uint2_precision.hpp" -#include "./ext/vector_uint3.hpp" -#include "./ext/vector_uint3_precision.hpp" -#include "./ext/vector_uint4.hpp" -#include "./ext/vector_uint4_precision.hpp" - -#include "./gtc/bitfield.hpp" -#include "./gtc/color_space.hpp" -#include "./gtc/constants.hpp" -#include "./gtc/epsilon.hpp" -#include "./gtc/integer.hpp" -#include "./gtc/matrix_access.hpp" -#include "./gtc/matrix_integer.hpp" -#include "./gtc/matrix_inverse.hpp" -#include "./gtc/matrix_transform.hpp" -#include "./gtc/noise.hpp" -#include "./gtc/packing.hpp" -#include "./gtc/quaternion.hpp" -#include "./gtc/random.hpp" -#include "./gtc/reciprocal.hpp" -#include "./gtc/round.hpp" -#include "./gtc/type_precision.hpp" -#include "./gtc/type_ptr.hpp" -#include "./gtc/ulp.hpp" -#include "./gtc/vec1.hpp" -#if GLM_CONFIG_ALIGNED_GENTYPES == GLM_ENABLE -# include "./gtc/type_aligned.hpp" -#endif - -#ifdef GLM_ENABLE_EXPERIMENTAL -#include "./gtx/associated_min_max.hpp" -#include "./gtx/bit.hpp" -#include "./gtx/closest_point.hpp" -#include "./gtx/color_encoding.hpp" -#include "./gtx/color_space.hpp" -#include "./gtx/color_space_YCoCg.hpp" -#include "./gtx/compatibility.hpp" -#include "./gtx/component_wise.hpp" -#include "./gtx/dual_quaternion.hpp" -#include "./gtx/euler_angles.hpp" -#include "./gtx/extend.hpp" -#include "./gtx/extended_min_max.hpp" -#include "./gtx/fast_exponential.hpp" -#include "./gtx/fast_square_root.hpp" -#include "./gtx/fast_trigonometry.hpp" -#include "./gtx/functions.hpp" -#include "./gtx/gradient_paint.hpp" -#include "./gtx/handed_coordinate_space.hpp" -#include "./gtx/integer.hpp" -#include "./gtx/intersect.hpp" -#include "./gtx/log_base.hpp" -#include "./gtx/matrix_cross_product.hpp" -#include "./gtx/matrix_interpolation.hpp" -#include "./gtx/matrix_major_storage.hpp" -#include "./gtx/matrix_operation.hpp" -#include "./gtx/matrix_query.hpp" -#include "./gtx/mixed_product.hpp" -#include "./gtx/norm.hpp" -#include "./gtx/normal.hpp" -#include "./gtx/normalize_dot.hpp" -#include "./gtx/number_precision.hpp" -#include "./gtx/optimum_pow.hpp" -#include "./gtx/orthonormalize.hpp" -#include "./gtx/perpendicular.hpp" -#include "./gtx/polar_coordinates.hpp" -#include "./gtx/projection.hpp" -#include "./gtx/quaternion.hpp" -#include "./gtx/raw_data.hpp" -#include "./gtx/rotate_vector.hpp" -#include "./gtx/spline.hpp" -#include "./gtx/std_based_type.hpp" -#if !(GLM_COMPILER & GLM_COMPILER_CUDA) -# include "./gtx/string_cast.hpp" -#endif -#include "./gtx/transform.hpp" -#include "./gtx/transform2.hpp" -#include "./gtx/vec_swizzle.hpp" -#include "./gtx/vector_angle.hpp" -#include "./gtx/vector_query.hpp" -#include "./gtx/wrap.hpp" - -#if GLM_HAS_TEMPLATE_ALIASES -# include "./gtx/scalar_multiplication.hpp" -#endif - -#if GLM_HAS_RANGE_FOR -# include "./gtx/range.hpp" -#endif -#endif//GLM_ENABLE_EXPERIMENTAL diff --git a/third_party/glm/ext/matrix_clip_space.hpp b/third_party/glm/ext/matrix_clip_space.hpp deleted file mode 100755 index c3874f2..0000000 --- a/third_party/glm/ext/matrix_clip_space.hpp +++ /dev/null @@ -1,522 +0,0 @@ -/// @ref ext_matrix_clip_space -/// @file glm/ext/matrix_clip_space.hpp -/// -/// @defgroup ext_matrix_clip_space GLM_EXT_matrix_clip_space -/// @ingroup ext -/// -/// Defines functions that generate clip space transformation matrices. -/// -/// The matrices generated by this extension use standard OpenGL fixed-function -/// conventions. For example, the lookAt function generates a transform from world -/// space into the specific eye space that the projective matrix functions -/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility -/// specifications defines the particular layout of this eye space. -/// -/// Include to use the features of this extension. -/// -/// @see ext_matrix_transform -/// @see ext_matrix_projection - -#pragma once - -// Dependencies -#include "../ext/scalar_constants.hpp" -#include "../geometric.hpp" -#include "../trigonometric.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_matrix_clip_space extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_matrix_clip_space - /// @{ - - /// Creates a matrix for projecting two-dimensional coordinates onto the screen. - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top, T const& zNear, T const& zFar) - /// @see gluOrtho2D man page - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> ortho( - T left, T right, T bottom, T top); - - /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoLH_ZO( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume using right-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoLH_NO( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoRH_ZO( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume, using right-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoRH_NO( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoZO( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoNO( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume, using left-handed coordinates. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoLH( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume, using right-handed coordinates. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> orthoRH( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a matrix for an orthographic parallel viewing volume, using the default handedness and default near and far clip planes definition. - /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. - /// - /// @tparam T A floating-point scalar type - /// - /// @see - glm::ortho(T const& left, T const& right, T const& bottom, T const& top) - /// @see glOrtho man page - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> ortho( - T left, T right, T bottom, T top, T zNear, T zFar); - - /// Creates a left handed frustum matrix. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumLH_ZO( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a left handed frustum matrix. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumLH_NO( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a right handed frustum matrix. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumRH_ZO( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a right handed frustum matrix. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumRH_NO( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a frustum matrix using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumZO( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a frustum matrix using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumNO( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a left handed frustum matrix. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumLH( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a right handed frustum matrix. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustumRH( - T left, T right, T bottom, T top, T near, T far); - - /// Creates a frustum matrix with default handedness, using the default handedness and default near and far clip planes definition. - /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. - /// - /// @tparam T A floating-point scalar type - /// @see glFrustum man page - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> frustum( - T left, T right, T bottom, T top, T near, T far); - - - /// Creates a matrix for a right handed, symetric perspective-view frustum. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveRH_ZO( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a right handed, symetric perspective-view frustum. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveRH_NO( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a left handed, symetric perspective-view frustum. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveLH_ZO( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a left handed, symetric perspective-view frustum. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveLH_NO( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a symetric perspective-view frustum using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveZO( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a symetric perspective-view frustum using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveNO( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a right handed, symetric perspective-view frustum. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveRH( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a left handed, symetric perspective-view frustum. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveLH( - T fovy, T aspect, T near, T far); - - /// Creates a matrix for a symetric perspective-view frustum based on the default handedness and default near and far clip planes definition. - /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. - /// - /// @param fovy Specifies the field of view angle in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - /// @see gluPerspective man page - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspective( - T fovy, T aspect, T near, T far); - - /// Builds a perspective projection matrix based on a field of view using right-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovRH_ZO( - T fov, T width, T height, T near, T far); - - /// Builds a perspective projection matrix based on a field of view using right-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovRH_NO( - T fov, T width, T height, T near, T far); - - /// Builds a perspective projection matrix based on a field of view using left-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovLH_ZO( - T fov, T width, T height, T near, T far); - - /// Builds a perspective projection matrix based on a field of view using left-handed coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovLH_NO( - T fov, T width, T height, T near, T far); - - /// Builds a perspective projection matrix based on a field of view using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovZO( - T fov, T width, T height, T near, T far); - - /// Builds a perspective projection matrix based on a field of view using left-handed coordinates if GLM_FORCE_LEFT_HANDED if defined or right-handed coordinates otherwise. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovNO( - T fov, T width, T height, T near, T far); - - /// Builds a right handed perspective projection matrix based on a field of view. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovRH( - T fov, T width, T height, T near, T far); - - /// Builds a left handed perspective projection matrix based on a field of view. - /// If GLM_FORCE_DEPTH_ZERO_TO_ONE is defined, the near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// Otherwise, the near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFovLH( - T fov, T width, T height, T near, T far); - - /// Builds a perspective projection matrix based on a field of view and the default handedness and default near and far clip planes definition. - /// To change default handedness use GLM_FORCE_LEFT_HANDED. To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. - /// - /// @param fov Expressed in radians. - /// @param width Width of the viewport - /// @param height Height of the viewport - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param far Specifies the distance from the viewer to the far clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> perspectiveFov( - T fov, T width, T height, T near, T far); - - /// Creates a matrix for a left handed, symmetric perspective-view frustum with far plane at infinite. - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> infinitePerspectiveLH( - T fovy, T aspect, T near); - - /// Creates a matrix for a right handed, symmetric perspective-view frustum with far plane at infinite. - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> infinitePerspectiveRH( - T fovy, T aspect, T near); - - /// Creates a matrix for a symmetric perspective-view frustum with far plane at infinite with default handedness. - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> infinitePerspective( - T fovy, T aspect, T near); - - /// Creates a matrix for a symmetric perspective-view frustum with far plane at infinite for graphics hardware that doesn't support depth clamping. - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> tweakedInfinitePerspective( - T fovy, T aspect, T near); - - /// Creates a matrix for a symmetric perspective-view frustum with far plane at infinite for graphics hardware that doesn't support depth clamping. - /// - /// @param fovy Specifies the field of view angle, in degrees, in the y direction. Expressed in radians. - /// @param aspect Specifies the aspect ratio that determines the field of view in the x direction. The aspect ratio is the ratio of x (width) to y (height). - /// @param near Specifies the distance from the viewer to the near clipping plane (always positive). - /// @param ep Epsilon - /// - /// @tparam T A floating-point scalar type - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> tweakedInfinitePerspective( - T fovy, T aspect, T near, T ep); - - /// @} -}//namespace glm - -#include "matrix_clip_space.inl" diff --git a/third_party/glm/ext/matrix_clip_space.inl b/third_party/glm/ext/matrix_clip_space.inl deleted file mode 100755 index 7e4df33..0000000 --- a/third_party/glm/ext/matrix_clip_space.inl +++ /dev/null @@ -1,555 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> ortho(T left, T right, T bottom, T top) - { - mat<4, 4, T, defaultp> Result(static_cast(1)); - Result[0][0] = static_cast(2) / (right - left); - Result[1][1] = static_cast(2) / (top - bottom); - Result[2][2] = - static_cast(1); - Result[3][0] = - (right + left) / (right - left); - Result[3][1] = - (top + bottom) / (top - bottom); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoLH_ZO(T left, T right, T bottom, T top, T zNear, T zFar) - { - mat<4, 4, T, defaultp> Result(1); - Result[0][0] = static_cast(2) / (right - left); - Result[1][1] = static_cast(2) / (top - bottom); - Result[2][2] = static_cast(1) / (zFar - zNear); - Result[3][0] = - (right + left) / (right - left); - Result[3][1] = - (top + bottom) / (top - bottom); - Result[3][2] = - zNear / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoLH_NO(T left, T right, T bottom, T top, T zNear, T zFar) - { - mat<4, 4, T, defaultp> Result(1); - Result[0][0] = static_cast(2) / (right - left); - Result[1][1] = static_cast(2) / (top - bottom); - Result[2][2] = static_cast(2) / (zFar - zNear); - Result[3][0] = - (right + left) / (right - left); - Result[3][1] = - (top + bottom) / (top - bottom); - Result[3][2] = - (zFar + zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoRH_ZO(T left, T right, T bottom, T top, T zNear, T zFar) - { - mat<4, 4, T, defaultp> Result(1); - Result[0][0] = static_cast(2) / (right - left); - Result[1][1] = static_cast(2) / (top - bottom); - Result[2][2] = - static_cast(1) / (zFar - zNear); - Result[3][0] = - (right + left) / (right - left); - Result[3][1] = - (top + bottom) / (top - bottom); - Result[3][2] = - zNear / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoRH_NO(T left, T right, T bottom, T top, T zNear, T zFar) - { - mat<4, 4, T, defaultp> Result(1); - Result[0][0] = static_cast(2) / (right - left); - Result[1][1] = static_cast(2) / (top - bottom); - Result[2][2] = - static_cast(2) / (zFar - zNear); - Result[3][0] = - (right + left) / (right - left); - Result[3][1] = - (top + bottom) / (top - bottom); - Result[3][2] = - (zFar + zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoZO(T left, T right, T bottom, T top, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return orthoLH_ZO(left, right, bottom, top, zNear, zFar); -# else - return orthoRH_ZO(left, right, bottom, top, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoNO(T left, T right, T bottom, T top, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return orthoLH_NO(left, right, bottom, top, zNear, zFar); -# else - return orthoRH_NO(left, right, bottom, top, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoLH(T left, T right, T bottom, T top, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return orthoLH_ZO(left, right, bottom, top, zNear, zFar); -# else - return orthoLH_NO(left, right, bottom, top, zNear, zFar); -# endif - - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> orthoRH(T left, T right, T bottom, T top, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return orthoRH_ZO(left, right, bottom, top, zNear, zFar); -# else - return orthoRH_NO(left, right, bottom, top, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> ortho(T left, T right, T bottom, T top, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO - return orthoLH_ZO(left, right, bottom, top, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO - return orthoLH_NO(left, right, bottom, top, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO - return orthoRH_ZO(left, right, bottom, top, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO - return orthoRH_NO(left, right, bottom, top, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumLH_ZO(T left, T right, T bottom, T top, T nearVal, T farVal) - { - mat<4, 4, T, defaultp> Result(0); - Result[0][0] = (static_cast(2) * nearVal) / (right - left); - Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); - Result[2][0] = (right + left) / (right - left); - Result[2][1] = (top + bottom) / (top - bottom); - Result[2][2] = farVal / (farVal - nearVal); - Result[2][3] = static_cast(1); - Result[3][2] = -(farVal * nearVal) / (farVal - nearVal); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumLH_NO(T left, T right, T bottom, T top, T nearVal, T farVal) - { - mat<4, 4, T, defaultp> Result(0); - Result[0][0] = (static_cast(2) * nearVal) / (right - left); - Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); - Result[2][0] = (right + left) / (right - left); - Result[2][1] = (top + bottom) / (top - bottom); - Result[2][2] = (farVal + nearVal) / (farVal - nearVal); - Result[2][3] = static_cast(1); - Result[3][2] = - (static_cast(2) * farVal * nearVal) / (farVal - nearVal); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumRH_ZO(T left, T right, T bottom, T top, T nearVal, T farVal) - { - mat<4, 4, T, defaultp> Result(0); - Result[0][0] = (static_cast(2) * nearVal) / (right - left); - Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); - Result[2][0] = (right + left) / (right - left); - Result[2][1] = (top + bottom) / (top - bottom); - Result[2][2] = farVal / (nearVal - farVal); - Result[2][3] = static_cast(-1); - Result[3][2] = -(farVal * nearVal) / (farVal - nearVal); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumRH_NO(T left, T right, T bottom, T top, T nearVal, T farVal) - { - mat<4, 4, T, defaultp> Result(0); - Result[0][0] = (static_cast(2) * nearVal) / (right - left); - Result[1][1] = (static_cast(2) * nearVal) / (top - bottom); - Result[2][0] = (right + left) / (right - left); - Result[2][1] = (top + bottom) / (top - bottom); - Result[2][2] = - (farVal + nearVal) / (farVal - nearVal); - Result[2][3] = static_cast(-1); - Result[3][2] = - (static_cast(2) * farVal * nearVal) / (farVal - nearVal); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumZO(T left, T right, T bottom, T top, T nearVal, T farVal) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return frustumLH_ZO(left, right, bottom, top, nearVal, farVal); -# else - return frustumRH_ZO(left, right, bottom, top, nearVal, farVal); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumNO(T left, T right, T bottom, T top, T nearVal, T farVal) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return frustumLH_NO(left, right, bottom, top, nearVal, farVal); -# else - return frustumRH_NO(left, right, bottom, top, nearVal, farVal); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumLH(T left, T right, T bottom, T top, T nearVal, T farVal) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return frustumLH_ZO(left, right, bottom, top, nearVal, farVal); -# else - return frustumLH_NO(left, right, bottom, top, nearVal, farVal); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustumRH(T left, T right, T bottom, T top, T nearVal, T farVal) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return frustumRH_ZO(left, right, bottom, top, nearVal, farVal); -# else - return frustumRH_NO(left, right, bottom, top, nearVal, farVal); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> frustum(T left, T right, T bottom, T top, T nearVal, T farVal) - { -# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO - return frustumLH_ZO(left, right, bottom, top, nearVal, farVal); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO - return frustumLH_NO(left, right, bottom, top, nearVal, farVal); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO - return frustumRH_ZO(left, right, bottom, top, nearVal, farVal); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO - return frustumRH_NO(left, right, bottom, top, nearVal, farVal); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveRH_ZO(T fovy, T aspect, T zNear, T zFar) - { - assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); - - T const tanHalfFovy = tan(fovy / static_cast(2)); - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); - Result[1][1] = static_cast(1) / (tanHalfFovy); - Result[2][2] = zFar / (zNear - zFar); - Result[2][3] = - static_cast(1); - Result[3][2] = -(zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveRH_NO(T fovy, T aspect, T zNear, T zFar) - { - assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); - - T const tanHalfFovy = tan(fovy / static_cast(2)); - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); - Result[1][1] = static_cast(1) / (tanHalfFovy); - Result[2][2] = - (zFar + zNear) / (zFar - zNear); - Result[2][3] = - static_cast(1); - Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveLH_ZO(T fovy, T aspect, T zNear, T zFar) - { - assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); - - T const tanHalfFovy = tan(fovy / static_cast(2)); - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); - Result[1][1] = static_cast(1) / (tanHalfFovy); - Result[2][2] = zFar / (zFar - zNear); - Result[2][3] = static_cast(1); - Result[3][2] = -(zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveLH_NO(T fovy, T aspect, T zNear, T zFar) - { - assert(abs(aspect - std::numeric_limits::epsilon()) > static_cast(0)); - - T const tanHalfFovy = tan(fovy / static_cast(2)); - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = static_cast(1) / (aspect * tanHalfFovy); - Result[1][1] = static_cast(1) / (tanHalfFovy); - Result[2][2] = (zFar + zNear) / (zFar - zNear); - Result[2][3] = static_cast(1); - Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveZO(T fovy, T aspect, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return perspectiveLH_ZO(fovy, aspect, zNear, zFar); -# else - return perspectiveRH_ZO(fovy, aspect, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveNO(T fovy, T aspect, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return perspectiveLH_NO(fovy, aspect, zNear, zFar); -# else - return perspectiveRH_NO(fovy, aspect, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveLH(T fovy, T aspect, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return perspectiveLH_ZO(fovy, aspect, zNear, zFar); -# else - return perspectiveLH_NO(fovy, aspect, zNear, zFar); -# endif - - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveRH(T fovy, T aspect, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return perspectiveRH_ZO(fovy, aspect, zNear, zFar); -# else - return perspectiveRH_NO(fovy, aspect, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspective(T fovy, T aspect, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO - return perspectiveLH_ZO(fovy, aspect, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO - return perspectiveLH_NO(fovy, aspect, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO - return perspectiveRH_ZO(fovy, aspect, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO - return perspectiveRH_NO(fovy, aspect, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovRH_ZO(T fov, T width, T height, T zNear, T zFar) - { - assert(width > static_cast(0)); - assert(height > static_cast(0)); - assert(fov > static_cast(0)); - - T const rad = fov; - T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); - T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = w; - Result[1][1] = h; - Result[2][2] = zFar / (zNear - zFar); - Result[2][3] = - static_cast(1); - Result[3][2] = -(zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovRH_NO(T fov, T width, T height, T zNear, T zFar) - { - assert(width > static_cast(0)); - assert(height > static_cast(0)); - assert(fov > static_cast(0)); - - T const rad = fov; - T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); - T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = w; - Result[1][1] = h; - Result[2][2] = - (zFar + zNear) / (zFar - zNear); - Result[2][3] = - static_cast(1); - Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovLH_ZO(T fov, T width, T height, T zNear, T zFar) - { - assert(width > static_cast(0)); - assert(height > static_cast(0)); - assert(fov > static_cast(0)); - - T const rad = fov; - T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); - T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = w; - Result[1][1] = h; - Result[2][2] = zFar / (zFar - zNear); - Result[2][3] = static_cast(1); - Result[3][2] = -(zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovLH_NO(T fov, T width, T height, T zNear, T zFar) - { - assert(width > static_cast(0)); - assert(height > static_cast(0)); - assert(fov > static_cast(0)); - - T const rad = fov; - T const h = glm::cos(static_cast(0.5) * rad) / glm::sin(static_cast(0.5) * rad); - T const w = h * height / width; ///todo max(width , Height) / min(width , Height)? - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = w; - Result[1][1] = h; - Result[2][2] = (zFar + zNear) / (zFar - zNear); - Result[2][3] = static_cast(1); - Result[3][2] = - (static_cast(2) * zFar * zNear) / (zFar - zNear); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovZO(T fov, T width, T height, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return perspectiveFovLH_ZO(fov, width, height, zNear, zFar); -# else - return perspectiveFovRH_ZO(fov, width, height, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovNO(T fov, T width, T height, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return perspectiveFovLH_NO(fov, width, height, zNear, zFar); -# else - return perspectiveFovRH_NO(fov, width, height, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovLH(T fov, T width, T height, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return perspectiveFovLH_ZO(fov, width, height, zNear, zFar); -# else - return perspectiveFovLH_NO(fov, width, height, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFovRH(T fov, T width, T height, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT - return perspectiveFovRH_ZO(fov, width, height, zNear, zFar); -# else - return perspectiveFovRH_NO(fov, width, height, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> perspectiveFov(T fov, T width, T height, T zNear, T zFar) - { -# if GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_ZO - return perspectiveFovLH_ZO(fov, width, height, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_LH_NO - return perspectiveFovLH_NO(fov, width, height, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_ZO - return perspectiveFovRH_ZO(fov, width, height, zNear, zFar); -# elif GLM_CONFIG_CLIP_CONTROL == GLM_CLIP_CONTROL_RH_NO - return perspectiveFovRH_NO(fov, width, height, zNear, zFar); -# endif - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> infinitePerspectiveRH(T fovy, T aspect, T zNear) - { - T const range = tan(fovy / static_cast(2)) * zNear; - T const left = -range * aspect; - T const right = range * aspect; - T const bottom = -range; - T const top = range; - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = (static_cast(2) * zNear) / (right - left); - Result[1][1] = (static_cast(2) * zNear) / (top - bottom); - Result[2][2] = - static_cast(1); - Result[2][3] = - static_cast(1); - Result[3][2] = - static_cast(2) * zNear; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> infinitePerspectiveLH(T fovy, T aspect, T zNear) - { - T const range = tan(fovy / static_cast(2)) * zNear; - T const left = -range * aspect; - T const right = range * aspect; - T const bottom = -range; - T const top = range; - - mat<4, 4, T, defaultp> Result(T(0)); - Result[0][0] = (static_cast(2) * zNear) / (right - left); - Result[1][1] = (static_cast(2) * zNear) / (top - bottom); - Result[2][2] = static_cast(1); - Result[2][3] = static_cast(1); - Result[3][2] = - static_cast(2) * zNear; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> infinitePerspective(T fovy, T aspect, T zNear) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return infinitePerspectiveLH(fovy, aspect, zNear); -# else - return infinitePerspectiveRH(fovy, aspect, zNear); -# endif - } - - // Infinite projection matrix: http://www.terathon.com/gdc07_lengyel.pdf - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> tweakedInfinitePerspective(T fovy, T aspect, T zNear, T ep) - { - T const range = tan(fovy / static_cast(2)) * zNear; - T const left = -range * aspect; - T const right = range * aspect; - T const bottom = -range; - T const top = range; - - mat<4, 4, T, defaultp> Result(static_cast(0)); - Result[0][0] = (static_cast(2) * zNear) / (right - left); - Result[1][1] = (static_cast(2) * zNear) / (top - bottom); - Result[2][2] = ep - static_cast(1); - Result[2][3] = static_cast(-1); - Result[3][2] = (ep - static_cast(2)) * zNear; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> tweakedInfinitePerspective(T fovy, T aspect, T zNear) - { - return tweakedInfinitePerspective(fovy, aspect, zNear, epsilon()); - } -}//namespace glm diff --git a/third_party/glm/ext/matrix_common.hpp b/third_party/glm/ext/matrix_common.hpp deleted file mode 100755 index 05c3799..0000000 --- a/third_party/glm/ext/matrix_common.hpp +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref ext_matrix_common -/// @file glm/ext/matrix_common.hpp -/// -/// @defgroup ext_matrix_common GLM_EXT_matrix_common -/// @ingroup ext -/// -/// Defines functions for common matrix operations. -/// -/// Include to use the features of this extension. -/// -/// @see ext_matrix_common - -#pragma once - -#include "../detail/qualifier.hpp" -#include "../detail/_fixes.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_matrix_transform extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_matrix_common - /// @{ - - template - GLM_FUNC_DECL mat mix(mat const& x, mat const& y, mat const& a); - - template - GLM_FUNC_DECL mat mix(mat const& x, mat const& y, U a); - - /// @} -}//namespace glm - -#include "matrix_common.inl" diff --git a/third_party/glm/ext/matrix_common.inl b/third_party/glm/ext/matrix_common.inl deleted file mode 100755 index 9d50848..0000000 --- a/third_party/glm/ext/matrix_common.inl +++ /dev/null @@ -1,16 +0,0 @@ -#include "../matrix.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat mix(mat const& x, mat const& y, U a) - { - return mat(x) * (static_cast(1) - a) + mat(y) * a; - } - - template - GLM_FUNC_QUALIFIER mat mix(mat const& x, mat const& y, mat const& a) - { - return matrixCompMult(mat(x), static_cast(1) - a) + matrixCompMult(mat(y), a); - } -}//namespace glm diff --git a/third_party/glm/ext/matrix_double2x2.hpp b/third_party/glm/ext/matrix_double2x2.hpp deleted file mode 100755 index 94dca54..0000000 --- a/third_party/glm/ext/matrix_double2x2.hpp +++ /dev/null @@ -1,23 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double2x2.hpp - -#pragma once -#include "../detail/type_mat2x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 2 columns of 2 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 2, double, defaultp> dmat2x2; - - /// 2 columns of 2 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 2, double, defaultp> dmat2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double2x2_precision.hpp b/third_party/glm/ext/matrix_double2x2_precision.hpp deleted file mode 100755 index 9e2c174..0000000 --- a/third_party/glm/ext/matrix_double2x2_precision.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double2x2_precision.hpp - -#pragma once -#include "../detail/type_mat2x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 2 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, double, lowp> lowp_dmat2; - - /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, double, mediump> mediump_dmat2; - - /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, double, highp> highp_dmat2; - - /// 2 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, double, lowp> lowp_dmat2x2; - - /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, double, mediump> mediump_dmat2x2; - - /// 2 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, double, highp> highp_dmat2x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double2x3.hpp b/third_party/glm/ext/matrix_double2x3.hpp deleted file mode 100755 index bfef87a..0000000 --- a/third_party/glm/ext/matrix_double2x3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double2x3.hpp - -#pragma once -#include "../detail/type_mat2x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 2 columns of 3 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 3, double, defaultp> dmat2x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double2x3_precision.hpp b/third_party/glm/ext/matrix_double2x3_precision.hpp deleted file mode 100755 index 098fb60..0000000 --- a/third_party/glm/ext/matrix_double2x3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double2x3_precision.hpp - -#pragma once -#include "../detail/type_mat2x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 2 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 3, double, lowp> lowp_dmat2x3; - - /// 2 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 3, double, mediump> mediump_dmat2x3; - - /// 2 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 3, double, highp> highp_dmat2x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double2x4.hpp b/third_party/glm/ext/matrix_double2x4.hpp deleted file mode 100755 index 499284b..0000000 --- a/third_party/glm/ext/matrix_double2x4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double2x4.hpp - -#pragma once -#include "../detail/type_mat2x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 2 columns of 4 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 4, double, defaultp> dmat2x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double2x4_precision.hpp b/third_party/glm/ext/matrix_double2x4_precision.hpp deleted file mode 100755 index 9b61ebc..0000000 --- a/third_party/glm/ext/matrix_double2x4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double2x4_precision.hpp - -#pragma once -#include "../detail/type_mat2x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 2 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 4, double, lowp> lowp_dmat2x4; - - /// 2 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 4, double, mediump> mediump_dmat2x4; - - /// 2 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 4, double, highp> highp_dmat2x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double3x2.hpp b/third_party/glm/ext/matrix_double3x2.hpp deleted file mode 100755 index dd23f36..0000000 --- a/third_party/glm/ext/matrix_double3x2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double3x2.hpp - -#pragma once -#include "../detail/type_mat3x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 3 columns of 2 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 2, double, defaultp> dmat3x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double3x2_precision.hpp b/third_party/glm/ext/matrix_double3x2_precision.hpp deleted file mode 100755 index 068d9e9..0000000 --- a/third_party/glm/ext/matrix_double3x2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double3x2_precision.hpp - -#pragma once -#include "../detail/type_mat3x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 3 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 2, double, lowp> lowp_dmat3x2; - - /// 3 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 2, double, mediump> mediump_dmat3x2; - - /// 3 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 2, double, highp> highp_dmat3x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double3x3.hpp b/third_party/glm/ext/matrix_double3x3.hpp deleted file mode 100755 index 53572b7..0000000 --- a/third_party/glm/ext/matrix_double3x3.hpp +++ /dev/null @@ -1,23 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double3x3.hpp - -#pragma once -#include "../detail/type_mat3x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 3 columns of 3 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 3, double, defaultp> dmat3x3; - - /// 3 columns of 3 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 3, double, defaultp> dmat3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double3x3_precision.hpp b/third_party/glm/ext/matrix_double3x3_precision.hpp deleted file mode 100755 index 8691e78..0000000 --- a/third_party/glm/ext/matrix_double3x3_precision.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double3x3_precision.hpp - -#pragma once -#include "../detail/type_mat3x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 3 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, double, lowp> lowp_dmat3; - - /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, double, mediump> mediump_dmat3; - - /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, double, highp> highp_dmat3; - - /// 3 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, double, lowp> lowp_dmat3x3; - - /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, double, mediump> mediump_dmat3x3; - - /// 3 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, double, highp> highp_dmat3x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double3x4.hpp b/third_party/glm/ext/matrix_double3x4.hpp deleted file mode 100755 index c572d63..0000000 --- a/third_party/glm/ext/matrix_double3x4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double3x4.hpp - -#pragma once -#include "../detail/type_mat3x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 3 columns of 4 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 4, double, defaultp> dmat3x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double3x4_precision.hpp b/third_party/glm/ext/matrix_double3x4_precision.hpp deleted file mode 100755 index f040217..0000000 --- a/third_party/glm/ext/matrix_double3x4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double3x4_precision.hpp - -#pragma once -#include "../detail/type_mat3x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 3 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 4, double, lowp> lowp_dmat3x4; - - /// 3 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 4, double, mediump> mediump_dmat3x4; - - /// 3 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 4, double, highp> highp_dmat3x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double4x2.hpp b/third_party/glm/ext/matrix_double4x2.hpp deleted file mode 100755 index 9b229f4..0000000 --- a/third_party/glm/ext/matrix_double4x2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double4x2.hpp - -#pragma once -#include "../detail/type_mat4x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 4 columns of 2 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 2, double, defaultp> dmat4x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double4x2_precision.hpp b/third_party/glm/ext/matrix_double4x2_precision.hpp deleted file mode 100755 index 6ad18ba..0000000 --- a/third_party/glm/ext/matrix_double4x2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double4x2_precision.hpp - -#pragma once -#include "../detail/type_mat4x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 4 columns of 2 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 2, double, lowp> lowp_dmat4x2; - - /// 4 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 2, double, mediump> mediump_dmat4x2; - - /// 4 columns of 2 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 2, double, highp> highp_dmat4x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double4x3.hpp b/third_party/glm/ext/matrix_double4x3.hpp deleted file mode 100755 index dca4cf9..0000000 --- a/third_party/glm/ext/matrix_double4x3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double4x3.hpp - -#pragma once -#include "../detail/type_mat4x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 4 columns of 3 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 3, double, defaultp> dmat4x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double4x3_precision.hpp b/third_party/glm/ext/matrix_double4x3_precision.hpp deleted file mode 100755 index f7371de..0000000 --- a/third_party/glm/ext/matrix_double4x3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double4x3_precision.hpp - -#pragma once -#include "../detail/type_mat4x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 4 columns of 3 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 3, double, lowp> lowp_dmat4x3; - - /// 4 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 3, double, mediump> mediump_dmat4x3; - - /// 4 columns of 3 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 3, double, highp> highp_dmat4x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double4x4.hpp b/third_party/glm/ext/matrix_double4x4.hpp deleted file mode 100755 index 81e1bf6..0000000 --- a/third_party/glm/ext/matrix_double4x4.hpp +++ /dev/null @@ -1,23 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double4x4.hpp - -#pragma once -#include "../detail/type_mat4x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 4 columns of 4 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 4, double, defaultp> dmat4x4; - - /// 4 columns of 4 components matrix of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 4, double, defaultp> dmat4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_double4x4_precision.hpp b/third_party/glm/ext/matrix_double4x4_precision.hpp deleted file mode 100755 index 4c36a84..0000000 --- a/third_party/glm/ext/matrix_double4x4_precision.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_double4x4_precision.hpp - -#pragma once -#include "../detail/type_mat4x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 4 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, double, lowp> lowp_dmat4; - - /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, double, mediump> mediump_dmat4; - - /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, double, highp> highp_dmat4; - - /// 4 columns of 4 components matrix of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, double, lowp> lowp_dmat4x4; - - /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, double, mediump> mediump_dmat4x4; - - /// 4 columns of 4 components matrix of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, double, highp> highp_dmat4x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float2x2.hpp b/third_party/glm/ext/matrix_float2x2.hpp deleted file mode 100755 index 53df921..0000000 --- a/third_party/glm/ext/matrix_float2x2.hpp +++ /dev/null @@ -1,23 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float2x2.hpp - -#pragma once -#include "../detail/type_mat2x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 2 columns of 2 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 2, float, defaultp> mat2x2; - - /// 2 columns of 2 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 2, float, defaultp> mat2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float2x2_precision.hpp b/third_party/glm/ext/matrix_float2x2_precision.hpp deleted file mode 100755 index 898b6db..0000000 --- a/third_party/glm/ext/matrix_float2x2_precision.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float2x2_precision.hpp - -#pragma once -#include "../detail/type_mat2x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 2 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, float, lowp> lowp_mat2; - - /// 2 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, float, mediump> mediump_mat2; - - /// 2 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, float, highp> highp_mat2; - - /// 2 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, float, lowp> lowp_mat2x2; - - /// 2 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, float, mediump> mediump_mat2x2; - - /// 2 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 2, float, highp> highp_mat2x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float2x3.hpp b/third_party/glm/ext/matrix_float2x3.hpp deleted file mode 100755 index 6f68822..0000000 --- a/third_party/glm/ext/matrix_float2x3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float2x3.hpp - -#pragma once -#include "../detail/type_mat2x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 2 columns of 3 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 3, float, defaultp> mat2x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float2x3_precision.hpp b/third_party/glm/ext/matrix_float2x3_precision.hpp deleted file mode 100755 index 50c1032..0000000 --- a/third_party/glm/ext/matrix_float2x3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float2x3_precision.hpp - -#pragma once -#include "../detail/type_mat2x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 2 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 3, float, lowp> lowp_mat2x3; - - /// 2 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 3, float, mediump> mediump_mat2x3; - - /// 2 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 3, float, highp> highp_mat2x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float2x4.hpp b/third_party/glm/ext/matrix_float2x4.hpp deleted file mode 100755 index 30f30de..0000000 --- a/third_party/glm/ext/matrix_float2x4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float2x4.hpp - -#pragma once -#include "../detail/type_mat2x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 2 columns of 4 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<2, 4, float, defaultp> mat2x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float2x4_precision.hpp b/third_party/glm/ext/matrix_float2x4_precision.hpp deleted file mode 100755 index 079d638..0000000 --- a/third_party/glm/ext/matrix_float2x4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float2x4_precision.hpp - -#pragma once -#include "../detail/type_mat2x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 2 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 4, float, lowp> lowp_mat2x4; - - /// 2 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 4, float, mediump> mediump_mat2x4; - - /// 2 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<2, 4, float, highp> highp_mat2x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float3x2.hpp b/third_party/glm/ext/matrix_float3x2.hpp deleted file mode 100755 index d39dd2f..0000000 --- a/third_party/glm/ext/matrix_float3x2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float3x2.hpp - -#pragma once -#include "../detail/type_mat3x2.hpp" - -namespace glm -{ - /// @addtogroup core - /// @{ - - /// 3 columns of 2 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 2, float, defaultp> mat3x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float3x2_precision.hpp b/third_party/glm/ext/matrix_float3x2_precision.hpp deleted file mode 100755 index 8572c2a..0000000 --- a/third_party/glm/ext/matrix_float3x2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float3x2_precision.hpp - -#pragma once -#include "../detail/type_mat3x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 3 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 2, float, lowp> lowp_mat3x2; - - /// 3 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 2, float, mediump> mediump_mat3x2; - - /// 3 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 2, float, highp> highp_mat3x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float3x3.hpp b/third_party/glm/ext/matrix_float3x3.hpp deleted file mode 100755 index 177d809..0000000 --- a/third_party/glm/ext/matrix_float3x3.hpp +++ /dev/null @@ -1,23 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float3x3.hpp - -#pragma once -#include "../detail/type_mat3x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 3 columns of 3 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 3, float, defaultp> mat3x3; - - /// 3 columns of 3 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 3, float, defaultp> mat3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float3x3_precision.hpp b/third_party/glm/ext/matrix_float3x3_precision.hpp deleted file mode 100755 index 8a900c1..0000000 --- a/third_party/glm/ext/matrix_float3x3_precision.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float3x3_precision.hpp - -#pragma once -#include "../detail/type_mat3x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 3 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, float, lowp> lowp_mat3; - - /// 3 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, float, mediump> mediump_mat3; - - /// 3 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, float, highp> highp_mat3; - - /// 3 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, float, lowp> lowp_mat3x3; - - /// 3 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, float, mediump> mediump_mat3x3; - - /// 3 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 3, float, highp> highp_mat3x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float3x4.hpp b/third_party/glm/ext/matrix_float3x4.hpp deleted file mode 100755 index 64b8459..0000000 --- a/third_party/glm/ext/matrix_float3x4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float3x4.hpp - -#pragma once -#include "../detail/type_mat3x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 3 columns of 4 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<3, 4, float, defaultp> mat3x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float3x4_precision.hpp b/third_party/glm/ext/matrix_float3x4_precision.hpp deleted file mode 100755 index bc36bf1..0000000 --- a/third_party/glm/ext/matrix_float3x4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float3x4_precision.hpp - -#pragma once -#include "../detail/type_mat3x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 3 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 4, float, lowp> lowp_mat3x4; - - /// 3 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 4, float, mediump> mediump_mat3x4; - - /// 3 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<3, 4, float, highp> highp_mat3x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float4x2.hpp b/third_party/glm/ext/matrix_float4x2.hpp deleted file mode 100755 index 1ed5227..0000000 --- a/third_party/glm/ext/matrix_float4x2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float4x2.hpp - -#pragma once -#include "../detail/type_mat4x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 4 columns of 2 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 2, float, defaultp> mat4x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float4x2_precision.hpp b/third_party/glm/ext/matrix_float4x2_precision.hpp deleted file mode 100755 index 88fd069..0000000 --- a/third_party/glm/ext/matrix_float4x2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float2x2_precision.hpp - -#pragma once -#include "../detail/type_mat2x2.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 4 columns of 2 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 2, float, lowp> lowp_mat4x2; - - /// 4 columns of 2 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 2, float, mediump> mediump_mat4x2; - - /// 4 columns of 2 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 2, float, highp> highp_mat4x2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float4x3.hpp b/third_party/glm/ext/matrix_float4x3.hpp deleted file mode 100755 index 5dbe765..0000000 --- a/third_party/glm/ext/matrix_float4x3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float4x3.hpp - -#pragma once -#include "../detail/type_mat4x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix - /// @{ - - /// 4 columns of 3 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 3, float, defaultp> mat4x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float4x3_precision.hpp b/third_party/glm/ext/matrix_float4x3_precision.hpp deleted file mode 100755 index 846ed4f..0000000 --- a/third_party/glm/ext/matrix_float4x3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float4x3_precision.hpp - -#pragma once -#include "../detail/type_mat4x3.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 4 columns of 3 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 3, float, lowp> lowp_mat4x3; - - /// 4 columns of 3 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 3, float, mediump> mediump_mat4x3; - - /// 4 columns of 3 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 3, float, highp> highp_mat4x3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float4x4.hpp b/third_party/glm/ext/matrix_float4x4.hpp deleted file mode 100755 index 5ba111d..0000000 --- a/third_party/glm/ext/matrix_float4x4.hpp +++ /dev/null @@ -1,23 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float4x4.hpp - -#pragma once -#include "../detail/type_mat4x4.hpp" - -namespace glm -{ - /// @ingroup core_matrix - /// @{ - - /// 4 columns of 4 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 4, float, defaultp> mat4x4; - - /// 4 columns of 4 components matrix of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - typedef mat<4, 4, float, defaultp> mat4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_float4x4_precision.hpp b/third_party/glm/ext/matrix_float4x4_precision.hpp deleted file mode 100755 index 597149b..0000000 --- a/third_party/glm/ext/matrix_float4x4_precision.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref core -/// @file glm/ext/matrix_float4x4_precision.hpp - -#pragma once -#include "../detail/type_mat4x4.hpp" - -namespace glm -{ - /// @addtogroup core_matrix_precision - /// @{ - - /// 4 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, float, lowp> lowp_mat4; - - /// 4 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, float, mediump> mediump_mat4; - - /// 4 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, float, highp> highp_mat4; - - /// 4 columns of 4 components matrix of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, float, lowp> lowp_mat4x4; - - /// 4 columns of 4 components matrix of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, float, mediump> mediump_mat4x4; - - /// 4 columns of 4 components matrix of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see GLSL 4.20.8 specification, section 4.1.6 Matrices - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef mat<4, 4, float, highp> highp_mat4x4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/matrix_projection.hpp b/third_party/glm/ext/matrix_projection.hpp deleted file mode 100755 index 51fd01b..0000000 --- a/third_party/glm/ext/matrix_projection.hpp +++ /dev/null @@ -1,149 +0,0 @@ -/// @ref ext_matrix_projection -/// @file glm/ext/matrix_projection.hpp -/// -/// @defgroup ext_matrix_projection GLM_EXT_matrix_projection -/// @ingroup ext -/// -/// Functions that generate common projection transformation matrices. -/// -/// The matrices generated by this extension use standard OpenGL fixed-function -/// conventions. For example, the lookAt function generates a transform from world -/// space into the specific eye space that the projective matrix functions -/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility -/// specifications defines the particular layout of this eye space. -/// -/// Include to use the features of this extension. -/// -/// @see ext_matrix_transform -/// @see ext_matrix_clip_space - -#pragma once - -// Dependencies -#include "../gtc/constants.hpp" -#include "../geometric.hpp" -#include "../trigonometric.hpp" -#include "../matrix.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_matrix_projection extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_matrix_projection - /// @{ - - /// Map the specified object coordinates (obj.x, obj.y, obj.z) into window coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param obj Specify the object coordinates. - /// @param model Specifies the current modelview matrix - /// @param proj Specifies the current projection matrix - /// @param viewport Specifies the current viewport - /// @return Return the computed window coordinates. - /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. - /// @tparam U Currently supported: Floating-point types and integer types. - /// - /// @see gluProject man page - template - GLM_FUNC_DECL vec<3, T, Q> projectZO( - vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); - - /// Map the specified object coordinates (obj.x, obj.y, obj.z) into window coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param obj Specify the object coordinates. - /// @param model Specifies the current modelview matrix - /// @param proj Specifies the current projection matrix - /// @param viewport Specifies the current viewport - /// @return Return the computed window coordinates. - /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. - /// @tparam U Currently supported: Floating-point types and integer types. - /// - /// @see gluProject man page - template - GLM_FUNC_DECL vec<3, T, Q> projectNO( - vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); - - /// Map the specified object coordinates (obj.x, obj.y, obj.z) into window coordinates using default near and far clip planes definition. - /// To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. - /// - /// @param obj Specify the object coordinates. - /// @param model Specifies the current modelview matrix - /// @param proj Specifies the current projection matrix - /// @param viewport Specifies the current viewport - /// @return Return the computed window coordinates. - /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. - /// @tparam U Currently supported: Floating-point types and integer types. - /// - /// @see gluProject man page - template - GLM_FUNC_DECL vec<3, T, Q> project( - vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); - - /// Map the specified window coordinates (win.x, win.y, win.z) into object coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of 0 and +1 respectively. (Direct3D clip volume definition) - /// - /// @param win Specify the window coordinates to be mapped. - /// @param model Specifies the modelview matrix - /// @param proj Specifies the projection matrix - /// @param viewport Specifies the viewport - /// @return Returns the computed object coordinates. - /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. - /// @tparam U Currently supported: Floating-point types and integer types. - /// - /// @see gluUnProject man page - template - GLM_FUNC_DECL vec<3, T, Q> unProjectZO( - vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); - - /// Map the specified window coordinates (win.x, win.y, win.z) into object coordinates. - /// The near and far clip planes correspond to z normalized device coordinates of -1 and +1 respectively. (OpenGL clip volume definition) - /// - /// @param win Specify the window coordinates to be mapped. - /// @param model Specifies the modelview matrix - /// @param proj Specifies the projection matrix - /// @param viewport Specifies the viewport - /// @return Returns the computed object coordinates. - /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. - /// @tparam U Currently supported: Floating-point types and integer types. - /// - /// @see gluUnProject man page - template - GLM_FUNC_DECL vec<3, T, Q> unProjectNO( - vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); - - /// Map the specified window coordinates (win.x, win.y, win.z) into object coordinates using default near and far clip planes definition. - /// To change default near and far clip planes definition use GLM_FORCE_DEPTH_ZERO_TO_ONE. - /// - /// @param win Specify the window coordinates to be mapped. - /// @param model Specifies the modelview matrix - /// @param proj Specifies the projection matrix - /// @param viewport Specifies the viewport - /// @return Returns the computed object coordinates. - /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. - /// @tparam U Currently supported: Floating-point types and integer types. - /// - /// @see gluUnProject man page - template - GLM_FUNC_DECL vec<3, T, Q> unProject( - vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport); - - /// Define a picking region - /// - /// @param center Specify the center of a picking region in window coordinates. - /// @param delta Specify the width and height, respectively, of the picking region in window coordinates. - /// @param viewport Rendering viewport - /// @tparam T Native type used for the computation. Currently supported: half (not recommended), float or double. - /// @tparam U Currently supported: Floating-point types and integer types. - /// - /// @see gluPickMatrix man page - template - GLM_FUNC_DECL mat<4, 4, T, Q> pickMatrix( - vec<2, T, Q> const& center, vec<2, T, Q> const& delta, vec<4, U, Q> const& viewport); - - /// @} -}//namespace glm - -#include "matrix_projection.inl" diff --git a/third_party/glm/ext/matrix_projection.inl b/third_party/glm/ext/matrix_projection.inl deleted file mode 100755 index 8b4eea9..0000000 --- a/third_party/glm/ext/matrix_projection.inl +++ /dev/null @@ -1,104 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> projectZO(vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) - { - vec<4, T, Q> tmp = vec<4, T, Q>(obj, static_cast(1)); - tmp = model * tmp; - tmp = proj * tmp; - - tmp /= tmp.w; - tmp.x = tmp.x * static_cast(0.5) + static_cast(0.5); - tmp.y = tmp.y * static_cast(0.5) + static_cast(0.5); - - tmp[0] = tmp[0] * T(viewport[2]) + T(viewport[0]); - tmp[1] = tmp[1] * T(viewport[3]) + T(viewport[1]); - - return vec<3, T, Q>(tmp); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> projectNO(vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) - { - vec<4, T, Q> tmp = vec<4, T, Q>(obj, static_cast(1)); - tmp = model * tmp; - tmp = proj * tmp; - - tmp /= tmp.w; - tmp = tmp * static_cast(0.5) + static_cast(0.5); - tmp[0] = tmp[0] * T(viewport[2]) + T(viewport[0]); - tmp[1] = tmp[1] * T(viewport[3]) + T(viewport[1]); - - return vec<3, T, Q>(tmp); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> project(vec<3, T, Q> const& obj, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) - { - if(GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT) - return projectZO(obj, model, proj, viewport); - else - return projectNO(obj, model, proj, viewport); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> unProjectZO(vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) - { - mat<4, 4, T, Q> Inverse = inverse(proj * model); - - vec<4, T, Q> tmp = vec<4, T, Q>(win, T(1)); - tmp.x = (tmp.x - T(viewport[0])) / T(viewport[2]); - tmp.y = (tmp.y - T(viewport[1])) / T(viewport[3]); - tmp.x = tmp.x * static_cast(2) - static_cast(1); - tmp.y = tmp.y * static_cast(2) - static_cast(1); - - vec<4, T, Q> obj = Inverse * tmp; - obj /= obj.w; - - return vec<3, T, Q>(obj); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> unProjectNO(vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) - { - mat<4, 4, T, Q> Inverse = inverse(proj * model); - - vec<4, T, Q> tmp = vec<4, T, Q>(win, T(1)); - tmp.x = (tmp.x - T(viewport[0])) / T(viewport[2]); - tmp.y = (tmp.y - T(viewport[1])) / T(viewport[3]); - tmp = tmp * static_cast(2) - static_cast(1); - - vec<4, T, Q> obj = Inverse * tmp; - obj /= obj.w; - - return vec<3, T, Q>(obj); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> unProject(vec<3, T, Q> const& win, mat<4, 4, T, Q> const& model, mat<4, 4, T, Q> const& proj, vec<4, U, Q> const& viewport) - { - if(GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_ZO_BIT) - return unProjectZO(win, model, proj, viewport); - else - return unProjectNO(win, model, proj, viewport); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> pickMatrix(vec<2, T, Q> const& center, vec<2, T, Q> const& delta, vec<4, U, Q> const& viewport) - { - assert(delta.x > static_cast(0) && delta.y > static_cast(0)); - mat<4, 4, T, Q> Result(static_cast(1)); - - if(!(delta.x > static_cast(0) && delta.y > static_cast(0))) - return Result; // Error - - vec<3, T, Q> Temp( - (static_cast(viewport[2]) - static_cast(2) * (center.x - static_cast(viewport[0]))) / delta.x, - (static_cast(viewport[3]) - static_cast(2) * (center.y - static_cast(viewport[1]))) / delta.y, - static_cast(0)); - - // Translate and scale the picked region to the entire window - Result = translate(Result, Temp); - return scale(Result, vec<3, T, Q>(static_cast(viewport[2]) / delta.x, static_cast(viewport[3]) / delta.y, static_cast(1))); - } -}//namespace glm diff --git a/third_party/glm/ext/matrix_relational.hpp b/third_party/glm/ext/matrix_relational.hpp deleted file mode 100755 index 20023ad..0000000 --- a/third_party/glm/ext/matrix_relational.hpp +++ /dev/null @@ -1,132 +0,0 @@ -/// @ref ext_matrix_relational -/// @file glm/ext/matrix_relational.hpp -/// -/// @defgroup ext_matrix_relational GLM_EXT_matrix_relational -/// @ingroup ext -/// -/// Exposes comparison functions for matrix types that take a user defined epsilon values. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_relational -/// @see ext_scalar_relational -/// @see ext_quaternion_relational - -#pragma once - -// Dependencies -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_matrix_relational extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_matrix_relational - /// @{ - - /// Perform a component-wise equal-to comparison of two matrices. - /// Return a boolean vector which components value is True if this expression is satisfied per column of the matrices. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y); - - /// Perform a component-wise not-equal-to comparison of two matrices. - /// Return a boolean vector which components value is True if this expression is satisfied per column of the matrices. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y); - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, T epsilon); - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, vec const& epsilon); - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is not satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, T epsilon); - - /// Returns the component-wise comparison of |x - y| >= epsilon. - /// True if this expression is not satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, vec const& epsilon); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, int ULPs); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(mat const& x, mat const& y, vec const& ULPs); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is not satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, int ULPs); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is not satisfied. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number of columns of the matrix - /// @tparam R Integer between 1 and 4 included that qualify the number of rows of the matrix - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, vec const& ULPs); - - /// @} -}//namespace glm - -#include "matrix_relational.inl" diff --git a/third_party/glm/ext/matrix_relational.inl b/third_party/glm/ext/matrix_relational.inl deleted file mode 100755 index b2b8753..0000000 --- a/third_party/glm/ext/matrix_relational.inl +++ /dev/null @@ -1,82 +0,0 @@ -/// @ref ext_vector_relational -/// @file glm/ext/vector_relational.inl - -// Dependency: -#include "../ext/vector_relational.hpp" -#include "../common.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b) - { - return equal(a, b, static_cast(0)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, T Epsilon) - { - return equal(a, b, vec(Epsilon)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, vec const& Epsilon) - { - vec Result(true); - for(length_t i = 0; i < C; ++i) - Result[i] = all(equal(a[i], b[i], Epsilon[i])); - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y) - { - return notEqual(x, y, static_cast(0)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, T Epsilon) - { - return notEqual(x, y, vec(Epsilon)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& a, mat const& b, vec const& Epsilon) - { - vec Result(true); - for(length_t i = 0; i < C; ++i) - Result[i] = any(notEqual(a[i], b[i], Epsilon[i])); - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, int MaxULPs) - { - return equal(a, b, vec(MaxULPs)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(mat const& a, mat const& b, vec const& MaxULPs) - { - vec Result(true); - for(length_t i = 0; i < C; ++i) - Result[i] = all(equal(a[i], b[i], MaxULPs[i])); - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& x, mat const& y, int MaxULPs) - { - return notEqual(x, y, vec(MaxULPs)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(mat const& a, mat const& b, vec const& MaxULPs) - { - vec Result(true); - for(length_t i = 0; i < C; ++i) - Result[i] = any(notEqual(a[i], b[i], MaxULPs[i])); - return Result; - } - -}//namespace glm diff --git a/third_party/glm/ext/matrix_transform.hpp b/third_party/glm/ext/matrix_transform.hpp deleted file mode 100755 index cbd187e..0000000 --- a/third_party/glm/ext/matrix_transform.hpp +++ /dev/null @@ -1,144 +0,0 @@ -/// @ref ext_matrix_transform -/// @file glm/ext/matrix_transform.hpp -/// -/// @defgroup ext_matrix_transform GLM_EXT_matrix_transform -/// @ingroup ext -/// -/// Defines functions that generate common transformation matrices. -/// -/// The matrices generated by this extension use standard OpenGL fixed-function -/// conventions. For example, the lookAt function generates a transform from world -/// space into the specific eye space that the projective matrix functions -/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility -/// specifications defines the particular layout of this eye space. -/// -/// Include to use the features of this extension. -/// -/// @see ext_matrix_projection -/// @see ext_matrix_clip_space - -#pragma once - -// Dependencies -#include "../gtc/constants.hpp" -#include "../geometric.hpp" -#include "../trigonometric.hpp" -#include "../matrix.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_matrix_transform extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_matrix_transform - /// @{ - - /// Builds an identity matrix. - template - GLM_FUNC_DECL GLM_CONSTEXPR genType identity(); - - /// Builds a translation 4 * 4 matrix created from a vector of 3 components. - /// - /// @param m Input matrix multiplied by this translation matrix. - /// @param v Coordinates of a translation vector. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - /// - /// @code - /// #include - /// #include - /// ... - /// glm::mat4 m = glm::translate(glm::mat4(1.0f), glm::vec3(1.0f)); - /// // m[0][0] == 1.0f, m[0][1] == 0.0f, m[0][2] == 0.0f, m[0][3] == 0.0f - /// // m[1][0] == 0.0f, m[1][1] == 1.0f, m[1][2] == 0.0f, m[1][3] == 0.0f - /// // m[2][0] == 0.0f, m[2][1] == 0.0f, m[2][2] == 1.0f, m[2][3] == 0.0f - /// // m[3][0] == 1.0f, m[3][1] == 1.0f, m[3][2] == 1.0f, m[3][3] == 1.0f - /// @endcode - /// - /// @see - translate(mat<4, 4, T, Q> const& m, T x, T y, T z) - /// @see - translate(vec<3, T, Q> const& v) - /// @see glTranslate man page - template - GLM_FUNC_DECL mat<4, 4, T, Q> translate( - mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v); - - /// Builds a rotation 4 * 4 matrix created from an axis vector and an angle. - /// - /// @param m Input matrix multiplied by this rotation matrix. - /// @param angle Rotation angle expressed in radians. - /// @param axis Rotation axis, recommended to be normalized. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - /// - /// @see - rotate(mat<4, 4, T, Q> const& m, T angle, T x, T y, T z) - /// @see - rotate(T angle, vec<3, T, Q> const& v) - /// @see glRotate man page - template - GLM_FUNC_DECL mat<4, 4, T, Q> rotate( - mat<4, 4, T, Q> const& m, T angle, vec<3, T, Q> const& axis); - - /// Builds a scale 4 * 4 matrix created from 3 scalars. - /// - /// @param m Input matrix multiplied by this scale matrix. - /// @param v Ratio of scaling for each axis. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - /// - /// @see - scale(mat<4, 4, T, Q> const& m, T x, T y, T z) - /// @see - scale(vec<3, T, Q> const& v) - /// @see glScale man page - template - GLM_FUNC_DECL mat<4, 4, T, Q> scale( - mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v); - - /// Build a right handed look at view matrix. - /// - /// @param eye Position of the camera - /// @param center Position where the camera is looking at - /// @param up Normalized up vector, how the camera is oriented. Typically (0, 0, 1) - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - /// - /// @see - frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) - template - GLM_FUNC_DECL mat<4, 4, T, Q> lookAtRH( - vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up); - - /// Build a left handed look at view matrix. - /// - /// @param eye Position of the camera - /// @param center Position where the camera is looking at - /// @param up Normalized up vector, how the camera is oriented. Typically (0, 0, 1) - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - /// - /// @see - frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) - template - GLM_FUNC_DECL mat<4, 4, T, Q> lookAtLH( - vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up); - - /// Build a look at view matrix based on the default handedness. - /// - /// @param eye Position of the camera - /// @param center Position where the camera is looking at - /// @param up Normalized up vector, how the camera is oriented. Typically (0, 0, 1) - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - /// - /// @see - frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) frustum(T const& left, T const& right, T const& bottom, T const& top, T const& nearVal, T const& farVal) - /// @see gluLookAt man page - template - GLM_FUNC_DECL mat<4, 4, T, Q> lookAt( - vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up); - - /// @} -}//namespace glm - -#include "matrix_transform.inl" diff --git a/third_party/glm/ext/matrix_transform.inl b/third_party/glm/ext/matrix_transform.inl deleted file mode 100755 index a415157..0000000 --- a/third_party/glm/ext/matrix_transform.inl +++ /dev/null @@ -1,152 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType identity() - { - return detail::init_gentype::GENTYPE>::identity(); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> translate(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v) - { - mat<4, 4, T, Q> Result(m); - Result[3] = m[0] * v[0] + m[1] * v[1] + m[2] * v[2] + m[3]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotate(mat<4, 4, T, Q> const& m, T angle, vec<3, T, Q> const& v) - { - T const a = angle; - T const c = cos(a); - T const s = sin(a); - - vec<3, T, Q> axis(normalize(v)); - vec<3, T, Q> temp((T(1) - c) * axis); - - mat<4, 4, T, Q> Rotate; - Rotate[0][0] = c + temp[0] * axis[0]; - Rotate[0][1] = temp[0] * axis[1] + s * axis[2]; - Rotate[0][2] = temp[0] * axis[2] - s * axis[1]; - - Rotate[1][0] = temp[1] * axis[0] - s * axis[2]; - Rotate[1][1] = c + temp[1] * axis[1]; - Rotate[1][2] = temp[1] * axis[2] + s * axis[0]; - - Rotate[2][0] = temp[2] * axis[0] + s * axis[1]; - Rotate[2][1] = temp[2] * axis[1] - s * axis[0]; - Rotate[2][2] = c + temp[2] * axis[2]; - - mat<4, 4, T, Q> Result; - Result[0] = m[0] * Rotate[0][0] + m[1] * Rotate[0][1] + m[2] * Rotate[0][2]; - Result[1] = m[0] * Rotate[1][0] + m[1] * Rotate[1][1] + m[2] * Rotate[1][2]; - Result[2] = m[0] * Rotate[2][0] + m[1] * Rotate[2][1] + m[2] * Rotate[2][2]; - Result[3] = m[3]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotate_slow(mat<4, 4, T, Q> const& m, T angle, vec<3, T, Q> const& v) - { - T const a = angle; - T const c = cos(a); - T const s = sin(a); - mat<4, 4, T, Q> Result; - - vec<3, T, Q> axis = normalize(v); - - Result[0][0] = c + (static_cast(1) - c) * axis.x * axis.x; - Result[0][1] = (static_cast(1) - c) * axis.x * axis.y + s * axis.z; - Result[0][2] = (static_cast(1) - c) * axis.x * axis.z - s * axis.y; - Result[0][3] = static_cast(0); - - Result[1][0] = (static_cast(1) - c) * axis.y * axis.x - s * axis.z; - Result[1][1] = c + (static_cast(1) - c) * axis.y * axis.y; - Result[1][2] = (static_cast(1) - c) * axis.y * axis.z + s * axis.x; - Result[1][3] = static_cast(0); - - Result[2][0] = (static_cast(1) - c) * axis.z * axis.x + s * axis.y; - Result[2][1] = (static_cast(1) - c) * axis.z * axis.y - s * axis.x; - Result[2][2] = c + (static_cast(1) - c) * axis.z * axis.z; - Result[2][3] = static_cast(0); - - Result[3] = vec<4, T, Q>(0, 0, 0, 1); - return m * Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scale(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v) - { - mat<4, 4, T, Q> Result; - Result[0] = m[0] * v[0]; - Result[1] = m[1] * v[1]; - Result[2] = m[2] * v[2]; - Result[3] = m[3]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scale_slow(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& v) - { - mat<4, 4, T, Q> Result(T(1)); - Result[0][0] = v.x; - Result[1][1] = v.y; - Result[2][2] = v.z; - return m * Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> lookAtRH(vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up) - { - vec<3, T, Q> const f(normalize(center - eye)); - vec<3, T, Q> const s(normalize(cross(f, up))); - vec<3, T, Q> const u(cross(s, f)); - - mat<4, 4, T, Q> Result(1); - Result[0][0] = s.x; - Result[1][0] = s.y; - Result[2][0] = s.z; - Result[0][1] = u.x; - Result[1][1] = u.y; - Result[2][1] = u.z; - Result[0][2] =-f.x; - Result[1][2] =-f.y; - Result[2][2] =-f.z; - Result[3][0] =-dot(s, eye); - Result[3][1] =-dot(u, eye); - Result[3][2] = dot(f, eye); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> lookAtLH(vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up) - { - vec<3, T, Q> const f(normalize(center - eye)); - vec<3, T, Q> const s(normalize(cross(up, f))); - vec<3, T, Q> const u(cross(f, s)); - - mat<4, 4, T, Q> Result(1); - Result[0][0] = s.x; - Result[1][0] = s.y; - Result[2][0] = s.z; - Result[0][1] = u.x; - Result[1][1] = u.y; - Result[2][1] = u.z; - Result[0][2] = f.x; - Result[1][2] = f.y; - Result[2][2] = f.z; - Result[3][0] = -dot(s, eye); - Result[3][1] = -dot(u, eye); - Result[3][2] = -dot(f, eye); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> lookAt(vec<3, T, Q> const& eye, vec<3, T, Q> const& center, vec<3, T, Q> const& up) - { - GLM_IF_CONSTEXPR(GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT) - return lookAtLH(eye, center, up); - else - return lookAtRH(eye, center, up); - } -}//namespace glm diff --git a/third_party/glm/ext/quaternion_common.hpp b/third_party/glm/ext/quaternion_common.hpp deleted file mode 100755 index 2980ed4..0000000 --- a/third_party/glm/ext/quaternion_common.hpp +++ /dev/null @@ -1,120 +0,0 @@ -/// @ref ext_quaternion_common -/// @file glm/ext/quaternion_common.hpp -/// -/// @defgroup ext_quaternion_common GLM_EXT_quaternion_common -/// @ingroup ext -/// -/// Provides common functions for quaternion types -/// -/// Include to use the features of this extension. -/// -/// @see ext_scalar_common -/// @see ext_vector_common -/// @see ext_quaternion_float -/// @see ext_quaternion_double -/// @see ext_quaternion_exponential -/// @see ext_quaternion_geometric -/// @see ext_quaternion_relational -/// @see ext_quaternion_trigonometric -/// @see ext_quaternion_transform - -#pragma once - -// Dependency: -#include "../ext/scalar_constants.hpp" -#include "../ext/quaternion_geometric.hpp" -#include "../common.hpp" -#include "../trigonometric.hpp" -#include "../exponential.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_common extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_common - /// @{ - - /// Spherical linear interpolation of two quaternions. - /// The interpolation is oriented and the rotation is performed at constant speed. - /// For short path spherical linear interpolation, use the slerp function. - /// - /// @param x A quaternion - /// @param y A quaternion - /// @param a Interpolation factor. The interpolation is defined beyond the range [0, 1]. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - /// - /// @see - slerp(qua const& x, qua const& y, T const& a) - template - GLM_FUNC_DECL qua mix(qua const& x, qua const& y, T a); - - /// Linear interpolation of two quaternions. - /// The interpolation is oriented. - /// - /// @param x A quaternion - /// @param y A quaternion - /// @param a Interpolation factor. The interpolation is defined in the range [0, 1]. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua lerp(qua const& x, qua const& y, T a); - - /// Spherical linear interpolation of two quaternions. - /// The interpolation always take the short path and the rotation is performed at constant speed. - /// - /// @param x A quaternion - /// @param y A quaternion - /// @param a Interpolation factor. The interpolation is defined beyond the range [0, 1]. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua slerp(qua const& x, qua const& y, T a); - - /// Returns the q conjugate. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua conjugate(qua const& q); - - /// Returns the q inverse. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua inverse(qua const& q); - - /// Returns true if x holds a NaN (not a number) - /// representation in the underlying implementation's set of - /// floating point representations. Returns false otherwise, - /// including for implementations with no NaN - /// representations. - /// - /// /!\ When using compiler fast math, this function may fail. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL vec<4, bool, Q> isnan(qua const& x); - - /// Returns true if x holds a positive infinity or negative - /// infinity representation in the underlying implementation's - /// set of floating point representations. Returns false - /// otherwise, including for implementations with no infinity - /// representations. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL vec<4, bool, Q> isinf(qua const& x); - - /// @} -} //namespace glm - -#include "quaternion_common.inl" diff --git a/third_party/glm/ext/quaternion_common.inl b/third_party/glm/ext/quaternion_common.inl deleted file mode 100755 index 3b2846f..0000000 --- a/third_party/glm/ext/quaternion_common.inl +++ /dev/null @@ -1,107 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER qua mix(qua const& x, qua const& y, T a) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'mix' only accept floating-point inputs"); - - T const cosTheta = dot(x, y); - - // Perform a linear interpolation when cosTheta is close to 1 to avoid side effect of sin(angle) becoming a zero denominator - if(cosTheta > static_cast(1) - epsilon()) - { - // Linear interpolation - return qua( - mix(x.w, y.w, a), - mix(x.x, y.x, a), - mix(x.y, y.y, a), - mix(x.z, y.z, a)); - } - else - { - // Essential Mathematics, page 467 - T angle = acos(cosTheta); - return (sin((static_cast(1) - a) * angle) * x + sin(a * angle) * y) / sin(angle); - } - } - - template - GLM_FUNC_QUALIFIER qua lerp(qua const& x, qua const& y, T a) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'lerp' only accept floating-point inputs"); - - // Lerp is only defined in [0, 1] - assert(a >= static_cast(0)); - assert(a <= static_cast(1)); - - return x * (static_cast(1) - a) + (y * a); - } - - template - GLM_FUNC_QUALIFIER qua slerp(qua const& x, qua const& y, T a) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'slerp' only accept floating-point inputs"); - - qua z = y; - - T cosTheta = dot(x, y); - - // If cosTheta < 0, the interpolation will take the long way around the sphere. - // To fix this, one quat must be negated. - if(cosTheta < static_cast(0)) - { - z = -y; - cosTheta = -cosTheta; - } - - // Perform a linear interpolation when cosTheta is close to 1 to avoid side effect of sin(angle) becoming a zero denominator - if(cosTheta > static_cast(1) - epsilon()) - { - // Linear interpolation - return qua( - mix(x.w, z.w, a), - mix(x.x, z.x, a), - mix(x.y, z.y, a), - mix(x.z, z.z, a)); - } - else - { - // Essential Mathematics, page 467 - T angle = acos(cosTheta); - return (sin((static_cast(1) - a) * angle) * x + sin(a * angle) * z) / sin(angle); - } - } - - template - GLM_FUNC_QUALIFIER qua conjugate(qua const& q) - { - return qua(q.w, -q.x, -q.y, -q.z); - } - - template - GLM_FUNC_QUALIFIER qua inverse(qua const& q) - { - return conjugate(q) / dot(q, q); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> isnan(qua const& q) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isnan' only accept floating-point inputs"); - - return vec<4, bool, Q>(isnan(q.x), isnan(q.y), isnan(q.z), isnan(q.w)); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> isinf(qua const& q) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isinf' only accept floating-point inputs"); - - return vec<4, bool, Q>(isinf(q.x), isinf(q.y), isinf(q.z), isinf(q.w)); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "quaternion_common_simd.inl" -#endif - diff --git a/third_party/glm/ext/quaternion_common_simd.inl b/third_party/glm/ext/quaternion_common_simd.inl deleted file mode 100755 index ddfc8a4..0000000 --- a/third_party/glm/ext/quaternion_common_simd.inl +++ /dev/null @@ -1,18 +0,0 @@ -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -namespace glm{ -namespace detail -{ - template - struct compute_dot, float, true> - { - static GLM_FUNC_QUALIFIER float call(qua const& x, qua const& y) - { - return _mm_cvtss_f32(glm_vec1_dot(x.data, y.data)); - } - }; -}//namespace detail -}//namespace glm - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT - diff --git a/third_party/glm/ext/quaternion_double.hpp b/third_party/glm/ext/quaternion_double.hpp deleted file mode 100755 index 63b24de..0000000 --- a/third_party/glm/ext/quaternion_double.hpp +++ /dev/null @@ -1,39 +0,0 @@ -/// @ref ext_quaternion_double -/// @file glm/ext/quaternion_double.hpp -/// -/// @defgroup ext_quaternion_double GLM_EXT_quaternion_double -/// @ingroup ext -/// -/// Exposes double-precision floating point quaternion type. -/// -/// Include to use the features of this extension. -/// -/// @see ext_quaternion_float -/// @see ext_quaternion_double_precision -/// @see ext_quaternion_common -/// @see ext_quaternion_exponential -/// @see ext_quaternion_geometric -/// @see ext_quaternion_relational -/// @see ext_quaternion_transform -/// @see ext_quaternion_trigonometric - -#pragma once - -// Dependency: -#include "../detail/type_quat.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_double extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_double - /// @{ - - /// Quaternion of double-precision floating-point numbers. - typedef qua dquat; - - /// @} -} //namespace glm - diff --git a/third_party/glm/ext/quaternion_double_precision.hpp b/third_party/glm/ext/quaternion_double_precision.hpp deleted file mode 100755 index 8aa24a1..0000000 --- a/third_party/glm/ext/quaternion_double_precision.hpp +++ /dev/null @@ -1,42 +0,0 @@ -/// @ref ext_quaternion_double_precision -/// @file glm/ext/quaternion_double_precision.hpp -/// -/// @defgroup ext_quaternion_double_precision GLM_EXT_quaternion_double_precision -/// @ingroup ext -/// -/// Exposes double-precision floating point quaternion type with various precision in term of ULPs. -/// -/// Include to use the features of this extension. - -#pragma once - -// Dependency: -#include "../detail/type_quat.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_double_precision extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_double_precision - /// @{ - - /// Quaternion of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see ext_quaternion_double_precision - typedef qua lowp_dquat; - - /// Quaternion of medium double-qualifier floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see ext_quaternion_double_precision - typedef qua mediump_dquat; - - /// Quaternion of high double-qualifier floating-point numbers using high precision arithmetic in term of ULPs. - /// - /// @see ext_quaternion_double_precision - typedef qua highp_dquat; - - /// @} -} //namespace glm - diff --git a/third_party/glm/ext/quaternion_exponential.hpp b/third_party/glm/ext/quaternion_exponential.hpp deleted file mode 100755 index affe297..0000000 --- a/third_party/glm/ext/quaternion_exponential.hpp +++ /dev/null @@ -1,63 +0,0 @@ -/// @ref ext_quaternion_exponential -/// @file glm/ext/quaternion_exponential.hpp -/// -/// @defgroup ext_quaternion_exponential GLM_EXT_quaternion_exponential -/// @ingroup ext -/// -/// Provides exponential functions for quaternion types -/// -/// Include to use the features of this extension. -/// -/// @see core_exponential -/// @see ext_quaternion_float -/// @see ext_quaternion_double - -#pragma once - -// Dependency: -#include "../common.hpp" -#include "../trigonometric.hpp" -#include "../geometric.hpp" -#include "../ext/scalar_constants.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_exponential extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_transform - /// @{ - - /// Returns a exponential of a quaternion. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua exp(qua const& q); - - /// Returns a logarithm of a quaternion - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua log(qua const& q); - - /// Returns a quaternion raised to a power. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua pow(qua const& q, T y); - - /// Returns the square root of a quaternion - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua sqrt(qua const& q); - - /// @} -} //namespace glm - -#include "quaternion_exponential.inl" diff --git a/third_party/glm/ext/quaternion_exponential.inl b/third_party/glm/ext/quaternion_exponential.inl deleted file mode 100755 index 8456c00..0000000 --- a/third_party/glm/ext/quaternion_exponential.inl +++ /dev/null @@ -1,85 +0,0 @@ -#include "scalar_constants.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER qua exp(qua const& q) - { - vec<3, T, Q> u(q.x, q.y, q.z); - T const Angle = glm::length(u); - if (Angle < epsilon()) - return qua(); - - vec<3, T, Q> const v(u / Angle); - return qua(cos(Angle), sin(Angle) * v); - } - - template - GLM_FUNC_QUALIFIER qua log(qua const& q) - { - vec<3, T, Q> u(q.x, q.y, q.z); - T Vec3Len = length(u); - - if (Vec3Len < epsilon()) - { - if(q.w > static_cast(0)) - return qua(log(q.w), static_cast(0), static_cast(0), static_cast(0)); - else if(q.w < static_cast(0)) - return qua(log(-q.w), pi(), static_cast(0), static_cast(0)); - else - return qua(std::numeric_limits::infinity(), std::numeric_limits::infinity(), std::numeric_limits::infinity(), std::numeric_limits::infinity()); - } - else - { - T t = atan(Vec3Len, T(q.w)) / Vec3Len; - T QuatLen2 = Vec3Len * Vec3Len + q.w * q.w; - return qua(static_cast(0.5) * log(QuatLen2), t * q.x, t * q.y, t * q.z); - } - } - - template - GLM_FUNC_QUALIFIER qua pow(qua const& x, T y) - { - //Raising to the power of 0 should yield 1 - //Needed to prevent a division by 0 error later on - if(y > -epsilon() && y < epsilon()) - return qua(1,0,0,0); - - //To deal with non-unit quaternions - T magnitude = sqrt(x.x * x.x + x.y * x.y + x.z * x.z + x.w *x.w); - - T Angle; - if(abs(x.w / magnitude) > cos_one_over_two()) - { - //Scalar component is close to 1; using it to recover angle would lose precision - //Instead, we use the non-scalar components since sin() is accurate around 0 - - //Prevent a division by 0 error later on - T VectorMagnitude = x.x * x.x + x.y * x.y + x.z * x.z; - if (glm::abs(VectorMagnitude - static_cast(0)) < glm::epsilon()) { - //Equivalent to raising a real number to a power - return qua(pow(x.w, y), 0, 0, 0); - } - - Angle = asin(sqrt(VectorMagnitude) / magnitude); - } - else - { - //Scalar component is small, shouldn't cause loss of precision - Angle = acos(x.w / magnitude); - } - - T NewAngle = Angle * y; - T Div = sin(NewAngle) / sin(Angle); - T Mag = pow(magnitude, y - static_cast(1)); - return qua(cos(NewAngle) * magnitude * Mag, x.x * Div * Mag, x.y * Div * Mag, x.z * Div * Mag); - } - - template - GLM_FUNC_QUALIFIER qua sqrt(qua const& x) - { - return pow(x, static_cast(0.5)); - } -}//namespace glm - - diff --git a/third_party/glm/ext/quaternion_float.hpp b/third_party/glm/ext/quaternion_float.hpp deleted file mode 100755 index ca42a60..0000000 --- a/third_party/glm/ext/quaternion_float.hpp +++ /dev/null @@ -1,39 +0,0 @@ -/// @ref ext_quaternion_float -/// @file glm/ext/quaternion_float.hpp -/// -/// @defgroup ext_quaternion_float GLM_EXT_quaternion_float -/// @ingroup ext -/// -/// Exposes single-precision floating point quaternion type. -/// -/// Include to use the features of this extension. -/// -/// @see ext_quaternion_double -/// @see ext_quaternion_float_precision -/// @see ext_quaternion_common -/// @see ext_quaternion_exponential -/// @see ext_quaternion_geometric -/// @see ext_quaternion_relational -/// @see ext_quaternion_transform -/// @see ext_quaternion_trigonometric - -#pragma once - -// Dependency: -#include "../detail/type_quat.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_float extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_float - /// @{ - - /// Quaternion of single-precision floating-point numbers. - typedef qua quat; - - /// @} -} //namespace glm - diff --git a/third_party/glm/ext/quaternion_float_precision.hpp b/third_party/glm/ext/quaternion_float_precision.hpp deleted file mode 100755 index f9e4f5c..0000000 --- a/third_party/glm/ext/quaternion_float_precision.hpp +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref ext_quaternion_float_precision -/// @file glm/ext/quaternion_float_precision.hpp -/// -/// @defgroup ext_quaternion_float_precision GLM_EXT_quaternion_float_precision -/// @ingroup ext -/// -/// Exposes single-precision floating point quaternion type with various precision in term of ULPs. -/// -/// Include to use the features of this extension. - -#pragma once - -// Dependency: -#include "../detail/type_quat.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_float_precision extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_float_precision - /// @{ - - /// Quaternion of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef qua lowp_quat; - - /// Quaternion of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef qua mediump_quat; - - /// Quaternion of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef qua highp_quat; - - /// @} -} //namespace glm - diff --git a/third_party/glm/ext/quaternion_geometric.hpp b/third_party/glm/ext/quaternion_geometric.hpp deleted file mode 100755 index 6d98bbe..0000000 --- a/third_party/glm/ext/quaternion_geometric.hpp +++ /dev/null @@ -1,70 +0,0 @@ -/// @ref ext_quaternion_geometric -/// @file glm/ext/quaternion_geometric.hpp -/// -/// @defgroup ext_quaternion_geometric GLM_EXT_quaternion_geometric -/// @ingroup ext -/// -/// Provides geometric functions for quaternion types -/// -/// Include to use the features of this extension. -/// -/// @see core_geometric -/// @see ext_quaternion_float -/// @see ext_quaternion_double - -#pragma once - -// Dependency: -#include "../geometric.hpp" -#include "../exponential.hpp" -#include "../ext/vector_relational.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_geometric extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_geometric - /// @{ - - /// Returns the norm of a quaternions - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_geometric - template - GLM_FUNC_DECL T length(qua const& q); - - /// Returns the normalized quaternion. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_geometric - template - GLM_FUNC_DECL qua normalize(qua const& q); - - /// Returns dot product of q1 and q2, i.e., q1[0] * q2[0] + q1[1] * q2[1] + ... - /// - /// @tparam T Floating-point scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_geometric - template - GLM_FUNC_DECL T dot(qua const& x, qua const& y); - - /// Compute a cross product. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_geometric - template - GLM_FUNC_QUALIFIER qua cross(qua const& q1, qua const& q2); - - /// @} -} //namespace glm - -#include "quaternion_geometric.inl" diff --git a/third_party/glm/ext/quaternion_geometric.inl b/third_party/glm/ext/quaternion_geometric.inl deleted file mode 100755 index e155ac5..0000000 --- a/third_party/glm/ext/quaternion_geometric.inl +++ /dev/null @@ -1,36 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER T dot(qua const& x, qua const& y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'dot' accepts only floating-point inputs"); - return detail::compute_dot, T, detail::is_aligned::value>::call(x, y); - } - - template - GLM_FUNC_QUALIFIER T length(qua const& q) - { - return glm::sqrt(dot(q, q)); - } - - template - GLM_FUNC_QUALIFIER qua normalize(qua const& q) - { - T len = length(q); - if(len <= static_cast(0)) // Problem - return qua(static_cast(1), static_cast(0), static_cast(0), static_cast(0)); - T oneOverLen = static_cast(1) / len; - return qua(q.w * oneOverLen, q.x * oneOverLen, q.y * oneOverLen, q.z * oneOverLen); - } - - template - GLM_FUNC_QUALIFIER qua cross(qua const& q1, qua const& q2) - { - return qua( - q1.w * q2.w - q1.x * q2.x - q1.y * q2.y - q1.z * q2.z, - q1.w * q2.x + q1.x * q2.w + q1.y * q2.z - q1.z * q2.y, - q1.w * q2.y + q1.y * q2.w + q1.z * q2.x - q1.x * q2.z, - q1.w * q2.z + q1.z * q2.w + q1.x * q2.y - q1.y * q2.x); - } -}//namespace glm - diff --git a/third_party/glm/ext/quaternion_relational.hpp b/third_party/glm/ext/quaternion_relational.hpp deleted file mode 100755 index 7aa121d..0000000 --- a/third_party/glm/ext/quaternion_relational.hpp +++ /dev/null @@ -1,62 +0,0 @@ -/// @ref ext_quaternion_relational -/// @file glm/ext/quaternion_relational.hpp -/// -/// @defgroup ext_quaternion_relational GLM_EXT_quaternion_relational -/// @ingroup ext -/// -/// Exposes comparison functions for quaternion types that take a user defined epsilon values. -/// -/// Include to use the features of this extension. -/// -/// @see core_vector_relational -/// @see ext_vector_relational -/// @see ext_matrix_relational -/// @see ext_quaternion_float -/// @see ext_quaternion_double - -#pragma once - -// Dependency: -#include "../vector_relational.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_relational extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_relational - /// @{ - - /// Returns the component-wise comparison of result x == y. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL vec<4, bool, Q> equal(qua const& x, qua const& y); - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL vec<4, bool, Q> equal(qua const& x, qua const& y, T epsilon); - - /// Returns the component-wise comparison of result x != y. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL vec<4, bool, Q> notEqual(qua const& x, qua const& y); - - /// Returns the component-wise comparison of |x - y| >= epsilon. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL vec<4, bool, Q> notEqual(qua const& x, qua const& y, T epsilon); - - /// @} -} //namespace glm - -#include "quaternion_relational.inl" diff --git a/third_party/glm/ext/quaternion_relational.inl b/third_party/glm/ext/quaternion_relational.inl deleted file mode 100755 index b1713e9..0000000 --- a/third_party/glm/ext/quaternion_relational.inl +++ /dev/null @@ -1,35 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> equal(qua const& x, qua const& y) - { - vec<4, bool, Q> Result; - for(length_t i = 0; i < x.length(); ++i) - Result[i] = x[i] == y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> equal(qua const& x, qua const& y, T epsilon) - { - vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); - return lessThan(abs(v), vec<4, T, Q>(epsilon)); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> notEqual(qua const& x, qua const& y) - { - vec<4, bool, Q> Result; - for(length_t i = 0; i < x.length(); ++i) - Result[i] = x[i] != y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> notEqual(qua const& x, qua const& y, T epsilon) - { - vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); - return greaterThanEqual(abs(v), vec<4, T, Q>(epsilon)); - } -}//namespace glm - diff --git a/third_party/glm/ext/quaternion_transform.hpp b/third_party/glm/ext/quaternion_transform.hpp deleted file mode 100755 index a9cc5c2..0000000 --- a/third_party/glm/ext/quaternion_transform.hpp +++ /dev/null @@ -1,47 +0,0 @@ -/// @ref ext_quaternion_transform -/// @file glm/ext/quaternion_transform.hpp -/// -/// @defgroup ext_quaternion_transform GLM_EXT_quaternion_transform -/// @ingroup ext -/// -/// Provides transformation functions for quaternion types -/// -/// Include to use the features of this extension. -/// -/// @see ext_quaternion_float -/// @see ext_quaternion_double -/// @see ext_quaternion_exponential -/// @see ext_quaternion_geometric -/// @see ext_quaternion_relational -/// @see ext_quaternion_trigonometric - -#pragma once - -// Dependency: -#include "../common.hpp" -#include "../trigonometric.hpp" -#include "../geometric.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_transform extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_transform - /// @{ - - /// Rotates a quaternion from a vector of 3 components axis and an angle. - /// - /// @param q Source orientation - /// @param angle Angle expressed in radians. - /// @param axis Axis of the rotation - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL qua rotate(qua const& q, T const& angle, vec<3, T, Q> const& axis); - /// @} -} //namespace glm - -#include "quaternion_transform.inl" diff --git a/third_party/glm/ext/quaternion_transform.inl b/third_party/glm/ext/quaternion_transform.inl deleted file mode 100755 index b87ecb6..0000000 --- a/third_party/glm/ext/quaternion_transform.inl +++ /dev/null @@ -1,24 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER qua rotate(qua const& q, T const& angle, vec<3, T, Q> const& v) - { - vec<3, T, Q> Tmp = v; - - // Axis of rotation must be normalised - T len = glm::length(Tmp); - if(abs(len - static_cast(1)) > static_cast(0.001)) - { - T oneOverLen = static_cast(1) / len; - Tmp.x *= oneOverLen; - Tmp.y *= oneOverLen; - Tmp.z *= oneOverLen; - } - - T const AngleRad(angle); - T const Sin = sin(AngleRad * static_cast(0.5)); - - return q * qua(cos(AngleRad * static_cast(0.5)), Tmp.x * Sin, Tmp.y * Sin, Tmp.z * Sin); - } -}//namespace glm - diff --git a/third_party/glm/ext/quaternion_trigonometric.hpp b/third_party/glm/ext/quaternion_trigonometric.hpp deleted file mode 100755 index 76cea27..0000000 --- a/third_party/glm/ext/quaternion_trigonometric.hpp +++ /dev/null @@ -1,63 +0,0 @@ -/// @ref ext_quaternion_trigonometric -/// @file glm/ext/quaternion_trigonometric.hpp -/// -/// @defgroup ext_quaternion_trigonometric GLM_EXT_quaternion_trigonometric -/// @ingroup ext -/// -/// Provides trigonometric functions for quaternion types -/// -/// Include to use the features of this extension. -/// -/// @see ext_quaternion_float -/// @see ext_quaternion_double -/// @see ext_quaternion_exponential -/// @see ext_quaternion_geometric -/// @see ext_quaternion_relational -/// @see ext_quaternion_transform - -#pragma once - -// Dependency: -#include "../trigonometric.hpp" -#include "../exponential.hpp" -#include "scalar_constants.hpp" -#include "vector_relational.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_quaternion_trigonometric extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_quaternion_trigonometric - /// @{ - - /// Returns the quaternion rotation angle. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL T angle(qua const& x); - - /// Returns the q rotation axis. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL vec<3, T, Q> axis(qua const& x); - - /// Build a quaternion from an angle and a normalized axis. - /// - /// @param angle Angle expressed in radians. - /// @param axis Axis of the quaternion, must be normalized. - /// - /// @tparam T A floating-point scalar type - /// @tparam Q A value from qualifier enum - template - GLM_FUNC_DECL qua angleAxis(T const& angle, vec<3, T, Q> const& axis); - - /// @} -} //namespace glm - -#include "quaternion_trigonometric.inl" diff --git a/third_party/glm/ext/quaternion_trigonometric.inl b/third_party/glm/ext/quaternion_trigonometric.inl deleted file mode 100755 index 06b7c4c..0000000 --- a/third_party/glm/ext/quaternion_trigonometric.inl +++ /dev/null @@ -1,34 +0,0 @@ -#include "scalar_constants.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER T angle(qua const& x) - { - if (abs(x.w) > cos_one_over_two()) - { - return asin(sqrt(x.x * x.x + x.y * x.y + x.z * x.z)) * static_cast(2); - } - - return acos(x.w) * static_cast(2); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> axis(qua const& x) - { - T const tmp1 = static_cast(1) - x.w * x.w; - if(tmp1 <= static_cast(0)) - return vec<3, T, Q>(0, 0, 1); - T const tmp2 = static_cast(1) / sqrt(tmp1); - return vec<3, T, Q>(x.x * tmp2, x.y * tmp2, x.z * tmp2); - } - - template - GLM_FUNC_QUALIFIER qua angleAxis(T const& angle, vec<3, T, Q> const& v) - { - T const a(angle); - T const s = glm::sin(a * static_cast(0.5)); - - return qua(glm::cos(a * static_cast(0.5)), v * s); - } -}//namespace glm diff --git a/third_party/glm/ext/scalar_common.hpp b/third_party/glm/ext/scalar_common.hpp deleted file mode 100755 index 4ab0f88..0000000 --- a/third_party/glm/ext/scalar_common.hpp +++ /dev/null @@ -1,103 +0,0 @@ -/// @ref ext_scalar_common -/// @file glm/ext/scalar_common.hpp -/// -/// @defgroup ext_scalar_common GLM_EXT_scalar_common -/// @ingroup ext -/// -/// Exposes min and max functions for 3 to 4 scalar parameters. -/// -/// Include to use the features of this extension. -/// -/// @see core_func_common -/// @see ext_vector_common - -#pragma once - -// Dependency: -#include "../common.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_scalar_common extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_scalar_common - /// @{ - - /// Returns the minimum component-wise values of 3 inputs - /// - /// @tparam T A floating-point scalar type. - template - GLM_FUNC_DECL T min(T a, T b, T c); - - /// Returns the minimum component-wise values of 4 inputs - /// - /// @tparam T A floating-point scalar type. - template - GLM_FUNC_DECL T min(T a, T b, T c, T d); - - /// Returns the maximum component-wise values of 3 inputs - /// - /// @tparam T A floating-point scalar type. - template - GLM_FUNC_DECL T max(T a, T b, T c); - - /// Returns the maximum component-wise values of 4 inputs - /// - /// @tparam T A floating-point scalar type. - template - GLM_FUNC_DECL T max(T a, T b, T c, T d); - - /// Returns the minimum component-wise values of 2 inputs. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam T A floating-point scalar type. - /// - /// @see std::fmin documentation - template - GLM_FUNC_DECL T fmin(T a, T b); - - /// Returns the minimum component-wise values of 3 inputs. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam T A floating-point scalar type. - /// - /// @see std::fmin documentation - template - GLM_FUNC_DECL T fmin(T a, T b, T c); - - /// Returns the minimum component-wise values of 4 inputs. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam T A floating-point scalar type. - /// - /// @see std::fmin documentation - template - GLM_FUNC_DECL T fmin(T a, T b, T c, T d); - - /// Returns the maximum component-wise values of 2 inputs. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam T A floating-point scalar type. - /// - /// @see std::fmax documentation - template - GLM_FUNC_DECL T fmax(T a, T b); - - /// Returns the maximum component-wise values of 3 inputs. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam T A floating-point scalar type. - /// - /// @see std::fmax documentation - template - GLM_FUNC_DECL T fmax(T a, T b, T C); - - /// Returns the maximum component-wise values of 4 inputs. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam T A floating-point scalar type. - /// - /// @see std::fmax documentation - template - GLM_FUNC_DECL T fmax(T a, T b, T C, T D); - - /// @} -}//namespace glm - -#include "scalar_common.inl" diff --git a/third_party/glm/ext/scalar_common.inl b/third_party/glm/ext/scalar_common.inl deleted file mode 100755 index 118a670..0000000 --- a/third_party/glm/ext/scalar_common.inl +++ /dev/null @@ -1,115 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER T min(T a, T b, T c) - { - return glm::min(glm::min(a, b), c); - } - - template - GLM_FUNC_QUALIFIER T min(T a, T b, T c, T d) - { - return glm::min(glm::min(a, b), glm::min(c, d)); - } - - template - GLM_FUNC_QUALIFIER T max(T a, T b, T c) - { - return glm::max(glm::max(a, b), c); - } - - template - GLM_FUNC_QUALIFIER T max(T a, T b, T c, T d) - { - return glm::max(glm::max(a, b), glm::max(c, d)); - } - -# if GLM_HAS_CXX11_STL - using std::fmin; -# else - template - GLM_FUNC_QUALIFIER T fmin(T a, T b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); - - if (isnan(a)) - return b; - return min(a, b); - } -# endif - - template - GLM_FUNC_QUALIFIER T fmin(T a, T b, T c) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); - - if (isnan(a)) - return fmin(b, c); - if (isnan(b)) - return fmin(a, c); - if (isnan(c)) - return min(a, b); - return min(a, b, c); - } - - template - GLM_FUNC_QUALIFIER T fmin(T a, T b, T c, T d) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); - - if (isnan(a)) - return fmin(b, c, d); - if (isnan(b)) - return min(a, fmin(c, d)); - if (isnan(c)) - return fmin(min(a, b), d); - if (isnan(d)) - return min(a, b, c); - return min(a, b, c, d); - } - - -# if GLM_HAS_CXX11_STL - using std::fmax; -# else - template - GLM_FUNC_QUALIFIER T fmax(T a, T b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); - - if (isnan(a)) - return b; - return max(a, b); - } -# endif - - template - GLM_FUNC_QUALIFIER T fmax(T a, T b, T c) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); - - if (isnan(a)) - return fmax(b, c); - if (isnan(b)) - return fmax(a, c); - if (isnan(c)) - return max(a, b); - return max(a, b, c); - } - - template - GLM_FUNC_QUALIFIER T fmax(T a, T b, T c, T d) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); - - if (isnan(a)) - return fmax(b, c, d); - if (isnan(b)) - return max(a, fmax(c, d)); - if (isnan(c)) - return fmax(max(a, b), d); - if (isnan(d)) - return max(a, b, c); - return max(a, b, c, d); - } -}//namespace glm diff --git a/third_party/glm/ext/scalar_constants.hpp b/third_party/glm/ext/scalar_constants.hpp deleted file mode 100755 index 74e210d..0000000 --- a/third_party/glm/ext/scalar_constants.hpp +++ /dev/null @@ -1,40 +0,0 @@ -/// @ref ext_scalar_constants -/// @file glm/ext/scalar_constants.hpp -/// -/// @defgroup ext_scalar_constants GLM_EXT_scalar_constants -/// @ingroup ext -/// -/// Provides a list of constants and precomputed useful values. -/// -/// Include to use the features of this extension. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_scalar_constants extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_scalar_constants - /// @{ - - /// Return the epsilon constant for floating point types. - template - GLM_FUNC_DECL GLM_CONSTEXPR genType epsilon(); - - /// Return the pi constant for floating point types. - template - GLM_FUNC_DECL GLM_CONSTEXPR genType pi(); - - /// Return the value of cos(1 / 2) for floating point types. - template - GLM_FUNC_DECL GLM_CONSTEXPR genType cos_one_over_two(); - - /// @} -} //namespace glm - -#include "scalar_constants.inl" diff --git a/third_party/glm/ext/scalar_constants.inl b/third_party/glm/ext/scalar_constants.inl deleted file mode 100755 index b475adf..0000000 --- a/third_party/glm/ext/scalar_constants.inl +++ /dev/null @@ -1,24 +0,0 @@ -#include - -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType epsilon() - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'epsilon' only accepts floating-point inputs"); - return std::numeric_limits::epsilon(); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType pi() - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'pi' only accepts floating-point inputs"); - return static_cast(3.14159265358979323846264338327950288); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType cos_one_over_two() - { - return genType(0.877582561890372716130286068203503191); - } -} //namespace glm diff --git a/third_party/glm/ext/scalar_int_sized.hpp b/third_party/glm/ext/scalar_int_sized.hpp deleted file mode 100755 index 8e9c511..0000000 --- a/third_party/glm/ext/scalar_int_sized.hpp +++ /dev/null @@ -1,70 +0,0 @@ -/// @ref ext_scalar_int_sized -/// @file glm/ext/scalar_int_sized.hpp -/// -/// @defgroup ext_scalar_int_sized GLM_EXT_scalar_int_sized -/// @ingroup ext -/// -/// Exposes sized signed integer scalar types. -/// -/// Include to use the features of this extension. -/// -/// @see ext_scalar_uint_sized - -#pragma once - -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_scalar_int_sized extension included") -#endif - -namespace glm{ -namespace detail -{ -# if GLM_HAS_EXTENDED_INTEGER_TYPE - typedef std::int8_t int8; - typedef std::int16_t int16; - typedef std::int32_t int32; -# else - typedef signed char int8; - typedef signed short int16; - typedef signed int int32; -#endif// - - template<> - struct is_int - { - enum test {value = ~0}; - }; - - template<> - struct is_int - { - enum test {value = ~0}; - }; - - template<> - struct is_int - { - enum test {value = ~0}; - }; -}//namespace detail - - - /// @addtogroup ext_scalar_int_sized - /// @{ - - /// 8 bit signed integer type. - typedef detail::int8 int8; - - /// 16 bit signed integer type. - typedef detail::int16 int16; - - /// 32 bit signed integer type. - typedef detail::int32 int32; - - /// 64 bit signed integer type. - typedef detail::int64 int64; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/scalar_integer.hpp b/third_party/glm/ext/scalar_integer.hpp deleted file mode 100755 index a2ca8a2..0000000 --- a/third_party/glm/ext/scalar_integer.hpp +++ /dev/null @@ -1,92 +0,0 @@ -/// @ref ext_scalar_integer -/// @file glm/ext/scalar_integer.hpp -/// -/// @see core (dependence) -/// -/// @defgroup ext_scalar_integer GLM_EXT_scalar_integer -/// @ingroup ext -/// -/// Include to use the features of this extension. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/_vectorize.hpp" -#include "../detail/type_float.hpp" -#include "../vector_relational.hpp" -#include "../common.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_scalar_integer extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_scalar_integer - /// @{ - - /// Return true if the value is a power of two number. - /// - /// @see ext_scalar_integer - template - GLM_FUNC_DECL bool isPowerOfTwo(genIUType v); - - /// Return the power of two number which value is just higher the input value, - /// round up to a power of two. - /// - /// @see ext_scalar_integer - template - GLM_FUNC_DECL genIUType nextPowerOfTwo(genIUType v); - - /// Return the power of two number which value is just lower the input value, - /// round down to a power of two. - /// - /// @see ext_scalar_integer - template - GLM_FUNC_DECL genIUType prevPowerOfTwo(genIUType v); - - /// Return true if the 'Value' is a multiple of 'Multiple'. - /// - /// @see ext_scalar_integer - template - GLM_FUNC_DECL bool isMultiple(genIUType v, genIUType Multiple); - - /// Higher multiple number of Source. - /// - /// @tparam genIUType Integer scalar or vector types. - /// - /// @param v Source value to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see ext_scalar_integer - template - GLM_FUNC_DECL genIUType nextMultiple(genIUType v, genIUType Multiple); - - /// Lower multiple number of Source. - /// - /// @tparam genIUType Integer scalar or vector types. - /// - /// @param v Source value to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see ext_scalar_integer - template - GLM_FUNC_DECL genIUType prevMultiple(genIUType v, genIUType Multiple); - - /// Returns the bit number of the Nth significant bit set to - /// 1 in the binary representation of value. - /// If value bitcount is less than the Nth significant bit, -1 will be returned. - /// - /// @tparam genIUType Signed or unsigned integer scalar types. - /// - /// @see ext_scalar_integer - template - GLM_FUNC_DECL int findNSB(genIUType x, int significantBitCount); - - /// @} -} //namespace glm - -#include "scalar_integer.inl" diff --git a/third_party/glm/ext/scalar_integer.inl b/third_party/glm/ext/scalar_integer.inl deleted file mode 100755 index efba960..0000000 --- a/third_party/glm/ext/scalar_integer.inl +++ /dev/null @@ -1,243 +0,0 @@ -#include "../integer.hpp" - -namespace glm{ -namespace detail -{ - template - struct compute_ceilShift - { - GLM_FUNC_QUALIFIER static vec call(vec const& v, T) - { - return v; - } - }; - - template - struct compute_ceilShift - { - GLM_FUNC_QUALIFIER static vec call(vec const& v, T Shift) - { - return v | (v >> Shift); - } - }; - - template - struct compute_ceilPowerOfTwo - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - GLM_STATIC_ASSERT(!std::numeric_limits::is_iec559, "'ceilPowerOfTwo' only accept integer scalar or vector inputs"); - - vec const Sign(sign(x)); - - vec v(abs(x)); - - v = v - static_cast(1); - v = v | (v >> static_cast(1)); - v = v | (v >> static_cast(2)); - v = v | (v >> static_cast(4)); - v = compute_ceilShift= 2>::call(v, 8); - v = compute_ceilShift= 4>::call(v, 16); - v = compute_ceilShift= 8>::call(v, 32); - return (v + static_cast(1)) * Sign; - } - }; - - template - struct compute_ceilPowerOfTwo - { - GLM_FUNC_QUALIFIER static vec call(vec const& x) - { - GLM_STATIC_ASSERT(!std::numeric_limits::is_iec559, "'ceilPowerOfTwo' only accept integer scalar or vector inputs"); - - vec v(x); - - v = v - static_cast(1); - v = v | (v >> static_cast(1)); - v = v | (v >> static_cast(2)); - v = v | (v >> static_cast(4)); - v = compute_ceilShift= 2>::call(v, 8); - v = compute_ceilShift= 4>::call(v, 16); - v = compute_ceilShift= 8>::call(v, 32); - return v + static_cast(1); - } - }; - - template - struct compute_ceilMultiple{}; - - template<> - struct compute_ceilMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - if(Source > genType(0)) - return Source + (Multiple - std::fmod(Source, Multiple)); - else - return Source + std::fmod(-Source, Multiple); - } - }; - - template<> - struct compute_ceilMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - genType Tmp = Source - genType(1); - return Tmp + (Multiple - (Tmp % Multiple)); - } - }; - - template<> - struct compute_ceilMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - assert(Multiple > genType(0)); - if(Source > genType(0)) - { - genType Tmp = Source - genType(1); - return Tmp + (Multiple - (Tmp % Multiple)); - } - else - return Source + (-Source % Multiple); - } - }; - - template - struct compute_floorMultiple{}; - - template<> - struct compute_floorMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - if(Source >= genType(0)) - return Source - std::fmod(Source, Multiple); - else - return Source - std::fmod(Source, Multiple) - Multiple; - } - }; - - template<> - struct compute_floorMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - if(Source >= genType(0)) - return Source - Source % Multiple; - else - { - genType Tmp = Source + genType(1); - return Tmp - Tmp % Multiple - Multiple; - } - } - }; - - template<> - struct compute_floorMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - if(Source >= genType(0)) - return Source - Source % Multiple; - else - { - genType Tmp = Source + genType(1); - return Tmp - Tmp % Multiple - Multiple; - } - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER bool isPowerOfTwo(genIUType Value) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isPowerOfTwo' only accept integer inputs"); - - genIUType const Result = glm::abs(Value); - return !(Result & (Result - 1)); - } - - template - GLM_FUNC_QUALIFIER genIUType nextPowerOfTwo(genIUType value) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextPowerOfTwo' only accept integer inputs"); - - return detail::compute_ceilPowerOfTwo<1, genIUType, defaultp, std::numeric_limits::is_signed>::call(vec<1, genIUType, defaultp>(value)).x; - } - - template - GLM_FUNC_QUALIFIER genIUType prevPowerOfTwo(genIUType value) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevPowerOfTwo' only accept integer inputs"); - - return isPowerOfTwo(value) ? value : static_cast(static_cast(1) << static_cast(findMSB(value))); - } - - template - GLM_FUNC_QUALIFIER bool isMultiple(genIUType Value, genIUType Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isMultiple' only accept integer inputs"); - - return isMultiple(vec<1, genIUType>(Value), vec<1, genIUType>(Multiple)).x; - } - - template - GLM_FUNC_QUALIFIER genIUType nextMultiple(genIUType Source, genIUType Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextMultiple' only accept integer inputs"); - - return detail::compute_ceilMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); - } - - template - GLM_FUNC_QUALIFIER genIUType prevMultiple(genIUType Source, genIUType Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevMultiple' only accept integer inputs"); - - return detail::compute_floorMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); - } - - template - GLM_FUNC_QUALIFIER int findNSB(genIUType x, int significantBitCount) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findNSB' only accept integer inputs"); - - if(bitCount(x) < significantBitCount) - return -1; - - genIUType const One = static_cast(1); - int bitPos = 0; - - genIUType key = x; - int nBitCount = significantBitCount; - int Step = sizeof(x) * 8 / 2; - while (key > One) - { - genIUType Mask = static_cast((One << Step) - One); - genIUType currentKey = key & Mask; - int currentBitCount = bitCount(currentKey); - if (nBitCount > currentBitCount) - { - nBitCount -= currentBitCount; - bitPos += Step; - key >>= static_cast(Step); - } - else - { - key = key & Mask; - } - - Step >>= 1; - } - - return static_cast(bitPos); - } -}//namespace glm diff --git a/third_party/glm/ext/scalar_relational.hpp b/third_party/glm/ext/scalar_relational.hpp deleted file mode 100755 index 3076a5e..0000000 --- a/third_party/glm/ext/scalar_relational.hpp +++ /dev/null @@ -1,65 +0,0 @@ -/// @ref ext_scalar_relational -/// @file glm/ext/scalar_relational.hpp -/// -/// @defgroup ext_scalar_relational GLM_EXT_scalar_relational -/// @ingroup ext -/// -/// Exposes comparison functions for scalar types that take a user defined epsilon values. -/// -/// Include to use the features of this extension. -/// -/// @see core_vector_relational -/// @see ext_vector_relational -/// @see ext_matrix_relational - -#pragma once - -// Dependencies -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_scalar_relational extension included") -#endif - -namespace glm -{ - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is satisfied. - /// - /// @tparam genType Floating-point or integer scalar types - template - GLM_FUNC_DECL GLM_CONSTEXPR bool equal(genType const& x, genType const& y, genType const& epsilon); - - /// Returns the component-wise comparison of |x - y| >= epsilon. - /// True if this expression is not satisfied. - /// - /// @tparam genType Floating-point or integer scalar types - template - GLM_FUNC_DECL GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, genType const& epsilon); - - /// Returns the component-wise comparison between two scalars in term of ULPs. - /// True if this expression is satisfied. - /// - /// @param x First operand. - /// @param y Second operand. - /// @param ULPs Maximum difference in ULPs between the two operators to consider them equal. - /// - /// @tparam genType Floating-point or integer scalar types - template - GLM_FUNC_DECL GLM_CONSTEXPR bool equal(genType const& x, genType const& y, int ULPs); - - /// Returns the component-wise comparison between two scalars in term of ULPs. - /// True if this expression is not satisfied. - /// - /// @param x First operand. - /// @param y Second operand. - /// @param ULPs Maximum difference in ULPs between the two operators to consider them not equal. - /// - /// @tparam genType Floating-point or integer scalar types - template - GLM_FUNC_DECL GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, int ULPs); - - /// @} -}//namespace glm - -#include "scalar_relational.inl" diff --git a/third_party/glm/ext/scalar_relational.inl b/third_party/glm/ext/scalar_relational.inl deleted file mode 100755 index c85583e..0000000 --- a/third_party/glm/ext/scalar_relational.inl +++ /dev/null @@ -1,40 +0,0 @@ -#include "../common.hpp" -#include "../ext/scalar_int_sized.hpp" -#include "../ext/scalar_uint_sized.hpp" -#include "../detail/type_float.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool equal(genType const& x, genType const& y, genType const& epsilon) - { - return abs(x - y) <= epsilon; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, genType const& epsilon) - { - return abs(x - y) > epsilon; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool equal(genType const& x, genType const& y, int MaxULPs) - { - detail::float_t const a(x); - detail::float_t const b(y); - - // Different signs means they do not match. - if(a.negative() != b.negative()) - return false; - - // Find the difference in ULPs. - typename detail::float_t::int_type const DiffULPs = abs(a.i - b.i); - return DiffULPs <= MaxULPs; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR bool notEqual(genType const& x, genType const& y, int ULPs) - { - return !equal(x, y, ULPs); - } -}//namespace glm diff --git a/third_party/glm/ext/scalar_uint_sized.hpp b/third_party/glm/ext/scalar_uint_sized.hpp deleted file mode 100755 index fd5267f..0000000 --- a/third_party/glm/ext/scalar_uint_sized.hpp +++ /dev/null @@ -1,70 +0,0 @@ -/// @ref ext_scalar_uint_sized -/// @file glm/ext/scalar_uint_sized.hpp -/// -/// @defgroup ext_scalar_uint_sized GLM_EXT_scalar_uint_sized -/// @ingroup ext -/// -/// Exposes sized unsigned integer scalar types. -/// -/// Include to use the features of this extension. -/// -/// @see ext_scalar_int_sized - -#pragma once - -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_scalar_uint_sized extension included") -#endif - -namespace glm{ -namespace detail -{ -# if GLM_HAS_EXTENDED_INTEGER_TYPE - typedef std::uint8_t uint8; - typedef std::uint16_t uint16; - typedef std::uint32_t uint32; -# else - typedef unsigned char uint8; - typedef unsigned short uint16; - typedef unsigned int uint32; -#endif - - template<> - struct is_int - { - enum test {value = ~0}; - }; - - template<> - struct is_int - { - enum test {value = ~0}; - }; - - template<> - struct is_int - { - enum test {value = ~0}; - }; -}//namespace detail - - - /// @addtogroup ext_scalar_uint_sized - /// @{ - - /// 8 bit unsigned integer type. - typedef detail::uint8 uint8; - - /// 16 bit unsigned integer type. - typedef detail::uint16 uint16; - - /// 32 bit unsigned integer type. - typedef detail::uint32 uint32; - - /// 64 bit unsigned integer type. - typedef detail::uint64 uint64; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/scalar_ulp.hpp b/third_party/glm/ext/scalar_ulp.hpp deleted file mode 100755 index 941ada3..0000000 --- a/third_party/glm/ext/scalar_ulp.hpp +++ /dev/null @@ -1,74 +0,0 @@ -/// @ref ext_scalar_ulp -/// @file glm/ext/scalar_ulp.hpp -/// -/// @defgroup ext_scalar_ulp GLM_EXT_scalar_ulp -/// @ingroup ext -/// -/// Allow the measurement of the accuracy of a function against a reference -/// implementation. This extension works on floating-point data and provide results -/// in ULP. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_ulp -/// @see ext_scalar_relational - -#pragma once - -// Dependencies -#include "../ext/scalar_int_sized.hpp" -#include "../common.hpp" -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_scalar_ulp extension included") -#endif - -namespace glm -{ - /// Return the next ULP value(s) after the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL genType nextFloat(genType x); - - /// Return the previous ULP value(s) before the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL genType prevFloat(genType x); - - /// Return the value(s) ULP distance after the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL genType nextFloat(genType x, int ULPs); - - /// Return the value(s) ULP distance before the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL genType prevFloat(genType x, int ULPs); - - /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. - /// - /// @see ext_scalar_ulp - GLM_FUNC_DECL int floatDistance(float x, float y); - - /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. - /// - /// @see ext_scalar_ulp - GLM_FUNC_DECL int64 floatDistance(double x, double y); - - /// @} -}//namespace glm - -#include "scalar_ulp.inl" diff --git a/third_party/glm/ext/scalar_ulp.inl b/third_party/glm/ext/scalar_ulp.inl deleted file mode 100755 index 308df15..0000000 --- a/third_party/glm/ext/scalar_ulp.inl +++ /dev/null @@ -1,284 +0,0 @@ -/// Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved. -/// -/// Developed at SunPro, a Sun Microsystems, Inc. business. -/// Permission to use, copy, modify, and distribute this -/// software is freely granted, provided that this notice -/// is preserved. - -#include "../detail/type_float.hpp" -#include "../ext/scalar_constants.hpp" -#include -#include - -#if(GLM_COMPILER & GLM_COMPILER_VC) -# pragma warning(push) -# pragma warning(disable : 4127) -#endif - -typedef union -{ - float value; - /* FIXME: Assumes 32 bit int. */ - unsigned int word; -} ieee_float_shape_type; - -typedef union -{ - double value; - struct - { - int lsw; - int msw; - } parts; -} ieee_double_shape_type; - -#define GLM_EXTRACT_WORDS(ix0,ix1,d) \ - do { \ - ieee_double_shape_type ew_u; \ - ew_u.value = (d); \ - (ix0) = ew_u.parts.msw; \ - (ix1) = ew_u.parts.lsw; \ - } while (0) - -#define GLM_GET_FLOAT_WORD(i,d) \ - do { \ - ieee_float_shape_type gf_u; \ - gf_u.value = (d); \ - (i) = gf_u.word; \ - } while (0) - -#define GLM_SET_FLOAT_WORD(d,i) \ - do { \ - ieee_float_shape_type sf_u; \ - sf_u.word = (i); \ - (d) = sf_u.value; \ - } while (0) - -#define GLM_INSERT_WORDS(d,ix0,ix1) \ - do { \ - ieee_double_shape_type iw_u; \ - iw_u.parts.msw = (ix0); \ - iw_u.parts.lsw = (ix1); \ - (d) = iw_u.value; \ - } while (0) - -namespace glm{ -namespace detail -{ - GLM_FUNC_QUALIFIER float nextafterf(float x, float y) - { - volatile float t; - int hx, hy, ix, iy; - - GLM_GET_FLOAT_WORD(hx, x); - GLM_GET_FLOAT_WORD(hy, y); - ix = hx & 0x7fffffff; // |x| - iy = hy & 0x7fffffff; // |y| - - if((ix > 0x7f800000) || // x is nan - (iy > 0x7f800000)) // y is nan - return x + y; - if(abs(y - x) <= epsilon()) - return y; // x=y, return y - if(ix == 0) - { // x == 0 - GLM_SET_FLOAT_WORD(x, (hy & 0x80000000) | 1);// return +-minsubnormal - t = x * x; - if(abs(t - x) <= epsilon()) - return t; - else - return x; // raise underflow flag - } - if(hx >= 0) - { // x > 0 - if(hx > hy) // x > y, x -= ulp - hx -= 1; - else // x < y, x += ulp - hx += 1; - } - else - { // x < 0 - if(hy >= 0 || hx > hy) // x < y, x -= ulp - hx -= 1; - else // x > y, x += ulp - hx += 1; - } - hy = hx & 0x7f800000; - if(hy >= 0x7f800000) - return x + x; // overflow - if(hy < 0x00800000) // underflow - { - t = x * x; - if(abs(t - x) > epsilon()) - { // raise underflow flag - GLM_SET_FLOAT_WORD(y, hx); - return y; - } - } - GLM_SET_FLOAT_WORD(x, hx); - return x; - } - - GLM_FUNC_QUALIFIER double nextafter(double x, double y) - { - volatile double t; - int hx, hy, ix, iy; - unsigned int lx, ly; - - GLM_EXTRACT_WORDS(hx, lx, x); - GLM_EXTRACT_WORDS(hy, ly, y); - ix = hx & 0x7fffffff; // |x| - iy = hy & 0x7fffffff; // |y| - - if(((ix >= 0x7ff00000) && ((ix - 0x7ff00000) | lx) != 0) || // x is nan - ((iy >= 0x7ff00000) && ((iy - 0x7ff00000) | ly) != 0)) // y is nan - return x + y; - if(abs(y - x) <= epsilon()) - return y; // x=y, return y - if((ix | lx) == 0) - { // x == 0 - GLM_INSERT_WORDS(x, hy & 0x80000000, 1); // return +-minsubnormal - t = x * x; - if(abs(t - x) <= epsilon()) - return t; - else - return x; // raise underflow flag - } - if(hx >= 0) { // x > 0 - if(hx > hy || ((hx == hy) && (lx > ly))) { // x > y, x -= ulp - if(lx == 0) hx -= 1; - lx -= 1; - } - else { // x < y, x += ulp - lx += 1; - if(lx == 0) hx += 1; - } - } - else { // x < 0 - if(hy >= 0 || hx > hy || ((hx == hy) && (lx > ly))){// x < y, x -= ulp - if(lx == 0) hx -= 1; - lx -= 1; - } - else { // x > y, x += ulp - lx += 1; - if(lx == 0) hx += 1; - } - } - hy = hx & 0x7ff00000; - if(hy >= 0x7ff00000) - return x + x; // overflow - if(hy < 0x00100000) - { // underflow - t = x * x; - if(abs(t - x) > epsilon()) - { // raise underflow flag - GLM_INSERT_WORDS(y, hx, lx); - return y; - } - } - GLM_INSERT_WORDS(x, hx, lx); - return x; - } -}//namespace detail -}//namespace glm - -#if(GLM_COMPILER & GLM_COMPILER_VC) -# pragma warning(pop) -#endif - -namespace glm -{ - template<> - GLM_FUNC_QUALIFIER float nextFloat(float x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::max()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return detail::nextafterf(x, FLT_MAX); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafterf(x, FLT_MAX); -# else - return nextafterf(x, FLT_MAX); -# endif - } - - template<> - GLM_FUNC_QUALIFIER double nextFloat(double x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::max()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return detail::nextafter(x, std::numeric_limits::max()); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafter(x, DBL_MAX); -# else - return nextafter(x, DBL_MAX); -# endif - } - - template - GLM_FUNC_QUALIFIER T nextFloat(T x, int ULPs) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'next_float' only accept floating-point input"); - assert(ULPs >= 0); - - T temp = x; - for(int i = 0; i < ULPs; ++i) - temp = nextFloat(temp); - return temp; - } - - GLM_FUNC_QUALIFIER float prevFloat(float x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::min()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return detail::nextafterf(x, FLT_MIN); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafterf(x, FLT_MIN); -# else - return nextafterf(x, FLT_MIN); -# endif - } - - GLM_FUNC_QUALIFIER double prevFloat(double x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::min()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return _nextafter(x, DBL_MIN); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafter(x, DBL_MIN); -# else - return nextafter(x, DBL_MIN); -# endif - } - - template - GLM_FUNC_QUALIFIER T prevFloat(T x, int ULPs) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'prev_float' only accept floating-point input"); - assert(ULPs >= 0); - - T temp = x; - for(int i = 0; i < ULPs; ++i) - temp = prevFloat(temp); - return temp; - } - - GLM_FUNC_QUALIFIER int floatDistance(float x, float y) - { - detail::float_t const a(x); - detail::float_t const b(y); - - return abs(a.i - b.i); - } - - GLM_FUNC_QUALIFIER int64 floatDistance(double x, double y) - { - detail::float_t const a(x); - detail::float_t const b(y); - - return abs(a.i - b.i); - } -}//namespace glm diff --git a/third_party/glm/ext/vector_bool1.hpp b/third_party/glm/ext/vector_bool1.hpp deleted file mode 100755 index 002c320..0000000 --- a/third_party/glm/ext/vector_bool1.hpp +++ /dev/null @@ -1,30 +0,0 @@ -/// @ref ext_vector_bool1 -/// @file glm/ext/vector_bool1.hpp -/// -/// @defgroup ext_vector_bool1 GLM_EXT_vector_bool1 -/// @ingroup ext -/// -/// Exposes bvec1 vector type. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_bool1_precision extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_bool1 extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_bool1 - /// @{ - - /// 1 components vector of boolean. - typedef vec<1, bool, defaultp> bvec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_bool1_precision.hpp b/third_party/glm/ext/vector_bool1_precision.hpp deleted file mode 100755 index e62d3cf..0000000 --- a/third_party/glm/ext/vector_bool1_precision.hpp +++ /dev/null @@ -1,34 +0,0 @@ -/// @ref ext_vector_bool1_precision -/// @file glm/ext/vector_bool1_precision.hpp -/// -/// @defgroup ext_vector_bool1_precision GLM_EXT_vector_bool1_precision -/// @ingroup ext -/// -/// Exposes highp_bvec1, mediump_bvec1 and lowp_bvec1 types. -/// -/// Include to use the features of this extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_bool1_precision extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_bool1_precision - /// @{ - - /// 1 component vector of bool values. - typedef vec<1, bool, highp> highp_bvec1; - - /// 1 component vector of bool values. - typedef vec<1, bool, mediump> mediump_bvec1; - - /// 1 component vector of bool values. - typedef vec<1, bool, lowp> lowp_bvec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_bool2.hpp b/third_party/glm/ext/vector_bool2.hpp deleted file mode 100755 index 52288b7..0000000 --- a/third_party/glm/ext/vector_bool2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_bool2.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 2 components vector of boolean. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<2, bool, defaultp> bvec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_bool2_precision.hpp b/third_party/glm/ext/vector_bool2_precision.hpp deleted file mode 100755 index 4370933..0000000 --- a/third_party/glm/ext/vector_bool2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_bool2_precision.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 2 components vector of high qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, bool, highp> highp_bvec2; - - /// 2 components vector of medium qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, bool, mediump> mediump_bvec2; - - /// 2 components vector of low qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, bool, lowp> lowp_bvec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_bool3.hpp b/third_party/glm/ext/vector_bool3.hpp deleted file mode 100755 index 90a0b7e..0000000 --- a/third_party/glm/ext/vector_bool3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_bool3.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 3 components vector of boolean. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<3, bool, defaultp> bvec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_bool3_precision.hpp b/third_party/glm/ext/vector_bool3_precision.hpp deleted file mode 100755 index 89cd2d3..0000000 --- a/third_party/glm/ext/vector_bool3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_bool3_precision.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 3 components vector of high qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, bool, highp> highp_bvec3; - - /// 3 components vector of medium qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, bool, mediump> mediump_bvec3; - - /// 3 components vector of low qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, bool, lowp> lowp_bvec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_bool4.hpp b/third_party/glm/ext/vector_bool4.hpp deleted file mode 100755 index 18aa71b..0000000 --- a/third_party/glm/ext/vector_bool4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_bool4.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 4 components vector of boolean. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<4, bool, defaultp> bvec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_bool4_precision.hpp b/third_party/glm/ext/vector_bool4_precision.hpp deleted file mode 100755 index 79786e5..0000000 --- a/third_party/glm/ext/vector_bool4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_bool4_precision.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 4 components vector of high qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, bool, highp> highp_bvec4; - - /// 4 components vector of medium qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, bool, mediump> mediump_bvec4; - - /// 4 components vector of low qualifier bool numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, bool, lowp> lowp_bvec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_common.hpp b/third_party/glm/ext/vector_common.hpp deleted file mode 100755 index 324fe1c..0000000 --- a/third_party/glm/ext/vector_common.hpp +++ /dev/null @@ -1,144 +0,0 @@ -/// @ref ext_vector_common -/// @file glm/ext/vector_common.hpp -/// -/// @defgroup ext_vector_common GLM_EXT_vector_common -/// @ingroup ext -/// -/// Exposes min and max functions for 3 to 4 vector parameters. -/// -/// Include to use the features of this extension. -/// -/// @see core_common -/// @see ext_scalar_common - -#pragma once - -// Dependency: -#include "../ext/scalar_common.hpp" -#include "../common.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_common extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_common - /// @{ - - /// Return the minimum component-wise values of 3 inputs - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& a, vec const& b, vec const& c); - - /// Return the minimum component-wise values of 4 inputs - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec min(vec const& a, vec const& b, vec const& c, vec const& d); - - /// Return the maximum component-wise values of 3 inputs - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec max(vec const& x, vec const& y, vec const& z); - - /// Return the maximum component-wise values of 4 inputs - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec max( vec const& x, vec const& y, vec const& z, vec const& w); - - /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmin documentation - template - GLM_FUNC_DECL vec fmin(vec const& x, T y); - - /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmin documentation - template - GLM_FUNC_DECL vec fmin(vec const& x, vec const& y); - - /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmin documentation - template - GLM_FUNC_DECL vec fmin(vec const& a, vec const& b, vec const& c); - - /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmin documentation - template - GLM_FUNC_DECL vec fmin(vec const& a, vec const& b, vec const& c, vec const& d); - - /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmax documentation - template - GLM_FUNC_DECL vec fmax(vec const& a, T b); - - /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmax documentation - template - GLM_FUNC_DECL vec fmax(vec const& a, vec const& b); - - /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmax documentation - template - GLM_FUNC_DECL vec fmax(vec const& a, vec const& b, vec const& c); - - /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see std::fmax documentation - template - GLM_FUNC_DECL vec fmax(vec const& a, vec const& b, vec const& c, vec const& d); - - /// @} -}//namespace glm - -#include "vector_common.inl" diff --git a/third_party/glm/ext/vector_common.inl b/third_party/glm/ext/vector_common.inl deleted file mode 100755 index 71f3809..0000000 --- a/third_party/glm/ext/vector_common.inl +++ /dev/null @@ -1,88 +0,0 @@ -#include "../detail/_vectorize.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& x, vec const& y, vec const& z) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'min' only accept floating-point or integer inputs"); - return glm::min(glm::min(x, y), z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec min(vec const& x, vec const& y, vec const& z, vec const& w) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'min' only accept floating-point or integer inputs"); - return glm::min(glm::min(x, y), glm::min(z, w)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& x, vec const& y, vec const& z) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'max' only accept floating-point or integer inputs"); - return glm::max(glm::max(x, y), z); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec max(vec const& x, vec const& y, vec const& z, vec const& w) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'max' only accept floating-point or integer inputs"); - return glm::max(glm::max(x, y), glm::max(z, w)); - } - - template - GLM_FUNC_QUALIFIER vec fmin(vec const& a, T b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); - return detail::functor2::call(fmin, a, vec(b)); - } - - template - GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); - return detail::functor2::call(fmin, a, b); - } - - template - GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b, vec const& c) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); - return fmin(fmin(a, b), c); - } - - template - GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b, vec const& c, vec const& d) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point inputs"); - return fmin(fmin(a, b), fmin(c, d)); - } - - template - GLM_FUNC_QUALIFIER vec fmax(vec const& a, T b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); - return detail::functor2::call(fmax, a, vec(b)); - } - - template - GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); - return detail::functor2::call(fmax, a, b); - } - - template - GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b, vec const& c) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); - return fmax(fmax(a, b), c); - } - - template - GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b, vec const& c, vec const& d) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point inputs"); - return fmax(fmax(a, b), fmax(c, d)); - } -}//namespace glm diff --git a/third_party/glm/ext/vector_double1.hpp b/third_party/glm/ext/vector_double1.hpp deleted file mode 100755 index 3882667..0000000 --- a/third_party/glm/ext/vector_double1.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref ext_vector_double1 -/// @file glm/ext/vector_double1.hpp -/// -/// @defgroup ext_vector_double1 GLM_EXT_vector_double1 -/// @ingroup ext -/// -/// Exposes double-precision floating point vector type with one component. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_double1_precision extension. -/// @see ext_vector_float1 extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_double1 extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_double1 - /// @{ - - /// 1 components vector of double-precision floating-point numbers. - typedef vec<1, double, defaultp> dvec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_double1_precision.hpp b/third_party/glm/ext/vector_double1_precision.hpp deleted file mode 100755 index 1d47195..0000000 --- a/third_party/glm/ext/vector_double1_precision.hpp +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref ext_vector_double1_precision -/// @file glm/ext/vector_double1_precision.hpp -/// -/// @defgroup ext_vector_double1_precision GLM_EXT_vector_double1_precision -/// @ingroup ext -/// -/// Exposes highp_dvec1, mediump_dvec1 and lowp_dvec1 types. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_double1 - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_double1_precision extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_double1_precision - /// @{ - - /// 1 component vector of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<1, double, highp> highp_dvec1; - - /// 1 component vector of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<1, double, mediump> mediump_dvec1; - - /// 1 component vector of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<1, double, lowp> lowp_dvec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_double2.hpp b/third_party/glm/ext/vector_double2.hpp deleted file mode 100755 index 60e3577..0000000 --- a/third_party/glm/ext/vector_double2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_double2.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 2 components vector of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<2, double, defaultp> dvec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_double2_precision.hpp b/third_party/glm/ext/vector_double2_precision.hpp deleted file mode 100755 index fa53940..0000000 --- a/third_party/glm/ext/vector_double2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_double2_precision.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 2 components vector of high double-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, double, highp> highp_dvec2; - - /// 2 components vector of medium double-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, double, mediump> mediump_dvec2; - - /// 2 components vector of low double-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, double, lowp> lowp_dvec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_double3.hpp b/third_party/glm/ext/vector_double3.hpp deleted file mode 100755 index 6dfe4c6..0000000 --- a/third_party/glm/ext/vector_double3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_double3.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 3 components vector of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<3, double, defaultp> dvec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_double3_precision.hpp b/third_party/glm/ext/vector_double3_precision.hpp deleted file mode 100755 index a8cfa37..0000000 --- a/third_party/glm/ext/vector_double3_precision.hpp +++ /dev/null @@ -1,34 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_double3_precision.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 3 components vector of high double-qualifier floating-point numbers. - /// There is no guarantee on the actual qualifier. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, double, highp> highp_dvec3; - - /// 3 components vector of medium double-qualifier floating-point numbers. - /// There is no guarantee on the actual qualifier. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, double, mediump> mediump_dvec3; - - /// 3 components vector of low double-qualifier floating-point numbers. - /// There is no guarantee on the actual qualifier. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, double, lowp> lowp_dvec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_double4.hpp b/third_party/glm/ext/vector_double4.hpp deleted file mode 100755 index 87f225f..0000000 --- a/third_party/glm/ext/vector_double4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_double4.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 4 components vector of double-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<4, double, defaultp> dvec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_double4_precision.hpp b/third_party/glm/ext/vector_double4_precision.hpp deleted file mode 100755 index 09cafa1..0000000 --- a/third_party/glm/ext/vector_double4_precision.hpp +++ /dev/null @@ -1,35 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_double4_precision.hpp - -#pragma once -#include "../detail/setup.hpp" -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 4 components vector of high double-qualifier floating-point numbers. - /// There is no guarantee on the actual qualifier. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, double, highp> highp_dvec4; - - /// 4 components vector of medium double-qualifier floating-point numbers. - /// There is no guarantee on the actual qualifier. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, double, mediump> mediump_dvec4; - - /// 4 components vector of low double-qualifier floating-point numbers. - /// There is no guarantee on the actual qualifier. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, double, lowp> lowp_dvec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float1.hpp b/third_party/glm/ext/vector_float1.hpp deleted file mode 100755 index 28acc2c..0000000 --- a/third_party/glm/ext/vector_float1.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref ext_vector_float1 -/// @file glm/ext/vector_float1.hpp -/// -/// @defgroup ext_vector_float1 GLM_EXT_vector_float1 -/// @ingroup ext -/// -/// Exposes single-precision floating point vector type with one component. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_float1_precision extension. -/// @see ext_vector_double1 extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_float1 extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_float1 - /// @{ - - /// 1 components vector of single-precision floating-point numbers. - typedef vec<1, float, defaultp> vec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float1_precision.hpp b/third_party/glm/ext/vector_float1_precision.hpp deleted file mode 100755 index 6e8dad8..0000000 --- a/third_party/glm/ext/vector_float1_precision.hpp +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref ext_vector_float1_precision -/// @file glm/ext/vector_float1_precision.hpp -/// -/// @defgroup ext_vector_float1_precision GLM_EXT_vector_float1_precision -/// @ingroup ext -/// -/// Exposes highp_vec1, mediump_vec1 and lowp_vec1 types. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_float1 extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_float1_precision extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_float1_precision - /// @{ - - /// 1 component vector of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<1, float, highp> highp_vec1; - - /// 1 component vector of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<1, float, mediump> mediump_vec1; - - /// 1 component vector of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<1, float, lowp> lowp_vec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float2.hpp b/third_party/glm/ext/vector_float2.hpp deleted file mode 100755 index d31545d..0000000 --- a/third_party/glm/ext/vector_float2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_float2.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 2 components vector of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<2, float, defaultp> vec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float2_precision.hpp b/third_party/glm/ext/vector_float2_precision.hpp deleted file mode 100755 index 23c0820..0000000 --- a/third_party/glm/ext/vector_float2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_float2_precision.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 2 components vector of high single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, float, highp> highp_vec2; - - /// 2 components vector of medium single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, float, mediump> mediump_vec2; - - /// 2 components vector of low single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, float, lowp> lowp_vec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float3.hpp b/third_party/glm/ext/vector_float3.hpp deleted file mode 100755 index cd79a62..0000000 --- a/third_party/glm/ext/vector_float3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_float3.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 3 components vector of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<3, float, defaultp> vec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float3_precision.hpp b/third_party/glm/ext/vector_float3_precision.hpp deleted file mode 100755 index be640b5..0000000 --- a/third_party/glm/ext/vector_float3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_float3_precision.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 3 components vector of high single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, float, highp> highp_vec3; - - /// 3 components vector of medium single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, float, mediump> mediump_vec3; - - /// 3 components vector of low single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, float, lowp> lowp_vec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float4.hpp b/third_party/glm/ext/vector_float4.hpp deleted file mode 100755 index d84adcc..0000000 --- a/third_party/glm/ext/vector_float4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_float4.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 4 components vector of single-precision floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<4, float, defaultp> vec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_float4_precision.hpp b/third_party/glm/ext/vector_float4_precision.hpp deleted file mode 100755 index aede838..0000000 --- a/third_party/glm/ext/vector_float4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_float4_precision.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 4 components vector of high single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, float, highp> highp_vec4; - - /// 4 components vector of medium single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, float, mediump> mediump_vec4; - - /// 4 components vector of low single-qualifier floating-point numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, float, lowp> lowp_vec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_int1.hpp b/third_party/glm/ext/vector_int1.hpp deleted file mode 100755 index dc86038..0000000 --- a/third_party/glm/ext/vector_int1.hpp +++ /dev/null @@ -1,32 +0,0 @@ -/// @ref ext_vector_int1 -/// @file glm/ext/vector_int1.hpp -/// -/// @defgroup ext_vector_int1 GLM_EXT_vector_int1 -/// @ingroup ext -/// -/// Exposes ivec1 vector type. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_uint1 extension. -/// @see ext_vector_int1_precision extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_int1 extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_int1 - /// @{ - - /// 1 component vector of signed integer numbers. - typedef vec<1, int, defaultp> ivec1; - - /// @} -}//namespace glm - diff --git a/third_party/glm/ext/vector_int1_precision.hpp b/third_party/glm/ext/vector_int1_precision.hpp deleted file mode 100755 index 3323954..0000000 --- a/third_party/glm/ext/vector_int1_precision.hpp +++ /dev/null @@ -1,34 +0,0 @@ -/// @ref ext_vector_int1_precision -/// @file glm/ext/vector_int1_precision.hpp -/// -/// @defgroup ext_vector_int1_precision GLM_EXT_vector_int1_precision -/// @ingroup ext -/// -/// Exposes highp_ivec1, mediump_ivec1 and lowp_ivec1 types. -/// -/// Include to use the features of this extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_int1_precision extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_int1_precision - /// @{ - - /// 1 component vector of signed integer values. - typedef vec<1, int, highp> highp_ivec1; - - /// 1 component vector of signed integer values. - typedef vec<1, int, mediump> mediump_ivec1; - - /// 1 component vector of signed integer values. - typedef vec<1, int, lowp> lowp_ivec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_int2.hpp b/third_party/glm/ext/vector_int2.hpp deleted file mode 100755 index aef803e..0000000 --- a/third_party/glm/ext/vector_int2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_int2.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 2 components vector of signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<2, int, defaultp> ivec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_int2_precision.hpp b/third_party/glm/ext/vector_int2_precision.hpp deleted file mode 100755 index 97315fc..0000000 --- a/third_party/glm/ext/vector_int2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_int2_precision.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 2 components vector of high qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, int, highp> highp_ivec2; - - /// 2 components vector of medium qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, int, mediump> mediump_ivec2; - - /// 2 components vector of low qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, int, lowp> lowp_ivec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_int3.hpp b/third_party/glm/ext/vector_int3.hpp deleted file mode 100755 index 4767e61..0000000 --- a/third_party/glm/ext/vector_int3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_int3.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 3 components vector of signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<3, int, defaultp> ivec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_int3_precision.hpp b/third_party/glm/ext/vector_int3_precision.hpp deleted file mode 100755 index 2cd3f5f..0000000 --- a/third_party/glm/ext/vector_int3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_int3_precision.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 3 components vector of high qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, int, highp> highp_ivec3; - - /// 3 components vector of medium qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, int, mediump> mediump_ivec3; - - /// 3 components vector of low qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, int, lowp> lowp_ivec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_int4.hpp b/third_party/glm/ext/vector_int4.hpp deleted file mode 100755 index bb23adf..0000000 --- a/third_party/glm/ext/vector_int4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_int4.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 4 components vector of signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<4, int, defaultp> ivec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_int4_precision.hpp b/third_party/glm/ext/vector_int4_precision.hpp deleted file mode 100755 index 4fcd791..0000000 --- a/third_party/glm/ext/vector_int4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_int4_precision.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 4 components vector of high qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, int, highp> highp_ivec4; - - /// 4 components vector of medium qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, int, mediump> mediump_ivec4; - - /// 4 components vector of low qualifier signed integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, int, lowp> lowp_ivec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_integer.hpp b/third_party/glm/ext/vector_integer.hpp deleted file mode 100755 index 1304dd8..0000000 --- a/third_party/glm/ext/vector_integer.hpp +++ /dev/null @@ -1,149 +0,0 @@ -/// @ref ext_vector_integer -/// @file glm/ext/vector_integer.hpp -/// -/// @see core (dependence) -/// @see ext_vector_integer (dependence) -/// -/// @defgroup ext_vector_integer GLM_EXT_vector_integer -/// @ingroup ext -/// -/// Include to use the features of this extension. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/_vectorize.hpp" -#include "../vector_relational.hpp" -#include "../common.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_integer extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_integer - /// @{ - - /// Return true if the value is a power of two number. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec isPowerOfTwo(vec const& v); - - /// Return the power of two number which value is just higher the input value, - /// round up to a power of two. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec nextPowerOfTwo(vec const& v); - - /// Return the power of two number which value is just lower the input value, - /// round down to a power of two. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec prevPowerOfTwo(vec const& v); - - /// Return true if the 'Value' is a multiple of 'Multiple'. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec isMultiple(vec const& v, T Multiple); - - /// Return true if the 'Value' is a multiple of 'Multiple'. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec isMultiple(vec const& v, vec const& Multiple); - - /// Higher multiple number of Source. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @param v Source values to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec nextMultiple(vec const& v, T Multiple); - - /// Higher multiple number of Source. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @param v Source values to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec nextMultiple(vec const& v, vec const& Multiple); - - /// Lower multiple number of Source. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @param v Source values to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec prevMultiple(vec const& v, T Multiple); - - /// Lower multiple number of Source. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed or unsigned integer scalar types. - /// @tparam Q Value from qualifier enum - /// - /// @param v Source values to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec prevMultiple(vec const& v, vec const& Multiple); - - /// Returns the bit number of the Nth significant bit set to - /// 1 in the binary representation of value. - /// If value bitcount is less than the Nth significant bit, -1 will be returned. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Signed or unsigned integer scalar types. - /// - /// @see ext_vector_integer - template - GLM_FUNC_DECL vec findNSB(vec const& Source, vec SignificantBitCount); - - /// @} -} //namespace glm - -#include "vector_integer.inl" diff --git a/third_party/glm/ext/vector_integer.inl b/third_party/glm/ext/vector_integer.inl deleted file mode 100755 index 939ff5e..0000000 --- a/third_party/glm/ext/vector_integer.inl +++ /dev/null @@ -1,85 +0,0 @@ -#include "scalar_integer.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec isPowerOfTwo(vec const& Value) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isPowerOfTwo' only accept integer inputs"); - - vec const Result(abs(Value)); - return equal(Result & (Result - vec(1)), vec(0)); - } - - template - GLM_FUNC_QUALIFIER vec nextPowerOfTwo(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextPowerOfTwo' only accept integer inputs"); - - return detail::compute_ceilPowerOfTwo::is_signed>::call(v); - } - - template - GLM_FUNC_QUALIFIER vec prevPowerOfTwo(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevPowerOfTwo' only accept integer inputs"); - - return detail::functor1::call(prevPowerOfTwo, v); - } - - template - GLM_FUNC_QUALIFIER vec isMultiple(vec const& Value, T Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isMultiple' only accept integer inputs"); - - return (Value % Multiple) == vec(0); - } - - template - GLM_FUNC_QUALIFIER vec isMultiple(vec const& Value, vec const& Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'isMultiple' only accept integer inputs"); - - return (Value % Multiple) == vec(0); - } - - template - GLM_FUNC_QUALIFIER vec nextMultiple(vec const& Source, T Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextMultiple' only accept integer inputs"); - - return detail::functor2::call(nextMultiple, Source, vec(Multiple)); - } - - template - GLM_FUNC_QUALIFIER vec nextMultiple(vec const& Source, vec const& Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'nextMultiple' only accept integer inputs"); - - return detail::functor2::call(nextMultiple, Source, Multiple); - } - - template - GLM_FUNC_QUALIFIER vec prevMultiple(vec const& Source, T Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevMultiple' only accept integer inputs"); - - return detail::functor2::call(prevMultiple, Source, vec(Multiple)); - } - - template - GLM_FUNC_QUALIFIER vec prevMultiple(vec const& Source, vec const& Multiple) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'prevMultiple' only accept integer inputs"); - - return detail::functor2::call(prevMultiple, Source, Multiple); - } - - template - GLM_FUNC_QUALIFIER vec findNSB(vec const& Source, vec SignificantBitCount) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'findNSB' only accept integer inputs"); - - return detail::functor2_vec_int::call(findNSB, Source, SignificantBitCount); - } -}//namespace glm diff --git a/third_party/glm/ext/vector_relational.hpp b/third_party/glm/ext/vector_relational.hpp deleted file mode 100755 index 1c2367d..0000000 --- a/third_party/glm/ext/vector_relational.hpp +++ /dev/null @@ -1,107 +0,0 @@ -/// @ref ext_vector_relational -/// @file glm/ext/vector_relational.hpp -/// -/// @see core (dependence) -/// @see ext_scalar_integer (dependence) -/// -/// @defgroup ext_vector_relational GLM_EXT_vector_relational -/// @ingroup ext -/// -/// Exposes comparison functions for vector types that take a user defined epsilon values. -/// -/// Include to use the features of this extension. -/// -/// @see core_vector_relational -/// @see ext_scalar_relational -/// @see ext_matrix_relational - -#pragma once - -// Dependencies -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_relational extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_relational - /// @{ - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, T epsilon); - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& epsilon); - - /// Returns the component-wise comparison of |x - y| >= epsilon. - /// True if this expression is not satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, T epsilon); - - /// Returns the component-wise comparison of |x - y| >= epsilon. - /// True if this expression is not satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& epsilon); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, int ULPs); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& ULPs); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is not satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, int ULPs); - - /// Returns the component-wise comparison between two vectors in term of ULPs. - /// True if this expression is not satisfied. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& ULPs); - - /// @} -}//namespace glm - -#include "vector_relational.inl" diff --git a/third_party/glm/ext/vector_relational.inl b/third_party/glm/ext/vector_relational.inl deleted file mode 100755 index 7a39ab5..0000000 --- a/third_party/glm/ext/vector_relational.inl +++ /dev/null @@ -1,75 +0,0 @@ -#include "../vector_relational.hpp" -#include "../common.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/type_float.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, T Epsilon) - { - return equal(x, y, vec(Epsilon)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& Epsilon) - { - return lessThanEqual(abs(x - y), Epsilon); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, T Epsilon) - { - return notEqual(x, y, vec(Epsilon)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& Epsilon) - { - return greaterThan(abs(x - y), Epsilon); - } - - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, int MaxULPs) - { - return equal(x, y, vec(MaxULPs)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec equal(vec const& x, vec const& y, vec const& MaxULPs) - { - vec Result(false); - for(length_t i = 0; i < L; ++i) - { - detail::float_t const a(x[i]); - detail::float_t const b(y[i]); - - // Different signs means they do not match. - if(a.negative() != b.negative()) - { - // Check for equality to make sure +0==-0 - Result[i] = a.mantissa() == b.mantissa() && a.exponent() == b.exponent(); - } - else - { - // Find the difference in ULPs. - typename detail::float_t::int_type const DiffULPs = abs(a.i - b.i); - Result[i] = DiffULPs <= MaxULPs[i]; - } - } - return Result; - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, int MaxULPs) - { - return notEqual(x, y, vec(MaxULPs)); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y, vec const& MaxULPs) - { - return not_(equal(x, y, MaxULPs)); - } -}//namespace glm diff --git a/third_party/glm/ext/vector_uint1.hpp b/third_party/glm/ext/vector_uint1.hpp deleted file mode 100755 index eb8a704..0000000 --- a/third_party/glm/ext/vector_uint1.hpp +++ /dev/null @@ -1,32 +0,0 @@ -/// @ref ext_vector_uint1 -/// @file glm/ext/vector_uint1.hpp -/// -/// @defgroup ext_vector_uint1 GLM_EXT_vector_uint1 -/// @ingroup ext -/// -/// Exposes uvec1 vector type. -/// -/// Include to use the features of this extension. -/// -/// @see ext_vector_int1 extension. -/// @see ext_vector_uint1_precision extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_uint1 extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_uint1 - /// @{ - - /// 1 component vector of unsigned integer numbers. - typedef vec<1, unsigned int, defaultp> uvec1; - - /// @} -}//namespace glm - diff --git a/third_party/glm/ext/vector_uint1_precision.hpp b/third_party/glm/ext/vector_uint1_precision.hpp deleted file mode 100755 index 30daa5b..0000000 --- a/third_party/glm/ext/vector_uint1_precision.hpp +++ /dev/null @@ -1,40 +0,0 @@ -/// @ref ext_vector_uint1_precision -/// @file glm/ext/vector_uint1_precision.hpp -/// -/// @defgroup ext_vector_uint1_precision GLM_EXT_vector_uint1_precision -/// @ingroup ext -/// -/// Exposes highp_uvec1, mediump_uvec1 and lowp_uvec1 types. -/// -/// Include to use the features of this extension. - -#pragma once - -#include "../detail/type_vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_uint1_precision extension included") -#endif - -namespace glm -{ - /// @addtogroup ext_vector_uint1_precision - /// @{ - - /// 1 component vector of unsigned integer values. - /// - /// @see ext_vector_uint1_precision - typedef vec<1, unsigned int, highp> highp_uvec1; - - /// 1 component vector of unsigned integer values. - /// - /// @see ext_vector_uint1_precision - typedef vec<1, unsigned int, mediump> mediump_uvec1; - - /// 1 component vector of unsigned integer values. - /// - /// @see ext_vector_uint1_precision - typedef vec<1, unsigned int, lowp> lowp_uvec1; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_uint2.hpp b/third_party/glm/ext/vector_uint2.hpp deleted file mode 100755 index 03c00f5..0000000 --- a/third_party/glm/ext/vector_uint2.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_uint2.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 2 components vector of unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<2, unsigned int, defaultp> uvec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_uint2_precision.hpp b/third_party/glm/ext/vector_uint2_precision.hpp deleted file mode 100755 index 2ba7b0d..0000000 --- a/third_party/glm/ext/vector_uint2_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_uint2_precision.hpp - -#pragma once -#include "../detail/type_vec2.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 2 components vector of high qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, unsigned int, highp> highp_uvec2; - - /// 2 components vector of medium qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, unsigned int, mediump> mediump_uvec2; - - /// 2 components vector of low qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<2, unsigned int, lowp> lowp_uvec2; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_uint3.hpp b/third_party/glm/ext/vector_uint3.hpp deleted file mode 100755 index f5b41c4..0000000 --- a/third_party/glm/ext/vector_uint3.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_uint3.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 3 components vector of unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<3, unsigned int, defaultp> uvec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_uint3_precision.hpp b/third_party/glm/ext/vector_uint3_precision.hpp deleted file mode 100755 index 125191c..0000000 --- a/third_party/glm/ext/vector_uint3_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_uint3_precision.hpp - -#pragma once -#include "../detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 3 components vector of high qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, unsigned int, highp> highp_uvec3; - - /// 3 components vector of medium qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, unsigned int, mediump> mediump_uvec3; - - /// 3 components vector of low qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<3, unsigned int, lowp> lowp_uvec3; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_uint4.hpp b/third_party/glm/ext/vector_uint4.hpp deleted file mode 100755 index 32ced58..0000000 --- a/third_party/glm/ext/vector_uint4.hpp +++ /dev/null @@ -1,18 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_uint4.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector - /// @{ - - /// 4 components vector of unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - typedef vec<4, unsigned int, defaultp> uvec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_uint4_precision.hpp b/third_party/glm/ext/vector_uint4_precision.hpp deleted file mode 100755 index cf4097c..0000000 --- a/third_party/glm/ext/vector_uint4_precision.hpp +++ /dev/null @@ -1,31 +0,0 @@ -/// @ref core -/// @file glm/ext/vector_uint4_precision.hpp - -#pragma once -#include "../detail/type_vec4.hpp" - -namespace glm -{ - /// @addtogroup core_vector_precision - /// @{ - - /// 4 components vector of high qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, unsigned int, highp> highp_uvec4; - - /// 4 components vector of medium qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, unsigned int, mediump> mediump_uvec4; - - /// 4 components vector of low qualifier unsigned integer numbers. - /// - /// @see GLSL 4.20.8 specification, section 4.1.5 Vectors - /// @see GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier - typedef vec<4, unsigned int, lowp> lowp_uvec4; - - /// @} -}//namespace glm diff --git a/third_party/glm/ext/vector_ulp.hpp b/third_party/glm/ext/vector_ulp.hpp deleted file mode 100755 index 6210396..0000000 --- a/third_party/glm/ext/vector_ulp.hpp +++ /dev/null @@ -1,109 +0,0 @@ -/// @ref ext_vector_ulp -/// @file glm/ext/vector_ulp.hpp -/// -/// @defgroup ext_vector_ulp GLM_EXT_vector_ulp -/// @ingroup ext -/// -/// Allow the measurement of the accuracy of a function against a reference -/// implementation. This extension works on floating-point data and provide results -/// in ULP. -/// -/// Include to use the features of this extension. -/// -/// @see ext_scalar_ulp -/// @see ext_scalar_relational -/// @see ext_vector_relational - -#pragma once - -// Dependencies -#include "../ext/scalar_ulp.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_EXT_vector_ulp extension included") -#endif - -namespace glm -{ - /// Return the next ULP value(s) after the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec nextFloat(vec const& x); - - /// Return the value(s) ULP distance after the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec nextFloat(vec const& x, int ULPs); - - /// Return the value(s) ULP distance after the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec nextFloat(vec const& x, vec const& ULPs); - - /// Return the previous ULP value(s) before the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec prevFloat(vec const& x); - - /// Return the value(s) ULP distance before the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec prevFloat(vec const& x, int ULPs); - - /// Return the value(s) ULP distance before the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec prevFloat(vec const& x, vec const& ULPs); - - /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec floatDistance(vec const& x, vec const& y); - - /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see ext_scalar_ulp - template - GLM_FUNC_DECL vec floatDistance(vec const& x, vec const& y); - - /// @} -}//namespace glm - -#include "vector_ulp.inl" diff --git a/third_party/glm/ext/vector_ulp.inl b/third_party/glm/ext/vector_ulp.inl deleted file mode 100755 index 91565ce..0000000 --- a/third_party/glm/ext/vector_ulp.inl +++ /dev/null @@ -1,74 +0,0 @@ -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec nextFloat(vec const& x) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = nextFloat(x[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec nextFloat(vec const& x, int ULPs) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = nextFloat(x[i], ULPs); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec nextFloat(vec const& x, vec const& ULPs) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = nextFloat(x[i], ULPs[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec prevFloat(vec const& x) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = prevFloat(x[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec prevFloat(vec const& x, int ULPs) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = prevFloat(x[i], ULPs); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec prevFloat(vec const& x, vec const& ULPs) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = prevFloat(x[i], ULPs[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec floatDistance(vec const& x, vec const& y) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = floatDistance(x[i], y[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec floatDistance(vec const& x, vec const& y) - { - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = floatDistance(x[i], y[i]); - return Result; - } -}//namespace glm diff --git a/third_party/glm/fwd.hpp b/third_party/glm/fwd.hpp deleted file mode 100755 index 474d44f..0000000 --- a/third_party/glm/fwd.hpp +++ /dev/null @@ -1,818 +0,0 @@ -#pragma once - -#include "detail/qualifier.hpp" - -namespace glm -{ -#if GLM_HAS_EXTENDED_INTEGER_TYPE - typedef std::int8_t int8; - typedef std::int16_t int16; - typedef std::int32_t int32; - typedef std::int64_t int64; - - typedef std::uint8_t uint8; - typedef std::uint16_t uint16; - typedef std::uint32_t uint32; - typedef std::uint64_t uint64; -#else - typedef signed char int8; - typedef signed short int16; - typedef signed int int32; - typedef detail::int64 int64; - - typedef unsigned char uint8; - typedef unsigned short uint16; - typedef unsigned int uint32; - typedef detail::uint64 uint64; -#endif - - // Scalar int - - typedef int8 lowp_i8; - typedef int8 mediump_i8; - typedef int8 highp_i8; - typedef int8 i8; - - typedef int8 lowp_int8; - typedef int8 mediump_int8; - typedef int8 highp_int8; - - typedef int8 lowp_int8_t; - typedef int8 mediump_int8_t; - typedef int8 highp_int8_t; - typedef int8 int8_t; - - typedef int16 lowp_i16; - typedef int16 mediump_i16; - typedef int16 highp_i16; - typedef int16 i16; - - typedef int16 lowp_int16; - typedef int16 mediump_int16; - typedef int16 highp_int16; - - typedef int16 lowp_int16_t; - typedef int16 mediump_int16_t; - typedef int16 highp_int16_t; - typedef int16 int16_t; - - typedef int32 lowp_i32; - typedef int32 mediump_i32; - typedef int32 highp_i32; - typedef int32 i32; - - typedef int32 lowp_int32; - typedef int32 mediump_int32; - typedef int32 highp_int32; - - typedef int32 lowp_int32_t; - typedef int32 mediump_int32_t; - typedef int32 highp_int32_t; - typedef int32 int32_t; - - typedef int64 lowp_i64; - typedef int64 mediump_i64; - typedef int64 highp_i64; - typedef int64 i64; - - typedef int64 lowp_int64; - typedef int64 mediump_int64; - typedef int64 highp_int64; - - typedef int64 lowp_int64_t; - typedef int64 mediump_int64_t; - typedef int64 highp_int64_t; - typedef int64 int64_t; - - // Scalar uint - - typedef uint8 lowp_u8; - typedef uint8 mediump_u8; - typedef uint8 highp_u8; - typedef uint8 u8; - - typedef uint8 lowp_uint8; - typedef uint8 mediump_uint8; - typedef uint8 highp_uint8; - - typedef uint8 lowp_uint8_t; - typedef uint8 mediump_uint8_t; - typedef uint8 highp_uint8_t; - typedef uint8 uint8_t; - - typedef uint16 lowp_u16; - typedef uint16 mediump_u16; - typedef uint16 highp_u16; - typedef uint16 u16; - - typedef uint16 lowp_uint16; - typedef uint16 mediump_uint16; - typedef uint16 highp_uint16; - - typedef uint16 lowp_uint16_t; - typedef uint16 mediump_uint16_t; - typedef uint16 highp_uint16_t; - typedef uint16 uint16_t; - - typedef uint32 lowp_u32; - typedef uint32 mediump_u32; - typedef uint32 highp_u32; - typedef uint32 u32; - - typedef uint32 lowp_uint32; - typedef uint32 mediump_uint32; - typedef uint32 highp_uint32; - - typedef uint32 lowp_uint32_t; - typedef uint32 mediump_uint32_t; - typedef uint32 highp_uint32_t; - typedef uint32 uint32_t; - - typedef uint64 lowp_u64; - typedef uint64 mediump_u64; - typedef uint64 highp_u64; - typedef uint64 u64; - - typedef uint64 lowp_uint64; - typedef uint64 mediump_uint64; - typedef uint64 highp_uint64; - - typedef uint64 lowp_uint64_t; - typedef uint64 mediump_uint64_t; - typedef uint64 highp_uint64_t; - typedef uint64 uint64_t; - - // Scalar float - - typedef float lowp_f32; - typedef float mediump_f32; - typedef float highp_f32; - typedef float f32; - - typedef float lowp_float32; - typedef float mediump_float32; - typedef float highp_float32; - typedef float float32; - - typedef float lowp_float32_t; - typedef float mediump_float32_t; - typedef float highp_float32_t; - typedef float float32_t; - - - typedef double lowp_f64; - typedef double mediump_f64; - typedef double highp_f64; - typedef double f64; - - typedef double lowp_float64; - typedef double mediump_float64; - typedef double highp_float64; - typedef double float64; - - typedef double lowp_float64_t; - typedef double mediump_float64_t; - typedef double highp_float64_t; - typedef double float64_t; - - // Vector bool - - typedef vec<1, bool, lowp> lowp_bvec1; - typedef vec<2, bool, lowp> lowp_bvec2; - typedef vec<3, bool, lowp> lowp_bvec3; - typedef vec<4, bool, lowp> lowp_bvec4; - - typedef vec<1, bool, mediump> mediump_bvec1; - typedef vec<2, bool, mediump> mediump_bvec2; - typedef vec<3, bool, mediump> mediump_bvec3; - typedef vec<4, bool, mediump> mediump_bvec4; - - typedef vec<1, bool, highp> highp_bvec1; - typedef vec<2, bool, highp> highp_bvec2; - typedef vec<3, bool, highp> highp_bvec3; - typedef vec<4, bool, highp> highp_bvec4; - - typedef vec<1, bool, defaultp> bvec1; - typedef vec<2, bool, defaultp> bvec2; - typedef vec<3, bool, defaultp> bvec3; - typedef vec<4, bool, defaultp> bvec4; - - // Vector int - - typedef vec<1, i32, lowp> lowp_ivec1; - typedef vec<2, i32, lowp> lowp_ivec2; - typedef vec<3, i32, lowp> lowp_ivec3; - typedef vec<4, i32, lowp> lowp_ivec4; - - typedef vec<1, i32, mediump> mediump_ivec1; - typedef vec<2, i32, mediump> mediump_ivec2; - typedef vec<3, i32, mediump> mediump_ivec3; - typedef vec<4, i32, mediump> mediump_ivec4; - - typedef vec<1, i32, highp> highp_ivec1; - typedef vec<2, i32, highp> highp_ivec2; - typedef vec<3, i32, highp> highp_ivec3; - typedef vec<4, i32, highp> highp_ivec4; - - typedef vec<1, i32, defaultp> ivec1; - typedef vec<2, i32, defaultp> ivec2; - typedef vec<3, i32, defaultp> ivec3; - typedef vec<4, i32, defaultp> ivec4; - - typedef vec<1, i8, lowp> lowp_i8vec1; - typedef vec<2, i8, lowp> lowp_i8vec2; - typedef vec<3, i8, lowp> lowp_i8vec3; - typedef vec<4, i8, lowp> lowp_i8vec4; - - typedef vec<1, i8, mediump> mediump_i8vec1; - typedef vec<2, i8, mediump> mediump_i8vec2; - typedef vec<3, i8, mediump> mediump_i8vec3; - typedef vec<4, i8, mediump> mediump_i8vec4; - - typedef vec<1, i8, highp> highp_i8vec1; - typedef vec<2, i8, highp> highp_i8vec2; - typedef vec<3, i8, highp> highp_i8vec3; - typedef vec<4, i8, highp> highp_i8vec4; - - typedef vec<1, i8, defaultp> i8vec1; - typedef vec<2, i8, defaultp> i8vec2; - typedef vec<3, i8, defaultp> i8vec3; - typedef vec<4, i8, defaultp> i8vec4; - - typedef vec<1, i16, lowp> lowp_i16vec1; - typedef vec<2, i16, lowp> lowp_i16vec2; - typedef vec<3, i16, lowp> lowp_i16vec3; - typedef vec<4, i16, lowp> lowp_i16vec4; - - typedef vec<1, i16, mediump> mediump_i16vec1; - typedef vec<2, i16, mediump> mediump_i16vec2; - typedef vec<3, i16, mediump> mediump_i16vec3; - typedef vec<4, i16, mediump> mediump_i16vec4; - - typedef vec<1, i16, highp> highp_i16vec1; - typedef vec<2, i16, highp> highp_i16vec2; - typedef vec<3, i16, highp> highp_i16vec3; - typedef vec<4, i16, highp> highp_i16vec4; - - typedef vec<1, i16, defaultp> i16vec1; - typedef vec<2, i16, defaultp> i16vec2; - typedef vec<3, i16, defaultp> i16vec3; - typedef vec<4, i16, defaultp> i16vec4; - - typedef vec<1, i32, lowp> lowp_i32vec1; - typedef vec<2, i32, lowp> lowp_i32vec2; - typedef vec<3, i32, lowp> lowp_i32vec3; - typedef vec<4, i32, lowp> lowp_i32vec4; - - typedef vec<1, i32, mediump> mediump_i32vec1; - typedef vec<2, i32, mediump> mediump_i32vec2; - typedef vec<3, i32, mediump> mediump_i32vec3; - typedef vec<4, i32, mediump> mediump_i32vec4; - - typedef vec<1, i32, highp> highp_i32vec1; - typedef vec<2, i32, highp> highp_i32vec2; - typedef vec<3, i32, highp> highp_i32vec3; - typedef vec<4, i32, highp> highp_i32vec4; - - typedef vec<1, i32, defaultp> i32vec1; - typedef vec<2, i32, defaultp> i32vec2; - typedef vec<3, i32, defaultp> i32vec3; - typedef vec<4, i32, defaultp> i32vec4; - - typedef vec<1, i64, lowp> lowp_i64vec1; - typedef vec<2, i64, lowp> lowp_i64vec2; - typedef vec<3, i64, lowp> lowp_i64vec3; - typedef vec<4, i64, lowp> lowp_i64vec4; - - typedef vec<1, i64, mediump> mediump_i64vec1; - typedef vec<2, i64, mediump> mediump_i64vec2; - typedef vec<3, i64, mediump> mediump_i64vec3; - typedef vec<4, i64, mediump> mediump_i64vec4; - - typedef vec<1, i64, highp> highp_i64vec1; - typedef vec<2, i64, highp> highp_i64vec2; - typedef vec<3, i64, highp> highp_i64vec3; - typedef vec<4, i64, highp> highp_i64vec4; - - typedef vec<1, i64, defaultp> i64vec1; - typedef vec<2, i64, defaultp> i64vec2; - typedef vec<3, i64, defaultp> i64vec3; - typedef vec<4, i64, defaultp> i64vec4; - - // Vector uint - - typedef vec<1, u32, lowp> lowp_uvec1; - typedef vec<2, u32, lowp> lowp_uvec2; - typedef vec<3, u32, lowp> lowp_uvec3; - typedef vec<4, u32, lowp> lowp_uvec4; - - typedef vec<1, u32, mediump> mediump_uvec1; - typedef vec<2, u32, mediump> mediump_uvec2; - typedef vec<3, u32, mediump> mediump_uvec3; - typedef vec<4, u32, mediump> mediump_uvec4; - - typedef vec<1, u32, highp> highp_uvec1; - typedef vec<2, u32, highp> highp_uvec2; - typedef vec<3, u32, highp> highp_uvec3; - typedef vec<4, u32, highp> highp_uvec4; - - typedef vec<1, u32, defaultp> uvec1; - typedef vec<2, u32, defaultp> uvec2; - typedef vec<3, u32, defaultp> uvec3; - typedef vec<4, u32, defaultp> uvec4; - - typedef vec<1, u8, lowp> lowp_u8vec1; - typedef vec<2, u8, lowp> lowp_u8vec2; - typedef vec<3, u8, lowp> lowp_u8vec3; - typedef vec<4, u8, lowp> lowp_u8vec4; - - typedef vec<1, u8, mediump> mediump_u8vec1; - typedef vec<2, u8, mediump> mediump_u8vec2; - typedef vec<3, u8, mediump> mediump_u8vec3; - typedef vec<4, u8, mediump> mediump_u8vec4; - - typedef vec<1, u8, highp> highp_u8vec1; - typedef vec<2, u8, highp> highp_u8vec2; - typedef vec<3, u8, highp> highp_u8vec3; - typedef vec<4, u8, highp> highp_u8vec4; - - typedef vec<1, u8, defaultp> u8vec1; - typedef vec<2, u8, defaultp> u8vec2; - typedef vec<3, u8, defaultp> u8vec3; - typedef vec<4, u8, defaultp> u8vec4; - - typedef vec<1, u16, lowp> lowp_u16vec1; - typedef vec<2, u16, lowp> lowp_u16vec2; - typedef vec<3, u16, lowp> lowp_u16vec3; - typedef vec<4, u16, lowp> lowp_u16vec4; - - typedef vec<1, u16, mediump> mediump_u16vec1; - typedef vec<2, u16, mediump> mediump_u16vec2; - typedef vec<3, u16, mediump> mediump_u16vec3; - typedef vec<4, u16, mediump> mediump_u16vec4; - - typedef vec<1, u16, highp> highp_u16vec1; - typedef vec<2, u16, highp> highp_u16vec2; - typedef vec<3, u16, highp> highp_u16vec3; - typedef vec<4, u16, highp> highp_u16vec4; - - typedef vec<1, u16, defaultp> u16vec1; - typedef vec<2, u16, defaultp> u16vec2; - typedef vec<3, u16, defaultp> u16vec3; - typedef vec<4, u16, defaultp> u16vec4; - - typedef vec<1, u32, lowp> lowp_u32vec1; - typedef vec<2, u32, lowp> lowp_u32vec2; - typedef vec<3, u32, lowp> lowp_u32vec3; - typedef vec<4, u32, lowp> lowp_u32vec4; - - typedef vec<1, u32, mediump> mediump_u32vec1; - typedef vec<2, u32, mediump> mediump_u32vec2; - typedef vec<3, u32, mediump> mediump_u32vec3; - typedef vec<4, u32, mediump> mediump_u32vec4; - - typedef vec<1, u32, highp> highp_u32vec1; - typedef vec<2, u32, highp> highp_u32vec2; - typedef vec<3, u32, highp> highp_u32vec3; - typedef vec<4, u32, highp> highp_u32vec4; - - typedef vec<1, u32, defaultp> u32vec1; - typedef vec<2, u32, defaultp> u32vec2; - typedef vec<3, u32, defaultp> u32vec3; - typedef vec<4, u32, defaultp> u32vec4; - - typedef vec<1, u64, lowp> lowp_u64vec1; - typedef vec<2, u64, lowp> lowp_u64vec2; - typedef vec<3, u64, lowp> lowp_u64vec3; - typedef vec<4, u64, lowp> lowp_u64vec4; - - typedef vec<1, u64, mediump> mediump_u64vec1; - typedef vec<2, u64, mediump> mediump_u64vec2; - typedef vec<3, u64, mediump> mediump_u64vec3; - typedef vec<4, u64, mediump> mediump_u64vec4; - - typedef vec<1, u64, highp> highp_u64vec1; - typedef vec<2, u64, highp> highp_u64vec2; - typedef vec<3, u64, highp> highp_u64vec3; - typedef vec<4, u64, highp> highp_u64vec4; - - typedef vec<1, u64, defaultp> u64vec1; - typedef vec<2, u64, defaultp> u64vec2; - typedef vec<3, u64, defaultp> u64vec3; - typedef vec<4, u64, defaultp> u64vec4; - - // Vector float - - typedef vec<1, float, lowp> lowp_vec1; - typedef vec<2, float, lowp> lowp_vec2; - typedef vec<3, float, lowp> lowp_vec3; - typedef vec<4, float, lowp> lowp_vec4; - - typedef vec<1, float, mediump> mediump_vec1; - typedef vec<2, float, mediump> mediump_vec2; - typedef vec<3, float, mediump> mediump_vec3; - typedef vec<4, float, mediump> mediump_vec4; - - typedef vec<1, float, highp> highp_vec1; - typedef vec<2, float, highp> highp_vec2; - typedef vec<3, float, highp> highp_vec3; - typedef vec<4, float, highp> highp_vec4; - - typedef vec<1, float, defaultp> vec1; - typedef vec<2, float, defaultp> vec2; - typedef vec<3, float, defaultp> vec3; - typedef vec<4, float, defaultp> vec4; - - typedef vec<1, float, lowp> lowp_fvec1; - typedef vec<2, float, lowp> lowp_fvec2; - typedef vec<3, float, lowp> lowp_fvec3; - typedef vec<4, float, lowp> lowp_fvec4; - - typedef vec<1, float, mediump> mediump_fvec1; - typedef vec<2, float, mediump> mediump_fvec2; - typedef vec<3, float, mediump> mediump_fvec3; - typedef vec<4, float, mediump> mediump_fvec4; - - typedef vec<1, float, highp> highp_fvec1; - typedef vec<2, float, highp> highp_fvec2; - typedef vec<3, float, highp> highp_fvec3; - typedef vec<4, float, highp> highp_fvec4; - - typedef vec<1, f32, defaultp> fvec1; - typedef vec<2, f32, defaultp> fvec2; - typedef vec<3, f32, defaultp> fvec3; - typedef vec<4, f32, defaultp> fvec4; - - typedef vec<1, f32, lowp> lowp_f32vec1; - typedef vec<2, f32, lowp> lowp_f32vec2; - typedef vec<3, f32, lowp> lowp_f32vec3; - typedef vec<4, f32, lowp> lowp_f32vec4; - - typedef vec<1, f32, mediump> mediump_f32vec1; - typedef vec<2, f32, mediump> mediump_f32vec2; - typedef vec<3, f32, mediump> mediump_f32vec3; - typedef vec<4, f32, mediump> mediump_f32vec4; - - typedef vec<1, f32, highp> highp_f32vec1; - typedef vec<2, f32, highp> highp_f32vec2; - typedef vec<3, f32, highp> highp_f32vec3; - typedef vec<4, f32, highp> highp_f32vec4; - - typedef vec<1, f32, defaultp> f32vec1; - typedef vec<2, f32, defaultp> f32vec2; - typedef vec<3, f32, defaultp> f32vec3; - typedef vec<4, f32, defaultp> f32vec4; - - typedef vec<1, f64, lowp> lowp_dvec1; - typedef vec<2, f64, lowp> lowp_dvec2; - typedef vec<3, f64, lowp> lowp_dvec3; - typedef vec<4, f64, lowp> lowp_dvec4; - - typedef vec<1, f64, mediump> mediump_dvec1; - typedef vec<2, f64, mediump> mediump_dvec2; - typedef vec<3, f64, mediump> mediump_dvec3; - typedef vec<4, f64, mediump> mediump_dvec4; - - typedef vec<1, f64, highp> highp_dvec1; - typedef vec<2, f64, highp> highp_dvec2; - typedef vec<3, f64, highp> highp_dvec3; - typedef vec<4, f64, highp> highp_dvec4; - - typedef vec<1, f64, defaultp> dvec1; - typedef vec<2, f64, defaultp> dvec2; - typedef vec<3, f64, defaultp> dvec3; - typedef vec<4, f64, defaultp> dvec4; - - typedef vec<1, f64, lowp> lowp_f64vec1; - typedef vec<2, f64, lowp> lowp_f64vec2; - typedef vec<3, f64, lowp> lowp_f64vec3; - typedef vec<4, f64, lowp> lowp_f64vec4; - - typedef vec<1, f64, mediump> mediump_f64vec1; - typedef vec<2, f64, mediump> mediump_f64vec2; - typedef vec<3, f64, mediump> mediump_f64vec3; - typedef vec<4, f64, mediump> mediump_f64vec4; - - typedef vec<1, f64, highp> highp_f64vec1; - typedef vec<2, f64, highp> highp_f64vec2; - typedef vec<3, f64, highp> highp_f64vec3; - typedef vec<4, f64, highp> highp_f64vec4; - - typedef vec<1, f64, defaultp> f64vec1; - typedef vec<2, f64, defaultp> f64vec2; - typedef vec<3, f64, defaultp> f64vec3; - typedef vec<4, f64, defaultp> f64vec4; - - // Matrix NxN - - typedef mat<2, 2, f32, lowp> lowp_mat2; - typedef mat<3, 3, f32, lowp> lowp_mat3; - typedef mat<4, 4, f32, lowp> lowp_mat4; - - typedef mat<2, 2, f32, mediump> mediump_mat2; - typedef mat<3, 3, f32, mediump> mediump_mat3; - typedef mat<4, 4, f32, mediump> mediump_mat4; - - typedef mat<2, 2, f32, highp> highp_mat2; - typedef mat<3, 3, f32, highp> highp_mat3; - typedef mat<4, 4, f32, highp> highp_mat4; - - typedef mat<2, 2, f32, defaultp> mat2; - typedef mat<3, 3, f32, defaultp> mat3; - typedef mat<4, 4, f32, defaultp> mat4; - - typedef mat<2, 2, f32, lowp> lowp_fmat2; - typedef mat<3, 3, f32, lowp> lowp_fmat3; - typedef mat<4, 4, f32, lowp> lowp_fmat4; - - typedef mat<2, 2, f32, mediump> mediump_fmat2; - typedef mat<3, 3, f32, mediump> mediump_fmat3; - typedef mat<4, 4, f32, mediump> mediump_fmat4; - - typedef mat<2, 2, f32, highp> highp_fmat2; - typedef mat<3, 3, f32, highp> highp_fmat3; - typedef mat<4, 4, f32, highp> highp_fmat4; - - typedef mat<2, 2, f32, defaultp> fmat2; - typedef mat<3, 3, f32, defaultp> fmat3; - typedef mat<4, 4, f32, defaultp> fmat4; - - typedef mat<2, 2, f32, lowp> lowp_f32mat2; - typedef mat<3, 3, f32, lowp> lowp_f32mat3; - typedef mat<4, 4, f32, lowp> lowp_f32mat4; - - typedef mat<2, 2, f32, mediump> mediump_f32mat2; - typedef mat<3, 3, f32, mediump> mediump_f32mat3; - typedef mat<4, 4, f32, mediump> mediump_f32mat4; - - typedef mat<2, 2, f32, highp> highp_f32mat2; - typedef mat<3, 3, f32, highp> highp_f32mat3; - typedef mat<4, 4, f32, highp> highp_f32mat4; - - typedef mat<2, 2, f32, defaultp> f32mat2; - typedef mat<3, 3, f32, defaultp> f32mat3; - typedef mat<4, 4, f32, defaultp> f32mat4; - - typedef mat<2, 2, f64, lowp> lowp_dmat2; - typedef mat<3, 3, f64, lowp> lowp_dmat3; - typedef mat<4, 4, f64, lowp> lowp_dmat4; - - typedef mat<2, 2, f64, mediump> mediump_dmat2; - typedef mat<3, 3, f64, mediump> mediump_dmat3; - typedef mat<4, 4, f64, mediump> mediump_dmat4; - - typedef mat<2, 2, f64, highp> highp_dmat2; - typedef mat<3, 3, f64, highp> highp_dmat3; - typedef mat<4, 4, f64, highp> highp_dmat4; - - typedef mat<2, 2, f64, defaultp> dmat2; - typedef mat<3, 3, f64, defaultp> dmat3; - typedef mat<4, 4, f64, defaultp> dmat4; - - typedef mat<2, 2, f64, lowp> lowp_f64mat2; - typedef mat<3, 3, f64, lowp> lowp_f64mat3; - typedef mat<4, 4, f64, lowp> lowp_f64mat4; - - typedef mat<2, 2, f64, mediump> mediump_f64mat2; - typedef mat<3, 3, f64, mediump> mediump_f64mat3; - typedef mat<4, 4, f64, mediump> mediump_f64mat4; - - typedef mat<2, 2, f64, highp> highp_f64mat2; - typedef mat<3, 3, f64, highp> highp_f64mat3; - typedef mat<4, 4, f64, highp> highp_f64mat4; - - typedef mat<2, 2, f64, defaultp> f64mat2; - typedef mat<3, 3, f64, defaultp> f64mat3; - typedef mat<4, 4, f64, defaultp> f64mat4; - - // Matrix MxN - - typedef mat<2, 2, f32, lowp> lowp_mat2x2; - typedef mat<2, 3, f32, lowp> lowp_mat2x3; - typedef mat<2, 4, f32, lowp> lowp_mat2x4; - typedef mat<3, 2, f32, lowp> lowp_mat3x2; - typedef mat<3, 3, f32, lowp> lowp_mat3x3; - typedef mat<3, 4, f32, lowp> lowp_mat3x4; - typedef mat<4, 2, f32, lowp> lowp_mat4x2; - typedef mat<4, 3, f32, lowp> lowp_mat4x3; - typedef mat<4, 4, f32, lowp> lowp_mat4x4; - - typedef mat<2, 2, f32, mediump> mediump_mat2x2; - typedef mat<2, 3, f32, mediump> mediump_mat2x3; - typedef mat<2, 4, f32, mediump> mediump_mat2x4; - typedef mat<3, 2, f32, mediump> mediump_mat3x2; - typedef mat<3, 3, f32, mediump> mediump_mat3x3; - typedef mat<3, 4, f32, mediump> mediump_mat3x4; - typedef mat<4, 2, f32, mediump> mediump_mat4x2; - typedef mat<4, 3, f32, mediump> mediump_mat4x3; - typedef mat<4, 4, f32, mediump> mediump_mat4x4; - - typedef mat<2, 2, f32, highp> highp_mat2x2; - typedef mat<2, 3, f32, highp> highp_mat2x3; - typedef mat<2, 4, f32, highp> highp_mat2x4; - typedef mat<3, 2, f32, highp> highp_mat3x2; - typedef mat<3, 3, f32, highp> highp_mat3x3; - typedef mat<3, 4, f32, highp> highp_mat3x4; - typedef mat<4, 2, f32, highp> highp_mat4x2; - typedef mat<4, 3, f32, highp> highp_mat4x3; - typedef mat<4, 4, f32, highp> highp_mat4x4; - - typedef mat<2, 2, f32, defaultp> mat2x2; - typedef mat<3, 2, f32, defaultp> mat3x2; - typedef mat<4, 2, f32, defaultp> mat4x2; - typedef mat<2, 3, f32, defaultp> mat2x3; - typedef mat<3, 3, f32, defaultp> mat3x3; - typedef mat<4, 3, f32, defaultp> mat4x3; - typedef mat<2, 4, f32, defaultp> mat2x4; - typedef mat<3, 4, f32, defaultp> mat3x4; - typedef mat<4, 4, f32, defaultp> mat4x4; - - typedef mat<2, 2, f32, lowp> lowp_fmat2x2; - typedef mat<2, 3, f32, lowp> lowp_fmat2x3; - typedef mat<2, 4, f32, lowp> lowp_fmat2x4; - typedef mat<3, 2, f32, lowp> lowp_fmat3x2; - typedef mat<3, 3, f32, lowp> lowp_fmat3x3; - typedef mat<3, 4, f32, lowp> lowp_fmat3x4; - typedef mat<4, 2, f32, lowp> lowp_fmat4x2; - typedef mat<4, 3, f32, lowp> lowp_fmat4x3; - typedef mat<4, 4, f32, lowp> lowp_fmat4x4; - - typedef mat<2, 2, f32, mediump> mediump_fmat2x2; - typedef mat<2, 3, f32, mediump> mediump_fmat2x3; - typedef mat<2, 4, f32, mediump> mediump_fmat2x4; - typedef mat<3, 2, f32, mediump> mediump_fmat3x2; - typedef mat<3, 3, f32, mediump> mediump_fmat3x3; - typedef mat<3, 4, f32, mediump> mediump_fmat3x4; - typedef mat<4, 2, f32, mediump> mediump_fmat4x2; - typedef mat<4, 3, f32, mediump> mediump_fmat4x3; - typedef mat<4, 4, f32, mediump> mediump_fmat4x4; - - typedef mat<2, 2, f32, highp> highp_fmat2x2; - typedef mat<2, 3, f32, highp> highp_fmat2x3; - typedef mat<2, 4, f32, highp> highp_fmat2x4; - typedef mat<3, 2, f32, highp> highp_fmat3x2; - typedef mat<3, 3, f32, highp> highp_fmat3x3; - typedef mat<3, 4, f32, highp> highp_fmat3x4; - typedef mat<4, 2, f32, highp> highp_fmat4x2; - typedef mat<4, 3, f32, highp> highp_fmat4x3; - typedef mat<4, 4, f32, highp> highp_fmat4x4; - - typedef mat<2, 2, f32, defaultp> fmat2x2; - typedef mat<3, 2, f32, defaultp> fmat3x2; - typedef mat<4, 2, f32, defaultp> fmat4x2; - typedef mat<2, 3, f32, defaultp> fmat2x3; - typedef mat<3, 3, f32, defaultp> fmat3x3; - typedef mat<4, 3, f32, defaultp> fmat4x3; - typedef mat<2, 4, f32, defaultp> fmat2x4; - typedef mat<3, 4, f32, defaultp> fmat3x4; - typedef mat<4, 4, f32, defaultp> fmat4x4; - - typedef mat<2, 2, f32, lowp> lowp_f32mat2x2; - typedef mat<2, 3, f32, lowp> lowp_f32mat2x3; - typedef mat<2, 4, f32, lowp> lowp_f32mat2x4; - typedef mat<3, 2, f32, lowp> lowp_f32mat3x2; - typedef mat<3, 3, f32, lowp> lowp_f32mat3x3; - typedef mat<3, 4, f32, lowp> lowp_f32mat3x4; - typedef mat<4, 2, f32, lowp> lowp_f32mat4x2; - typedef mat<4, 3, f32, lowp> lowp_f32mat4x3; - typedef mat<4, 4, f32, lowp> lowp_f32mat4x4; - - typedef mat<2, 2, f32, mediump> mediump_f32mat2x2; - typedef mat<2, 3, f32, mediump> mediump_f32mat2x3; - typedef mat<2, 4, f32, mediump> mediump_f32mat2x4; - typedef mat<3, 2, f32, mediump> mediump_f32mat3x2; - typedef mat<3, 3, f32, mediump> mediump_f32mat3x3; - typedef mat<3, 4, f32, mediump> mediump_f32mat3x4; - typedef mat<4, 2, f32, mediump> mediump_f32mat4x2; - typedef mat<4, 3, f32, mediump> mediump_f32mat4x3; - typedef mat<4, 4, f32, mediump> mediump_f32mat4x4; - - typedef mat<2, 2, f32, highp> highp_f32mat2x2; - typedef mat<2, 3, f32, highp> highp_f32mat2x3; - typedef mat<2, 4, f32, highp> highp_f32mat2x4; - typedef mat<3, 2, f32, highp> highp_f32mat3x2; - typedef mat<3, 3, f32, highp> highp_f32mat3x3; - typedef mat<3, 4, f32, highp> highp_f32mat3x4; - typedef mat<4, 2, f32, highp> highp_f32mat4x2; - typedef mat<4, 3, f32, highp> highp_f32mat4x3; - typedef mat<4, 4, f32, highp> highp_f32mat4x4; - - typedef mat<2, 2, f32, defaultp> f32mat2x2; - typedef mat<3, 2, f32, defaultp> f32mat3x2; - typedef mat<4, 2, f32, defaultp> f32mat4x2; - typedef mat<2, 3, f32, defaultp> f32mat2x3; - typedef mat<3, 3, f32, defaultp> f32mat3x3; - typedef mat<4, 3, f32, defaultp> f32mat4x3; - typedef mat<2, 4, f32, defaultp> f32mat2x4; - typedef mat<3, 4, f32, defaultp> f32mat3x4; - typedef mat<4, 4, f32, defaultp> f32mat4x4; - - typedef mat<2, 2, double, lowp> lowp_dmat2x2; - typedef mat<2, 3, double, lowp> lowp_dmat2x3; - typedef mat<2, 4, double, lowp> lowp_dmat2x4; - typedef mat<3, 2, double, lowp> lowp_dmat3x2; - typedef mat<3, 3, double, lowp> lowp_dmat3x3; - typedef mat<3, 4, double, lowp> lowp_dmat3x4; - typedef mat<4, 2, double, lowp> lowp_dmat4x2; - typedef mat<4, 3, double, lowp> lowp_dmat4x3; - typedef mat<4, 4, double, lowp> lowp_dmat4x4; - - typedef mat<2, 2, double, mediump> mediump_dmat2x2; - typedef mat<2, 3, double, mediump> mediump_dmat2x3; - typedef mat<2, 4, double, mediump> mediump_dmat2x4; - typedef mat<3, 2, double, mediump> mediump_dmat3x2; - typedef mat<3, 3, double, mediump> mediump_dmat3x3; - typedef mat<3, 4, double, mediump> mediump_dmat3x4; - typedef mat<4, 2, double, mediump> mediump_dmat4x2; - typedef mat<4, 3, double, mediump> mediump_dmat4x3; - typedef mat<4, 4, double, mediump> mediump_dmat4x4; - - typedef mat<2, 2, double, highp> highp_dmat2x2; - typedef mat<2, 3, double, highp> highp_dmat2x3; - typedef mat<2, 4, double, highp> highp_dmat2x4; - typedef mat<3, 2, double, highp> highp_dmat3x2; - typedef mat<3, 3, double, highp> highp_dmat3x3; - typedef mat<3, 4, double, highp> highp_dmat3x4; - typedef mat<4, 2, double, highp> highp_dmat4x2; - typedef mat<4, 3, double, highp> highp_dmat4x3; - typedef mat<4, 4, double, highp> highp_dmat4x4; - - typedef mat<2, 2, double, defaultp> dmat2x2; - typedef mat<3, 2, double, defaultp> dmat3x2; - typedef mat<4, 2, double, defaultp> dmat4x2; - typedef mat<2, 3, double, defaultp> dmat2x3; - typedef mat<3, 3, double, defaultp> dmat3x3; - typedef mat<4, 3, double, defaultp> dmat4x3; - typedef mat<2, 4, double, defaultp> dmat2x4; - typedef mat<3, 4, double, defaultp> dmat3x4; - typedef mat<4, 4, double, defaultp> dmat4x4; - - typedef mat<2, 2, f64, lowp> lowp_f64mat2x2; - typedef mat<2, 3, f64, lowp> lowp_f64mat2x3; - typedef mat<2, 4, f64, lowp> lowp_f64mat2x4; - typedef mat<3, 2, f64, lowp> lowp_f64mat3x2; - typedef mat<3, 3, f64, lowp> lowp_f64mat3x3; - typedef mat<3, 4, f64, lowp> lowp_f64mat3x4; - typedef mat<4, 2, f64, lowp> lowp_f64mat4x2; - typedef mat<4, 3, f64, lowp> lowp_f64mat4x3; - typedef mat<4, 4, f64, lowp> lowp_f64mat4x4; - - typedef mat<2, 2, f64, mediump> mediump_f64mat2x2; - typedef mat<2, 3, f64, mediump> mediump_f64mat2x3; - typedef mat<2, 4, f64, mediump> mediump_f64mat2x4; - typedef mat<3, 2, f64, mediump> mediump_f64mat3x2; - typedef mat<3, 3, f64, mediump> mediump_f64mat3x3; - typedef mat<3, 4, f64, mediump> mediump_f64mat3x4; - typedef mat<4, 2, f64, mediump> mediump_f64mat4x2; - typedef mat<4, 3, f64, mediump> mediump_f64mat4x3; - typedef mat<4, 4, f64, mediump> mediump_f64mat4x4; - - typedef mat<2, 2, f64, highp> highp_f64mat2x2; - typedef mat<2, 3, f64, highp> highp_f64mat2x3; - typedef mat<2, 4, f64, highp> highp_f64mat2x4; - typedef mat<3, 2, f64, highp> highp_f64mat3x2; - typedef mat<3, 3, f64, highp> highp_f64mat3x3; - typedef mat<3, 4, f64, highp> highp_f64mat3x4; - typedef mat<4, 2, f64, highp> highp_f64mat4x2; - typedef mat<4, 3, f64, highp> highp_f64mat4x3; - typedef mat<4, 4, f64, highp> highp_f64mat4x4; - - typedef mat<2, 2, f64, defaultp> f64mat2x2; - typedef mat<3, 2, f64, defaultp> f64mat3x2; - typedef mat<4, 2, f64, defaultp> f64mat4x2; - typedef mat<2, 3, f64, defaultp> f64mat2x3; - typedef mat<3, 3, f64, defaultp> f64mat3x3; - typedef mat<4, 3, f64, defaultp> f64mat4x3; - typedef mat<2, 4, f64, defaultp> f64mat2x4; - typedef mat<3, 4, f64, defaultp> f64mat3x4; - typedef mat<4, 4, f64, defaultp> f64mat4x4; - - // Quaternion - - typedef qua lowp_quat; - typedef qua mediump_quat; - typedef qua highp_quat; - typedef qua quat; - - typedef qua lowp_fquat; - typedef qua mediump_fquat; - typedef qua highp_fquat; - typedef qua fquat; - - typedef qua lowp_f32quat; - typedef qua mediump_f32quat; - typedef qua highp_f32quat; - typedef qua f32quat; - - typedef qua lowp_dquat; - typedef qua mediump_dquat; - typedef qua highp_dquat; - typedef qua dquat; - - typedef qua lowp_f64quat; - typedef qua mediump_f64quat; - typedef qua highp_f64quat; - typedef qua f64quat; -}//namespace glm - - diff --git a/third_party/glm/geometric.hpp b/third_party/glm/geometric.hpp deleted file mode 100755 index c068a3c..0000000 --- a/third_party/glm/geometric.hpp +++ /dev/null @@ -1,116 +0,0 @@ -/// @ref core -/// @file glm/geometric.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions -/// -/// @defgroup core_func_geometric Geometric functions -/// @ingroup core -/// -/// These operate on vectors as vectors, not component-wise. -/// -/// Include to use these core features. - -#pragma once - -#include "detail/type_vec3.hpp" - -namespace glm -{ - /// @addtogroup core_func_geometric - /// @{ - - /// Returns the length of x, i.e., sqrt(x * x). - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL length man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL T length(vec const& x); - - /// Returns the distance betwwen p0 and p1, i.e., length(p0 - p1). - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL distance man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL T distance(vec const& p0, vec const& p1); - - /// Returns the dot product of x and y, i.e., result = x * y. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL dot man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL T dot(vec const& x, vec const& y); - - /// Returns the cross product of x and y. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL cross man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL vec<3, T, Q> cross(vec<3, T, Q> const& x, vec<3, T, Q> const& y); - - /// Returns a vector in the same direction as x but with length of 1. - /// According to issue 10 GLSL 1.10 specification, if length(x) == 0 then result is undefined and generate an error. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL normalize man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL vec normalize(vec const& x); - - /// If dot(Nref, I) < 0.0, return N, otherwise, return -N. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL faceforward man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL vec faceforward( - vec const& N, - vec const& I, - vec const& Nref); - - /// For the incident vector I and surface orientation N, - /// returns the reflection direction : result = I - 2.0 * dot(N, I) * N. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL reflect man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL vec reflect( - vec const& I, - vec const& N); - - /// For the incident vector I and surface normal N, - /// and the ratio of indices of refraction eta, - /// return the refraction vector. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Floating-point scalar types. - /// - /// @see GLSL refract man page - /// @see GLSL 4.20.8 specification, section 8.5 Geometric Functions - template - GLM_FUNC_DECL vec refract( - vec const& I, - vec const& N, - T eta); - - /// @} -}//namespace glm - -#include "detail/func_geometric.inl" diff --git a/third_party/glm/glm.hpp b/third_party/glm/glm.hpp deleted file mode 100755 index 8b61064..0000000 --- a/third_party/glm/glm.hpp +++ /dev/null @@ -1,136 +0,0 @@ -/// @ref core -/// @file glm/glm.hpp -/// -/// @defgroup core Core features -/// -/// @brief Features that implement in C++ the GLSL specification as closely as possible. -/// -/// The GLM core consists of C++ types that mirror GLSL types and -/// C++ functions that mirror the GLSL functions. -/// -/// The best documentation for GLM Core is the current GLSL specification, -/// version 4.2 -/// (pdf file). -/// -/// GLM core functionalities require to be included to be used. -/// -/// -/// @defgroup core_vector Vector types -/// -/// Vector types of two to four components with an exhaustive set of operators. -/// -/// @ingroup core -/// -/// -/// @defgroup core_vector_precision Vector types with precision qualifiers -/// -/// @brief Vector types with precision qualifiers which may result in various precision in term of ULPs -/// -/// GLSL allows defining qualifiers for particular variables. -/// With OpenGL's GLSL, these qualifiers have no effect; they are there for compatibility, -/// with OpenGL ES's GLSL, these qualifiers do have an effect. -/// -/// C++ has no language equivalent to qualifier qualifiers. So GLM provides the next-best thing: -/// a number of typedefs that use a particular qualifier. -/// -/// None of these types make any guarantees about the actual qualifier used. -/// -/// @ingroup core -/// -/// -/// @defgroup core_matrix Matrix types -/// -/// Matrix types of with C columns and R rows where C and R are values between 2 to 4 included. -/// These types have exhaustive sets of operators. -/// -/// @ingroup core -/// -/// -/// @defgroup core_matrix_precision Matrix types with precision qualifiers -/// -/// @brief Matrix types with precision qualifiers which may result in various precision in term of ULPs -/// -/// GLSL allows defining qualifiers for particular variables. -/// With OpenGL's GLSL, these qualifiers have no effect; they are there for compatibility, -/// with OpenGL ES's GLSL, these qualifiers do have an effect. -/// -/// C++ has no language equivalent to qualifier qualifiers. So GLM provides the next-best thing: -/// a number of typedefs that use a particular qualifier. -/// -/// None of these types make any guarantees about the actual qualifier used. -/// -/// @ingroup core -/// -/// -/// @defgroup ext Stable extensions -/// -/// @brief Additional features not specified by GLSL specification. -/// -/// EXT extensions are fully tested and documented. -/// -/// Even if it's highly unrecommended, it's possible to include all the extensions at once by -/// including . Otherwise, each extension needs to be included a specific file. -/// -/// -/// @defgroup gtc Recommended extensions -/// -/// @brief Additional features not specified by GLSL specification. -/// -/// GTC extensions aim to be stable with tests and documentation. -/// -/// Even if it's highly unrecommended, it's possible to include all the extensions at once by -/// including . Otherwise, each extension needs to be included a specific file. -/// -/// -/// @defgroup gtx Experimental extensions -/// -/// @brief Experimental features not specified by GLSL specification. -/// -/// Experimental extensions are useful functions and types, but the development of -/// their API and functionality is not necessarily stable. They can change -/// substantially between versions. Backwards compatibility is not much of an issue -/// for them. -/// -/// Even if it's highly unrecommended, it's possible to include all the extensions -/// at once by including . Otherwise, each extension needs to be -/// included a specific file. -/// -/// @mainpage OpenGL Mathematics (GLM) -/// - Website: glm.g-truc.net -/// - GLM API documentation -/// - GLM Manual - -#include "detail/_fixes.hpp" - -#include "detail/setup.hpp" - -#pragma once - -#include -#include -#include -#include -#include -#include "fwd.hpp" - -#include "vec2.hpp" -#include "vec3.hpp" -#include "vec4.hpp" -#include "mat2x2.hpp" -#include "mat2x3.hpp" -#include "mat2x4.hpp" -#include "mat3x2.hpp" -#include "mat3x3.hpp" -#include "mat3x4.hpp" -#include "mat4x2.hpp" -#include "mat4x3.hpp" -#include "mat4x4.hpp" - -#include "trigonometric.hpp" -#include "exponential.hpp" -#include "common.hpp" -#include "packing.hpp" -#include "geometric.hpp" -#include "matrix.hpp" -#include "vector_relational.hpp" -#include "integer.hpp" diff --git a/third_party/glm/gtc/bitfield.hpp b/third_party/glm/gtc/bitfield.hpp deleted file mode 100755 index 084fbe7..0000000 --- a/third_party/glm/gtc/bitfield.hpp +++ /dev/null @@ -1,266 +0,0 @@ -/// @ref gtc_bitfield -/// @file glm/gtc/bitfield.hpp -/// -/// @see core (dependence) -/// @see gtc_bitfield (dependence) -/// -/// @defgroup gtc_bitfield GLM_GTC_bitfield -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Allow to perform bit operations on integer values - -#include "../detail/setup.hpp" - -#pragma once - -// Dependencies -#include "../ext/scalar_int_sized.hpp" -#include "../ext/scalar_uint_sized.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/_vectorize.hpp" -#include "type_precision.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_bitfield extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_bitfield - /// @{ - - /// Build a mask of 'count' bits - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL genIUType mask(genIUType Bits); - - /// Build a mask of 'count' bits - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed and unsigned integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL vec mask(vec const& v); - - /// Rotate all bits to the right. All the bits dropped in the right side are inserted back on the left side. - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL genIUType bitfieldRotateRight(genIUType In, int Shift); - - /// Rotate all bits to the right. All the bits dropped in the right side are inserted back on the left side. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed and unsigned integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL vec bitfieldRotateRight(vec const& In, int Shift); - - /// Rotate all bits to the left. All the bits dropped in the left side are inserted back on the right side. - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL genIUType bitfieldRotateLeft(genIUType In, int Shift); - - /// Rotate all bits to the left. All the bits dropped in the left side are inserted back on the right side. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed and unsigned integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL vec bitfieldRotateLeft(vec const& In, int Shift); - - /// Set to 1 a range of bits. - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL genIUType bitfieldFillOne(genIUType Value, int FirstBit, int BitCount); - - /// Set to 1 a range of bits. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed and unsigned integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL vec bitfieldFillOne(vec const& Value, int FirstBit, int BitCount); - - /// Set to 0 a range of bits. - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL genIUType bitfieldFillZero(genIUType Value, int FirstBit, int BitCount); - - /// Set to 0 a range of bits. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Signed and unsigned integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_bitfield - template - GLM_FUNC_DECL vec bitfieldFillZero(vec const& Value, int FirstBit, int BitCount); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of x followed by the first bit of y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int16 bitfieldInterleave(int8 x, int8 y); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of x followed by the first bit of y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint16 bitfieldInterleave(uint8 x, uint8 y); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of v.x followed by the first bit of v.y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint16 bitfieldInterleave(u8vec2 const& v); - - /// Deinterleaves the bits of x. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL glm::u8vec2 bitfieldDeinterleave(glm::uint16 x); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of x followed by the first bit of y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int32 bitfieldInterleave(int16 x, int16 y); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of x followed by the first bit of y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint32 bitfieldInterleave(uint16 x, uint16 y); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of v.x followed by the first bit of v.y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint32 bitfieldInterleave(u16vec2 const& v); - - /// Deinterleaves the bits of x. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL glm::u16vec2 bitfieldDeinterleave(glm::uint32 x); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of x followed by the first bit of y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int64 bitfieldInterleave(int32 x, int32 y); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of x followed by the first bit of y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint64 bitfieldInterleave(uint32 x, uint32 y); - - /// Interleaves the bits of x and y. - /// The first bit is the first bit of v.x followed by the first bit of v.y. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint64 bitfieldInterleave(u32vec2 const& v); - - /// Deinterleaves the bits of x. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL glm::u32vec2 bitfieldDeinterleave(glm::uint64 x); - - /// Interleaves the bits of x, y and z. - /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int32 bitfieldInterleave(int8 x, int8 y, int8 z); - - /// Interleaves the bits of x, y and z. - /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z); - - /// Interleaves the bits of x, y and z. - /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int64 bitfieldInterleave(int16 x, int16 y, int16 z); - - /// Interleaves the bits of x, y and z. - /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z); - - /// Interleaves the bits of x, y and z. - /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int64 bitfieldInterleave(int32 x, int32 y, int32 z); - - /// Interleaves the bits of x, y and z. - /// The first bit is the first bit of x followed by the first bit of y and the first bit of z. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint64 bitfieldInterleave(uint32 x, uint32 y, uint32 z); - - /// Interleaves the bits of x, y, z and w. - /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int32 bitfieldInterleave(int8 x, int8 y, int8 z, int8 w); - - /// Interleaves the bits of x, y, z and w. - /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z, uint8 w); - - /// Interleaves the bits of x, y, z and w. - /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL int64 bitfieldInterleave(int16 x, int16 y, int16 z, int16 w); - - /// Interleaves the bits of x, y, z and w. - /// The first bit is the first bit of x followed by the first bit of y, the first bit of z and finally the first bit of w. - /// The other bits are interleaved following the previous sequence. - /// - /// @see gtc_bitfield - GLM_FUNC_DECL uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z, uint16 w); - - /// @} -} //namespace glm - -#include "bitfield.inl" diff --git a/third_party/glm/gtc/bitfield.inl b/third_party/glm/gtc/bitfield.inl deleted file mode 100755 index 06cf188..0000000 --- a/third_party/glm/gtc/bitfield.inl +++ /dev/null @@ -1,626 +0,0 @@ -/// @ref gtc_bitfield - -#include "../simd/integer.h" - -namespace glm{ -namespace detail -{ - template - GLM_FUNC_DECL RET bitfieldInterleave(PARAM x, PARAM y); - - template - GLM_FUNC_DECL RET bitfieldInterleave(PARAM x, PARAM y, PARAM z); - - template - GLM_FUNC_DECL RET bitfieldInterleave(PARAM x, PARAM y, PARAM z, PARAM w); - - template<> - GLM_FUNC_QUALIFIER glm::uint16 bitfieldInterleave(glm::uint8 x, glm::uint8 y) - { - glm::uint16 REG1(x); - glm::uint16 REG2(y); - - REG1 = ((REG1 << 4) | REG1) & static_cast(0x0F0F); - REG2 = ((REG2 << 4) | REG2) & static_cast(0x0F0F); - - REG1 = ((REG1 << 2) | REG1) & static_cast(0x3333); - REG2 = ((REG2 << 2) | REG2) & static_cast(0x3333); - - REG1 = ((REG1 << 1) | REG1) & static_cast(0x5555); - REG2 = ((REG2 << 1) | REG2) & static_cast(0x5555); - - return REG1 | static_cast(REG2 << 1); - } - - template<> - GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(glm::uint16 x, glm::uint16 y) - { - glm::uint32 REG1(x); - glm::uint32 REG2(y); - - REG1 = ((REG1 << 8) | REG1) & static_cast(0x00FF00FF); - REG2 = ((REG2 << 8) | REG2) & static_cast(0x00FF00FF); - - REG1 = ((REG1 << 4) | REG1) & static_cast(0x0F0F0F0F); - REG2 = ((REG2 << 4) | REG2) & static_cast(0x0F0F0F0F); - - REG1 = ((REG1 << 2) | REG1) & static_cast(0x33333333); - REG2 = ((REG2 << 2) | REG2) & static_cast(0x33333333); - - REG1 = ((REG1 << 1) | REG1) & static_cast(0x55555555); - REG2 = ((REG2 << 1) | REG2) & static_cast(0x55555555); - - return REG1 | (REG2 << 1); - } - - template<> - GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint32 x, glm::uint32 y) - { - glm::uint64 REG1(x); - glm::uint64 REG2(y); - - REG1 = ((REG1 << 16) | REG1) & static_cast(0x0000FFFF0000FFFFull); - REG2 = ((REG2 << 16) | REG2) & static_cast(0x0000FFFF0000FFFFull); - - REG1 = ((REG1 << 8) | REG1) & static_cast(0x00FF00FF00FF00FFull); - REG2 = ((REG2 << 8) | REG2) & static_cast(0x00FF00FF00FF00FFull); - - REG1 = ((REG1 << 4) | REG1) & static_cast(0x0F0F0F0F0F0F0F0Full); - REG2 = ((REG2 << 4) | REG2) & static_cast(0x0F0F0F0F0F0F0F0Full); - - REG1 = ((REG1 << 2) | REG1) & static_cast(0x3333333333333333ull); - REG2 = ((REG2 << 2) | REG2) & static_cast(0x3333333333333333ull); - - REG1 = ((REG1 << 1) | REG1) & static_cast(0x5555555555555555ull); - REG2 = ((REG2 << 1) | REG2) & static_cast(0x5555555555555555ull); - - return REG1 | (REG2 << 1); - } - - template<> - GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(glm::uint8 x, glm::uint8 y, glm::uint8 z) - { - glm::uint32 REG1(x); - glm::uint32 REG2(y); - glm::uint32 REG3(z); - - REG1 = ((REG1 << 16) | REG1) & static_cast(0xFF0000FFu); - REG2 = ((REG2 << 16) | REG2) & static_cast(0xFF0000FFu); - REG3 = ((REG3 << 16) | REG3) & static_cast(0xFF0000FFu); - - REG1 = ((REG1 << 8) | REG1) & static_cast(0x0F00F00Fu); - REG2 = ((REG2 << 8) | REG2) & static_cast(0x0F00F00Fu); - REG3 = ((REG3 << 8) | REG3) & static_cast(0x0F00F00Fu); - - REG1 = ((REG1 << 4) | REG1) & static_cast(0xC30C30C3u); - REG2 = ((REG2 << 4) | REG2) & static_cast(0xC30C30C3u); - REG3 = ((REG3 << 4) | REG3) & static_cast(0xC30C30C3u); - - REG1 = ((REG1 << 2) | REG1) & static_cast(0x49249249u); - REG2 = ((REG2 << 2) | REG2) & static_cast(0x49249249u); - REG3 = ((REG3 << 2) | REG3) & static_cast(0x49249249u); - - return REG1 | (REG2 << 1) | (REG3 << 2); - } - - template<> - GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint16 x, glm::uint16 y, glm::uint16 z) - { - glm::uint64 REG1(x); - glm::uint64 REG2(y); - glm::uint64 REG3(z); - - REG1 = ((REG1 << 32) | REG1) & static_cast(0xFFFF00000000FFFFull); - REG2 = ((REG2 << 32) | REG2) & static_cast(0xFFFF00000000FFFFull); - REG3 = ((REG3 << 32) | REG3) & static_cast(0xFFFF00000000FFFFull); - - REG1 = ((REG1 << 16) | REG1) & static_cast(0x00FF0000FF0000FFull); - REG2 = ((REG2 << 16) | REG2) & static_cast(0x00FF0000FF0000FFull); - REG3 = ((REG3 << 16) | REG3) & static_cast(0x00FF0000FF0000FFull); - - REG1 = ((REG1 << 8) | REG1) & static_cast(0xF00F00F00F00F00Full); - REG2 = ((REG2 << 8) | REG2) & static_cast(0xF00F00F00F00F00Full); - REG3 = ((REG3 << 8) | REG3) & static_cast(0xF00F00F00F00F00Full); - - REG1 = ((REG1 << 4) | REG1) & static_cast(0x30C30C30C30C30C3ull); - REG2 = ((REG2 << 4) | REG2) & static_cast(0x30C30C30C30C30C3ull); - REG3 = ((REG3 << 4) | REG3) & static_cast(0x30C30C30C30C30C3ull); - - REG1 = ((REG1 << 2) | REG1) & static_cast(0x9249249249249249ull); - REG2 = ((REG2 << 2) | REG2) & static_cast(0x9249249249249249ull); - REG3 = ((REG3 << 2) | REG3) & static_cast(0x9249249249249249ull); - - return REG1 | (REG2 << 1) | (REG3 << 2); - } - - template<> - GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint32 x, glm::uint32 y, glm::uint32 z) - { - glm::uint64 REG1(x); - glm::uint64 REG2(y); - glm::uint64 REG3(z); - - REG1 = ((REG1 << 32) | REG1) & static_cast(0xFFFF00000000FFFFull); - REG2 = ((REG2 << 32) | REG2) & static_cast(0xFFFF00000000FFFFull); - REG3 = ((REG3 << 32) | REG3) & static_cast(0xFFFF00000000FFFFull); - - REG1 = ((REG1 << 16) | REG1) & static_cast(0x00FF0000FF0000FFull); - REG2 = ((REG2 << 16) | REG2) & static_cast(0x00FF0000FF0000FFull); - REG3 = ((REG3 << 16) | REG3) & static_cast(0x00FF0000FF0000FFull); - - REG1 = ((REG1 << 8) | REG1) & static_cast(0xF00F00F00F00F00Full); - REG2 = ((REG2 << 8) | REG2) & static_cast(0xF00F00F00F00F00Full); - REG3 = ((REG3 << 8) | REG3) & static_cast(0xF00F00F00F00F00Full); - - REG1 = ((REG1 << 4) | REG1) & static_cast(0x30C30C30C30C30C3ull); - REG2 = ((REG2 << 4) | REG2) & static_cast(0x30C30C30C30C30C3ull); - REG3 = ((REG3 << 4) | REG3) & static_cast(0x30C30C30C30C30C3ull); - - REG1 = ((REG1 << 2) | REG1) & static_cast(0x9249249249249249ull); - REG2 = ((REG2 << 2) | REG2) & static_cast(0x9249249249249249ull); - REG3 = ((REG3 << 2) | REG3) & static_cast(0x9249249249249249ull); - - return REG1 | (REG2 << 1) | (REG3 << 2); - } - - template<> - GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(glm::uint8 x, glm::uint8 y, glm::uint8 z, glm::uint8 w) - { - glm::uint32 REG1(x); - glm::uint32 REG2(y); - glm::uint32 REG3(z); - glm::uint32 REG4(w); - - REG1 = ((REG1 << 12) | REG1) & static_cast(0x000F000Fu); - REG2 = ((REG2 << 12) | REG2) & static_cast(0x000F000Fu); - REG3 = ((REG3 << 12) | REG3) & static_cast(0x000F000Fu); - REG4 = ((REG4 << 12) | REG4) & static_cast(0x000F000Fu); - - REG1 = ((REG1 << 6) | REG1) & static_cast(0x03030303u); - REG2 = ((REG2 << 6) | REG2) & static_cast(0x03030303u); - REG3 = ((REG3 << 6) | REG3) & static_cast(0x03030303u); - REG4 = ((REG4 << 6) | REG4) & static_cast(0x03030303u); - - REG1 = ((REG1 << 3) | REG1) & static_cast(0x11111111u); - REG2 = ((REG2 << 3) | REG2) & static_cast(0x11111111u); - REG3 = ((REG3 << 3) | REG3) & static_cast(0x11111111u); - REG4 = ((REG4 << 3) | REG4) & static_cast(0x11111111u); - - return REG1 | (REG2 << 1) | (REG3 << 2) | (REG4 << 3); - } - - template<> - GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(glm::uint16 x, glm::uint16 y, glm::uint16 z, glm::uint16 w) - { - glm::uint64 REG1(x); - glm::uint64 REG2(y); - glm::uint64 REG3(z); - glm::uint64 REG4(w); - - REG1 = ((REG1 << 24) | REG1) & static_cast(0x000000FF000000FFull); - REG2 = ((REG2 << 24) | REG2) & static_cast(0x000000FF000000FFull); - REG3 = ((REG3 << 24) | REG3) & static_cast(0x000000FF000000FFull); - REG4 = ((REG4 << 24) | REG4) & static_cast(0x000000FF000000FFull); - - REG1 = ((REG1 << 12) | REG1) & static_cast(0x000F000F000F000Full); - REG2 = ((REG2 << 12) | REG2) & static_cast(0x000F000F000F000Full); - REG3 = ((REG3 << 12) | REG3) & static_cast(0x000F000F000F000Full); - REG4 = ((REG4 << 12) | REG4) & static_cast(0x000F000F000F000Full); - - REG1 = ((REG1 << 6) | REG1) & static_cast(0x0303030303030303ull); - REG2 = ((REG2 << 6) | REG2) & static_cast(0x0303030303030303ull); - REG3 = ((REG3 << 6) | REG3) & static_cast(0x0303030303030303ull); - REG4 = ((REG4 << 6) | REG4) & static_cast(0x0303030303030303ull); - - REG1 = ((REG1 << 3) | REG1) & static_cast(0x1111111111111111ull); - REG2 = ((REG2 << 3) | REG2) & static_cast(0x1111111111111111ull); - REG3 = ((REG3 << 3) | REG3) & static_cast(0x1111111111111111ull); - REG4 = ((REG4 << 3) | REG4) & static_cast(0x1111111111111111ull); - - return REG1 | (REG2 << 1) | (REG3 << 2) | (REG4 << 3); - } -}//namespace detail - - template - GLM_FUNC_QUALIFIER genIUType mask(genIUType Bits) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'mask' accepts only integer values"); - - return Bits >= sizeof(genIUType) * 8 ? ~static_cast(0) : (static_cast(1) << Bits) - static_cast(1); - } - - template - GLM_FUNC_QUALIFIER vec mask(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'mask' accepts only integer values"); - - return detail::functor1::call(mask, v); - } - - template - GLM_FUNC_QUALIFIER genIType bitfieldRotateRight(genIType In, int Shift) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateRight' accepts only integer values"); - - int const BitSize = static_cast(sizeof(genIType) * 8); - return (In << static_cast(Shift)) | (In >> static_cast(BitSize - Shift)); - } - - template - GLM_FUNC_QUALIFIER vec bitfieldRotateRight(vec const& In, int Shift) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateRight' accepts only integer values"); - - int const BitSize = static_cast(sizeof(T) * 8); - return (In << static_cast(Shift)) | (In >> static_cast(BitSize - Shift)); - } - - template - GLM_FUNC_QUALIFIER genIType bitfieldRotateLeft(genIType In, int Shift) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateLeft' accepts only integer values"); - - int const BitSize = static_cast(sizeof(genIType) * 8); - return (In >> static_cast(Shift)) | (In << static_cast(BitSize - Shift)); - } - - template - GLM_FUNC_QUALIFIER vec bitfieldRotateLeft(vec const& In, int Shift) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "'bitfieldRotateLeft' accepts only integer values"); - - int const BitSize = static_cast(sizeof(T) * 8); - return (In >> static_cast(Shift)) | (In << static_cast(BitSize - Shift)); - } - - template - GLM_FUNC_QUALIFIER genIUType bitfieldFillOne(genIUType Value, int FirstBit, int BitCount) - { - return Value | static_cast(mask(BitCount) << FirstBit); - } - - template - GLM_FUNC_QUALIFIER vec bitfieldFillOne(vec const& Value, int FirstBit, int BitCount) - { - return Value | static_cast(mask(BitCount) << FirstBit); - } - - template - GLM_FUNC_QUALIFIER genIUType bitfieldFillZero(genIUType Value, int FirstBit, int BitCount) - { - return Value & static_cast(~(mask(BitCount) << FirstBit)); - } - - template - GLM_FUNC_QUALIFIER vec bitfieldFillZero(vec const& Value, int FirstBit, int BitCount) - { - return Value & static_cast(~(mask(BitCount) << FirstBit)); - } - - GLM_FUNC_QUALIFIER int16 bitfieldInterleave(int8 x, int8 y) - { - union sign8 - { - int8 i; - uint8 u; - } sign_x, sign_y; - - union sign16 - { - int16 i; - uint16 u; - } result; - - sign_x.i = x; - sign_y.i = y; - result.u = bitfieldInterleave(sign_x.u, sign_y.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint16 bitfieldInterleave(uint8 x, uint8 y) - { - return detail::bitfieldInterleave(x, y); - } - - GLM_FUNC_QUALIFIER uint16 bitfieldInterleave(u8vec2 const& v) - { - return detail::bitfieldInterleave(v.x, v.y); - } - - GLM_FUNC_QUALIFIER u8vec2 bitfieldDeinterleave(glm::uint16 x) - { - uint16 REG1(x); - uint16 REG2(x >>= 1); - - REG1 = REG1 & static_cast(0x5555); - REG2 = REG2 & static_cast(0x5555); - - REG1 = ((REG1 >> 1) | REG1) & static_cast(0x3333); - REG2 = ((REG2 >> 1) | REG2) & static_cast(0x3333); - - REG1 = ((REG1 >> 2) | REG1) & static_cast(0x0F0F); - REG2 = ((REG2 >> 2) | REG2) & static_cast(0x0F0F); - - REG1 = ((REG1 >> 4) | REG1) & static_cast(0x00FF); - REG2 = ((REG2 >> 4) | REG2) & static_cast(0x00FF); - - REG1 = ((REG1 >> 8) | REG1) & static_cast(0xFFFF); - REG2 = ((REG2 >> 8) | REG2) & static_cast(0xFFFF); - - return glm::u8vec2(REG1, REG2); - } - - GLM_FUNC_QUALIFIER int32 bitfieldInterleave(int16 x, int16 y) - { - union sign16 - { - int16 i; - uint16 u; - } sign_x, sign_y; - - union sign32 - { - int32 i; - uint32 u; - } result; - - sign_x.i = x; - sign_y.i = y; - result.u = bitfieldInterleave(sign_x.u, sign_y.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(uint16 x, uint16 y) - { - return detail::bitfieldInterleave(x, y); - } - - GLM_FUNC_QUALIFIER glm::uint32 bitfieldInterleave(u16vec2 const& v) - { - return detail::bitfieldInterleave(v.x, v.y); - } - - GLM_FUNC_QUALIFIER glm::u16vec2 bitfieldDeinterleave(glm::uint32 x) - { - glm::uint32 REG1(x); - glm::uint32 REG2(x >>= 1); - - REG1 = REG1 & static_cast(0x55555555); - REG2 = REG2 & static_cast(0x55555555); - - REG1 = ((REG1 >> 1) | REG1) & static_cast(0x33333333); - REG2 = ((REG2 >> 1) | REG2) & static_cast(0x33333333); - - REG1 = ((REG1 >> 2) | REG1) & static_cast(0x0F0F0F0F); - REG2 = ((REG2 >> 2) | REG2) & static_cast(0x0F0F0F0F); - - REG1 = ((REG1 >> 4) | REG1) & static_cast(0x00FF00FF); - REG2 = ((REG2 >> 4) | REG2) & static_cast(0x00FF00FF); - - REG1 = ((REG1 >> 8) | REG1) & static_cast(0x0000FFFF); - REG2 = ((REG2 >> 8) | REG2) & static_cast(0x0000FFFF); - - return glm::u16vec2(REG1, REG2); - } - - GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int32 x, int32 y) - { - union sign32 - { - int32 i; - uint32 u; - } sign_x, sign_y; - - union sign64 - { - int64 i; - uint64 u; - } result; - - sign_x.i = x; - sign_y.i = y; - result.u = bitfieldInterleave(sign_x.u, sign_y.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint32 x, uint32 y) - { - return detail::bitfieldInterleave(x, y); - } - - GLM_FUNC_QUALIFIER glm::uint64 bitfieldInterleave(u32vec2 const& v) - { - return detail::bitfieldInterleave(v.x, v.y); - } - - GLM_FUNC_QUALIFIER glm::u32vec2 bitfieldDeinterleave(glm::uint64 x) - { - glm::uint64 REG1(x); - glm::uint64 REG2(x >>= 1); - - REG1 = REG1 & static_cast(0x5555555555555555ull); - REG2 = REG2 & static_cast(0x5555555555555555ull); - - REG1 = ((REG1 >> 1) | REG1) & static_cast(0x3333333333333333ull); - REG2 = ((REG2 >> 1) | REG2) & static_cast(0x3333333333333333ull); - - REG1 = ((REG1 >> 2) | REG1) & static_cast(0x0F0F0F0F0F0F0F0Full); - REG2 = ((REG2 >> 2) | REG2) & static_cast(0x0F0F0F0F0F0F0F0Full); - - REG1 = ((REG1 >> 4) | REG1) & static_cast(0x00FF00FF00FF00FFull); - REG2 = ((REG2 >> 4) | REG2) & static_cast(0x00FF00FF00FF00FFull); - - REG1 = ((REG1 >> 8) | REG1) & static_cast(0x0000FFFF0000FFFFull); - REG2 = ((REG2 >> 8) | REG2) & static_cast(0x0000FFFF0000FFFFull); - - REG1 = ((REG1 >> 16) | REG1) & static_cast(0x00000000FFFFFFFFull); - REG2 = ((REG2 >> 16) | REG2) & static_cast(0x00000000FFFFFFFFull); - - return glm::u32vec2(REG1, REG2); - } - - GLM_FUNC_QUALIFIER int32 bitfieldInterleave(int8 x, int8 y, int8 z) - { - union sign8 - { - int8 i; - uint8 u; - } sign_x, sign_y, sign_z; - - union sign32 - { - int32 i; - uint32 u; - } result; - - sign_x.i = x; - sign_y.i = y; - sign_z.i = z; - result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z) - { - return detail::bitfieldInterleave(x, y, z); - } - - GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(u8vec3 const& v) - { - return detail::bitfieldInterleave(v.x, v.y, v.z); - } - - GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int16 x, int16 y, int16 z) - { - union sign16 - { - int16 i; - uint16 u; - } sign_x, sign_y, sign_z; - - union sign64 - { - int64 i; - uint64 u; - } result; - - sign_x.i = x; - sign_y.i = y; - sign_z.i = z; - result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z) - { - return detail::bitfieldInterleave(x, y, z); - } - - GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(u16vec3 const& v) - { - return detail::bitfieldInterleave(v.x, v.y, v.z); - } - - GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int32 x, int32 y, int32 z) - { - union sign16 - { - int32 i; - uint32 u; - } sign_x, sign_y, sign_z; - - union sign64 - { - int64 i; - uint64 u; - } result; - - sign_x.i = x; - sign_y.i = y; - sign_z.i = z; - result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint32 x, uint32 y, uint32 z) - { - return detail::bitfieldInterleave(x, y, z); - } - - GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(u32vec3 const& v) - { - return detail::bitfieldInterleave(v.x, v.y, v.z); - } - - GLM_FUNC_QUALIFIER int32 bitfieldInterleave(int8 x, int8 y, int8 z, int8 w) - { - union sign8 - { - int8 i; - uint8 u; - } sign_x, sign_y, sign_z, sign_w; - - union sign32 - { - int32 i; - uint32 u; - } result; - - sign_x.i = x; - sign_y.i = y; - sign_z.i = z; - sign_w.i = w; - result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u, sign_w.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(uint8 x, uint8 y, uint8 z, uint8 w) - { - return detail::bitfieldInterleave(x, y, z, w); - } - - GLM_FUNC_QUALIFIER uint32 bitfieldInterleave(u8vec4 const& v) - { - return detail::bitfieldInterleave(v.x, v.y, v.z, v.w); - } - - GLM_FUNC_QUALIFIER int64 bitfieldInterleave(int16 x, int16 y, int16 z, int16 w) - { - union sign16 - { - int16 i; - uint16 u; - } sign_x, sign_y, sign_z, sign_w; - - union sign64 - { - int64 i; - uint64 u; - } result; - - sign_x.i = x; - sign_y.i = y; - sign_z.i = z; - sign_w.i = w; - result.u = bitfieldInterleave(sign_x.u, sign_y.u, sign_z.u, sign_w.u); - - return result.i; - } - - GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(uint16 x, uint16 y, uint16 z, uint16 w) - { - return detail::bitfieldInterleave(x, y, z, w); - } - - GLM_FUNC_QUALIFIER uint64 bitfieldInterleave(u16vec4 const& v) - { - return detail::bitfieldInterleave(v.x, v.y, v.z, v.w); - } -}//namespace glm diff --git a/third_party/glm/gtc/color_space.hpp b/third_party/glm/gtc/color_space.hpp deleted file mode 100755 index cffd9f0..0000000 --- a/third_party/glm/gtc/color_space.hpp +++ /dev/null @@ -1,56 +0,0 @@ -/// @ref gtc_color_space -/// @file glm/gtc/color_space.hpp -/// -/// @see core (dependence) -/// @see gtc_color_space (dependence) -/// -/// @defgroup gtc_color_space GLM_GTC_color_space -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Allow to perform bit operations on integer values - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../exponential.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_color_space extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_color_space - /// @{ - - /// Convert a linear color to sRGB color using a standard gamma correction. - /// IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb - template - GLM_FUNC_DECL vec convertLinearToSRGB(vec const& ColorLinear); - - /// Convert a linear color to sRGB color using a custom gamma correction. - /// IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb - template - GLM_FUNC_DECL vec convertLinearToSRGB(vec const& ColorLinear, T Gamma); - - /// Convert a sRGB color to linear color using a standard gamma correction. - /// IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb - template - GLM_FUNC_DECL vec convertSRGBToLinear(vec const& ColorSRGB); - - /// Convert a sRGB color to linear color using a custom gamma correction. - // IEC 61966-2-1:1999 / Rec. 709 specification https://www.w3.org/Graphics/Color/srgb - template - GLM_FUNC_DECL vec convertSRGBToLinear(vec const& ColorSRGB, T Gamma); - - /// @} -} //namespace glm - -#include "color_space.inl" diff --git a/third_party/glm/gtc/color_space.inl b/third_party/glm/gtc/color_space.inl deleted file mode 100755 index 2a90004..0000000 --- a/third_party/glm/gtc/color_space.inl +++ /dev/null @@ -1,84 +0,0 @@ -/// @ref gtc_color_space - -namespace glm{ -namespace detail -{ - template - struct compute_rgbToSrgb - { - GLM_FUNC_QUALIFIER static vec call(vec const& ColorRGB, T GammaCorrection) - { - vec const ClampedColor(clamp(ColorRGB, static_cast(0), static_cast(1))); - - return mix( - pow(ClampedColor, vec(GammaCorrection)) * static_cast(1.055) - static_cast(0.055), - ClampedColor * static_cast(12.92), - lessThan(ClampedColor, vec(static_cast(0.0031308)))); - } - }; - - template - struct compute_rgbToSrgb<4, T, Q> - { - GLM_FUNC_QUALIFIER static vec<4, T, Q> call(vec<4, T, Q> const& ColorRGB, T GammaCorrection) - { - return vec<4, T, Q>(compute_rgbToSrgb<3, T, Q>::call(vec<3, T, Q>(ColorRGB), GammaCorrection), ColorRGB.w); - } - }; - - template - struct compute_srgbToRgb - { - GLM_FUNC_QUALIFIER static vec call(vec const& ColorSRGB, T Gamma) - { - return mix( - pow((ColorSRGB + static_cast(0.055)) * static_cast(0.94786729857819905213270142180095), vec(Gamma)), - ColorSRGB * static_cast(0.07739938080495356037151702786378), - lessThanEqual(ColorSRGB, vec(static_cast(0.04045)))); - } - }; - - template - struct compute_srgbToRgb<4, T, Q> - { - GLM_FUNC_QUALIFIER static vec<4, T, Q> call(vec<4, T, Q> const& ColorSRGB, T Gamma) - { - return vec<4, T, Q>(compute_srgbToRgb<3, T, Q>::call(vec<3, T, Q>(ColorSRGB), Gamma), ColorSRGB.w); - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER vec convertLinearToSRGB(vec const& ColorLinear) - { - return detail::compute_rgbToSrgb::call(ColorLinear, static_cast(0.41666)); - } - - // Based on Ian Taylor http://chilliant.blogspot.fr/2012/08/srgb-approximations-for-hlsl.html - template<> - GLM_FUNC_QUALIFIER vec<3, float, lowp> convertLinearToSRGB(vec<3, float, lowp> const& ColorLinear) - { - vec<3, float, lowp> S1 = sqrt(ColorLinear); - vec<3, float, lowp> S2 = sqrt(S1); - vec<3, float, lowp> S3 = sqrt(S2); - return 0.662002687f * S1 + 0.684122060f * S2 - 0.323583601f * S3 - 0.0225411470f * ColorLinear; - } - - template - GLM_FUNC_QUALIFIER vec convertLinearToSRGB(vec const& ColorLinear, T Gamma) - { - return detail::compute_rgbToSrgb::call(ColorLinear, static_cast(1) / Gamma); - } - - template - GLM_FUNC_QUALIFIER vec convertSRGBToLinear(vec const& ColorSRGB) - { - return detail::compute_srgbToRgb::call(ColorSRGB, static_cast(2.4)); - } - - template - GLM_FUNC_QUALIFIER vec convertSRGBToLinear(vec const& ColorSRGB, T Gamma) - { - return detail::compute_srgbToRgb::call(ColorSRGB, Gamma); - } -}//namespace glm diff --git a/third_party/glm/gtc/constants.hpp b/third_party/glm/gtc/constants.hpp deleted file mode 100755 index 99f2128..0000000 --- a/third_party/glm/gtc/constants.hpp +++ /dev/null @@ -1,165 +0,0 @@ -/// @ref gtc_constants -/// @file glm/gtc/constants.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_constants GLM_GTC_constants -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Provide a list of constants and precomputed useful values. - -#pragma once - -// Dependencies -#include "../ext/scalar_constants.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_constants extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_constants - /// @{ - - /// Return 0. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType zero(); - - /// Return 1. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType one(); - - /// Return pi * 2. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType two_pi(); - - /// Return square root of pi. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType root_pi(); - - /// Return pi / 2. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType half_pi(); - - /// Return pi / 2 * 3. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType three_over_two_pi(); - - /// Return pi / 4. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType quarter_pi(); - - /// Return 1 / pi. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType one_over_pi(); - - /// Return 1 / (pi * 2). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType one_over_two_pi(); - - /// Return 2 / pi. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType two_over_pi(); - - /// Return 4 / pi. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType four_over_pi(); - - /// Return 2 / sqrt(pi). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType two_over_root_pi(); - - /// Return 1 / sqrt(2). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType one_over_root_two(); - - /// Return sqrt(pi / 2). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType root_half_pi(); - - /// Return sqrt(2 * pi). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType root_two_pi(); - - /// Return sqrt(ln(4)). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType root_ln_four(); - - /// Return e constant. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType e(); - - /// Return Euler's constant. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType euler(); - - /// Return sqrt(2). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType root_two(); - - /// Return sqrt(3). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType root_three(); - - /// Return sqrt(5). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType root_five(); - - /// Return ln(2). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType ln_two(); - - /// Return ln(10). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType ln_ten(); - - /// Return ln(ln(2)). - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType ln_ln_two(); - - /// Return 1 / 3. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType third(); - - /// Return 2 / 3. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType two_thirds(); - - /// Return the golden ratio constant. - /// @see gtc_constants - template - GLM_FUNC_DECL GLM_CONSTEXPR genType golden_ratio(); - - /// @} -} //namespace glm - -#include "constants.inl" diff --git a/third_party/glm/gtc/constants.inl b/third_party/glm/gtc/constants.inl deleted file mode 100755 index bb98c6b..0000000 --- a/third_party/glm/gtc/constants.inl +++ /dev/null @@ -1,167 +0,0 @@ -/// @ref gtc_constants - -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType zero() - { - return genType(0); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one() - { - return genType(1); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_pi() - { - return genType(6.28318530717958647692528676655900576); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_pi() - { - return genType(1.772453850905516027); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType half_pi() - { - return genType(1.57079632679489661923132169163975144); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType three_over_two_pi() - { - return genType(4.71238898038468985769396507491925432); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType quarter_pi() - { - return genType(0.785398163397448309615660845819875721); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one_over_pi() - { - return genType(0.318309886183790671537767526745028724); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one_over_two_pi() - { - return genType(0.159154943091895335768883763372514362); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_over_pi() - { - return genType(0.636619772367581343075535053490057448); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType four_over_pi() - { - return genType(1.273239544735162686151070106980114898); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_over_root_pi() - { - return genType(1.12837916709551257389615890312154517); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType one_over_root_two() - { - return genType(0.707106781186547524400844362104849039); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_half_pi() - { - return genType(1.253314137315500251); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_two_pi() - { - return genType(2.506628274631000502); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_ln_four() - { - return genType(1.17741002251547469); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType e() - { - return genType(2.71828182845904523536); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType euler() - { - return genType(0.577215664901532860606); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_two() - { - return genType(1.41421356237309504880168872420969808); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_three() - { - return genType(1.73205080756887729352744634150587236); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType root_five() - { - return genType(2.23606797749978969640917366873127623); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType ln_two() - { - return genType(0.693147180559945309417232121458176568); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType ln_ten() - { - return genType(2.30258509299404568401799145468436421); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType ln_ln_two() - { - return genType(-0.3665129205816643); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType third() - { - return genType(0.3333333333333333333333333333333333333333); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType two_thirds() - { - return genType(0.666666666666666666666666666666666666667); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR genType golden_ratio() - { - return genType(1.61803398874989484820458683436563811); - } - -} //namespace glm diff --git a/third_party/glm/gtc/epsilon.hpp b/third_party/glm/gtc/epsilon.hpp deleted file mode 100755 index 640439b..0000000 --- a/third_party/glm/gtc/epsilon.hpp +++ /dev/null @@ -1,60 +0,0 @@ -/// @ref gtc_epsilon -/// @file glm/gtc/epsilon.hpp -/// -/// @see core (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtc_epsilon GLM_GTC_epsilon -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Comparison functions for a user defined epsilon values. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_epsilon extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_epsilon - /// @{ - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is satisfied. - /// - /// @see gtc_epsilon - template - GLM_FUNC_DECL vec epsilonEqual(vec const& x, vec const& y, T const& epsilon); - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is satisfied. - /// - /// @see gtc_epsilon - template - GLM_FUNC_DECL bool epsilonEqual(genType const& x, genType const& y, genType const& epsilon); - - /// Returns the component-wise comparison of |x - y| < epsilon. - /// True if this expression is not satisfied. - /// - /// @see gtc_epsilon - template - GLM_FUNC_DECL vec epsilonNotEqual(vec const& x, vec const& y, T const& epsilon); - - /// Returns the component-wise comparison of |x - y| >= epsilon. - /// True if this expression is not satisfied. - /// - /// @see gtc_epsilon - template - GLM_FUNC_DECL bool epsilonNotEqual(genType const& x, genType const& y, genType const& epsilon); - - /// @} -}//namespace glm - -#include "epsilon.inl" diff --git a/third_party/glm/gtc/epsilon.inl b/third_party/glm/gtc/epsilon.inl deleted file mode 100755 index 508b9f8..0000000 --- a/third_party/glm/gtc/epsilon.inl +++ /dev/null @@ -1,80 +0,0 @@ -/// @ref gtc_epsilon - -// Dependency: -#include "../vector_relational.hpp" -#include "../common.hpp" - -namespace glm -{ - template<> - GLM_FUNC_QUALIFIER bool epsilonEqual - ( - float const& x, - float const& y, - float const& epsilon - ) - { - return abs(x - y) < epsilon; - } - - template<> - GLM_FUNC_QUALIFIER bool epsilonEqual - ( - double const& x, - double const& y, - double const& epsilon - ) - { - return abs(x - y) < epsilon; - } - - template - GLM_FUNC_QUALIFIER vec epsilonEqual(vec const& x, vec const& y, T const& epsilon) - { - return lessThan(abs(x - y), vec(epsilon)); - } - - template - GLM_FUNC_QUALIFIER vec epsilonEqual(vec const& x, vec const& y, vec const& epsilon) - { - return lessThan(abs(x - y), vec(epsilon)); - } - - template<> - GLM_FUNC_QUALIFIER bool epsilonNotEqual(float const& x, float const& y, float const& epsilon) - { - return abs(x - y) >= epsilon; - } - - template<> - GLM_FUNC_QUALIFIER bool epsilonNotEqual(double const& x, double const& y, double const& epsilon) - { - return abs(x - y) >= epsilon; - } - - template - GLM_FUNC_QUALIFIER vec epsilonNotEqual(vec const& x, vec const& y, T const& epsilon) - { - return greaterThanEqual(abs(x - y), vec(epsilon)); - } - - template - GLM_FUNC_QUALIFIER vec epsilonNotEqual(vec const& x, vec const& y, vec const& epsilon) - { - return greaterThanEqual(abs(x - y), vec(epsilon)); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> epsilonEqual(qua const& x, qua const& y, T const& epsilon) - { - vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); - return lessThan(abs(v), vec<4, T, Q>(epsilon)); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> epsilonNotEqual(qua const& x, qua const& y, T const& epsilon) - { - vec<4, T, Q> v(x.x - y.x, x.y - y.y, x.z - y.z, x.w - y.w); - return greaterThanEqual(abs(v), vec<4, T, Q>(epsilon)); - } -}//namespace glm diff --git a/third_party/glm/gtc/integer.hpp b/third_party/glm/gtc/integer.hpp deleted file mode 100755 index 64ce10b..0000000 --- a/third_party/glm/gtc/integer.hpp +++ /dev/null @@ -1,65 +0,0 @@ -/// @ref gtc_integer -/// @file glm/gtc/integer.hpp -/// -/// @see core (dependence) -/// @see gtc_integer (dependence) -/// -/// @defgroup gtc_integer GLM_GTC_integer -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// @brief Allow to perform bit operations on integer values - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../common.hpp" -#include "../integer.hpp" -#include "../exponential.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_integer extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_integer - /// @{ - - /// Returns the log2 of x for integer values. Usefull to compute mipmap count from the texture size. - /// @see gtc_integer - template - GLM_FUNC_DECL genIUType log2(genIUType x); - - /// Returns a value equal to the nearest integer to x. - /// The fraction 0.5 will round in a direction chosen by the - /// implementation, presumably the direction that is fastest. - /// - /// @param x The values of the argument must be greater or equal to zero. - /// @tparam T floating point scalar types. - /// - /// @see GLSL round man page - /// @see gtc_integer - template - GLM_FUNC_DECL vec iround(vec const& x); - - /// Returns a value equal to the nearest integer to x. - /// The fraction 0.5 will round in a direction chosen by the - /// implementation, presumably the direction that is fastest. - /// - /// @param x The values of the argument must be greater or equal to zero. - /// @tparam T floating point scalar types. - /// - /// @see GLSL round man page - /// @see gtc_integer - template - GLM_FUNC_DECL vec uround(vec const& x); - - /// @} -} //namespace glm - -#include "integer.inl" diff --git a/third_party/glm/gtc/integer.inl b/third_party/glm/gtc/integer.inl deleted file mode 100755 index f0a8b4f..0000000 --- a/third_party/glm/gtc/integer.inl +++ /dev/null @@ -1,68 +0,0 @@ -/// @ref gtc_integer - -namespace glm{ -namespace detail -{ - template - struct compute_log2 - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - //Equivalent to return findMSB(vec); but save one function call in ASM with VC - //return findMSB(vec); - return vec(detail::compute_findMSB_vec::call(v)); - } - }; - -# if GLM_HAS_BITSCAN_WINDOWS - template - struct compute_log2<4, int, Q, false, Aligned> - { - GLM_FUNC_QUALIFIER static vec<4, int, Q> call(vec<4, int, Q> const& v) - { - vec<4, int, Q> Result; - _BitScanReverse(reinterpret_cast(&Result.x), v.x); - _BitScanReverse(reinterpret_cast(&Result.y), v.y); - _BitScanReverse(reinterpret_cast(&Result.z), v.z); - _BitScanReverse(reinterpret_cast(&Result.w), v.w); - return Result; - } - }; -# endif//GLM_HAS_BITSCAN_WINDOWS -}//namespace detail - template - GLM_FUNC_QUALIFIER int iround(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'iround' only accept floating-point inputs"); - assert(static_cast(0.0) <= x); - - return static_cast(x + static_cast(0.5)); - } - - template - GLM_FUNC_QUALIFIER vec iround(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'iround' only accept floating-point inputs"); - assert(all(lessThanEqual(vec(0), x))); - - return vec(x + static_cast(0.5)); - } - - template - GLM_FUNC_QUALIFIER uint uround(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'uround' only accept floating-point inputs"); - assert(static_cast(0.0) <= x); - - return static_cast(x + static_cast(0.5)); - } - - template - GLM_FUNC_QUALIFIER vec uround(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'uround' only accept floating-point inputs"); - assert(all(lessThanEqual(vec(0), x))); - - return vec(x + static_cast(0.5)); - } -}//namespace glm diff --git a/third_party/glm/gtc/matrix_access.hpp b/third_party/glm/gtc/matrix_access.hpp deleted file mode 100755 index 4935ba7..0000000 --- a/third_party/glm/gtc/matrix_access.hpp +++ /dev/null @@ -1,60 +0,0 @@ -/// @ref gtc_matrix_access -/// @file glm/gtc/matrix_access.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_matrix_access GLM_GTC_matrix_access -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Defines functions to access rows or columns of a matrix easily. - -#pragma once - -// Dependency: -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_matrix_access extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_matrix_access - /// @{ - - /// Get a specific row of a matrix. - /// @see gtc_matrix_access - template - GLM_FUNC_DECL typename genType::row_type row( - genType const& m, - length_t index); - - /// Set a specific row to a matrix. - /// @see gtc_matrix_access - template - GLM_FUNC_DECL genType row( - genType const& m, - length_t index, - typename genType::row_type const& x); - - /// Get a specific column of a matrix. - /// @see gtc_matrix_access - template - GLM_FUNC_DECL typename genType::col_type column( - genType const& m, - length_t index); - - /// Set a specific column to a matrix. - /// @see gtc_matrix_access - template - GLM_FUNC_DECL genType column( - genType const& m, - length_t index, - typename genType::col_type const& x); - - /// @} -}//namespace glm - -#include "matrix_access.inl" diff --git a/third_party/glm/gtc/matrix_access.inl b/third_party/glm/gtc/matrix_access.inl deleted file mode 100755 index 09fcc10..0000000 --- a/third_party/glm/gtc/matrix_access.inl +++ /dev/null @@ -1,62 +0,0 @@ -/// @ref gtc_matrix_access - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType row - ( - genType const& m, - length_t index, - typename genType::row_type const& x - ) - { - assert(index >= 0 && index < m[0].length()); - - genType Result = m; - for(length_t i = 0; i < m.length(); ++i) - Result[i][index] = x[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER typename genType::row_type row - ( - genType const& m, - length_t index - ) - { - assert(index >= 0 && index < m[0].length()); - - typename genType::row_type Result(0); - for(length_t i = 0; i < m.length(); ++i) - Result[i] = m[i][index]; - return Result; - } - - template - GLM_FUNC_QUALIFIER genType column - ( - genType const& m, - length_t index, - typename genType::col_type const& x - ) - { - assert(index >= 0 && index < m.length()); - - genType Result = m; - Result[index] = x; - return Result; - } - - template - GLM_FUNC_QUALIFIER typename genType::col_type column - ( - genType const& m, - length_t index - ) - { - assert(index >= 0 && index < m.length()); - - return m[index]; - } -}//namespace glm diff --git a/third_party/glm/gtc/matrix_integer.hpp b/third_party/glm/gtc/matrix_integer.hpp deleted file mode 100755 index 557a977..0000000 --- a/third_party/glm/gtc/matrix_integer.hpp +++ /dev/null @@ -1,487 +0,0 @@ -/// @ref gtc_matrix_integer -/// @file glm/gtc/matrix_integer.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_matrix_integer GLM_GTC_matrix_integer -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Defines a number of matrices with integer types. - -#pragma once - -// Dependency: -#include "../mat2x2.hpp" -#include "../mat2x3.hpp" -#include "../mat2x4.hpp" -#include "../mat3x2.hpp" -#include "../mat3x3.hpp" -#include "../mat3x4.hpp" -#include "../mat4x2.hpp" -#include "../mat4x3.hpp" -#include "../mat4x4.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_matrix_integer extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_matrix_integer - /// @{ - - /// High-qualifier signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, int, highp> highp_imat2; - - /// High-qualifier signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, int, highp> highp_imat3; - - /// High-qualifier signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, int, highp> highp_imat4; - - /// High-qualifier signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, int, highp> highp_imat2x2; - - /// High-qualifier signed integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 3, int, highp> highp_imat2x3; - - /// High-qualifier signed integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 4, int, highp> highp_imat2x4; - - /// High-qualifier signed integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 2, int, highp> highp_imat3x2; - - /// High-qualifier signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, int, highp> highp_imat3x3; - - /// High-qualifier signed integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 4, int, highp> highp_imat3x4; - - /// High-qualifier signed integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 2, int, highp> highp_imat4x2; - - /// High-qualifier signed integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 3, int, highp> highp_imat4x3; - - /// High-qualifier signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, int, highp> highp_imat4x4; - - - /// Medium-qualifier signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, int, mediump> mediump_imat2; - - /// Medium-qualifier signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, int, mediump> mediump_imat3; - - /// Medium-qualifier signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, int, mediump> mediump_imat4; - - - /// Medium-qualifier signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, int, mediump> mediump_imat2x2; - - /// Medium-qualifier signed integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 3, int, mediump> mediump_imat2x3; - - /// Medium-qualifier signed integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 4, int, mediump> mediump_imat2x4; - - /// Medium-qualifier signed integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 2, int, mediump> mediump_imat3x2; - - /// Medium-qualifier signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, int, mediump> mediump_imat3x3; - - /// Medium-qualifier signed integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 4, int, mediump> mediump_imat3x4; - - /// Medium-qualifier signed integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 2, int, mediump> mediump_imat4x2; - - /// Medium-qualifier signed integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 3, int, mediump> mediump_imat4x3; - - /// Medium-qualifier signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, int, mediump> mediump_imat4x4; - - - /// Low-qualifier signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, int, lowp> lowp_imat2; - - /// Low-qualifier signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, int, lowp> lowp_imat3; - - /// Low-qualifier signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, int, lowp> lowp_imat4; - - - /// Low-qualifier signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, int, lowp> lowp_imat2x2; - - /// Low-qualifier signed integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 3, int, lowp> lowp_imat2x3; - - /// Low-qualifier signed integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 4, int, lowp> lowp_imat2x4; - - /// Low-qualifier signed integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 2, int, lowp> lowp_imat3x2; - - /// Low-qualifier signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, int, lowp> lowp_imat3x3; - - /// Low-qualifier signed integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 4, int, lowp> lowp_imat3x4; - - /// Low-qualifier signed integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 2, int, lowp> lowp_imat4x2; - - /// Low-qualifier signed integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 3, int, lowp> lowp_imat4x3; - - /// Low-qualifier signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, int, lowp> lowp_imat4x4; - - - /// High-qualifier unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, uint, highp> highp_umat2; - - /// High-qualifier unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, uint, highp> highp_umat3; - - /// High-qualifier unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, uint, highp> highp_umat4; - - /// High-qualifier unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, uint, highp> highp_umat2x2; - - /// High-qualifier unsigned integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 3, uint, highp> highp_umat2x3; - - /// High-qualifier unsigned integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 4, uint, highp> highp_umat2x4; - - /// High-qualifier unsigned integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 2, uint, highp> highp_umat3x2; - - /// High-qualifier unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, uint, highp> highp_umat3x3; - - /// High-qualifier unsigned integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 4, uint, highp> highp_umat3x4; - - /// High-qualifier unsigned integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 2, uint, highp> highp_umat4x2; - - /// High-qualifier unsigned integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 3, uint, highp> highp_umat4x3; - - /// High-qualifier unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, uint, highp> highp_umat4x4; - - - /// Medium-qualifier unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, uint, mediump> mediump_umat2; - - /// Medium-qualifier unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, uint, mediump> mediump_umat3; - - /// Medium-qualifier unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, uint, mediump> mediump_umat4; - - - /// Medium-qualifier unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, uint, mediump> mediump_umat2x2; - - /// Medium-qualifier unsigned integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 3, uint, mediump> mediump_umat2x3; - - /// Medium-qualifier unsigned integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 4, uint, mediump> mediump_umat2x4; - - /// Medium-qualifier unsigned integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 2, uint, mediump> mediump_umat3x2; - - /// Medium-qualifier unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, uint, mediump> mediump_umat3x3; - - /// Medium-qualifier unsigned integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 4, uint, mediump> mediump_umat3x4; - - /// Medium-qualifier unsigned integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 2, uint, mediump> mediump_umat4x2; - - /// Medium-qualifier unsigned integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 3, uint, mediump> mediump_umat4x3; - - /// Medium-qualifier unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, uint, mediump> mediump_umat4x4; - - - /// Low-qualifier unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, uint, lowp> lowp_umat2; - - /// Low-qualifier unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, uint, lowp> lowp_umat3; - - /// Low-qualifier unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, uint, lowp> lowp_umat4; - - - /// Low-qualifier unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 2, uint, lowp> lowp_umat2x2; - - /// Low-qualifier unsigned integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 3, uint, lowp> lowp_umat2x3; - - /// Low-qualifier unsigned integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mat<2, 4, uint, lowp> lowp_umat2x4; - - /// Low-qualifier unsigned integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 2, uint, lowp> lowp_umat3x2; - - /// Low-qualifier unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 3, uint, lowp> lowp_umat3x3; - - /// Low-qualifier unsigned integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mat<3, 4, uint, lowp> lowp_umat3x4; - - /// Low-qualifier unsigned integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 2, uint, lowp> lowp_umat4x2; - - /// Low-qualifier unsigned integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 3, uint, lowp> lowp_umat4x3; - - /// Low-qualifier unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mat<4, 4, uint, lowp> lowp_umat4x4; - -#if(defined(GLM_PRECISION_HIGHP_INT)) - typedef highp_imat2 imat2; - typedef highp_imat3 imat3; - typedef highp_imat4 imat4; - typedef highp_imat2x2 imat2x2; - typedef highp_imat2x3 imat2x3; - typedef highp_imat2x4 imat2x4; - typedef highp_imat3x2 imat3x2; - typedef highp_imat3x3 imat3x3; - typedef highp_imat3x4 imat3x4; - typedef highp_imat4x2 imat4x2; - typedef highp_imat4x3 imat4x3; - typedef highp_imat4x4 imat4x4; -#elif(defined(GLM_PRECISION_LOWP_INT)) - typedef lowp_imat2 imat2; - typedef lowp_imat3 imat3; - typedef lowp_imat4 imat4; - typedef lowp_imat2x2 imat2x2; - typedef lowp_imat2x3 imat2x3; - typedef lowp_imat2x4 imat2x4; - typedef lowp_imat3x2 imat3x2; - typedef lowp_imat3x3 imat3x3; - typedef lowp_imat3x4 imat3x4; - typedef lowp_imat4x2 imat4x2; - typedef lowp_imat4x3 imat4x3; - typedef lowp_imat4x4 imat4x4; -#else //if(defined(GLM_PRECISION_MEDIUMP_INT)) - - /// Signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat2 imat2; - - /// Signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat3 imat3; - - /// Signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat4 imat4; - - /// Signed integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat2x2 imat2x2; - - /// Signed integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat2x3 imat2x3; - - /// Signed integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat2x4 imat2x4; - - /// Signed integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat3x2 imat3x2; - - /// Signed integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat3x3 imat3x3; - - /// Signed integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat3x4 imat3x4; - - /// Signed integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat4x2 imat4x2; - - /// Signed integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat4x3 imat4x3; - - /// Signed integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_imat4x4 imat4x4; -#endif//GLM_PRECISION - -#if(defined(GLM_PRECISION_HIGHP_UINT)) - typedef highp_umat2 umat2; - typedef highp_umat3 umat3; - typedef highp_umat4 umat4; - typedef highp_umat2x2 umat2x2; - typedef highp_umat2x3 umat2x3; - typedef highp_umat2x4 umat2x4; - typedef highp_umat3x2 umat3x2; - typedef highp_umat3x3 umat3x3; - typedef highp_umat3x4 umat3x4; - typedef highp_umat4x2 umat4x2; - typedef highp_umat4x3 umat4x3; - typedef highp_umat4x4 umat4x4; -#elif(defined(GLM_PRECISION_LOWP_UINT)) - typedef lowp_umat2 umat2; - typedef lowp_umat3 umat3; - typedef lowp_umat4 umat4; - typedef lowp_umat2x2 umat2x2; - typedef lowp_umat2x3 umat2x3; - typedef lowp_umat2x4 umat2x4; - typedef lowp_umat3x2 umat3x2; - typedef lowp_umat3x3 umat3x3; - typedef lowp_umat3x4 umat3x4; - typedef lowp_umat4x2 umat4x2; - typedef lowp_umat4x3 umat4x3; - typedef lowp_umat4x4 umat4x4; -#else //if(defined(GLM_PRECISION_MEDIUMP_UINT)) - - /// Unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat2 umat2; - - /// Unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat3 umat3; - - /// Unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat4 umat4; - - /// Unsigned integer 2x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat2x2 umat2x2; - - /// Unsigned integer 2x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat2x3 umat2x3; - - /// Unsigned integer 2x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat2x4 umat2x4; - - /// Unsigned integer 3x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat3x2 umat3x2; - - /// Unsigned integer 3x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat3x3 umat3x3; - - /// Unsigned integer 3x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat3x4 umat3x4; - - /// Unsigned integer 4x2 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat4x2 umat4x2; - - /// Unsigned integer 4x3 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat4x3 umat4x3; - - /// Unsigned integer 4x4 matrix. - /// @see gtc_matrix_integer - typedef mediump_umat4x4 umat4x4; -#endif//GLM_PRECISION - - /// @} -}//namespace glm diff --git a/third_party/glm/gtc/matrix_inverse.hpp b/third_party/glm/gtc/matrix_inverse.hpp deleted file mode 100755 index a1900ad..0000000 --- a/third_party/glm/gtc/matrix_inverse.hpp +++ /dev/null @@ -1,50 +0,0 @@ -/// @ref gtc_matrix_inverse -/// @file glm/gtc/matrix_inverse.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_matrix_inverse GLM_GTC_matrix_inverse -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Defines additional matrix inverting functions. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../matrix.hpp" -#include "../mat2x2.hpp" -#include "../mat3x3.hpp" -#include "../mat4x4.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_matrix_inverse extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_matrix_inverse - /// @{ - - /// Fast matrix inverse for affine matrix. - /// - /// @param m Input matrix to invert. - /// @tparam genType Squared floating-point matrix: half, float or double. Inverse of matrix based of half-qualifier floating point value is highly innacurate. - /// @see gtc_matrix_inverse - template - GLM_FUNC_DECL genType affineInverse(genType const& m); - - /// Compute the inverse transpose of a matrix. - /// - /// @param m Input matrix to invert transpose. - /// @tparam genType Squared floating-point matrix: half, float or double. Inverse of matrix based of half-qualifier floating point value is highly innacurate. - /// @see gtc_matrix_inverse - template - GLM_FUNC_DECL genType inverseTranspose(genType const& m); - - /// @} -}//namespace glm - -#include "matrix_inverse.inl" diff --git a/third_party/glm/gtc/matrix_inverse.inl b/third_party/glm/gtc/matrix_inverse.inl deleted file mode 100755 index c004b9e..0000000 --- a/third_party/glm/gtc/matrix_inverse.inl +++ /dev/null @@ -1,118 +0,0 @@ -/// @ref gtc_matrix_inverse - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> affineInverse(mat<3, 3, T, Q> const& m) - { - mat<2, 2, T, Q> const Inv(inverse(mat<2, 2, T, Q>(m))); - - return mat<3, 3, T, Q>( - vec<3, T, Q>(Inv[0], static_cast(0)), - vec<3, T, Q>(Inv[1], static_cast(0)), - vec<3, T, Q>(-Inv * vec<2, T, Q>(m[2]), static_cast(1))); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> affineInverse(mat<4, 4, T, Q> const& m) - { - mat<3, 3, T, Q> const Inv(inverse(mat<3, 3, T, Q>(m))); - - return mat<4, 4, T, Q>( - vec<4, T, Q>(Inv[0], static_cast(0)), - vec<4, T, Q>(Inv[1], static_cast(0)), - vec<4, T, Q>(Inv[2], static_cast(0)), - vec<4, T, Q>(-Inv * vec<3, T, Q>(m[3]), static_cast(1))); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> inverseTranspose(mat<2, 2, T, Q> const& m) - { - T Determinant = m[0][0] * m[1][1] - m[1][0] * m[0][1]; - - mat<2, 2, T, Q> Inverse( - + m[1][1] / Determinant, - - m[0][1] / Determinant, - - m[1][0] / Determinant, - + m[0][0] / Determinant); - - return Inverse; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> inverseTranspose(mat<3, 3, T, Q> const& m) - { - T Determinant = - + m[0][0] * (m[1][1] * m[2][2] - m[1][2] * m[2][1]) - - m[0][1] * (m[1][0] * m[2][2] - m[1][2] * m[2][0]) - + m[0][2] * (m[1][0] * m[2][1] - m[1][1] * m[2][0]); - - mat<3, 3, T, Q> Inverse; - Inverse[0][0] = + (m[1][1] * m[2][2] - m[2][1] * m[1][2]); - Inverse[0][1] = - (m[1][0] * m[2][2] - m[2][0] * m[1][2]); - Inverse[0][2] = + (m[1][0] * m[2][1] - m[2][0] * m[1][1]); - Inverse[1][0] = - (m[0][1] * m[2][2] - m[2][1] * m[0][2]); - Inverse[1][1] = + (m[0][0] * m[2][2] - m[2][0] * m[0][2]); - Inverse[1][2] = - (m[0][0] * m[2][1] - m[2][0] * m[0][1]); - Inverse[2][0] = + (m[0][1] * m[1][2] - m[1][1] * m[0][2]); - Inverse[2][1] = - (m[0][0] * m[1][2] - m[1][0] * m[0][2]); - Inverse[2][2] = + (m[0][0] * m[1][1] - m[1][0] * m[0][1]); - Inverse /= Determinant; - - return Inverse; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> inverseTranspose(mat<4, 4, T, Q> const& m) - { - T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - T SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; - T SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; - T SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; - T SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; - T SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; - T SubFactor11 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; - T SubFactor12 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; - T SubFactor13 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; - T SubFactor14 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; - T SubFactor15 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; - T SubFactor16 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; - T SubFactor17 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; - - mat<4, 4, T, Q> Inverse; - Inverse[0][0] = + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02); - Inverse[0][1] = - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04); - Inverse[0][2] = + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05); - Inverse[0][3] = - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05); - - Inverse[1][0] = - (m[0][1] * SubFactor00 - m[0][2] * SubFactor01 + m[0][3] * SubFactor02); - Inverse[1][1] = + (m[0][0] * SubFactor00 - m[0][2] * SubFactor03 + m[0][3] * SubFactor04); - Inverse[1][2] = - (m[0][0] * SubFactor01 - m[0][1] * SubFactor03 + m[0][3] * SubFactor05); - Inverse[1][3] = + (m[0][0] * SubFactor02 - m[0][1] * SubFactor04 + m[0][2] * SubFactor05); - - Inverse[2][0] = + (m[0][1] * SubFactor06 - m[0][2] * SubFactor07 + m[0][3] * SubFactor08); - Inverse[2][1] = - (m[0][0] * SubFactor06 - m[0][2] * SubFactor09 + m[0][3] * SubFactor10); - Inverse[2][2] = + (m[0][0] * SubFactor07 - m[0][1] * SubFactor09 + m[0][3] * SubFactor11); - Inverse[2][3] = - (m[0][0] * SubFactor08 - m[0][1] * SubFactor10 + m[0][2] * SubFactor11); - - Inverse[3][0] = - (m[0][1] * SubFactor12 - m[0][2] * SubFactor13 + m[0][3] * SubFactor14); - Inverse[3][1] = + (m[0][0] * SubFactor12 - m[0][2] * SubFactor15 + m[0][3] * SubFactor16); - Inverse[3][2] = - (m[0][0] * SubFactor13 - m[0][1] * SubFactor15 + m[0][3] * SubFactor17); - Inverse[3][3] = + (m[0][0] * SubFactor14 - m[0][1] * SubFactor16 + m[0][2] * SubFactor17); - - T Determinant = - + m[0][0] * Inverse[0][0] - + m[0][1] * Inverse[0][1] - + m[0][2] * Inverse[0][2] - + m[0][3] * Inverse[0][3]; - - Inverse /= Determinant; - - return Inverse; - } -}//namespace glm diff --git a/third_party/glm/gtc/matrix_transform.hpp b/third_party/glm/gtc/matrix_transform.hpp deleted file mode 100755 index 612418f..0000000 --- a/third_party/glm/gtc/matrix_transform.hpp +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref gtc_matrix_transform -/// @file glm/gtc/matrix_transform.hpp -/// -/// @see core (dependence) -/// @see gtx_transform -/// @see gtx_transform2 -/// -/// @defgroup gtc_matrix_transform GLM_GTC_matrix_transform -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Defines functions that generate common transformation matrices. -/// -/// The matrices generated by this extension use standard OpenGL fixed-function -/// conventions. For example, the lookAt function generates a transform from world -/// space into the specific eye space that the projective matrix functions -/// (perspective, ortho, etc) are designed to expect. The OpenGL compatibility -/// specifications defines the particular layout of this eye space. - -#pragma once - -// Dependencies -#include "../mat4x4.hpp" -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include "../ext/matrix_projection.hpp" -#include "../ext/matrix_clip_space.hpp" -#include "../ext/matrix_transform.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_matrix_transform extension included") -#endif - -#include "matrix_transform.inl" diff --git a/third_party/glm/gtc/matrix_transform.inl b/third_party/glm/gtc/matrix_transform.inl deleted file mode 100755 index 15b46bc..0000000 --- a/third_party/glm/gtc/matrix_transform.inl +++ /dev/null @@ -1,3 +0,0 @@ -#include "../geometric.hpp" -#include "../trigonometric.hpp" -#include "../matrix.hpp" diff --git a/third_party/glm/gtc/noise.hpp b/third_party/glm/gtc/noise.hpp deleted file mode 100755 index ab1772e..0000000 --- a/third_party/glm/gtc/noise.hpp +++ /dev/null @@ -1,61 +0,0 @@ -/// @ref gtc_noise -/// @file glm/gtc/noise.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_noise GLM_GTC_noise -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Defines 2D, 3D and 4D procedural noise functions -/// Based on the work of Stefan Gustavson and Ashima Arts on "webgl-noise": -/// https://github.com/ashima/webgl-noise -/// Following Stefan Gustavson's paper "Simplex noise demystified": -/// http://www.itn.liu.se/~stegu/simplexnoise/simplexnoise.pdf - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/_noise.hpp" -#include "../geometric.hpp" -#include "../common.hpp" -#include "../vector_relational.hpp" -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_noise extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_noise - /// @{ - - /// Classic perlin noise. - /// @see gtc_noise - template - GLM_FUNC_DECL T perlin( - vec const& p); - - /// Periodic perlin noise. - /// @see gtc_noise - template - GLM_FUNC_DECL T perlin( - vec const& p, - vec const& rep); - - /// Simplex noise. - /// @see gtc_noise - template - GLM_FUNC_DECL T simplex( - vec const& p); - - /// @} -}//namespace glm - -#include "noise.inl" diff --git a/third_party/glm/gtc/noise.inl b/third_party/glm/gtc/noise.inl deleted file mode 100755 index 30d0b27..0000000 --- a/third_party/glm/gtc/noise.inl +++ /dev/null @@ -1,807 +0,0 @@ -/// @ref gtc_noise -/// -// Based on the work of Stefan Gustavson and Ashima Arts on "webgl-noise": -// https://github.com/ashima/webgl-noise -// Following Stefan Gustavson's paper "Simplex noise demystified": -// http://www.itn.liu.se/~stegu/simplexnoise/simplexnoise.pdf - -namespace glm{ -namespace gtc -{ - template - GLM_FUNC_QUALIFIER vec<4, T, Q> grad4(T const& j, vec<4, T, Q> const& ip) - { - vec<3, T, Q> pXYZ = floor(fract(vec<3, T, Q>(j) * vec<3, T, Q>(ip)) * T(7)) * ip[2] - T(1); - T pW = static_cast(1.5) - dot(abs(pXYZ), vec<3, T, Q>(1)); - vec<4, T, Q> s = vec<4, T, Q>(lessThan(vec<4, T, Q>(pXYZ, pW), vec<4, T, Q>(0.0))); - pXYZ = pXYZ + (vec<3, T, Q>(s) * T(2) - T(1)) * s.w; - return vec<4, T, Q>(pXYZ, pW); - } -}//namespace gtc - - // Classic Perlin noise - template - GLM_FUNC_QUALIFIER T perlin(vec<2, T, Q> const& Position) - { - vec<4, T, Q> Pi = glm::floor(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) + vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); - vec<4, T, Q> Pf = glm::fract(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) - vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); - Pi = mod(Pi, vec<4, T, Q>(289)); // To avoid truncation effects in permutation - vec<4, T, Q> ix(Pi.x, Pi.z, Pi.x, Pi.z); - vec<4, T, Q> iy(Pi.y, Pi.y, Pi.w, Pi.w); - vec<4, T, Q> fx(Pf.x, Pf.z, Pf.x, Pf.z); - vec<4, T, Q> fy(Pf.y, Pf.y, Pf.w, Pf.w); - - vec<4, T, Q> i = detail::permute(detail::permute(ix) + iy); - - vec<4, T, Q> gx = static_cast(2) * glm::fract(i / T(41)) - T(1); - vec<4, T, Q> gy = glm::abs(gx) - T(0.5); - vec<4, T, Q> tx = glm::floor(gx + T(0.5)); - gx = gx - tx; - - vec<2, T, Q> g00(gx.x, gy.x); - vec<2, T, Q> g10(gx.y, gy.y); - vec<2, T, Q> g01(gx.z, gy.z); - vec<2, T, Q> g11(gx.w, gy.w); - - vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11))); - g00 *= norm.x; - g01 *= norm.y; - g10 *= norm.z; - g11 *= norm.w; - - T n00 = dot(g00, vec<2, T, Q>(fx.x, fy.x)); - T n10 = dot(g10, vec<2, T, Q>(fx.y, fy.y)); - T n01 = dot(g01, vec<2, T, Q>(fx.z, fy.z)); - T n11 = dot(g11, vec<2, T, Q>(fx.w, fy.w)); - - vec<2, T, Q> fade_xy = detail::fade(vec<2, T, Q>(Pf.x, Pf.y)); - vec<2, T, Q> n_x = mix(vec<2, T, Q>(n00, n01), vec<2, T, Q>(n10, n11), fade_xy.x); - T n_xy = mix(n_x.x, n_x.y, fade_xy.y); - return T(2.3) * n_xy; - } - - // Classic Perlin noise - template - GLM_FUNC_QUALIFIER T perlin(vec<3, T, Q> const& Position) - { - vec<3, T, Q> Pi0 = floor(Position); // Integer part for indexing - vec<3, T, Q> Pi1 = Pi0 + T(1); // Integer part + 1 - Pi0 = detail::mod289(Pi0); - Pi1 = detail::mod289(Pi1); - vec<3, T, Q> Pf0 = fract(Position); // Fractional part for interpolation - vec<3, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 - vec<4, T, Q> ix(Pi0.x, Pi1.x, Pi0.x, Pi1.x); - vec<4, T, Q> iy = vec<4, T, Q>(vec<2, T, Q>(Pi0.y), vec<2, T, Q>(Pi1.y)); - vec<4, T, Q> iz0(Pi0.z); - vec<4, T, Q> iz1(Pi1.z); - - vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); - vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); - vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); - - vec<4, T, Q> gx0 = ixy0 * T(1.0 / 7.0); - vec<4, T, Q> gy0 = fract(floor(gx0) * T(1.0 / 7.0)) - T(0.5); - gx0 = fract(gx0); - vec<4, T, Q> gz0 = vec<4, T, Q>(0.5) - abs(gx0) - abs(gy0); - vec<4, T, Q> sz0 = step(gz0, vec<4, T, Q>(0.0)); - gx0 -= sz0 * (step(T(0), gx0) - T(0.5)); - gy0 -= sz0 * (step(T(0), gy0) - T(0.5)); - - vec<4, T, Q> gx1 = ixy1 * T(1.0 / 7.0); - vec<4, T, Q> gy1 = fract(floor(gx1) * T(1.0 / 7.0)) - T(0.5); - gx1 = fract(gx1); - vec<4, T, Q> gz1 = vec<4, T, Q>(0.5) - abs(gx1) - abs(gy1); - vec<4, T, Q> sz1 = step(gz1, vec<4, T, Q>(0.0)); - gx1 -= sz1 * (step(T(0), gx1) - T(0.5)); - gy1 -= sz1 * (step(T(0), gy1) - T(0.5)); - - vec<3, T, Q> g000(gx0.x, gy0.x, gz0.x); - vec<3, T, Q> g100(gx0.y, gy0.y, gz0.y); - vec<3, T, Q> g010(gx0.z, gy0.z, gz0.z); - vec<3, T, Q> g110(gx0.w, gy0.w, gz0.w); - vec<3, T, Q> g001(gx1.x, gy1.x, gz1.x); - vec<3, T, Q> g101(gx1.y, gy1.y, gz1.y); - vec<3, T, Q> g011(gx1.z, gy1.z, gz1.z); - vec<3, T, Q> g111(gx1.w, gy1.w, gz1.w); - - vec<4, T, Q> norm0 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); - g000 *= norm0.x; - g010 *= norm0.y; - g100 *= norm0.z; - g110 *= norm0.w; - vec<4, T, Q> norm1 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); - g001 *= norm1.x; - g011 *= norm1.y; - g101 *= norm1.z; - g111 *= norm1.w; - - T n000 = dot(g000, Pf0); - T n100 = dot(g100, vec<3, T, Q>(Pf1.x, Pf0.y, Pf0.z)); - T n010 = dot(g010, vec<3, T, Q>(Pf0.x, Pf1.y, Pf0.z)); - T n110 = dot(g110, vec<3, T, Q>(Pf1.x, Pf1.y, Pf0.z)); - T n001 = dot(g001, vec<3, T, Q>(Pf0.x, Pf0.y, Pf1.z)); - T n101 = dot(g101, vec<3, T, Q>(Pf1.x, Pf0.y, Pf1.z)); - T n011 = dot(g011, vec<3, T, Q>(Pf0.x, Pf1.y, Pf1.z)); - T n111 = dot(g111, Pf1); - - vec<3, T, Q> fade_xyz = detail::fade(Pf0); - vec<4, T, Q> n_z = mix(vec<4, T, Q>(n000, n100, n010, n110), vec<4, T, Q>(n001, n101, n011, n111), fade_xyz.z); - vec<2, T, Q> n_yz = mix(vec<2, T, Q>(n_z.x, n_z.y), vec<2, T, Q>(n_z.z, n_z.w), fade_xyz.y); - T n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); - return T(2.2) * n_xyz; - } - /* - // Classic Perlin noise - template - GLM_FUNC_QUALIFIER T perlin(vec<3, T, Q> const& P) - { - vec<3, T, Q> Pi0 = floor(P); // Integer part for indexing - vec<3, T, Q> Pi1 = Pi0 + T(1); // Integer part + 1 - Pi0 = mod(Pi0, T(289)); - Pi1 = mod(Pi1, T(289)); - vec<3, T, Q> Pf0 = fract(P); // Fractional part for interpolation - vec<3, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 - vec<4, T, Q> ix(Pi0.x, Pi1.x, Pi0.x, Pi1.x); - vec<4, T, Q> iy(Pi0.y, Pi0.y, Pi1.y, Pi1.y); - vec<4, T, Q> iz0(Pi0.z); - vec<4, T, Q> iz1(Pi1.z); - - vec<4, T, Q> ixy = permute(permute(ix) + iy); - vec<4, T, Q> ixy0 = permute(ixy + iz0); - vec<4, T, Q> ixy1 = permute(ixy + iz1); - - vec<4, T, Q> gx0 = ixy0 / T(7); - vec<4, T, Q> gy0 = fract(floor(gx0) / T(7)) - T(0.5); - gx0 = fract(gx0); - vec<4, T, Q> gz0 = vec<4, T, Q>(0.5) - abs(gx0) - abs(gy0); - vec<4, T, Q> sz0 = step(gz0, vec<4, T, Q>(0.0)); - gx0 -= sz0 * (step(0.0, gx0) - T(0.5)); - gy0 -= sz0 * (step(0.0, gy0) - T(0.5)); - - vec<4, T, Q> gx1 = ixy1 / T(7); - vec<4, T, Q> gy1 = fract(floor(gx1) / T(7)) - T(0.5); - gx1 = fract(gx1); - vec<4, T, Q> gz1 = vec<4, T, Q>(0.5) - abs(gx1) - abs(gy1); - vec<4, T, Q> sz1 = step(gz1, vec<4, T, Q>(0.0)); - gx1 -= sz1 * (step(T(0), gx1) - T(0.5)); - gy1 -= sz1 * (step(T(0), gy1) - T(0.5)); - - vec<3, T, Q> g000(gx0.x, gy0.x, gz0.x); - vec<3, T, Q> g100(gx0.y, gy0.y, gz0.y); - vec<3, T, Q> g010(gx0.z, gy0.z, gz0.z); - vec<3, T, Q> g110(gx0.w, gy0.w, gz0.w); - vec<3, T, Q> g001(gx1.x, gy1.x, gz1.x); - vec<3, T, Q> g101(gx1.y, gy1.y, gz1.y); - vec<3, T, Q> g011(gx1.z, gy1.z, gz1.z); - vec<3, T, Q> g111(gx1.w, gy1.w, gz1.w); - - vec<4, T, Q> norm0 = taylorInvSqrt(vec<4, T, Q>(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); - g000 *= norm0.x; - g010 *= norm0.y; - g100 *= norm0.z; - g110 *= norm0.w; - vec<4, T, Q> norm1 = taylorInvSqrt(vec<4, T, Q>(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); - g001 *= norm1.x; - g011 *= norm1.y; - g101 *= norm1.z; - g111 *= norm1.w; - - T n000 = dot(g000, Pf0); - T n100 = dot(g100, vec<3, T, Q>(Pf1.x, Pf0.y, Pf0.z)); - T n010 = dot(g010, vec<3, T, Q>(Pf0.x, Pf1.y, Pf0.z)); - T n110 = dot(g110, vec<3, T, Q>(Pf1.x, Pf1.y, Pf0.z)); - T n001 = dot(g001, vec<3, T, Q>(Pf0.x, Pf0.y, Pf1.z)); - T n101 = dot(g101, vec<3, T, Q>(Pf1.x, Pf0.y, Pf1.z)); - T n011 = dot(g011, vec<3, T, Q>(Pf0.x, Pf1.y, Pf1.z)); - T n111 = dot(g111, Pf1); - - vec<3, T, Q> fade_xyz = fade(Pf0); - vec<4, T, Q> n_z = mix(vec<4, T, Q>(n000, n100, n010, n110), vec<4, T, Q>(n001, n101, n011, n111), fade_xyz.z); - vec<2, T, Q> n_yz = mix( - vec<2, T, Q>(n_z.x, n_z.y), - vec<2, T, Q>(n_z.z, n_z.w), fade_xyz.y); - T n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); - return T(2.2) * n_xyz; - } - */ - // Classic Perlin noise - template - GLM_FUNC_QUALIFIER T perlin(vec<4, T, Q> const& Position) - { - vec<4, T, Q> Pi0 = floor(Position); // Integer part for indexing - vec<4, T, Q> Pi1 = Pi0 + T(1); // Integer part + 1 - Pi0 = mod(Pi0, vec<4, T, Q>(289)); - Pi1 = mod(Pi1, vec<4, T, Q>(289)); - vec<4, T, Q> Pf0 = fract(Position); // Fractional part for interpolation - vec<4, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 - vec<4, T, Q> ix(Pi0.x, Pi1.x, Pi0.x, Pi1.x); - vec<4, T, Q> iy(Pi0.y, Pi0.y, Pi1.y, Pi1.y); - vec<4, T, Q> iz0(Pi0.z); - vec<4, T, Q> iz1(Pi1.z); - vec<4, T, Q> iw0(Pi0.w); - vec<4, T, Q> iw1(Pi1.w); - - vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); - vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); - vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); - vec<4, T, Q> ixy00 = detail::permute(ixy0 + iw0); - vec<4, T, Q> ixy01 = detail::permute(ixy0 + iw1); - vec<4, T, Q> ixy10 = detail::permute(ixy1 + iw0); - vec<4, T, Q> ixy11 = detail::permute(ixy1 + iw1); - - vec<4, T, Q> gx00 = ixy00 / T(7); - vec<4, T, Q> gy00 = floor(gx00) / T(7); - vec<4, T, Q> gz00 = floor(gy00) / T(6); - gx00 = fract(gx00) - T(0.5); - gy00 = fract(gy00) - T(0.5); - gz00 = fract(gz00) - T(0.5); - vec<4, T, Q> gw00 = vec<4, T, Q>(0.75) - abs(gx00) - abs(gy00) - abs(gz00); - vec<4, T, Q> sw00 = step(gw00, vec<4, T, Q>(0.0)); - gx00 -= sw00 * (step(T(0), gx00) - T(0.5)); - gy00 -= sw00 * (step(T(0), gy00) - T(0.5)); - - vec<4, T, Q> gx01 = ixy01 / T(7); - vec<4, T, Q> gy01 = floor(gx01) / T(7); - vec<4, T, Q> gz01 = floor(gy01) / T(6); - gx01 = fract(gx01) - T(0.5); - gy01 = fract(gy01) - T(0.5); - gz01 = fract(gz01) - T(0.5); - vec<4, T, Q> gw01 = vec<4, T, Q>(0.75) - abs(gx01) - abs(gy01) - abs(gz01); - vec<4, T, Q> sw01 = step(gw01, vec<4, T, Q>(0.0)); - gx01 -= sw01 * (step(T(0), gx01) - T(0.5)); - gy01 -= sw01 * (step(T(0), gy01) - T(0.5)); - - vec<4, T, Q> gx10 = ixy10 / T(7); - vec<4, T, Q> gy10 = floor(gx10) / T(7); - vec<4, T, Q> gz10 = floor(gy10) / T(6); - gx10 = fract(gx10) - T(0.5); - gy10 = fract(gy10) - T(0.5); - gz10 = fract(gz10) - T(0.5); - vec<4, T, Q> gw10 = vec<4, T, Q>(0.75) - abs(gx10) - abs(gy10) - abs(gz10); - vec<4, T, Q> sw10 = step(gw10, vec<4, T, Q>(0)); - gx10 -= sw10 * (step(T(0), gx10) - T(0.5)); - gy10 -= sw10 * (step(T(0), gy10) - T(0.5)); - - vec<4, T, Q> gx11 = ixy11 / T(7); - vec<4, T, Q> gy11 = floor(gx11) / T(7); - vec<4, T, Q> gz11 = floor(gy11) / T(6); - gx11 = fract(gx11) - T(0.5); - gy11 = fract(gy11) - T(0.5); - gz11 = fract(gz11) - T(0.5); - vec<4, T, Q> gw11 = vec<4, T, Q>(0.75) - abs(gx11) - abs(gy11) - abs(gz11); - vec<4, T, Q> sw11 = step(gw11, vec<4, T, Q>(0.0)); - gx11 -= sw11 * (step(T(0), gx11) - T(0.5)); - gy11 -= sw11 * (step(T(0), gy11) - T(0.5)); - - vec<4, T, Q> g0000(gx00.x, gy00.x, gz00.x, gw00.x); - vec<4, T, Q> g1000(gx00.y, gy00.y, gz00.y, gw00.y); - vec<4, T, Q> g0100(gx00.z, gy00.z, gz00.z, gw00.z); - vec<4, T, Q> g1100(gx00.w, gy00.w, gz00.w, gw00.w); - vec<4, T, Q> g0010(gx10.x, gy10.x, gz10.x, gw10.x); - vec<4, T, Q> g1010(gx10.y, gy10.y, gz10.y, gw10.y); - vec<4, T, Q> g0110(gx10.z, gy10.z, gz10.z, gw10.z); - vec<4, T, Q> g1110(gx10.w, gy10.w, gz10.w, gw10.w); - vec<4, T, Q> g0001(gx01.x, gy01.x, gz01.x, gw01.x); - vec<4, T, Q> g1001(gx01.y, gy01.y, gz01.y, gw01.y); - vec<4, T, Q> g0101(gx01.z, gy01.z, gz01.z, gw01.z); - vec<4, T, Q> g1101(gx01.w, gy01.w, gz01.w, gw01.w); - vec<4, T, Q> g0011(gx11.x, gy11.x, gz11.x, gw11.x); - vec<4, T, Q> g1011(gx11.y, gy11.y, gz11.y, gw11.y); - vec<4, T, Q> g0111(gx11.z, gy11.z, gz11.z, gw11.z); - vec<4, T, Q> g1111(gx11.w, gy11.w, gz11.w, gw11.w); - - vec<4, T, Q> norm00 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0000, g0000), dot(g0100, g0100), dot(g1000, g1000), dot(g1100, g1100))); - g0000 *= norm00.x; - g0100 *= norm00.y; - g1000 *= norm00.z; - g1100 *= norm00.w; - - vec<4, T, Q> norm01 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0001, g0001), dot(g0101, g0101), dot(g1001, g1001), dot(g1101, g1101))); - g0001 *= norm01.x; - g0101 *= norm01.y; - g1001 *= norm01.z; - g1101 *= norm01.w; - - vec<4, T, Q> norm10 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0010, g0010), dot(g0110, g0110), dot(g1010, g1010), dot(g1110, g1110))); - g0010 *= norm10.x; - g0110 *= norm10.y; - g1010 *= norm10.z; - g1110 *= norm10.w; - - vec<4, T, Q> norm11 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0011, g0011), dot(g0111, g0111), dot(g1011, g1011), dot(g1111, g1111))); - g0011 *= norm11.x; - g0111 *= norm11.y; - g1011 *= norm11.z; - g1111 *= norm11.w; - - T n0000 = dot(g0000, Pf0); - T n1000 = dot(g1000, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf0.w)); - T n0100 = dot(g0100, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf0.w)); - T n1100 = dot(g1100, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf0.w)); - T n0010 = dot(g0010, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf0.w)); - T n1010 = dot(g1010, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf0.w)); - T n0110 = dot(g0110, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf0.w)); - T n1110 = dot(g1110, vec<4, T, Q>(Pf1.x, Pf1.y, Pf1.z, Pf0.w)); - T n0001 = dot(g0001, vec<4, T, Q>(Pf0.x, Pf0.y, Pf0.z, Pf1.w)); - T n1001 = dot(g1001, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf1.w)); - T n0101 = dot(g0101, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf1.w)); - T n1101 = dot(g1101, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf1.w)); - T n0011 = dot(g0011, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf1.w)); - T n1011 = dot(g1011, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf1.w)); - T n0111 = dot(g0111, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf1.w)); - T n1111 = dot(g1111, Pf1); - - vec<4, T, Q> fade_xyzw = detail::fade(Pf0); - vec<4, T, Q> n_0w = mix(vec<4, T, Q>(n0000, n1000, n0100, n1100), vec<4, T, Q>(n0001, n1001, n0101, n1101), fade_xyzw.w); - vec<4, T, Q> n_1w = mix(vec<4, T, Q>(n0010, n1010, n0110, n1110), vec<4, T, Q>(n0011, n1011, n0111, n1111), fade_xyzw.w); - vec<4, T, Q> n_zw = mix(n_0w, n_1w, fade_xyzw.z); - vec<2, T, Q> n_yzw = mix(vec<2, T, Q>(n_zw.x, n_zw.y), vec<2, T, Q>(n_zw.z, n_zw.w), fade_xyzw.y); - T n_xyzw = mix(n_yzw.x, n_yzw.y, fade_xyzw.x); - return T(2.2) * n_xyzw; - } - - // Classic Perlin noise, periodic variant - template - GLM_FUNC_QUALIFIER T perlin(vec<2, T, Q> const& Position, vec<2, T, Q> const& rep) - { - vec<4, T, Q> Pi = floor(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) + vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); - vec<4, T, Q> Pf = fract(vec<4, T, Q>(Position.x, Position.y, Position.x, Position.y)) - vec<4, T, Q>(0.0, 0.0, 1.0, 1.0); - Pi = mod(Pi, vec<4, T, Q>(rep.x, rep.y, rep.x, rep.y)); // To create noise with explicit period - Pi = mod(Pi, vec<4, T, Q>(289)); // To avoid truncation effects in permutation - vec<4, T, Q> ix(Pi.x, Pi.z, Pi.x, Pi.z); - vec<4, T, Q> iy(Pi.y, Pi.y, Pi.w, Pi.w); - vec<4, T, Q> fx(Pf.x, Pf.z, Pf.x, Pf.z); - vec<4, T, Q> fy(Pf.y, Pf.y, Pf.w, Pf.w); - - vec<4, T, Q> i = detail::permute(detail::permute(ix) + iy); - - vec<4, T, Q> gx = static_cast(2) * fract(i / T(41)) - T(1); - vec<4, T, Q> gy = abs(gx) - T(0.5); - vec<4, T, Q> tx = floor(gx + T(0.5)); - gx = gx - tx; - - vec<2, T, Q> g00(gx.x, gy.x); - vec<2, T, Q> g10(gx.y, gy.y); - vec<2, T, Q> g01(gx.z, gy.z); - vec<2, T, Q> g11(gx.w, gy.w); - - vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11))); - g00 *= norm.x; - g01 *= norm.y; - g10 *= norm.z; - g11 *= norm.w; - - T n00 = dot(g00, vec<2, T, Q>(fx.x, fy.x)); - T n10 = dot(g10, vec<2, T, Q>(fx.y, fy.y)); - T n01 = dot(g01, vec<2, T, Q>(fx.z, fy.z)); - T n11 = dot(g11, vec<2, T, Q>(fx.w, fy.w)); - - vec<2, T, Q> fade_xy = detail::fade(vec<2, T, Q>(Pf.x, Pf.y)); - vec<2, T, Q> n_x = mix(vec<2, T, Q>(n00, n01), vec<2, T, Q>(n10, n11), fade_xy.x); - T n_xy = mix(n_x.x, n_x.y, fade_xy.y); - return T(2.3) * n_xy; - } - - // Classic Perlin noise, periodic variant - template - GLM_FUNC_QUALIFIER T perlin(vec<3, T, Q> const& Position, vec<3, T, Q> const& rep) - { - vec<3, T, Q> Pi0 = mod(floor(Position), rep); // Integer part, modulo period - vec<3, T, Q> Pi1 = mod(Pi0 + vec<3, T, Q>(T(1)), rep); // Integer part + 1, mod period - Pi0 = mod(Pi0, vec<3, T, Q>(289)); - Pi1 = mod(Pi1, vec<3, T, Q>(289)); - vec<3, T, Q> Pf0 = fract(Position); // Fractional part for interpolation - vec<3, T, Q> Pf1 = Pf0 - vec<3, T, Q>(T(1)); // Fractional part - 1.0 - vec<4, T, Q> ix = vec<4, T, Q>(Pi0.x, Pi1.x, Pi0.x, Pi1.x); - vec<4, T, Q> iy = vec<4, T, Q>(Pi0.y, Pi0.y, Pi1.y, Pi1.y); - vec<4, T, Q> iz0(Pi0.z); - vec<4, T, Q> iz1(Pi1.z); - - vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); - vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); - vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); - - vec<4, T, Q> gx0 = ixy0 / T(7); - vec<4, T, Q> gy0 = fract(floor(gx0) / T(7)) - T(0.5); - gx0 = fract(gx0); - vec<4, T, Q> gz0 = vec<4, T, Q>(0.5) - abs(gx0) - abs(gy0); - vec<4, T, Q> sz0 = step(gz0, vec<4, T, Q>(0)); - gx0 -= sz0 * (step(T(0), gx0) - T(0.5)); - gy0 -= sz0 * (step(T(0), gy0) - T(0.5)); - - vec<4, T, Q> gx1 = ixy1 / T(7); - vec<4, T, Q> gy1 = fract(floor(gx1) / T(7)) - T(0.5); - gx1 = fract(gx1); - vec<4, T, Q> gz1 = vec<4, T, Q>(0.5) - abs(gx1) - abs(gy1); - vec<4, T, Q> sz1 = step(gz1, vec<4, T, Q>(T(0))); - gx1 -= sz1 * (step(T(0), gx1) - T(0.5)); - gy1 -= sz1 * (step(T(0), gy1) - T(0.5)); - - vec<3, T, Q> g000 = vec<3, T, Q>(gx0.x, gy0.x, gz0.x); - vec<3, T, Q> g100 = vec<3, T, Q>(gx0.y, gy0.y, gz0.y); - vec<3, T, Q> g010 = vec<3, T, Q>(gx0.z, gy0.z, gz0.z); - vec<3, T, Q> g110 = vec<3, T, Q>(gx0.w, gy0.w, gz0.w); - vec<3, T, Q> g001 = vec<3, T, Q>(gx1.x, gy1.x, gz1.x); - vec<3, T, Q> g101 = vec<3, T, Q>(gx1.y, gy1.y, gz1.y); - vec<3, T, Q> g011 = vec<3, T, Q>(gx1.z, gy1.z, gz1.z); - vec<3, T, Q> g111 = vec<3, T, Q>(gx1.w, gy1.w, gz1.w); - - vec<4, T, Q> norm0 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g000, g000), dot(g010, g010), dot(g100, g100), dot(g110, g110))); - g000 *= norm0.x; - g010 *= norm0.y; - g100 *= norm0.z; - g110 *= norm0.w; - vec<4, T, Q> norm1 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g001, g001), dot(g011, g011), dot(g101, g101), dot(g111, g111))); - g001 *= norm1.x; - g011 *= norm1.y; - g101 *= norm1.z; - g111 *= norm1.w; - - T n000 = dot(g000, Pf0); - T n100 = dot(g100, vec<3, T, Q>(Pf1.x, Pf0.y, Pf0.z)); - T n010 = dot(g010, vec<3, T, Q>(Pf0.x, Pf1.y, Pf0.z)); - T n110 = dot(g110, vec<3, T, Q>(Pf1.x, Pf1.y, Pf0.z)); - T n001 = dot(g001, vec<3, T, Q>(Pf0.x, Pf0.y, Pf1.z)); - T n101 = dot(g101, vec<3, T, Q>(Pf1.x, Pf0.y, Pf1.z)); - T n011 = dot(g011, vec<3, T, Q>(Pf0.x, Pf1.y, Pf1.z)); - T n111 = dot(g111, Pf1); - - vec<3, T, Q> fade_xyz = detail::fade(Pf0); - vec<4, T, Q> n_z = mix(vec<4, T, Q>(n000, n100, n010, n110), vec<4, T, Q>(n001, n101, n011, n111), fade_xyz.z); - vec<2, T, Q> n_yz = mix(vec<2, T, Q>(n_z.x, n_z.y), vec<2, T, Q>(n_z.z, n_z.w), fade_xyz.y); - T n_xyz = mix(n_yz.x, n_yz.y, fade_xyz.x); - return T(2.2) * n_xyz; - } - - // Classic Perlin noise, periodic version - template - GLM_FUNC_QUALIFIER T perlin(vec<4, T, Q> const& Position, vec<4, T, Q> const& rep) - { - vec<4, T, Q> Pi0 = mod(floor(Position), rep); // Integer part modulo rep - vec<4, T, Q> Pi1 = mod(Pi0 + T(1), rep); // Integer part + 1 mod rep - vec<4, T, Q> Pf0 = fract(Position); // Fractional part for interpolation - vec<4, T, Q> Pf1 = Pf0 - T(1); // Fractional part - 1.0 - vec<4, T, Q> ix = vec<4, T, Q>(Pi0.x, Pi1.x, Pi0.x, Pi1.x); - vec<4, T, Q> iy = vec<4, T, Q>(Pi0.y, Pi0.y, Pi1.y, Pi1.y); - vec<4, T, Q> iz0(Pi0.z); - vec<4, T, Q> iz1(Pi1.z); - vec<4, T, Q> iw0(Pi0.w); - vec<4, T, Q> iw1(Pi1.w); - - vec<4, T, Q> ixy = detail::permute(detail::permute(ix) + iy); - vec<4, T, Q> ixy0 = detail::permute(ixy + iz0); - vec<4, T, Q> ixy1 = detail::permute(ixy + iz1); - vec<4, T, Q> ixy00 = detail::permute(ixy0 + iw0); - vec<4, T, Q> ixy01 = detail::permute(ixy0 + iw1); - vec<4, T, Q> ixy10 = detail::permute(ixy1 + iw0); - vec<4, T, Q> ixy11 = detail::permute(ixy1 + iw1); - - vec<4, T, Q> gx00 = ixy00 / T(7); - vec<4, T, Q> gy00 = floor(gx00) / T(7); - vec<4, T, Q> gz00 = floor(gy00) / T(6); - gx00 = fract(gx00) - T(0.5); - gy00 = fract(gy00) - T(0.5); - gz00 = fract(gz00) - T(0.5); - vec<4, T, Q> gw00 = vec<4, T, Q>(0.75) - abs(gx00) - abs(gy00) - abs(gz00); - vec<4, T, Q> sw00 = step(gw00, vec<4, T, Q>(0)); - gx00 -= sw00 * (step(T(0), gx00) - T(0.5)); - gy00 -= sw00 * (step(T(0), gy00) - T(0.5)); - - vec<4, T, Q> gx01 = ixy01 / T(7); - vec<4, T, Q> gy01 = floor(gx01) / T(7); - vec<4, T, Q> gz01 = floor(gy01) / T(6); - gx01 = fract(gx01) - T(0.5); - gy01 = fract(gy01) - T(0.5); - gz01 = fract(gz01) - T(0.5); - vec<4, T, Q> gw01 = vec<4, T, Q>(0.75) - abs(gx01) - abs(gy01) - abs(gz01); - vec<4, T, Q> sw01 = step(gw01, vec<4, T, Q>(0.0)); - gx01 -= sw01 * (step(T(0), gx01) - T(0.5)); - gy01 -= sw01 * (step(T(0), gy01) - T(0.5)); - - vec<4, T, Q> gx10 = ixy10 / T(7); - vec<4, T, Q> gy10 = floor(gx10) / T(7); - vec<4, T, Q> gz10 = floor(gy10) / T(6); - gx10 = fract(gx10) - T(0.5); - gy10 = fract(gy10) - T(0.5); - gz10 = fract(gz10) - T(0.5); - vec<4, T, Q> gw10 = vec<4, T, Q>(0.75) - abs(gx10) - abs(gy10) - abs(gz10); - vec<4, T, Q> sw10 = step(gw10, vec<4, T, Q>(0.0)); - gx10 -= sw10 * (step(T(0), gx10) - T(0.5)); - gy10 -= sw10 * (step(T(0), gy10) - T(0.5)); - - vec<4, T, Q> gx11 = ixy11 / T(7); - vec<4, T, Q> gy11 = floor(gx11) / T(7); - vec<4, T, Q> gz11 = floor(gy11) / T(6); - gx11 = fract(gx11) - T(0.5); - gy11 = fract(gy11) - T(0.5); - gz11 = fract(gz11) - T(0.5); - vec<4, T, Q> gw11 = vec<4, T, Q>(0.75) - abs(gx11) - abs(gy11) - abs(gz11); - vec<4, T, Q> sw11 = step(gw11, vec<4, T, Q>(T(0))); - gx11 -= sw11 * (step(T(0), gx11) - T(0.5)); - gy11 -= sw11 * (step(T(0), gy11) - T(0.5)); - - vec<4, T, Q> g0000(gx00.x, gy00.x, gz00.x, gw00.x); - vec<4, T, Q> g1000(gx00.y, gy00.y, gz00.y, gw00.y); - vec<4, T, Q> g0100(gx00.z, gy00.z, gz00.z, gw00.z); - vec<4, T, Q> g1100(gx00.w, gy00.w, gz00.w, gw00.w); - vec<4, T, Q> g0010(gx10.x, gy10.x, gz10.x, gw10.x); - vec<4, T, Q> g1010(gx10.y, gy10.y, gz10.y, gw10.y); - vec<4, T, Q> g0110(gx10.z, gy10.z, gz10.z, gw10.z); - vec<4, T, Q> g1110(gx10.w, gy10.w, gz10.w, gw10.w); - vec<4, T, Q> g0001(gx01.x, gy01.x, gz01.x, gw01.x); - vec<4, T, Q> g1001(gx01.y, gy01.y, gz01.y, gw01.y); - vec<4, T, Q> g0101(gx01.z, gy01.z, gz01.z, gw01.z); - vec<4, T, Q> g1101(gx01.w, gy01.w, gz01.w, gw01.w); - vec<4, T, Q> g0011(gx11.x, gy11.x, gz11.x, gw11.x); - vec<4, T, Q> g1011(gx11.y, gy11.y, gz11.y, gw11.y); - vec<4, T, Q> g0111(gx11.z, gy11.z, gz11.z, gw11.z); - vec<4, T, Q> g1111(gx11.w, gy11.w, gz11.w, gw11.w); - - vec<4, T, Q> norm00 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0000, g0000), dot(g0100, g0100), dot(g1000, g1000), dot(g1100, g1100))); - g0000 *= norm00.x; - g0100 *= norm00.y; - g1000 *= norm00.z; - g1100 *= norm00.w; - - vec<4, T, Q> norm01 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0001, g0001), dot(g0101, g0101), dot(g1001, g1001), dot(g1101, g1101))); - g0001 *= norm01.x; - g0101 *= norm01.y; - g1001 *= norm01.z; - g1101 *= norm01.w; - - vec<4, T, Q> norm10 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0010, g0010), dot(g0110, g0110), dot(g1010, g1010), dot(g1110, g1110))); - g0010 *= norm10.x; - g0110 *= norm10.y; - g1010 *= norm10.z; - g1110 *= norm10.w; - - vec<4, T, Q> norm11 = detail::taylorInvSqrt(vec<4, T, Q>(dot(g0011, g0011), dot(g0111, g0111), dot(g1011, g1011), dot(g1111, g1111))); - g0011 *= norm11.x; - g0111 *= norm11.y; - g1011 *= norm11.z; - g1111 *= norm11.w; - - T n0000 = dot(g0000, Pf0); - T n1000 = dot(g1000, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf0.w)); - T n0100 = dot(g0100, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf0.w)); - T n1100 = dot(g1100, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf0.w)); - T n0010 = dot(g0010, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf0.w)); - T n1010 = dot(g1010, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf0.w)); - T n0110 = dot(g0110, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf0.w)); - T n1110 = dot(g1110, vec<4, T, Q>(Pf1.x, Pf1.y, Pf1.z, Pf0.w)); - T n0001 = dot(g0001, vec<4, T, Q>(Pf0.x, Pf0.y, Pf0.z, Pf1.w)); - T n1001 = dot(g1001, vec<4, T, Q>(Pf1.x, Pf0.y, Pf0.z, Pf1.w)); - T n0101 = dot(g0101, vec<4, T, Q>(Pf0.x, Pf1.y, Pf0.z, Pf1.w)); - T n1101 = dot(g1101, vec<4, T, Q>(Pf1.x, Pf1.y, Pf0.z, Pf1.w)); - T n0011 = dot(g0011, vec<4, T, Q>(Pf0.x, Pf0.y, Pf1.z, Pf1.w)); - T n1011 = dot(g1011, vec<4, T, Q>(Pf1.x, Pf0.y, Pf1.z, Pf1.w)); - T n0111 = dot(g0111, vec<4, T, Q>(Pf0.x, Pf1.y, Pf1.z, Pf1.w)); - T n1111 = dot(g1111, Pf1); - - vec<4, T, Q> fade_xyzw = detail::fade(Pf0); - vec<4, T, Q> n_0w = mix(vec<4, T, Q>(n0000, n1000, n0100, n1100), vec<4, T, Q>(n0001, n1001, n0101, n1101), fade_xyzw.w); - vec<4, T, Q> n_1w = mix(vec<4, T, Q>(n0010, n1010, n0110, n1110), vec<4, T, Q>(n0011, n1011, n0111, n1111), fade_xyzw.w); - vec<4, T, Q> n_zw = mix(n_0w, n_1w, fade_xyzw.z); - vec<2, T, Q> n_yzw = mix(vec<2, T, Q>(n_zw.x, n_zw.y), vec<2, T, Q>(n_zw.z, n_zw.w), fade_xyzw.y); - T n_xyzw = mix(n_yzw.x, n_yzw.y, fade_xyzw.x); - return T(2.2) * n_xyzw; - } - - template - GLM_FUNC_QUALIFIER T simplex(glm::vec<2, T, Q> const& v) - { - vec<4, T, Q> const C = vec<4, T, Q>( - T( 0.211324865405187), // (3.0 - sqrt(3.0)) / 6.0 - T( 0.366025403784439), // 0.5 * (sqrt(3.0) - 1.0) - T(-0.577350269189626), // -1.0 + 2.0 * C.x - T( 0.024390243902439)); // 1.0 / 41.0 - - // First corner - vec<2, T, Q> i = floor(v + dot(v, vec<2, T, Q>(C[1]))); - vec<2, T, Q> x0 = v - i + dot(i, vec<2, T, Q>(C[0])); - - // Other corners - //i1.x = step( x0.y, x0.x ); // x0.x > x0.y ? 1.0 : 0.0 - //i1.y = 1.0 - i1.x; - vec<2, T, Q> i1 = (x0.x > x0.y) ? vec<2, T, Q>(1, 0) : vec<2, T, Q>(0, 1); - // x0 = x0 - 0.0 + 0.0 * C.xx ; - // x1 = x0 - i1 + 1.0 * C.xx ; - // x2 = x0 - 1.0 + 2.0 * C.xx ; - vec<4, T, Q> x12 = vec<4, T, Q>(x0.x, x0.y, x0.x, x0.y) + vec<4, T, Q>(C.x, C.x, C.z, C.z); - x12 = vec<4, T, Q>(vec<2, T, Q>(x12) - i1, x12.z, x12.w); - - // Permutations - i = mod(i, vec<2, T, Q>(289)); // Avoid truncation effects in permutation - vec<3, T, Q> p = detail::permute( - detail::permute(i.y + vec<3, T, Q>(T(0), i1.y, T(1))) - + i.x + vec<3, T, Q>(T(0), i1.x, T(1))); - - vec<3, T, Q> m = max(vec<3, T, Q>(0.5) - vec<3, T, Q>( - dot(x0, x0), - dot(vec<2, T, Q>(x12.x, x12.y), vec<2, T, Q>(x12.x, x12.y)), - dot(vec<2, T, Q>(x12.z, x12.w), vec<2, T, Q>(x12.z, x12.w))), vec<3, T, Q>(0)); - m = m * m ; - m = m * m ; - - // Gradients: 41 points uniformly over a line, mapped onto a diamond. - // The ring size 17*17 = 289 is close to a multiple of 41 (41*7 = 287) - - vec<3, T, Q> x = static_cast(2) * fract(p * C.w) - T(1); - vec<3, T, Q> h = abs(x) - T(0.5); - vec<3, T, Q> ox = floor(x + T(0.5)); - vec<3, T, Q> a0 = x - ox; - - // Normalise gradients implicitly by scaling m - // Inlined for speed: m *= taylorInvSqrt( a0*a0 + h*h ); - m *= static_cast(1.79284291400159) - T(0.85373472095314) * (a0 * a0 + h * h); - - // Compute final noise value at P - vec<3, T, Q> g; - g.x = a0.x * x0.x + h.x * x0.y; - //g.yz = a0.yz * x12.xz + h.yz * x12.yw; - g.y = a0.y * x12.x + h.y * x12.y; - g.z = a0.z * x12.z + h.z * x12.w; - return T(130) * dot(m, g); - } - - template - GLM_FUNC_QUALIFIER T simplex(vec<3, T, Q> const& v) - { - vec<2, T, Q> const C(1.0 / 6.0, 1.0 / 3.0); - vec<4, T, Q> const D(0.0, 0.5, 1.0, 2.0); - - // First corner - vec<3, T, Q> i(floor(v + dot(v, vec<3, T, Q>(C.y)))); - vec<3, T, Q> x0(v - i + dot(i, vec<3, T, Q>(C.x))); - - // Other corners - vec<3, T, Q> g(step(vec<3, T, Q>(x0.y, x0.z, x0.x), x0)); - vec<3, T, Q> l(T(1) - g); - vec<3, T, Q> i1(min(g, vec<3, T, Q>(l.z, l.x, l.y))); - vec<3, T, Q> i2(max(g, vec<3, T, Q>(l.z, l.x, l.y))); - - // x0 = x0 - 0.0 + 0.0 * C.xxx; - // x1 = x0 - i1 + 1.0 * C.xxx; - // x2 = x0 - i2 + 2.0 * C.xxx; - // x3 = x0 - 1.0 + 3.0 * C.xxx; - vec<3, T, Q> x1(x0 - i1 + C.x); - vec<3, T, Q> x2(x0 - i2 + C.y); // 2.0*C.x = 1/3 = C.y - vec<3, T, Q> x3(x0 - D.y); // -1.0+3.0*C.x = -0.5 = -D.y - - // Permutations - i = detail::mod289(i); - vec<4, T, Q> p(detail::permute(detail::permute(detail::permute( - i.z + vec<4, T, Q>(T(0), i1.z, i2.z, T(1))) + - i.y + vec<4, T, Q>(T(0), i1.y, i2.y, T(1))) + - i.x + vec<4, T, Q>(T(0), i1.x, i2.x, T(1)))); - - // Gradients: 7x7 points over a square, mapped onto an octahedron. - // The ring size 17*17 = 289 is close to a multiple of 49 (49*6 = 294) - T n_ = static_cast(0.142857142857); // 1.0/7.0 - vec<3, T, Q> ns(n_ * vec<3, T, Q>(D.w, D.y, D.z) - vec<3, T, Q>(D.x, D.z, D.x)); - - vec<4, T, Q> j(p - T(49) * floor(p * ns.z * ns.z)); // mod(p,7*7) - - vec<4, T, Q> x_(floor(j * ns.z)); - vec<4, T, Q> y_(floor(j - T(7) * x_)); // mod(j,N) - - vec<4, T, Q> x(x_ * ns.x + ns.y); - vec<4, T, Q> y(y_ * ns.x + ns.y); - vec<4, T, Q> h(T(1) - abs(x) - abs(y)); - - vec<4, T, Q> b0(x.x, x.y, y.x, y.y); - vec<4, T, Q> b1(x.z, x.w, y.z, y.w); - - // vec4 s0 = vec4(lessThan(b0,0.0))*2.0 - 1.0; - // vec4 s1 = vec4(lessThan(b1,0.0))*2.0 - 1.0; - vec<4, T, Q> s0(floor(b0) * T(2) + T(1)); - vec<4, T, Q> s1(floor(b1) * T(2) + T(1)); - vec<4, T, Q> sh(-step(h, vec<4, T, Q>(0.0))); - - vec<4, T, Q> a0 = vec<4, T, Q>(b0.x, b0.z, b0.y, b0.w) + vec<4, T, Q>(s0.x, s0.z, s0.y, s0.w) * vec<4, T, Q>(sh.x, sh.x, sh.y, sh.y); - vec<4, T, Q> a1 = vec<4, T, Q>(b1.x, b1.z, b1.y, b1.w) + vec<4, T, Q>(s1.x, s1.z, s1.y, s1.w) * vec<4, T, Q>(sh.z, sh.z, sh.w, sh.w); - - vec<3, T, Q> p0(a0.x, a0.y, h.x); - vec<3, T, Q> p1(a0.z, a0.w, h.y); - vec<3, T, Q> p2(a1.x, a1.y, h.z); - vec<3, T, Q> p3(a1.z, a1.w, h.w); - - // Normalise gradients - vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(p0, p0), dot(p1, p1), dot(p2, p2), dot(p3, p3))); - p0 *= norm.x; - p1 *= norm.y; - p2 *= norm.z; - p3 *= norm.w; - - // Mix final noise value - vec<4, T, Q> m = max(T(0.6) - vec<4, T, Q>(dot(x0, x0), dot(x1, x1), dot(x2, x2), dot(x3, x3)), vec<4, T, Q>(0)); - m = m * m; - return T(42) * dot(m * m, vec<4, T, Q>(dot(p0, x0), dot(p1, x1), dot(p2, x2), dot(p3, x3))); - } - - template - GLM_FUNC_QUALIFIER T simplex(vec<4, T, Q> const& v) - { - vec<4, T, Q> const C( - 0.138196601125011, // (5 - sqrt(5))/20 G4 - 0.276393202250021, // 2 * G4 - 0.414589803375032, // 3 * G4 - -0.447213595499958); // -1 + 4 * G4 - - // (sqrt(5) - 1)/4 = F4, used once below - T const F4 = static_cast(0.309016994374947451); - - // First corner - vec<4, T, Q> i = floor(v + dot(v, vec<4, T, Q>(F4))); - vec<4, T, Q> x0 = v - i + dot(i, vec<4, T, Q>(C.x)); - - // Other corners - - // Rank sorting originally contributed by Bill Licea-Kane, AMD (formerly ATI) - vec<4, T, Q> i0; - vec<3, T, Q> isX = step(vec<3, T, Q>(x0.y, x0.z, x0.w), vec<3, T, Q>(x0.x)); - vec<3, T, Q> isYZ = step(vec<3, T, Q>(x0.z, x0.w, x0.w), vec<3, T, Q>(x0.y, x0.y, x0.z)); - // i0.x = dot(isX, vec3(1.0)); - //i0.x = isX.x + isX.y + isX.z; - //i0.yzw = static_cast(1) - isX; - i0 = vec<4, T, Q>(isX.x + isX.y + isX.z, T(1) - isX); - // i0.y += dot(isYZ.xy, vec2(1.0)); - i0.y += isYZ.x + isYZ.y; - //i0.zw += 1.0 - vec<2, T, Q>(isYZ.x, isYZ.y); - i0.z += static_cast(1) - isYZ.x; - i0.w += static_cast(1) - isYZ.y; - i0.z += isYZ.z; - i0.w += static_cast(1) - isYZ.z; - - // i0 now contains the unique values 0,1,2,3 in each channel - vec<4, T, Q> i3 = clamp(i0, T(0), T(1)); - vec<4, T, Q> i2 = clamp(i0 - T(1), T(0), T(1)); - vec<4, T, Q> i1 = clamp(i0 - T(2), T(0), T(1)); - - // x0 = x0 - 0.0 + 0.0 * C.xxxx - // x1 = x0 - i1 + 0.0 * C.xxxx - // x2 = x0 - i2 + 0.0 * C.xxxx - // x3 = x0 - i3 + 0.0 * C.xxxx - // x4 = x0 - 1.0 + 4.0 * C.xxxx - vec<4, T, Q> x1 = x0 - i1 + C.x; - vec<4, T, Q> x2 = x0 - i2 + C.y; - vec<4, T, Q> x3 = x0 - i3 + C.z; - vec<4, T, Q> x4 = x0 + C.w; - - // Permutations - i = mod(i, vec<4, T, Q>(289)); - T j0 = detail::permute(detail::permute(detail::permute(detail::permute(i.w) + i.z) + i.y) + i.x); - vec<4, T, Q> j1 = detail::permute(detail::permute(detail::permute(detail::permute( - i.w + vec<4, T, Q>(i1.w, i2.w, i3.w, T(1))) + - i.z + vec<4, T, Q>(i1.z, i2.z, i3.z, T(1))) + - i.y + vec<4, T, Q>(i1.y, i2.y, i3.y, T(1))) + - i.x + vec<4, T, Q>(i1.x, i2.x, i3.x, T(1))); - - // Gradients: 7x7x6 points over a cube, mapped onto a 4-cross polytope - // 7*7*6 = 294, which is close to the ring size 17*17 = 289. - vec<4, T, Q> ip = vec<4, T, Q>(T(1) / T(294), T(1) / T(49), T(1) / T(7), T(0)); - - vec<4, T, Q> p0 = gtc::grad4(j0, ip); - vec<4, T, Q> p1 = gtc::grad4(j1.x, ip); - vec<4, T, Q> p2 = gtc::grad4(j1.y, ip); - vec<4, T, Q> p3 = gtc::grad4(j1.z, ip); - vec<4, T, Q> p4 = gtc::grad4(j1.w, ip); - - // Normalise gradients - vec<4, T, Q> norm = detail::taylorInvSqrt(vec<4, T, Q>(dot(p0, p0), dot(p1, p1), dot(p2, p2), dot(p3, p3))); - p0 *= norm.x; - p1 *= norm.y; - p2 *= norm.z; - p3 *= norm.w; - p4 *= detail::taylorInvSqrt(dot(p4, p4)); - - // Mix contributions from the five corners - vec<3, T, Q> m0 = max(T(0.6) - vec<3, T, Q>(dot(x0, x0), dot(x1, x1), dot(x2, x2)), vec<3, T, Q>(0)); - vec<2, T, Q> m1 = max(T(0.6) - vec<2, T, Q>(dot(x3, x3), dot(x4, x4) ), vec<2, T, Q>(0)); - m0 = m0 * m0; - m1 = m1 * m1; - return T(49) * - (dot(m0 * m0, vec<3, T, Q>(dot(p0, x0), dot(p1, x1), dot(p2, x2))) + - dot(m1 * m1, vec<2, T, Q>(dot(p3, x3), dot(p4, x4)))); - } -}//namespace glm diff --git a/third_party/glm/gtc/packing.hpp b/third_party/glm/gtc/packing.hpp deleted file mode 100755 index 7c64aba..0000000 --- a/third_party/glm/gtc/packing.hpp +++ /dev/null @@ -1,728 +0,0 @@ -/// @ref gtc_packing -/// @file glm/gtc/packing.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_packing GLM_GTC_packing -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// This extension provides a set of function to convert vertors to packed -/// formats. - -#pragma once - -// Dependency: -#include "type_precision.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_packing extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_packing - /// @{ - - /// First, converts the normalized floating-point value v into a 8-bit integer value. - /// Then, the results are packed into the returned 8-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packUnorm1x8: round(clamp(c, 0, +1) * 255.0) - /// - /// @see gtc_packing - /// @see uint16 packUnorm2x8(vec2 const& v) - /// @see uint32 packUnorm4x8(vec4 const& v) - /// @see GLSL packUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint8 packUnorm1x8(float v); - - /// Convert a single 8-bit integer to a normalized floating-point value. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackUnorm4x8: f / 255.0 - /// - /// @see gtc_packing - /// @see vec2 unpackUnorm2x8(uint16 p) - /// @see vec4 unpackUnorm4x8(uint32 p) - /// @see GLSL unpackUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL float unpackUnorm1x8(uint8 p); - - /// First, converts each component of the normalized floating-point value v into 8-bit integer values. - /// Then, the results are packed into the returned 16-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packUnorm2x8: round(clamp(c, 0, +1) * 255.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see gtc_packing - /// @see uint8 packUnorm1x8(float const& v) - /// @see uint32 packUnorm4x8(vec4 const& v) - /// @see GLSL packUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint16 packUnorm2x8(vec2 const& v); - - /// First, unpacks a single 16-bit unsigned integer p into a pair of 8-bit unsigned integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned two-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackUnorm4x8: f / 255.0 - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see float unpackUnorm1x8(uint8 v) - /// @see vec4 unpackUnorm4x8(uint32 p) - /// @see GLSL unpackUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec2 unpackUnorm2x8(uint16 p); - - /// First, converts the normalized floating-point value v into 8-bit integer value. - /// Then, the results are packed into the returned 8-bit unsigned integer. - /// - /// The conversion to fixed point is done as follows: - /// packSnorm1x8: round(clamp(s, -1, +1) * 127.0) - /// - /// @see gtc_packing - /// @see uint16 packSnorm2x8(vec2 const& v) - /// @see uint32 packSnorm4x8(vec4 const& v) - /// @see GLSL packSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint8 packSnorm1x8(float s); - - /// First, unpacks a single 8-bit unsigned integer p into a single 8-bit signed integers. - /// Then, the value is converted to a normalized floating-point value to generate the returned scalar. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm1x8: clamp(f / 127.0, -1, +1) - /// - /// @see gtc_packing - /// @see vec2 unpackSnorm2x8(uint16 p) - /// @see vec4 unpackSnorm4x8(uint32 p) - /// @see GLSL unpackSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL float unpackSnorm1x8(uint8 p); - - /// First, converts each component of the normalized floating-point value v into 8-bit integer values. - /// Then, the results are packed into the returned 16-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packSnorm2x8: round(clamp(c, -1, +1) * 127.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see gtc_packing - /// @see uint8 packSnorm1x8(float const& v) - /// @see uint32 packSnorm4x8(vec4 const& v) - /// @see GLSL packSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint16 packSnorm2x8(vec2 const& v); - - /// First, unpacks a single 16-bit unsigned integer p into a pair of 8-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned two-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm2x8: clamp(f / 127.0, -1, +1) - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see float unpackSnorm1x8(uint8 p) - /// @see vec4 unpackSnorm4x8(uint32 p) - /// @see GLSL unpackSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec2 unpackSnorm2x8(uint16 p); - - /// First, converts the normalized floating-point value v into a 16-bit integer value. - /// Then, the results are packed into the returned 16-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packUnorm1x16: round(clamp(c, 0, +1) * 65535.0) - /// - /// @see gtc_packing - /// @see uint16 packSnorm1x16(float const& v) - /// @see uint64 packSnorm4x16(vec4 const& v) - /// @see GLSL packUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint16 packUnorm1x16(float v); - - /// First, unpacks a single 16-bit unsigned integer p into a of 16-bit unsigned integers. - /// Then, the value is converted to a normalized floating-point value to generate the returned scalar. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackUnorm1x16: f / 65535.0 - /// - /// @see gtc_packing - /// @see vec2 unpackUnorm2x16(uint32 p) - /// @see vec4 unpackUnorm4x16(uint64 p) - /// @see GLSL unpackUnorm2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL float unpackUnorm1x16(uint16 p); - - /// First, converts each component of the normalized floating-point value v into 16-bit integer values. - /// Then, the results are packed into the returned 64-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packUnorm4x16: round(clamp(c, 0, +1) * 65535.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see gtc_packing - /// @see uint16 packUnorm1x16(float const& v) - /// @see uint32 packUnorm2x16(vec2 const& v) - /// @see GLSL packUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint64 packUnorm4x16(vec4 const& v); - - /// First, unpacks a single 64-bit unsigned integer p into four 16-bit unsigned integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackUnormx4x16: f / 65535.0 - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see float unpackUnorm1x16(uint16 p) - /// @see vec2 unpackUnorm2x16(uint32 p) - /// @see GLSL unpackUnorm2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec4 unpackUnorm4x16(uint64 p); - - /// First, converts the normalized floating-point value v into 16-bit integer value. - /// Then, the results are packed into the returned 16-bit unsigned integer. - /// - /// The conversion to fixed point is done as follows: - /// packSnorm1x8: round(clamp(s, -1, +1) * 32767.0) - /// - /// @see gtc_packing - /// @see uint32 packSnorm2x16(vec2 const& v) - /// @see uint64 packSnorm4x16(vec4 const& v) - /// @see GLSL packSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint16 packSnorm1x16(float v); - - /// First, unpacks a single 16-bit unsigned integer p into a single 16-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned scalar. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm1x16: clamp(f / 32767.0, -1, +1) - /// - /// @see gtc_packing - /// @see vec2 unpackSnorm2x16(uint32 p) - /// @see vec4 unpackSnorm4x16(uint64 p) - /// @see GLSL unpackSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL float unpackSnorm1x16(uint16 p); - - /// First, converts each component of the normalized floating-point value v into 16-bit integer values. - /// Then, the results are packed into the returned 64-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packSnorm2x8: round(clamp(c, -1, +1) * 32767.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see gtc_packing - /// @see uint16 packSnorm1x16(float const& v) - /// @see uint32 packSnorm2x16(vec2 const& v) - /// @see GLSL packSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint64 packSnorm4x16(vec4 const& v); - - /// First, unpacks a single 64-bit unsigned integer p into four 16-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm4x16: clamp(f / 32767.0, -1, +1) - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see float unpackSnorm1x16(uint16 p) - /// @see vec2 unpackSnorm2x16(uint32 p) - /// @see GLSL unpackSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec4 unpackSnorm4x16(uint64 p); - - /// Returns an unsigned integer obtained by converting the components of a floating-point scalar - /// to the 16-bit floating-point representation found in the OpenGL Specification, - /// and then packing this 16-bit value into a 16-bit unsigned integer. - /// - /// @see gtc_packing - /// @see uint32 packHalf2x16(vec2 const& v) - /// @see uint64 packHalf4x16(vec4 const& v) - /// @see GLSL packHalf2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint16 packHalf1x16(float v); - - /// Returns a floating-point scalar with components obtained by unpacking a 16-bit unsigned integer into a 16-bit value, - /// interpreted as a 16-bit floating-point number according to the OpenGL Specification, - /// and converting it to 32-bit floating-point values. - /// - /// @see gtc_packing - /// @see vec2 unpackHalf2x16(uint32 const& v) - /// @see vec4 unpackHalf4x16(uint64 const& v) - /// @see GLSL unpackHalf2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL float unpackHalf1x16(uint16 v); - - /// Returns an unsigned integer obtained by converting the components of a four-component floating-point vector - /// to the 16-bit floating-point representation found in the OpenGL Specification, - /// and then packing these four 16-bit values into a 64-bit unsigned integer. - /// The first vector component specifies the 16 least-significant bits of the result; - /// the forth component specifies the 16 most-significant bits. - /// - /// @see gtc_packing - /// @see uint16 packHalf1x16(float const& v) - /// @see uint32 packHalf2x16(vec2 const& v) - /// @see GLSL packHalf2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint64 packHalf4x16(vec4 const& v); - - /// Returns a four-component floating-point vector with components obtained by unpacking a 64-bit unsigned integer into four 16-bit values, - /// interpreting those values as 16-bit floating-point numbers according to the OpenGL Specification, - /// and converting them to 32-bit floating-point values. - /// The first component of the vector is obtained from the 16 least-significant bits of v; - /// the forth component is obtained from the 16 most-significant bits of v. - /// - /// @see gtc_packing - /// @see float unpackHalf1x16(uint16 const& v) - /// @see vec2 unpackHalf2x16(uint32 const& v) - /// @see GLSL unpackHalf2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec4 unpackHalf4x16(uint64 p); - - /// Returns an unsigned integer obtained by converting the components of a four-component signed integer vector - /// to the 10-10-10-2-bit signed integer representation found in the OpenGL Specification, - /// and then packing these four values into a 32-bit unsigned integer. - /// The first vector component specifies the 10 least-significant bits of the result; - /// the forth component specifies the 2 most-significant bits. - /// - /// @see gtc_packing - /// @see uint32 packI3x10_1x2(uvec4 const& v) - /// @see uint32 packSnorm3x10_1x2(vec4 const& v) - /// @see uint32 packUnorm3x10_1x2(vec4 const& v) - /// @see ivec4 unpackI3x10_1x2(uint32 const& p) - GLM_FUNC_DECL uint32 packI3x10_1x2(ivec4 const& v); - - /// Unpacks a single 32-bit unsigned integer p into three 10-bit and one 2-bit signed integers. - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see uint32 packU3x10_1x2(uvec4 const& v) - /// @see vec4 unpackSnorm3x10_1x2(uint32 const& p); - /// @see uvec4 unpackI3x10_1x2(uint32 const& p); - GLM_FUNC_DECL ivec4 unpackI3x10_1x2(uint32 p); - - /// Returns an unsigned integer obtained by converting the components of a four-component unsigned integer vector - /// to the 10-10-10-2-bit unsigned integer representation found in the OpenGL Specification, - /// and then packing these four values into a 32-bit unsigned integer. - /// The first vector component specifies the 10 least-significant bits of the result; - /// the forth component specifies the 2 most-significant bits. - /// - /// @see gtc_packing - /// @see uint32 packI3x10_1x2(ivec4 const& v) - /// @see uint32 packSnorm3x10_1x2(vec4 const& v) - /// @see uint32 packUnorm3x10_1x2(vec4 const& v) - /// @see ivec4 unpackU3x10_1x2(uint32 const& p) - GLM_FUNC_DECL uint32 packU3x10_1x2(uvec4 const& v); - - /// Unpacks a single 32-bit unsigned integer p into three 10-bit and one 2-bit unsigned integers. - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see uint32 packU3x10_1x2(uvec4 const& v) - /// @see vec4 unpackSnorm3x10_1x2(uint32 const& p); - /// @see uvec4 unpackI3x10_1x2(uint32 const& p); - GLM_FUNC_DECL uvec4 unpackU3x10_1x2(uint32 p); - - /// First, converts the first three components of the normalized floating-point value v into 10-bit signed integer values. - /// Then, converts the forth component of the normalized floating-point value v into 2-bit signed integer values. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packSnorm3x10_1x2(xyz): round(clamp(c, -1, +1) * 511.0) - /// packSnorm3x10_1x2(w): round(clamp(c, -1, +1) * 1.0) - /// - /// The first vector component specifies the 10 least-significant bits of the result; - /// the forth component specifies the 2 most-significant bits. - /// - /// @see gtc_packing - /// @see vec4 unpackSnorm3x10_1x2(uint32 const& p) - /// @see uint32 packUnorm3x10_1x2(vec4 const& v) - /// @see uint32 packU3x10_1x2(uvec4 const& v) - /// @see uint32 packI3x10_1x2(ivec4 const& v) - GLM_FUNC_DECL uint32 packSnorm3x10_1x2(vec4 const& v); - - /// First, unpacks a single 32-bit unsigned integer p into four 16-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm3x10_1x2(xyz): clamp(f / 511.0, -1, +1) - /// unpackSnorm3x10_1x2(w): clamp(f / 511.0, -1, +1) - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see uint32 packSnorm3x10_1x2(vec4 const& v) - /// @see vec4 unpackUnorm3x10_1x2(uint32 const& p)) - /// @see uvec4 unpackI3x10_1x2(uint32 const& p) - /// @see uvec4 unpackU3x10_1x2(uint32 const& p) - GLM_FUNC_DECL vec4 unpackSnorm3x10_1x2(uint32 p); - - /// First, converts the first three components of the normalized floating-point value v into 10-bit unsigned integer values. - /// Then, converts the forth component of the normalized floating-point value v into 2-bit signed uninteger values. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packUnorm3x10_1x2(xyz): round(clamp(c, 0, +1) * 1023.0) - /// packUnorm3x10_1x2(w): round(clamp(c, 0, +1) * 3.0) - /// - /// The first vector component specifies the 10 least-significant bits of the result; - /// the forth component specifies the 2 most-significant bits. - /// - /// @see gtc_packing - /// @see vec4 unpackUnorm3x10_1x2(uint32 const& p) - /// @see uint32 packUnorm3x10_1x2(vec4 const& v) - /// @see uint32 packU3x10_1x2(uvec4 const& v) - /// @see uint32 packI3x10_1x2(ivec4 const& v) - GLM_FUNC_DECL uint32 packUnorm3x10_1x2(vec4 const& v); - - /// First, unpacks a single 32-bit unsigned integer p into four 16-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm3x10_1x2(xyz): clamp(f / 1023.0, 0, +1) - /// unpackSnorm3x10_1x2(w): clamp(f / 3.0, 0, +1) - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see uint32 packSnorm3x10_1x2(vec4 const& v) - /// @see vec4 unpackInorm3x10_1x2(uint32 const& p)) - /// @see uvec4 unpackI3x10_1x2(uint32 const& p) - /// @see uvec4 unpackU3x10_1x2(uint32 const& p) - GLM_FUNC_DECL vec4 unpackUnorm3x10_1x2(uint32 p); - - /// First, converts the first two components of the normalized floating-point value v into 11-bit signless floating-point values. - /// Then, converts the third component of the normalized floating-point value v into a 10-bit signless floating-point value. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The first vector component specifies the 11 least-significant bits of the result; - /// the last component specifies the 10 most-significant bits. - /// - /// @see gtc_packing - /// @see vec3 unpackF2x11_1x10(uint32 const& p) - GLM_FUNC_DECL uint32 packF2x11_1x10(vec3 const& v); - - /// First, unpacks a single 32-bit unsigned integer p into two 11-bit signless floating-point values and one 10-bit signless floating-point value . - /// Then, each component is converted to a normalized floating-point value to generate the returned three-component vector. - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see gtc_packing - /// @see uint32 packF2x11_1x10(vec3 const& v) - GLM_FUNC_DECL vec3 unpackF2x11_1x10(uint32 p); - - - /// First, converts the first two components of the normalized floating-point value v into 11-bit signless floating-point values. - /// Then, converts the third component of the normalized floating-point value v into a 10-bit signless floating-point value. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The first vector component specifies the 11 least-significant bits of the result; - /// the last component specifies the 10 most-significant bits. - /// - /// packF3x9_E1x5 allows encoding into RGBE / RGB9E5 format - /// - /// @see gtc_packing - /// @see vec3 unpackF3x9_E1x5(uint32 const& p) - GLM_FUNC_DECL uint32 packF3x9_E1x5(vec3 const& v); - - /// First, unpacks a single 32-bit unsigned integer p into two 11-bit signless floating-point values and one 10-bit signless floating-point value . - /// Then, each component is converted to a normalized floating-point value to generate the returned three-component vector. - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// unpackF3x9_E1x5 allows decoding RGBE / RGB9E5 data - /// - /// @see gtc_packing - /// @see uint32 packF3x9_E1x5(vec3 const& v) - GLM_FUNC_DECL vec3 unpackF3x9_E1x5(uint32 p); - - /// Returns an unsigned integer vector obtained by converting the components of a floating-point vector - /// to the 16-bit floating-point representation found in the OpenGL Specification. - /// The first vector component specifies the 16 least-significant bits of the result; - /// the forth component specifies the 16 most-significant bits. - /// - /// @see gtc_packing - /// @see vec<3, T, Q> unpackRGBM(vec<4, T, Q> const& p) - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - template - GLM_FUNC_DECL vec<4, T, Q> packRGBM(vec<3, T, Q> const& rgb); - - /// Returns a floating-point vector with components obtained by reinterpreting an integer vector as 16-bit floating-point numbers and converting them to 32-bit floating-point values. - /// The first component of the vector is obtained from the 16 least-significant bits of v; - /// the forth component is obtained from the 16 most-significant bits of v. - /// - /// @see gtc_packing - /// @see vec<4, T, Q> packRGBM(vec<3, float, Q> const& v) - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - template - GLM_FUNC_DECL vec<3, T, Q> unpackRGBM(vec<4, T, Q> const& rgbm); - - /// Returns an unsigned integer vector obtained by converting the components of a floating-point vector - /// to the 16-bit floating-point representation found in the OpenGL Specification. - /// The first vector component specifies the 16 least-significant bits of the result; - /// the forth component specifies the 16 most-significant bits. - /// - /// @see gtc_packing - /// @see vec unpackHalf(vec const& p) - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - template - GLM_FUNC_DECL vec packHalf(vec const& v); - - /// Returns a floating-point vector with components obtained by reinterpreting an integer vector as 16-bit floating-point numbers and converting them to 32-bit floating-point values. - /// The first component of the vector is obtained from the 16 least-significant bits of v; - /// the forth component is obtained from the 16 most-significant bits of v. - /// - /// @see gtc_packing - /// @see vec packHalf(vec const& v) - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - template - GLM_FUNC_DECL vec unpackHalf(vec const& p); - - /// Convert each component of the normalized floating-point vector into unsigned integer values. - /// - /// @see gtc_packing - /// @see vec unpackUnorm(vec const& p); - template - GLM_FUNC_DECL vec packUnorm(vec const& v); - - /// Convert a packed integer to a normalized floating-point vector. - /// - /// @see gtc_packing - /// @see vec packUnorm(vec const& v) - template - GLM_FUNC_DECL vec unpackUnorm(vec const& v); - - /// Convert each component of the normalized floating-point vector into signed integer values. - /// - /// @see gtc_packing - /// @see vec unpackSnorm(vec const& p); - template - GLM_FUNC_DECL vec packSnorm(vec const& v); - - /// Convert a packed integer to a normalized floating-point vector. - /// - /// @see gtc_packing - /// @see vec packSnorm(vec const& v) - template - GLM_FUNC_DECL vec unpackSnorm(vec const& v); - - /// Convert each component of the normalized floating-point vector into unsigned integer values. - /// - /// @see gtc_packing - /// @see vec2 unpackUnorm2x4(uint8 p) - GLM_FUNC_DECL uint8 packUnorm2x4(vec2 const& v); - - /// Convert a packed integer to a normalized floating-point vector. - /// - /// @see gtc_packing - /// @see uint8 packUnorm2x4(vec2 const& v) - GLM_FUNC_DECL vec2 unpackUnorm2x4(uint8 p); - - /// Convert each component of the normalized floating-point vector into unsigned integer values. - /// - /// @see gtc_packing - /// @see vec4 unpackUnorm4x4(uint16 p) - GLM_FUNC_DECL uint16 packUnorm4x4(vec4 const& v); - - /// Convert a packed integer to a normalized floating-point vector. - /// - /// @see gtc_packing - /// @see uint16 packUnorm4x4(vec4 const& v) - GLM_FUNC_DECL vec4 unpackUnorm4x4(uint16 p); - - /// Convert each component of the normalized floating-point vector into unsigned integer values. - /// - /// @see gtc_packing - /// @see vec3 unpackUnorm1x5_1x6_1x5(uint16 p) - GLM_FUNC_DECL uint16 packUnorm1x5_1x6_1x5(vec3 const& v); - - /// Convert a packed integer to a normalized floating-point vector. - /// - /// @see gtc_packing - /// @see uint16 packUnorm1x5_1x6_1x5(vec3 const& v) - GLM_FUNC_DECL vec3 unpackUnorm1x5_1x6_1x5(uint16 p); - - /// Convert each component of the normalized floating-point vector into unsigned integer values. - /// - /// @see gtc_packing - /// @see vec4 unpackUnorm3x5_1x1(uint16 p) - GLM_FUNC_DECL uint16 packUnorm3x5_1x1(vec4 const& v); - - /// Convert a packed integer to a normalized floating-point vector. - /// - /// @see gtc_packing - /// @see uint16 packUnorm3x5_1x1(vec4 const& v) - GLM_FUNC_DECL vec4 unpackUnorm3x5_1x1(uint16 p); - - /// Convert each component of the normalized floating-point vector into unsigned integer values. - /// - /// @see gtc_packing - /// @see vec3 unpackUnorm2x3_1x2(uint8 p) - GLM_FUNC_DECL uint8 packUnorm2x3_1x2(vec3 const& v); - - /// Convert a packed integer to a normalized floating-point vector. - /// - /// @see gtc_packing - /// @see uint8 packUnorm2x3_1x2(vec3 const& v) - GLM_FUNC_DECL vec3 unpackUnorm2x3_1x2(uint8 p); - - - - /// Convert each component from an integer vector into a packed integer. - /// - /// @see gtc_packing - /// @see i8vec2 unpackInt2x8(int16 p) - GLM_FUNC_DECL int16 packInt2x8(i8vec2 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see int16 packInt2x8(i8vec2 const& v) - GLM_FUNC_DECL i8vec2 unpackInt2x8(int16 p); - - /// Convert each component from an integer vector into a packed unsigned integer. - /// - /// @see gtc_packing - /// @see u8vec2 unpackInt2x8(uint16 p) - GLM_FUNC_DECL uint16 packUint2x8(u8vec2 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see uint16 packInt2x8(u8vec2 const& v) - GLM_FUNC_DECL u8vec2 unpackUint2x8(uint16 p); - - /// Convert each component from an integer vector into a packed integer. - /// - /// @see gtc_packing - /// @see i8vec4 unpackInt4x8(int32 p) - GLM_FUNC_DECL int32 packInt4x8(i8vec4 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see int32 packInt2x8(i8vec4 const& v) - GLM_FUNC_DECL i8vec4 unpackInt4x8(int32 p); - - /// Convert each component from an integer vector into a packed unsigned integer. - /// - /// @see gtc_packing - /// @see u8vec4 unpackUint4x8(uint32 p) - GLM_FUNC_DECL uint32 packUint4x8(u8vec4 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see uint32 packUint4x8(u8vec2 const& v) - GLM_FUNC_DECL u8vec4 unpackUint4x8(uint32 p); - - /// Convert each component from an integer vector into a packed integer. - /// - /// @see gtc_packing - /// @see i16vec2 unpackInt2x16(int p) - GLM_FUNC_DECL int packInt2x16(i16vec2 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see int packInt2x16(i16vec2 const& v) - GLM_FUNC_DECL i16vec2 unpackInt2x16(int p); - - /// Convert each component from an integer vector into a packed integer. - /// - /// @see gtc_packing - /// @see i16vec4 unpackInt4x16(int64 p) - GLM_FUNC_DECL int64 packInt4x16(i16vec4 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see int64 packInt4x16(i16vec4 const& v) - GLM_FUNC_DECL i16vec4 unpackInt4x16(int64 p); - - /// Convert each component from an integer vector into a packed unsigned integer. - /// - /// @see gtc_packing - /// @see u16vec2 unpackUint2x16(uint p) - GLM_FUNC_DECL uint packUint2x16(u16vec2 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see uint packUint2x16(u16vec2 const& v) - GLM_FUNC_DECL u16vec2 unpackUint2x16(uint p); - - /// Convert each component from an integer vector into a packed unsigned integer. - /// - /// @see gtc_packing - /// @see u16vec4 unpackUint4x16(uint64 p) - GLM_FUNC_DECL uint64 packUint4x16(u16vec4 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see uint64 packUint4x16(u16vec4 const& v) - GLM_FUNC_DECL u16vec4 unpackUint4x16(uint64 p); - - /// Convert each component from an integer vector into a packed integer. - /// - /// @see gtc_packing - /// @see i32vec2 unpackInt2x32(int p) - GLM_FUNC_DECL int64 packInt2x32(i32vec2 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see int packInt2x16(i32vec2 const& v) - GLM_FUNC_DECL i32vec2 unpackInt2x32(int64 p); - - /// Convert each component from an integer vector into a packed unsigned integer. - /// - /// @see gtc_packing - /// @see u32vec2 unpackUint2x32(int p) - GLM_FUNC_DECL uint64 packUint2x32(u32vec2 const& v); - - /// Convert a packed integer into an integer vector. - /// - /// @see gtc_packing - /// @see int packUint2x16(u32vec2 const& v) - GLM_FUNC_DECL u32vec2 unpackUint2x32(uint64 p); - - - /// @} -}// namespace glm - -#include "packing.inl" diff --git a/third_party/glm/gtc/packing.inl b/third_party/glm/gtc/packing.inl deleted file mode 100755 index 8c906e1..0000000 --- a/third_party/glm/gtc/packing.inl +++ /dev/null @@ -1,938 +0,0 @@ -/// @ref gtc_packing - -#include "../ext/scalar_relational.hpp" -#include "../ext/vector_relational.hpp" -#include "../common.hpp" -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include "../detail/type_half.hpp" -#include -#include - -namespace glm{ -namespace detail -{ - GLM_FUNC_QUALIFIER glm::uint16 float2half(glm::uint32 f) - { - // 10 bits => EE EEEFFFFF - // 11 bits => EEE EEFFFFFF - // Half bits => SEEEEEFF FFFFFFFF - // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF - - // 0x00007c00 => 00000000 00000000 01111100 00000000 - // 0x000003ff => 00000000 00000000 00000011 11111111 - // 0x38000000 => 00111000 00000000 00000000 00000000 - // 0x7f800000 => 01111111 10000000 00000000 00000000 - // 0x00008000 => 00000000 00000000 10000000 00000000 - return - ((f >> 16) & 0x8000) | // sign - ((((f & 0x7f800000) - 0x38000000) >> 13) & 0x7c00) | // exponential - ((f >> 13) & 0x03ff); // Mantissa - } - - GLM_FUNC_QUALIFIER glm::uint32 float2packed11(glm::uint32 f) - { - // 10 bits => EE EEEFFFFF - // 11 bits => EEE EEFFFFFF - // Half bits => SEEEEEFF FFFFFFFF - // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF - - // 0x000007c0 => 00000000 00000000 00000111 11000000 - // 0x00007c00 => 00000000 00000000 01111100 00000000 - // 0x000003ff => 00000000 00000000 00000011 11111111 - // 0x38000000 => 00111000 00000000 00000000 00000000 - // 0x7f800000 => 01111111 10000000 00000000 00000000 - // 0x00008000 => 00000000 00000000 10000000 00000000 - return - ((((f & 0x7f800000) - 0x38000000) >> 17) & 0x07c0) | // exponential - ((f >> 17) & 0x003f); // Mantissa - } - - GLM_FUNC_QUALIFIER glm::uint32 packed11ToFloat(glm::uint32 p) - { - // 10 bits => EE EEEFFFFF - // 11 bits => EEE EEFFFFFF - // Half bits => SEEEEEFF FFFFFFFF - // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF - - // 0x000007c0 => 00000000 00000000 00000111 11000000 - // 0x00007c00 => 00000000 00000000 01111100 00000000 - // 0x000003ff => 00000000 00000000 00000011 11111111 - // 0x38000000 => 00111000 00000000 00000000 00000000 - // 0x7f800000 => 01111111 10000000 00000000 00000000 - // 0x00008000 => 00000000 00000000 10000000 00000000 - return - ((((p & 0x07c0) << 17) + 0x38000000) & 0x7f800000) | // exponential - ((p & 0x003f) << 17); // Mantissa - } - - GLM_FUNC_QUALIFIER glm::uint32 float2packed10(glm::uint32 f) - { - // 10 bits => EE EEEFFFFF - // 11 bits => EEE EEFFFFFF - // Half bits => SEEEEEFF FFFFFFFF - // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF - - // 0x0000001F => 00000000 00000000 00000000 00011111 - // 0x0000003F => 00000000 00000000 00000000 00111111 - // 0x000003E0 => 00000000 00000000 00000011 11100000 - // 0x000007C0 => 00000000 00000000 00000111 11000000 - // 0x00007C00 => 00000000 00000000 01111100 00000000 - // 0x000003FF => 00000000 00000000 00000011 11111111 - // 0x38000000 => 00111000 00000000 00000000 00000000 - // 0x7f800000 => 01111111 10000000 00000000 00000000 - // 0x00008000 => 00000000 00000000 10000000 00000000 - return - ((((f & 0x7f800000) - 0x38000000) >> 18) & 0x03E0) | // exponential - ((f >> 18) & 0x001f); // Mantissa - } - - GLM_FUNC_QUALIFIER glm::uint32 packed10ToFloat(glm::uint32 p) - { - // 10 bits => EE EEEFFFFF - // 11 bits => EEE EEFFFFFF - // Half bits => SEEEEEFF FFFFFFFF - // Float bits => SEEEEEEE EFFFFFFF FFFFFFFF FFFFFFFF - - // 0x0000001F => 00000000 00000000 00000000 00011111 - // 0x0000003F => 00000000 00000000 00000000 00111111 - // 0x000003E0 => 00000000 00000000 00000011 11100000 - // 0x000007C0 => 00000000 00000000 00000111 11000000 - // 0x00007C00 => 00000000 00000000 01111100 00000000 - // 0x000003FF => 00000000 00000000 00000011 11111111 - // 0x38000000 => 00111000 00000000 00000000 00000000 - // 0x7f800000 => 01111111 10000000 00000000 00000000 - // 0x00008000 => 00000000 00000000 10000000 00000000 - return - ((((p & 0x03E0) << 18) + 0x38000000) & 0x7f800000) | // exponential - ((p & 0x001f) << 18); // Mantissa - } - - GLM_FUNC_QUALIFIER glm::uint half2float(glm::uint h) - { - return ((h & 0x8000) << 16) | ((( h & 0x7c00) + 0x1C000) << 13) | ((h & 0x03FF) << 13); - } - - GLM_FUNC_QUALIFIER glm::uint floatTo11bit(float x) - { - if(x == 0.0f) - return 0u; - else if(glm::isnan(x)) - return ~0u; - else if(glm::isinf(x)) - return 0x1Fu << 6u; - - uint Pack = 0u; - memcpy(&Pack, &x, sizeof(Pack)); - return float2packed11(Pack); - } - - GLM_FUNC_QUALIFIER float packed11bitToFloat(glm::uint x) - { - if(x == 0) - return 0.0f; - else if(x == ((1 << 11) - 1)) - return ~0;//NaN - else if(x == (0x1f << 6)) - return ~0;//Inf - - uint Result = packed11ToFloat(x); - - float Temp = 0; - memcpy(&Temp, &Result, sizeof(Temp)); - return Temp; - } - - GLM_FUNC_QUALIFIER glm::uint floatTo10bit(float x) - { - if(x == 0.0f) - return 0u; - else if(glm::isnan(x)) - return ~0u; - else if(glm::isinf(x)) - return 0x1Fu << 5u; - - uint Pack = 0; - memcpy(&Pack, &x, sizeof(Pack)); - return float2packed10(Pack); - } - - GLM_FUNC_QUALIFIER float packed10bitToFloat(glm::uint x) - { - if(x == 0) - return 0.0f; - else if(x == ((1 << 10) - 1)) - return ~0;//NaN - else if(x == (0x1f << 5)) - return ~0;//Inf - - uint Result = packed10ToFloat(x); - - float Temp = 0; - memcpy(&Temp, &Result, sizeof(Temp)); - return Temp; - } - -// GLM_FUNC_QUALIFIER glm::uint f11_f11_f10(float x, float y, float z) -// { -// return ((floatTo11bit(x) & ((1 << 11) - 1)) << 0) | ((floatTo11bit(y) & ((1 << 11) - 1)) << 11) | ((floatTo10bit(z) & ((1 << 10) - 1)) << 22); -// } - - union u3u3u2 - { - struct - { - uint x : 3; - uint y : 3; - uint z : 2; - } data; - uint8 pack; - }; - - union u4u4 - { - struct - { - uint x : 4; - uint y : 4; - } data; - uint8 pack; - }; - - union u4u4u4u4 - { - struct - { - uint x : 4; - uint y : 4; - uint z : 4; - uint w : 4; - } data; - uint16 pack; - }; - - union u5u6u5 - { - struct - { - uint x : 5; - uint y : 6; - uint z : 5; - } data; - uint16 pack; - }; - - union u5u5u5u1 - { - struct - { - uint x : 5; - uint y : 5; - uint z : 5; - uint w : 1; - } data; - uint16 pack; - }; - - union u10u10u10u2 - { - struct - { - uint x : 10; - uint y : 10; - uint z : 10; - uint w : 2; - } data; - uint32 pack; - }; - - union i10i10i10i2 - { - struct - { - int x : 10; - int y : 10; - int z : 10; - int w : 2; - } data; - uint32 pack; - }; - - union u9u9u9e5 - { - struct - { - uint x : 9; - uint y : 9; - uint z : 9; - uint w : 5; - } data; - uint32 pack; - }; - - template - struct compute_half - {}; - - template - struct compute_half<1, Q> - { - GLM_FUNC_QUALIFIER static vec<1, uint16, Q> pack(vec<1, float, Q> const& v) - { - int16 const Unpack(detail::toFloat16(v.x)); - u16vec1 Packed; - memcpy(&Packed, &Unpack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER static vec<1, float, Q> unpack(vec<1, uint16, Q> const& v) - { - i16vec1 Unpack; - memcpy(&Unpack, &v, sizeof(Unpack)); - return vec<1, float, Q>(detail::toFloat32(v.x)); - } - }; - - template - struct compute_half<2, Q> - { - GLM_FUNC_QUALIFIER static vec<2, uint16, Q> pack(vec<2, float, Q> const& v) - { - vec<2, int16, Q> const Unpack(detail::toFloat16(v.x), detail::toFloat16(v.y)); - u16vec2 Packed; - memcpy(&Packed, &Unpack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER static vec<2, float, Q> unpack(vec<2, uint16, Q> const& v) - { - i16vec2 Unpack; - memcpy(&Unpack, &v, sizeof(Unpack)); - return vec<2, float, Q>(detail::toFloat32(v.x), detail::toFloat32(v.y)); - } - }; - - template - struct compute_half<3, Q> - { - GLM_FUNC_QUALIFIER static vec<3, uint16, Q> pack(vec<3, float, Q> const& v) - { - vec<3, int16, Q> const Unpack(detail::toFloat16(v.x), detail::toFloat16(v.y), detail::toFloat16(v.z)); - u16vec3 Packed; - memcpy(&Packed, &Unpack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER static vec<3, float, Q> unpack(vec<3, uint16, Q> const& v) - { - i16vec3 Unpack; - memcpy(&Unpack, &v, sizeof(Unpack)); - return vec<3, float, Q>(detail::toFloat32(v.x), detail::toFloat32(v.y), detail::toFloat32(v.z)); - } - }; - - template - struct compute_half<4, Q> - { - GLM_FUNC_QUALIFIER static vec<4, uint16, Q> pack(vec<4, float, Q> const& v) - { - vec<4, int16, Q> const Unpack(detail::toFloat16(v.x), detail::toFloat16(v.y), detail::toFloat16(v.z), detail::toFloat16(v.w)); - u16vec4 Packed; - memcpy(&Packed, &Unpack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER static vec<4, float, Q> unpack(vec<4, uint16, Q> const& v) - { - i16vec4 Unpack; - memcpy(&Unpack, &v, sizeof(Unpack)); - return vec<4, float, Q>(detail::toFloat32(v.x), detail::toFloat32(v.y), detail::toFloat32(v.z), detail::toFloat32(v.w)); - } - }; -}//namespace detail - - GLM_FUNC_QUALIFIER uint8 packUnorm1x8(float v) - { - return static_cast(round(clamp(v, 0.0f, 1.0f) * 255.0f)); - } - - GLM_FUNC_QUALIFIER float unpackUnorm1x8(uint8 p) - { - float const Unpack(p); - return Unpack * static_cast(0.0039215686274509803921568627451); // 1 / 255 - } - - GLM_FUNC_QUALIFIER uint16 packUnorm2x8(vec2 const& v) - { - u8vec2 const Topack(round(clamp(v, 0.0f, 1.0f) * 255.0f)); - - uint16 Unpack = 0; - memcpy(&Unpack, &Topack, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER vec2 unpackUnorm2x8(uint16 p) - { - u8vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return vec2(Unpack) * float(0.0039215686274509803921568627451); // 1 / 255 - } - - GLM_FUNC_QUALIFIER uint8 packSnorm1x8(float v) - { - int8 const Topack(static_cast(round(clamp(v ,-1.0f, 1.0f) * 127.0f))); - uint8 Packed = 0; - memcpy(&Packed, &Topack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER float unpackSnorm1x8(uint8 p) - { - int8 Unpack = 0; - memcpy(&Unpack, &p, sizeof(Unpack)); - return clamp( - static_cast(Unpack) * 0.00787401574803149606299212598425f, // 1.0f / 127.0f - -1.0f, 1.0f); - } - - GLM_FUNC_QUALIFIER uint16 packSnorm2x8(vec2 const& v) - { - i8vec2 const Topack(round(clamp(v, -1.0f, 1.0f) * 127.0f)); - uint16 Packed = 0; - memcpy(&Packed, &Topack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER vec2 unpackSnorm2x8(uint16 p) - { - i8vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return clamp( - vec2(Unpack) * 0.00787401574803149606299212598425f, // 1.0f / 127.0f - -1.0f, 1.0f); - } - - GLM_FUNC_QUALIFIER uint16 packUnorm1x16(float s) - { - return static_cast(round(clamp(s, 0.0f, 1.0f) * 65535.0f)); - } - - GLM_FUNC_QUALIFIER float unpackUnorm1x16(uint16 p) - { - float const Unpack(p); - return Unpack * 1.5259021896696421759365224689097e-5f; // 1.0 / 65535.0 - } - - GLM_FUNC_QUALIFIER uint64 packUnorm4x16(vec4 const& v) - { - u16vec4 const Topack(round(clamp(v , 0.0f, 1.0f) * 65535.0f)); - uint64 Packed = 0; - memcpy(&Packed, &Topack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER vec4 unpackUnorm4x16(uint64 p) - { - u16vec4 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return vec4(Unpack) * 1.5259021896696421759365224689097e-5f; // 1.0 / 65535.0 - } - - GLM_FUNC_QUALIFIER uint16 packSnorm1x16(float v) - { - int16 const Topack = static_cast(round(clamp(v ,-1.0f, 1.0f) * 32767.0f)); - uint16 Packed = 0; - memcpy(&Packed, &Topack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER float unpackSnorm1x16(uint16 p) - { - int16 Unpack = 0; - memcpy(&Unpack, &p, sizeof(Unpack)); - return clamp( - static_cast(Unpack) * 3.0518509475997192297128208258309e-5f, //1.0f / 32767.0f, - -1.0f, 1.0f); - } - - GLM_FUNC_QUALIFIER uint64 packSnorm4x16(vec4 const& v) - { - i16vec4 const Topack(round(clamp(v ,-1.0f, 1.0f) * 32767.0f)); - uint64 Packed = 0; - memcpy(&Packed, &Topack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER vec4 unpackSnorm4x16(uint64 p) - { - i16vec4 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return clamp( - vec4(Unpack) * 3.0518509475997192297128208258309e-5f, //1.0f / 32767.0f, - -1.0f, 1.0f); - } - - GLM_FUNC_QUALIFIER uint16 packHalf1x16(float v) - { - int16 const Topack(detail::toFloat16(v)); - uint16 Packed = 0; - memcpy(&Packed, &Topack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER float unpackHalf1x16(uint16 v) - { - int16 Unpack = 0; - memcpy(&Unpack, &v, sizeof(Unpack)); - return detail::toFloat32(Unpack); - } - - GLM_FUNC_QUALIFIER uint64 packHalf4x16(glm::vec4 const& v) - { - i16vec4 const Unpack( - detail::toFloat16(v.x), - detail::toFloat16(v.y), - detail::toFloat16(v.z), - detail::toFloat16(v.w)); - uint64 Packed = 0; - memcpy(&Packed, &Unpack, sizeof(Packed)); - return Packed; - } - - GLM_FUNC_QUALIFIER glm::vec4 unpackHalf4x16(uint64 v) - { - i16vec4 Unpack; - memcpy(&Unpack, &v, sizeof(Unpack)); - return vec4( - detail::toFloat32(Unpack.x), - detail::toFloat32(Unpack.y), - detail::toFloat32(Unpack.z), - detail::toFloat32(Unpack.w)); - } - - GLM_FUNC_QUALIFIER uint32 packI3x10_1x2(ivec4 const& v) - { - detail::i10i10i10i2 Result; - Result.data.x = v.x; - Result.data.y = v.y; - Result.data.z = v.z; - Result.data.w = v.w; - return Result.pack; - } - - GLM_FUNC_QUALIFIER ivec4 unpackI3x10_1x2(uint32 v) - { - detail::i10i10i10i2 Unpack; - Unpack.pack = v; - return ivec4( - Unpack.data.x, - Unpack.data.y, - Unpack.data.z, - Unpack.data.w); - } - - GLM_FUNC_QUALIFIER uint32 packU3x10_1x2(uvec4 const& v) - { - detail::u10u10u10u2 Result; - Result.data.x = v.x; - Result.data.y = v.y; - Result.data.z = v.z; - Result.data.w = v.w; - return Result.pack; - } - - GLM_FUNC_QUALIFIER uvec4 unpackU3x10_1x2(uint32 v) - { - detail::u10u10u10u2 Unpack; - Unpack.pack = v; - return uvec4( - Unpack.data.x, - Unpack.data.y, - Unpack.data.z, - Unpack.data.w); - } - - GLM_FUNC_QUALIFIER uint32 packSnorm3x10_1x2(vec4 const& v) - { - ivec4 const Pack(round(clamp(v,-1.0f, 1.0f) * vec4(511.f, 511.f, 511.f, 1.f))); - - detail::i10i10i10i2 Result; - Result.data.x = Pack.x; - Result.data.y = Pack.y; - Result.data.z = Pack.z; - Result.data.w = Pack.w; - return Result.pack; - } - - GLM_FUNC_QUALIFIER vec4 unpackSnorm3x10_1x2(uint32 v) - { - detail::i10i10i10i2 Unpack; - Unpack.pack = v; - - vec4 const Result(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w); - - return clamp(Result * vec4(1.f / 511.f, 1.f / 511.f, 1.f / 511.f, 1.f), -1.0f, 1.0f); - } - - GLM_FUNC_QUALIFIER uint32 packUnorm3x10_1x2(vec4 const& v) - { - uvec4 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec4(1023.f, 1023.f, 1023.f, 3.f))); - - detail::u10u10u10u2 Result; - Result.data.x = Unpack.x; - Result.data.y = Unpack.y; - Result.data.z = Unpack.z; - Result.data.w = Unpack.w; - return Result.pack; - } - - GLM_FUNC_QUALIFIER vec4 unpackUnorm3x10_1x2(uint32 v) - { - vec4 const ScaleFactors(1.0f / 1023.f, 1.0f / 1023.f, 1.0f / 1023.f, 1.0f / 3.f); - - detail::u10u10u10u2 Unpack; - Unpack.pack = v; - return vec4(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w) * ScaleFactors; - } - - GLM_FUNC_QUALIFIER uint32 packF2x11_1x10(vec3 const& v) - { - return - ((detail::floatTo11bit(v.x) & ((1 << 11) - 1)) << 0) | - ((detail::floatTo11bit(v.y) & ((1 << 11) - 1)) << 11) | - ((detail::floatTo10bit(v.z) & ((1 << 10) - 1)) << 22); - } - - GLM_FUNC_QUALIFIER vec3 unpackF2x11_1x10(uint32 v) - { - return vec3( - detail::packed11bitToFloat(v >> 0), - detail::packed11bitToFloat(v >> 11), - detail::packed10bitToFloat(v >> 22)); - } - - GLM_FUNC_QUALIFIER uint32 packF3x9_E1x5(vec3 const& v) - { - float const SharedExpMax = (pow(2.0f, 9.0f - 1.0f) / pow(2.0f, 9.0f)) * pow(2.0f, 31.f - 15.f); - vec3 const Color = clamp(v, 0.0f, SharedExpMax); - float const MaxColor = max(Color.x, max(Color.y, Color.z)); - - float const ExpSharedP = max(-15.f - 1.f, floor(log2(MaxColor))) + 1.0f + 15.f; - float const MaxShared = floor(MaxColor / pow(2.0f, (ExpSharedP - 15.f - 9.f)) + 0.5f); - float const ExpShared = equal(MaxShared, pow(2.0f, 9.0f), epsilon()) ? ExpSharedP + 1.0f : ExpSharedP; - - uvec3 const ColorComp(floor(Color / pow(2.f, (ExpShared - 15.f - 9.f)) + 0.5f)); - - detail::u9u9u9e5 Unpack; - Unpack.data.x = ColorComp.x; - Unpack.data.y = ColorComp.y; - Unpack.data.z = ColorComp.z; - Unpack.data.w = uint(ExpShared); - return Unpack.pack; - } - - GLM_FUNC_QUALIFIER vec3 unpackF3x9_E1x5(uint32 v) - { - detail::u9u9u9e5 Unpack; - Unpack.pack = v; - - return vec3(Unpack.data.x, Unpack.data.y, Unpack.data.z) * pow(2.0f, Unpack.data.w - 15.f - 9.f); - } - - // Based on Brian Karis http://graphicrants.blogspot.fr/2009/04/rgbm-color-encoding.html - template - GLM_FUNC_QUALIFIER vec<4, T, Q> packRGBM(vec<3, T, Q> const& rgb) - { - vec<3, T, Q> const Color(rgb * static_cast(1.0 / 6.0)); - T Alpha = clamp(max(max(Color.x, Color.y), max(Color.z, static_cast(1e-6))), static_cast(0), static_cast(1)); - Alpha = ceil(Alpha * static_cast(255.0)) / static_cast(255.0); - return vec<4, T, Q>(Color / Alpha, Alpha); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> unpackRGBM(vec<4, T, Q> const& rgbm) - { - return vec<3, T, Q>(rgbm.x, rgbm.y, rgbm.z) * rgbm.w * static_cast(6); - } - - template - GLM_FUNC_QUALIFIER vec packHalf(vec const& v) - { - return detail::compute_half::pack(v); - } - - template - GLM_FUNC_QUALIFIER vec unpackHalf(vec const& v) - { - return detail::compute_half::unpack(v); - } - - template - GLM_FUNC_QUALIFIER vec packUnorm(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); - - return vec(round(clamp(v, static_cast(0), static_cast(1)) * static_cast(std::numeric_limits::max()))); - } - - template - GLM_FUNC_QUALIFIER vec unpackUnorm(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); - - return vec(v) * (static_cast(1) / static_cast(std::numeric_limits::max())); - } - - template - GLM_FUNC_QUALIFIER vec packSnorm(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); - - return vec(round(clamp(v , static_cast(-1), static_cast(1)) * static_cast(std::numeric_limits::max()))); - } - - template - GLM_FUNC_QUALIFIER vec unpackSnorm(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_integer, "uintType must be an integer type"); - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "floatType must be a floating point type"); - - return clamp(vec(v) * (static_cast(1) / static_cast(std::numeric_limits::max())), static_cast(-1), static_cast(1)); - } - - GLM_FUNC_QUALIFIER uint8 packUnorm2x4(vec2 const& v) - { - u32vec2 const Unpack(round(clamp(v, 0.0f, 1.0f) * 15.0f)); - detail::u4u4 Result; - Result.data.x = Unpack.x; - Result.data.y = Unpack.y; - return Result.pack; - } - - GLM_FUNC_QUALIFIER vec2 unpackUnorm2x4(uint8 v) - { - float const ScaleFactor(1.f / 15.f); - detail::u4u4 Unpack; - Unpack.pack = v; - return vec2(Unpack.data.x, Unpack.data.y) * ScaleFactor; - } - - GLM_FUNC_QUALIFIER uint16 packUnorm4x4(vec4 const& v) - { - u32vec4 const Unpack(round(clamp(v, 0.0f, 1.0f) * 15.0f)); - detail::u4u4u4u4 Result; - Result.data.x = Unpack.x; - Result.data.y = Unpack.y; - Result.data.z = Unpack.z; - Result.data.w = Unpack.w; - return Result.pack; - } - - GLM_FUNC_QUALIFIER vec4 unpackUnorm4x4(uint16 v) - { - float const ScaleFactor(1.f / 15.f); - detail::u4u4u4u4 Unpack; - Unpack.pack = v; - return vec4(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w) * ScaleFactor; - } - - GLM_FUNC_QUALIFIER uint16 packUnorm1x5_1x6_1x5(vec3 const& v) - { - u32vec3 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec3(31.f, 63.f, 31.f))); - detail::u5u6u5 Result; - Result.data.x = Unpack.x; - Result.data.y = Unpack.y; - Result.data.z = Unpack.z; - return Result.pack; - } - - GLM_FUNC_QUALIFIER vec3 unpackUnorm1x5_1x6_1x5(uint16 v) - { - vec3 const ScaleFactor(1.f / 31.f, 1.f / 63.f, 1.f / 31.f); - detail::u5u6u5 Unpack; - Unpack.pack = v; - return vec3(Unpack.data.x, Unpack.data.y, Unpack.data.z) * ScaleFactor; - } - - GLM_FUNC_QUALIFIER uint16 packUnorm3x5_1x1(vec4 const& v) - { - u32vec4 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec4(31.f, 31.f, 31.f, 1.f))); - detail::u5u5u5u1 Result; - Result.data.x = Unpack.x; - Result.data.y = Unpack.y; - Result.data.z = Unpack.z; - Result.data.w = Unpack.w; - return Result.pack; - } - - GLM_FUNC_QUALIFIER vec4 unpackUnorm3x5_1x1(uint16 v) - { - vec4 const ScaleFactor(1.f / 31.f, 1.f / 31.f, 1.f / 31.f, 1.f); - detail::u5u5u5u1 Unpack; - Unpack.pack = v; - return vec4(Unpack.data.x, Unpack.data.y, Unpack.data.z, Unpack.data.w) * ScaleFactor; - } - - GLM_FUNC_QUALIFIER uint8 packUnorm2x3_1x2(vec3 const& v) - { - u32vec3 const Unpack(round(clamp(v, 0.0f, 1.0f) * vec3(7.f, 7.f, 3.f))); - detail::u3u3u2 Result; - Result.data.x = Unpack.x; - Result.data.y = Unpack.y; - Result.data.z = Unpack.z; - return Result.pack; - } - - GLM_FUNC_QUALIFIER vec3 unpackUnorm2x3_1x2(uint8 v) - { - vec3 const ScaleFactor(1.f / 7.f, 1.f / 7.f, 1.f / 3.f); - detail::u3u3u2 Unpack; - Unpack.pack = v; - return vec3(Unpack.data.x, Unpack.data.y, Unpack.data.z) * ScaleFactor; - } - - GLM_FUNC_QUALIFIER int16 packInt2x8(i8vec2 const& v) - { - int16 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER i8vec2 unpackInt2x8(int16 p) - { - i8vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER uint16 packUint2x8(u8vec2 const& v) - { - uint16 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER u8vec2 unpackUint2x8(uint16 p) - { - u8vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER int32 packInt4x8(i8vec4 const& v) - { - int32 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER i8vec4 unpackInt4x8(int32 p) - { - i8vec4 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER uint32 packUint4x8(u8vec4 const& v) - { - uint32 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER u8vec4 unpackUint4x8(uint32 p) - { - u8vec4 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER int packInt2x16(i16vec2 const& v) - { - int Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER i16vec2 unpackInt2x16(int p) - { - i16vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER int64 packInt4x16(i16vec4 const& v) - { - int64 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER i16vec4 unpackInt4x16(int64 p) - { - i16vec4 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER uint packUint2x16(u16vec2 const& v) - { - uint Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER u16vec2 unpackUint2x16(uint p) - { - u16vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER uint64 packUint4x16(u16vec4 const& v) - { - uint64 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER u16vec4 unpackUint4x16(uint64 p) - { - u16vec4 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER int64 packInt2x32(i32vec2 const& v) - { - int64 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER i32vec2 unpackInt2x32(int64 p) - { - i32vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } - - GLM_FUNC_QUALIFIER uint64 packUint2x32(u32vec2 const& v) - { - uint64 Pack = 0; - memcpy(&Pack, &v, sizeof(Pack)); - return Pack; - } - - GLM_FUNC_QUALIFIER u32vec2 unpackUint2x32(uint64 p) - { - u32vec2 Unpack; - memcpy(&Unpack, &p, sizeof(Unpack)); - return Unpack; - } -}//namespace glm - diff --git a/third_party/glm/gtc/quaternion.hpp b/third_party/glm/gtc/quaternion.hpp deleted file mode 100755 index 359e072..0000000 --- a/third_party/glm/gtc/quaternion.hpp +++ /dev/null @@ -1,173 +0,0 @@ -/// @ref gtc_quaternion -/// @file glm/gtc/quaternion.hpp -/// -/// @see core (dependence) -/// @see gtc_constants (dependence) -/// -/// @defgroup gtc_quaternion GLM_GTC_quaternion -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Defines a templated quaternion type and several quaternion operations. - -#pragma once - -// Dependency: -#include "../gtc/constants.hpp" -#include "../gtc/matrix_transform.hpp" -#include "../ext/vector_relational.hpp" -#include "../ext/quaternion_common.hpp" -#include "../ext/quaternion_float.hpp" -#include "../ext/quaternion_float_precision.hpp" -#include "../ext/quaternion_double.hpp" -#include "../ext/quaternion_double_precision.hpp" -#include "../ext/quaternion_relational.hpp" -#include "../ext/quaternion_geometric.hpp" -#include "../ext/quaternion_trigonometric.hpp" -#include "../ext/quaternion_transform.hpp" -#include "../detail/type_mat3x3.hpp" -#include "../detail/type_mat4x4.hpp" -#include "../detail/type_vec3.hpp" -#include "../detail/type_vec4.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_quaternion extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_quaternion - /// @{ - - /// Returns euler angles, pitch as x, yaw as y, roll as z. - /// The result is expressed in radians. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL vec<3, T, Q> eulerAngles(qua const& x); - - /// Returns roll value of euler angles expressed in radians. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL T roll(qua const& x); - - /// Returns pitch value of euler angles expressed in radians. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL T pitch(qua const& x); - - /// Returns yaw value of euler angles expressed in radians. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL T yaw(qua const& x); - - /// Converts a quaternion to a 3 * 3 matrix. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL mat<3, 3, T, Q> mat3_cast(qua const& x); - - /// Converts a quaternion to a 4 * 4 matrix. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL mat<4, 4, T, Q> mat4_cast(qua const& x); - - /// Converts a pure rotation 3 * 3 matrix to a quaternion. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL qua quat_cast(mat<3, 3, T, Q> const& x); - - /// Converts a pure rotation 4 * 4 matrix to a quaternion. - /// - /// @tparam T Floating-point scalar types. - /// - /// @see gtc_quaternion - template - GLM_FUNC_DECL qua quat_cast(mat<4, 4, T, Q> const& x); - - /// Returns the component-wise comparison result of x < y. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_relational - template - GLM_FUNC_DECL vec<4, bool, Q> lessThan(qua const& x, qua const& y); - - /// Returns the component-wise comparison of result x <= y. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_relational - template - GLM_FUNC_DECL vec<4, bool, Q> lessThanEqual(qua const& x, qua const& y); - - /// Returns the component-wise comparison of result x > y. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_relational - template - GLM_FUNC_DECL vec<4, bool, Q> greaterThan(qua const& x, qua const& y); - - /// Returns the component-wise comparison of result x >= y. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_quaternion_relational - template - GLM_FUNC_DECL vec<4, bool, Q> greaterThanEqual(qua const& x, qua const& y); - - /// Build a look at quaternion based on the default handedness. - /// - /// @param direction Desired forward direction. Needs to be normalized. - /// @param up Up vector, how the camera is oriented. Typically (0, 1, 0). - template - GLM_FUNC_DECL qua quatLookAt( - vec<3, T, Q> const& direction, - vec<3, T, Q> const& up); - - /// Build a right-handed look at quaternion. - /// - /// @param direction Desired forward direction onto which the -z-axis gets mapped. Needs to be normalized. - /// @param up Up vector, how the camera is oriented. Typically (0, 1, 0). - template - GLM_FUNC_DECL qua quatLookAtRH( - vec<3, T, Q> const& direction, - vec<3, T, Q> const& up); - - /// Build a left-handed look at quaternion. - /// - /// @param direction Desired forward direction onto which the +z-axis gets mapped. Needs to be normalized. - /// @param up Up vector, how the camera is oriented. Typically (0, 1, 0). - template - GLM_FUNC_DECL qua quatLookAtLH( - vec<3, T, Q> const& direction, - vec<3, T, Q> const& up); - /// @} -} //namespace glm - -#include "quaternion.inl" diff --git a/third_party/glm/gtc/quaternion.inl b/third_party/glm/gtc/quaternion.inl deleted file mode 100755 index 9dd037e..0000000 --- a/third_party/glm/gtc/quaternion.inl +++ /dev/null @@ -1,200 +0,0 @@ -#include "../trigonometric.hpp" -#include "../geometric.hpp" -#include "../exponential.hpp" -#include "epsilon.hpp" -#include - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> eulerAngles(qua const& x) - { - return vec<3, T, Q>(pitch(x), yaw(x), roll(x)); - } - - template - GLM_FUNC_QUALIFIER T roll(qua const& q) - { - return static_cast(atan(static_cast(2) * (q.x * q.y + q.w * q.z), q.w * q.w + q.x * q.x - q.y * q.y - q.z * q.z)); - } - - template - GLM_FUNC_QUALIFIER T pitch(qua const& q) - { - //return T(atan(T(2) * (q.y * q.z + q.w * q.x), q.w * q.w - q.x * q.x - q.y * q.y + q.z * q.z)); - T const y = static_cast(2) * (q.y * q.z + q.w * q.x); - T const x = q.w * q.w - q.x * q.x - q.y * q.y + q.z * q.z; - - if(all(equal(vec<2, T, Q>(x, y), vec<2, T, Q>(0), epsilon()))) //avoid atan2(0,0) - handle singularity - Matiis - return static_cast(static_cast(2) * atan(q.x, q.w)); - - return static_cast(atan(y, x)); - } - - template - GLM_FUNC_QUALIFIER T yaw(qua const& q) - { - return asin(clamp(static_cast(-2) * (q.x * q.z - q.w * q.y), static_cast(-1), static_cast(1))); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> mat3_cast(qua const& q) - { - mat<3, 3, T, Q> Result(T(1)); - T qxx(q.x * q.x); - T qyy(q.y * q.y); - T qzz(q.z * q.z); - T qxz(q.x * q.z); - T qxy(q.x * q.y); - T qyz(q.y * q.z); - T qwx(q.w * q.x); - T qwy(q.w * q.y); - T qwz(q.w * q.z); - - Result[0][0] = T(1) - T(2) * (qyy + qzz); - Result[0][1] = T(2) * (qxy + qwz); - Result[0][2] = T(2) * (qxz - qwy); - - Result[1][0] = T(2) * (qxy - qwz); - Result[1][1] = T(1) - T(2) * (qxx + qzz); - Result[1][2] = T(2) * (qyz + qwx); - - Result[2][0] = T(2) * (qxz + qwy); - Result[2][1] = T(2) * (qyz - qwx); - Result[2][2] = T(1) - T(2) * (qxx + qyy); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> mat4_cast(qua const& q) - { - return mat<4, 4, T, Q>(mat3_cast(q)); - } - - template - GLM_FUNC_QUALIFIER qua quat_cast(mat<3, 3, T, Q> const& m) - { - T fourXSquaredMinus1 = m[0][0] - m[1][1] - m[2][2]; - T fourYSquaredMinus1 = m[1][1] - m[0][0] - m[2][2]; - T fourZSquaredMinus1 = m[2][2] - m[0][0] - m[1][1]; - T fourWSquaredMinus1 = m[0][0] + m[1][1] + m[2][2]; - - int biggestIndex = 0; - T fourBiggestSquaredMinus1 = fourWSquaredMinus1; - if(fourXSquaredMinus1 > fourBiggestSquaredMinus1) - { - fourBiggestSquaredMinus1 = fourXSquaredMinus1; - biggestIndex = 1; - } - if(fourYSquaredMinus1 > fourBiggestSquaredMinus1) - { - fourBiggestSquaredMinus1 = fourYSquaredMinus1; - biggestIndex = 2; - } - if(fourZSquaredMinus1 > fourBiggestSquaredMinus1) - { - fourBiggestSquaredMinus1 = fourZSquaredMinus1; - biggestIndex = 3; - } - - T biggestVal = sqrt(fourBiggestSquaredMinus1 + static_cast(1)) * static_cast(0.5); - T mult = static_cast(0.25) / biggestVal; - - switch(biggestIndex) - { - case 0: - return qua(biggestVal, (m[1][2] - m[2][1]) * mult, (m[2][0] - m[0][2]) * mult, (m[0][1] - m[1][0]) * mult); - case 1: - return qua((m[1][2] - m[2][1]) * mult, biggestVal, (m[0][1] + m[1][0]) * mult, (m[2][0] + m[0][2]) * mult); - case 2: - return qua((m[2][0] - m[0][2]) * mult, (m[0][1] + m[1][0]) * mult, biggestVal, (m[1][2] + m[2][1]) * mult); - case 3: - return qua((m[0][1] - m[1][0]) * mult, (m[2][0] + m[0][2]) * mult, (m[1][2] + m[2][1]) * mult, biggestVal); - default: // Silence a -Wswitch-default warning in GCC. Should never actually get here. Assert is just for sanity. - assert(false); - return qua(1, 0, 0, 0); - } - } - - template - GLM_FUNC_QUALIFIER qua quat_cast(mat<4, 4, T, Q> const& m4) - { - return quat_cast(mat<3, 3, T, Q>(m4)); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> lessThan(qua const& x, qua const& y) - { - vec<4, bool, Q> Result; - for(length_t i = 0; i < x.length(); ++i) - Result[i] = x[i] < y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> lessThanEqual(qua const& x, qua const& y) - { - vec<4, bool, Q> Result; - for(length_t i = 0; i < x.length(); ++i) - Result[i] = x[i] <= y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> greaterThan(qua const& x, qua const& y) - { - vec<4, bool, Q> Result; - for(length_t i = 0; i < x.length(); ++i) - Result[i] = x[i] > y[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> greaterThanEqual(qua const& x, qua const& y) - { - vec<4, bool, Q> Result; - for(length_t i = 0; i < x.length(); ++i) - Result[i] = x[i] >= y[i]; - return Result; - } - - - template - GLM_FUNC_QUALIFIER qua quatLookAt(vec<3, T, Q> const& direction, vec<3, T, Q> const& up) - { -# if GLM_CONFIG_CLIP_CONTROL & GLM_CLIP_CONTROL_LH_BIT - return quatLookAtLH(direction, up); -# else - return quatLookAtRH(direction, up); -# endif - } - - template - GLM_FUNC_QUALIFIER qua quatLookAtRH(vec<3, T, Q> const& direction, vec<3, T, Q> const& up) - { - mat<3, 3, T, Q> Result; - - Result[2] = -direction; - Result[0] = normalize(cross(up, Result[2])); - Result[1] = cross(Result[2], Result[0]); - - return quat_cast(Result); - } - - template - GLM_FUNC_QUALIFIER qua quatLookAtLH(vec<3, T, Q> const& direction, vec<3, T, Q> const& up) - { - mat<3, 3, T, Q> Result; - - Result[2] = direction; - Result[0] = normalize(cross(up, Result[2])); - Result[1] = cross(Result[2], Result[0]); - - return quat_cast(Result); - } -}//namespace glm - -#if GLM_CONFIG_SIMD == GLM_ENABLE -# include "quaternion_simd.inl" -#endif - diff --git a/third_party/glm/gtc/quaternion_simd.inl b/third_party/glm/gtc/quaternion_simd.inl deleted file mode 100755 index e69de29..0000000 diff --git a/third_party/glm/gtc/random.hpp b/third_party/glm/gtc/random.hpp deleted file mode 100755 index 9a85958..0000000 --- a/third_party/glm/gtc/random.hpp +++ /dev/null @@ -1,82 +0,0 @@ -/// @ref gtc_random -/// @file glm/gtc/random.hpp -/// -/// @see core (dependence) -/// @see gtx_random (extended) -/// -/// @defgroup gtc_random GLM_GTC_random -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Generate random number from various distribution methods. - -#pragma once - -// Dependency: -#include "../ext/scalar_int_sized.hpp" -#include "../ext/scalar_uint_sized.hpp" -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_random extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_random - /// @{ - - /// Generate random numbers in the interval [Min, Max], according a linear distribution - /// - /// @param Min Minimum value included in the sampling - /// @param Max Maximum value included in the sampling - /// @tparam genType Value type. Currently supported: float or double scalars. - /// @see gtc_random - template - GLM_FUNC_DECL genType linearRand(genType Min, genType Max); - - /// Generate random numbers in the interval [Min, Max], according a linear distribution - /// - /// @param Min Minimum value included in the sampling - /// @param Max Maximum value included in the sampling - /// @tparam T Value type. Currently supported: float or double. - /// - /// @see gtc_random - template - GLM_FUNC_DECL vec linearRand(vec const& Min, vec const& Max); - - /// Generate random numbers in the interval [Min, Max], according a gaussian distribution - /// - /// @see gtc_random - template - GLM_FUNC_DECL genType gaussRand(genType Mean, genType Deviation); - - /// Generate a random 2D vector which coordinates are regulary distributed on a circle of a given radius - /// - /// @see gtc_random - template - GLM_FUNC_DECL vec<2, T, defaultp> circularRand(T Radius); - - /// Generate a random 3D vector which coordinates are regulary distributed on a sphere of a given radius - /// - /// @see gtc_random - template - GLM_FUNC_DECL vec<3, T, defaultp> sphericalRand(T Radius); - - /// Generate a random 2D vector which coordinates are regulary distributed within the area of a disk of a given radius - /// - /// @see gtc_random - template - GLM_FUNC_DECL vec<2, T, defaultp> diskRand(T Radius); - - /// Generate a random 3D vector which coordinates are regulary distributed within the volume of a ball of a given radius - /// - /// @see gtc_random - template - GLM_FUNC_DECL vec<3, T, defaultp> ballRand(T Radius); - - /// @} -}//namespace glm - -#include "random.inl" diff --git a/third_party/glm/gtc/random.inl b/third_party/glm/gtc/random.inl deleted file mode 100755 index 7048509..0000000 --- a/third_party/glm/gtc/random.inl +++ /dev/null @@ -1,303 +0,0 @@ -#include "../geometric.hpp" -#include "../exponential.hpp" -#include "../trigonometric.hpp" -#include "../detail/type_vec1.hpp" -#include -#include -#include -#include - -namespace glm{ -namespace detail -{ - template - struct compute_rand - { - GLM_FUNC_QUALIFIER static vec call(); - }; - - template - struct compute_rand<1, uint8, P> - { - GLM_FUNC_QUALIFIER static vec<1, uint8, P> call() - { - return vec<1, uint8, P>( - std::rand() % std::numeric_limits::max()); - } - }; - - template - struct compute_rand<2, uint8, P> - { - GLM_FUNC_QUALIFIER static vec<2, uint8, P> call() - { - return vec<2, uint8, P>( - std::rand() % std::numeric_limits::max(), - std::rand() % std::numeric_limits::max()); - } - }; - - template - struct compute_rand<3, uint8, P> - { - GLM_FUNC_QUALIFIER static vec<3, uint8, P> call() - { - return vec<3, uint8, P>( - std::rand() % std::numeric_limits::max(), - std::rand() % std::numeric_limits::max(), - std::rand() % std::numeric_limits::max()); - } - }; - - template - struct compute_rand<4, uint8, P> - { - GLM_FUNC_QUALIFIER static vec<4, uint8, P> call() - { - return vec<4, uint8, P>( - std::rand() % std::numeric_limits::max(), - std::rand() % std::numeric_limits::max(), - std::rand() % std::numeric_limits::max(), - std::rand() % std::numeric_limits::max()); - } - }; - - template - struct compute_rand - { - GLM_FUNC_QUALIFIER static vec call() - { - return - (vec(compute_rand::call()) << static_cast(8)) | - (vec(compute_rand::call()) << static_cast(0)); - } - }; - - template - struct compute_rand - { - GLM_FUNC_QUALIFIER static vec call() - { - return - (vec(compute_rand::call()) << static_cast(16)) | - (vec(compute_rand::call()) << static_cast(0)); - } - }; - - template - struct compute_rand - { - GLM_FUNC_QUALIFIER static vec call() - { - return - (vec(compute_rand::call()) << static_cast(32)) | - (vec(compute_rand::call()) << static_cast(0)); - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max); - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (vec(compute_rand::call() % vec(Max + static_cast(1) - Min))) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return (compute_rand::call() % (Max + static_cast(1) - Min)) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return vec(compute_rand::call()) / static_cast(std::numeric_limits::max()) * (Max - Min) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return vec(compute_rand::call()) / static_cast(std::numeric_limits::max()) * (Max - Min) + Min; - } - }; - - template - struct compute_linearRand - { - GLM_FUNC_QUALIFIER static vec call(vec const& Min, vec const& Max) - { - return vec(compute_rand::call()) / static_cast(std::numeric_limits::max()) * (Max - Min) + Min; - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER genType linearRand(genType Min, genType Max) - { - return detail::compute_linearRand<1, genType, highp>::call( - vec<1, genType, highp>(Min), - vec<1, genType, highp>(Max)).x; - } - - template - GLM_FUNC_QUALIFIER vec linearRand(vec const& Min, vec const& Max) - { - return detail::compute_linearRand::call(Min, Max); - } - - template - GLM_FUNC_QUALIFIER genType gaussRand(genType Mean, genType Deviation) - { - genType w, x1, x2; - - do - { - x1 = linearRand(genType(-1), genType(1)); - x2 = linearRand(genType(-1), genType(1)); - - w = x1 * x1 + x2 * x2; - } while(w > genType(1)); - - return static_cast(x2 * Deviation * Deviation * sqrt((genType(-2) * log(w)) / w) + Mean); - } - - template - GLM_FUNC_QUALIFIER vec gaussRand(vec const& Mean, vec const& Deviation) - { - return detail::functor2::call(gaussRand, Mean, Deviation); - } - - template - GLM_FUNC_QUALIFIER vec<2, T, defaultp> diskRand(T Radius) - { - assert(Radius > static_cast(0)); - - vec<2, T, defaultp> Result(T(0)); - T LenRadius(T(0)); - - do - { - Result = linearRand( - vec<2, T, defaultp>(-Radius), - vec<2, T, defaultp>(Radius)); - LenRadius = length(Result); - } - while(LenRadius > Radius); - - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, defaultp> ballRand(T Radius) - { - assert(Radius > static_cast(0)); - - vec<3, T, defaultp> Result(T(0)); - T LenRadius(T(0)); - - do - { - Result = linearRand( - vec<3, T, defaultp>(-Radius), - vec<3, T, defaultp>(Radius)); - LenRadius = length(Result); - } - while(LenRadius > Radius); - - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, defaultp> circularRand(T Radius) - { - assert(Radius > static_cast(0)); - - T a = linearRand(T(0), static_cast(6.283185307179586476925286766559)); - return vec<2, T, defaultp>(glm::cos(a), glm::sin(a)) * Radius; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, defaultp> sphericalRand(T Radius) - { - assert(Radius > static_cast(0)); - - T theta = linearRand(T(0), T(6.283185307179586476925286766559f)); - T phi = std::acos(linearRand(T(-1.0f), T(1.0f))); - - T x = std::sin(phi) * std::cos(theta); - T y = std::sin(phi) * std::sin(theta); - T z = std::cos(phi); - - return vec<3, T, defaultp>(x, y, z) * Radius; - } -}//namespace glm diff --git a/third_party/glm/gtc/reciprocal.hpp b/third_party/glm/gtc/reciprocal.hpp deleted file mode 100755 index c7d1330..0000000 --- a/third_party/glm/gtc/reciprocal.hpp +++ /dev/null @@ -1,135 +0,0 @@ -/// @ref gtc_reciprocal -/// @file glm/gtc/reciprocal.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_reciprocal GLM_GTC_reciprocal -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Define secant, cosecant and cotangent functions. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_reciprocal extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_reciprocal - /// @{ - - /// Secant function. - /// hypotenuse / adjacent or 1 / cos(x) - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType sec(genType angle); - - /// Cosecant function. - /// hypotenuse / opposite or 1 / sin(x) - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType csc(genType angle); - - /// Cotangent function. - /// adjacent / opposite or 1 / tan(x) - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType cot(genType angle); - - /// Inverse secant function. - /// - /// @return Return an angle expressed in radians. - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType asec(genType x); - - /// Inverse cosecant function. - /// - /// @return Return an angle expressed in radians. - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType acsc(genType x); - - /// Inverse cotangent function. - /// - /// @return Return an angle expressed in radians. - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType acot(genType x); - - /// Secant hyperbolic function. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType sech(genType angle); - - /// Cosecant hyperbolic function. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType csch(genType angle); - - /// Cotangent hyperbolic function. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType coth(genType angle); - - /// Inverse secant hyperbolic function. - /// - /// @return Return an angle expressed in radians. - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType asech(genType x); - - /// Inverse cosecant hyperbolic function. - /// - /// @return Return an angle expressed in radians. - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType acsch(genType x); - - /// Inverse cotangent hyperbolic function. - /// - /// @return Return an angle expressed in radians. - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtc_reciprocal - template - GLM_FUNC_DECL genType acoth(genType x); - - /// @} -}//namespace glm - -#include "reciprocal.inl" diff --git a/third_party/glm/gtc/reciprocal.inl b/third_party/glm/gtc/reciprocal.inl deleted file mode 100755 index d88729e..0000000 --- a/third_party/glm/gtc/reciprocal.inl +++ /dev/null @@ -1,191 +0,0 @@ -/// @ref gtc_reciprocal - -#include "../trigonometric.hpp" -#include - -namespace glm -{ - // sec - template - GLM_FUNC_QUALIFIER genType sec(genType angle) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sec' only accept floating-point values"); - return genType(1) / glm::cos(angle); - } - - template - GLM_FUNC_QUALIFIER vec sec(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sec' only accept floating-point inputs"); - return detail::functor1::call(sec, x); - } - - // csc - template - GLM_FUNC_QUALIFIER genType csc(genType angle) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csc' only accept floating-point values"); - return genType(1) / glm::sin(angle); - } - - template - GLM_FUNC_QUALIFIER vec csc(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csc' only accept floating-point inputs"); - return detail::functor1::call(csc, x); - } - - // cot - template - GLM_FUNC_QUALIFIER genType cot(genType angle) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cot' only accept floating-point values"); - - genType const pi_over_2 = genType(3.1415926535897932384626433832795 / 2.0); - return glm::tan(pi_over_2 - angle); - } - - template - GLM_FUNC_QUALIFIER vec cot(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cot' only accept floating-point inputs"); - return detail::functor1::call(cot, x); - } - - // asec - template - GLM_FUNC_QUALIFIER genType asec(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asec' only accept floating-point values"); - return acos(genType(1) / x); - } - - template - GLM_FUNC_QUALIFIER vec asec(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asec' only accept floating-point inputs"); - return detail::functor1::call(asec, x); - } - - // acsc - template - GLM_FUNC_QUALIFIER genType acsc(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsc' only accept floating-point values"); - return asin(genType(1) / x); - } - - template - GLM_FUNC_QUALIFIER vec acsc(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsc' only accept floating-point inputs"); - return detail::functor1::call(acsc, x); - } - - // acot - template - GLM_FUNC_QUALIFIER genType acot(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acot' only accept floating-point values"); - - genType const pi_over_2 = genType(3.1415926535897932384626433832795 / 2.0); - return pi_over_2 - atan(x); - } - - template - GLM_FUNC_QUALIFIER vec acot(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acot' only accept floating-point inputs"); - return detail::functor1::call(acot, x); - } - - // sech - template - GLM_FUNC_QUALIFIER genType sech(genType angle) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sech' only accept floating-point values"); - return genType(1) / glm::cosh(angle); - } - - template - GLM_FUNC_QUALIFIER vec sech(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'sech' only accept floating-point inputs"); - return detail::functor1::call(sech, x); - } - - // csch - template - GLM_FUNC_QUALIFIER genType csch(genType angle) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csch' only accept floating-point values"); - return genType(1) / glm::sinh(angle); - } - - template - GLM_FUNC_QUALIFIER vec csch(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'csch' only accept floating-point inputs"); - return detail::functor1::call(csch, x); - } - - // coth - template - GLM_FUNC_QUALIFIER genType coth(genType angle) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'coth' only accept floating-point values"); - return glm::cosh(angle) / glm::sinh(angle); - } - - template - GLM_FUNC_QUALIFIER vec coth(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'coth' only accept floating-point inputs"); - return detail::functor1::call(coth, x); - } - - // asech - template - GLM_FUNC_QUALIFIER genType asech(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asech' only accept floating-point values"); - return acosh(genType(1) / x); - } - - template - GLM_FUNC_QUALIFIER vec asech(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'asech' only accept floating-point inputs"); - return detail::functor1::call(asech, x); - } - - // acsch - template - GLM_FUNC_QUALIFIER genType acsch(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsch' only accept floating-point values"); - return asinh(genType(1) / x); - } - - template - GLM_FUNC_QUALIFIER vec acsch(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acsch' only accept floating-point inputs"); - return detail::functor1::call(acsch, x); - } - - // acoth - template - GLM_FUNC_QUALIFIER genType acoth(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acoth' only accept floating-point values"); - return atanh(genType(1) / x); - } - - template - GLM_FUNC_QUALIFIER vec acoth(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'acoth' only accept floating-point inputs"); - return detail::functor1::call(acoth, x); - } -}//namespace glm diff --git a/third_party/glm/gtc/round.hpp b/third_party/glm/gtc/round.hpp deleted file mode 100755 index 56edbbc..0000000 --- a/third_party/glm/gtc/round.hpp +++ /dev/null @@ -1,160 +0,0 @@ -/// @ref gtc_round -/// @file glm/gtc/round.hpp -/// -/// @see core (dependence) -/// @see gtc_round (dependence) -/// -/// @defgroup gtc_round GLM_GTC_round -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Rounding value to specific boundings - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/_vectorize.hpp" -#include "../vector_relational.hpp" -#include "../common.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_round extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_round - /// @{ - - /// Return the power of two number which value is just higher the input value, - /// round up to a power of two. - /// - /// @see gtc_round - template - GLM_FUNC_DECL genIUType ceilPowerOfTwo(genIUType v); - - /// Return the power of two number which value is just higher the input value, - /// round up to a power of two. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_round - template - GLM_FUNC_DECL vec ceilPowerOfTwo(vec const& v); - - /// Return the power of two number which value is just lower the input value, - /// round down to a power of two. - /// - /// @see gtc_round - template - GLM_FUNC_DECL genIUType floorPowerOfTwo(genIUType v); - - /// Return the power of two number which value is just lower the input value, - /// round down to a power of two. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_round - template - GLM_FUNC_DECL vec floorPowerOfTwo(vec const& v); - - /// Return the power of two number which value is the closet to the input value. - /// - /// @see gtc_round - template - GLM_FUNC_DECL genIUType roundPowerOfTwo(genIUType v); - - /// Return the power of two number which value is the closet to the input value. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_round - template - GLM_FUNC_DECL vec roundPowerOfTwo(vec const& v); - - /// Higher multiple number of Source. - /// - /// @tparam genType Floating-point or integer scalar or vector types. - /// - /// @param v Source value to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see gtc_round - template - GLM_FUNC_DECL genType ceilMultiple(genType v, genType Multiple); - - /// Higher multiple number of Source. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @param v Source values to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see gtc_round - template - GLM_FUNC_DECL vec ceilMultiple(vec const& v, vec const& Multiple); - - /// Lower multiple number of Source. - /// - /// @tparam genType Floating-point or integer scalar or vector types. - /// - /// @param v Source value to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see gtc_round - template - GLM_FUNC_DECL genType floorMultiple(genType v, genType Multiple); - - /// Lower multiple number of Source. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @param v Source values to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see gtc_round - template - GLM_FUNC_DECL vec floorMultiple(vec const& v, vec const& Multiple); - - /// Lower multiple number of Source. - /// - /// @tparam genType Floating-point or integer scalar or vector types. - /// - /// @param v Source value to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see gtc_round - template - GLM_FUNC_DECL genType roundMultiple(genType v, genType Multiple); - - /// Lower multiple number of Source. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @param v Source values to which is applied the function - /// @param Multiple Must be a null or positive value - /// - /// @see gtc_round - template - GLM_FUNC_DECL vec roundMultiple(vec const& v, vec const& Multiple); - - /// @} -} //namespace glm - -#include "round.inl" diff --git a/third_party/glm/gtc/round.inl b/third_party/glm/gtc/round.inl deleted file mode 100755 index 48411e4..0000000 --- a/third_party/glm/gtc/round.inl +++ /dev/null @@ -1,155 +0,0 @@ -/// @ref gtc_round - -#include "../integer.hpp" -#include "../ext/vector_integer.hpp" - -namespace glm{ -namespace detail -{ - template - struct compute_roundMultiple {}; - - template<> - struct compute_roundMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - if (Source >= genType(0)) - return Source - std::fmod(Source, Multiple); - else - { - genType Tmp = Source + genType(1); - return Tmp - std::fmod(Tmp, Multiple) - Multiple; - } - } - }; - - template<> - struct compute_roundMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - if (Source >= genType(0)) - return Source - Source % Multiple; - else - { - genType Tmp = Source + genType(1); - return Tmp - Tmp % Multiple - Multiple; - } - } - }; - - template<> - struct compute_roundMultiple - { - template - GLM_FUNC_QUALIFIER static genType call(genType Source, genType Multiple) - { - if (Source >= genType(0)) - return Source - Source % Multiple; - else - { - genType Tmp = Source + genType(1); - return Tmp - Tmp % Multiple - Multiple; - } - } - }; -}//namespace detail - - ////////////////// - // ceilPowerOfTwo - - template - GLM_FUNC_QUALIFIER genType ceilPowerOfTwo(genType value) - { - return detail::compute_ceilPowerOfTwo<1, genType, defaultp, std::numeric_limits::is_signed>::call(vec<1, genType, defaultp>(value)).x; - } - - template - GLM_FUNC_QUALIFIER vec ceilPowerOfTwo(vec const& v) - { - return detail::compute_ceilPowerOfTwo::is_signed>::call(v); - } - - /////////////////// - // floorPowerOfTwo - - template - GLM_FUNC_QUALIFIER genType floorPowerOfTwo(genType value) - { - return isPowerOfTwo(value) ? value : static_cast(1) << findMSB(value); - } - - template - GLM_FUNC_QUALIFIER vec floorPowerOfTwo(vec const& v) - { - return detail::functor1::call(floorPowerOfTwo, v); - } - - /////////////////// - // roundPowerOfTwo - - template - GLM_FUNC_QUALIFIER genIUType roundPowerOfTwo(genIUType value) - { - if(isPowerOfTwo(value)) - return value; - - genIUType const prev = static_cast(1) << findMSB(value); - genIUType const next = prev << static_cast(1); - return (next - value) < (value - prev) ? next : prev; - } - - template - GLM_FUNC_QUALIFIER vec roundPowerOfTwo(vec const& v) - { - return detail::functor1::call(roundPowerOfTwo, v); - } - - ////////////////////// - // ceilMultiple - - template - GLM_FUNC_QUALIFIER genType ceilMultiple(genType Source, genType Multiple) - { - return detail::compute_ceilMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); - } - - template - GLM_FUNC_QUALIFIER vec ceilMultiple(vec const& Source, vec const& Multiple) - { - return detail::functor2::call(ceilMultiple, Source, Multiple); - } - - ////////////////////// - // floorMultiple - - template - GLM_FUNC_QUALIFIER genType floorMultiple(genType Source, genType Multiple) - { - return detail::compute_floorMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); - } - - template - GLM_FUNC_QUALIFIER vec floorMultiple(vec const& Source, vec const& Multiple) - { - return detail::functor2::call(floorMultiple, Source, Multiple); - } - - ////////////////////// - // roundMultiple - - template - GLM_FUNC_QUALIFIER genType roundMultiple(genType Source, genType Multiple) - { - return detail::compute_roundMultiple::is_iec559, std::numeric_limits::is_signed>::call(Source, Multiple); - } - - template - GLM_FUNC_QUALIFIER vec roundMultiple(vec const& Source, vec const& Multiple) - { - return detail::functor2::call(roundMultiple, Source, Multiple); - } -}//namespace glm diff --git a/third_party/glm/gtc/type_aligned.hpp b/third_party/glm/gtc/type_aligned.hpp deleted file mode 100755 index 5403abf..0000000 --- a/third_party/glm/gtc/type_aligned.hpp +++ /dev/null @@ -1,1315 +0,0 @@ -/// @ref gtc_type_aligned -/// @file glm/gtc/type_aligned.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_type_aligned GLM_GTC_type_aligned -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Aligned types allowing SIMD optimizations of vectors and matrices types - -#pragma once - -#if (GLM_CONFIG_ALIGNED_GENTYPES == GLM_DISABLE) -# error "GLM: Aligned gentypes require to enable C++ language extensions. Define GLM_FORCE_ALIGNED_GENTYPES before including GLM headers to use aligned types." -#endif - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_type_aligned extension included") -#endif - -#include "../mat4x4.hpp" -#include "../mat4x3.hpp" -#include "../mat4x2.hpp" -#include "../mat3x4.hpp" -#include "../mat3x3.hpp" -#include "../mat3x2.hpp" -#include "../mat2x4.hpp" -#include "../mat2x3.hpp" -#include "../mat2x2.hpp" -#include "../gtc/vec1.hpp" -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" - -namespace glm -{ - /// @addtogroup gtc_type_aligned - /// @{ - - // -- *vec1 -- - - /// 1 component vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<1, float, aligned_highp> aligned_highp_vec1; - - /// 1 component vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<1, float, aligned_mediump> aligned_mediump_vec1; - - /// 1 component vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<1, float, aligned_lowp> aligned_lowp_vec1; - - /// 1 component vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<1, double, aligned_highp> aligned_highp_dvec1; - - /// 1 component vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<1, double, aligned_mediump> aligned_mediump_dvec1; - - /// 1 component vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<1, double, aligned_lowp> aligned_lowp_dvec1; - - /// 1 component vector aligned in memory of signed integer numbers. - typedef vec<1, int, aligned_highp> aligned_highp_ivec1; - - /// 1 component vector aligned in memory of signed integer numbers. - typedef vec<1, int, aligned_mediump> aligned_mediump_ivec1; - - /// 1 component vector aligned in memory of signed integer numbers. - typedef vec<1, int, aligned_lowp> aligned_lowp_ivec1; - - /// 1 component vector aligned in memory of unsigned integer numbers. - typedef vec<1, uint, aligned_highp> aligned_highp_uvec1; - - /// 1 component vector aligned in memory of unsigned integer numbers. - typedef vec<1, uint, aligned_mediump> aligned_mediump_uvec1; - - /// 1 component vector aligned in memory of unsigned integer numbers. - typedef vec<1, uint, aligned_lowp> aligned_lowp_uvec1; - - /// 1 component vector aligned in memory of bool values. - typedef vec<1, bool, aligned_highp> aligned_highp_bvec1; - - /// 1 component vector aligned in memory of bool values. - typedef vec<1, bool, aligned_mediump> aligned_mediump_bvec1; - - /// 1 component vector aligned in memory of bool values. - typedef vec<1, bool, aligned_lowp> aligned_lowp_bvec1; - - /// 1 component vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<1, float, packed_highp> packed_highp_vec1; - - /// 1 component vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<1, float, packed_mediump> packed_mediump_vec1; - - /// 1 component vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<1, float, packed_lowp> packed_lowp_vec1; - - /// 1 component vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<1, double, packed_highp> packed_highp_dvec1; - - /// 1 component vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<1, double, packed_mediump> packed_mediump_dvec1; - - /// 1 component vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<1, double, packed_lowp> packed_lowp_dvec1; - - /// 1 component vector tightly packed in memory of signed integer numbers. - typedef vec<1, int, packed_highp> packed_highp_ivec1; - - /// 1 component vector tightly packed in memory of signed integer numbers. - typedef vec<1, int, packed_mediump> packed_mediump_ivec1; - - /// 1 component vector tightly packed in memory of signed integer numbers. - typedef vec<1, int, packed_lowp> packed_lowp_ivec1; - - /// 1 component vector tightly packed in memory of unsigned integer numbers. - typedef vec<1, uint, packed_highp> packed_highp_uvec1; - - /// 1 component vector tightly packed in memory of unsigned integer numbers. - typedef vec<1, uint, packed_mediump> packed_mediump_uvec1; - - /// 1 component vector tightly packed in memory of unsigned integer numbers. - typedef vec<1, uint, packed_lowp> packed_lowp_uvec1; - - /// 1 component vector tightly packed in memory of bool values. - typedef vec<1, bool, packed_highp> packed_highp_bvec1; - - /// 1 component vector tightly packed in memory of bool values. - typedef vec<1, bool, packed_mediump> packed_mediump_bvec1; - - /// 1 component vector tightly packed in memory of bool values. - typedef vec<1, bool, packed_lowp> packed_lowp_bvec1; - - // -- *vec2 -- - - /// 2 components vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<2, float, aligned_highp> aligned_highp_vec2; - - /// 2 components vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<2, float, aligned_mediump> aligned_mediump_vec2; - - /// 2 components vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<2, float, aligned_lowp> aligned_lowp_vec2; - - /// 2 components vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<2, double, aligned_highp> aligned_highp_dvec2; - - /// 2 components vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<2, double, aligned_mediump> aligned_mediump_dvec2; - - /// 2 components vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<2, double, aligned_lowp> aligned_lowp_dvec2; - - /// 2 components vector aligned in memory of signed integer numbers. - typedef vec<2, int, aligned_highp> aligned_highp_ivec2; - - /// 2 components vector aligned in memory of signed integer numbers. - typedef vec<2, int, aligned_mediump> aligned_mediump_ivec2; - - /// 2 components vector aligned in memory of signed integer numbers. - typedef vec<2, int, aligned_lowp> aligned_lowp_ivec2; - - /// 2 components vector aligned in memory of unsigned integer numbers. - typedef vec<2, uint, aligned_highp> aligned_highp_uvec2; - - /// 2 components vector aligned in memory of unsigned integer numbers. - typedef vec<2, uint, aligned_mediump> aligned_mediump_uvec2; - - /// 2 components vector aligned in memory of unsigned integer numbers. - typedef vec<2, uint, aligned_lowp> aligned_lowp_uvec2; - - /// 2 components vector aligned in memory of bool values. - typedef vec<2, bool, aligned_highp> aligned_highp_bvec2; - - /// 2 components vector aligned in memory of bool values. - typedef vec<2, bool, aligned_mediump> aligned_mediump_bvec2; - - /// 2 components vector aligned in memory of bool values. - typedef vec<2, bool, aligned_lowp> aligned_lowp_bvec2; - - /// 2 components vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<2, float, packed_highp> packed_highp_vec2; - - /// 2 components vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<2, float, packed_mediump> packed_mediump_vec2; - - /// 2 components vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<2, float, packed_lowp> packed_lowp_vec2; - - /// 2 components vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<2, double, packed_highp> packed_highp_dvec2; - - /// 2 components vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<2, double, packed_mediump> packed_mediump_dvec2; - - /// 2 components vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<2, double, packed_lowp> packed_lowp_dvec2; - - /// 2 components vector tightly packed in memory of signed integer numbers. - typedef vec<2, int, packed_highp> packed_highp_ivec2; - - /// 2 components vector tightly packed in memory of signed integer numbers. - typedef vec<2, int, packed_mediump> packed_mediump_ivec2; - - /// 2 components vector tightly packed in memory of signed integer numbers. - typedef vec<2, int, packed_lowp> packed_lowp_ivec2; - - /// 2 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<2, uint, packed_highp> packed_highp_uvec2; - - /// 2 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<2, uint, packed_mediump> packed_mediump_uvec2; - - /// 2 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<2, uint, packed_lowp> packed_lowp_uvec2; - - /// 2 components vector tightly packed in memory of bool values. - typedef vec<2, bool, packed_highp> packed_highp_bvec2; - - /// 2 components vector tightly packed in memory of bool values. - typedef vec<2, bool, packed_mediump> packed_mediump_bvec2; - - /// 2 components vector tightly packed in memory of bool values. - typedef vec<2, bool, packed_lowp> packed_lowp_bvec2; - - // -- *vec3 -- - - /// 3 components vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<3, float, aligned_highp> aligned_highp_vec3; - - /// 3 components vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<3, float, aligned_mediump> aligned_mediump_vec3; - - /// 3 components vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<3, float, aligned_lowp> aligned_lowp_vec3; - - /// 3 components vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<3, double, aligned_highp> aligned_highp_dvec3; - - /// 3 components vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<3, double, aligned_mediump> aligned_mediump_dvec3; - - /// 3 components vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<3, double, aligned_lowp> aligned_lowp_dvec3; - - /// 3 components vector aligned in memory of signed integer numbers. - typedef vec<3, int, aligned_highp> aligned_highp_ivec3; - - /// 3 components vector aligned in memory of signed integer numbers. - typedef vec<3, int, aligned_mediump> aligned_mediump_ivec3; - - /// 3 components vector aligned in memory of signed integer numbers. - typedef vec<3, int, aligned_lowp> aligned_lowp_ivec3; - - /// 3 components vector aligned in memory of unsigned integer numbers. - typedef vec<3, uint, aligned_highp> aligned_highp_uvec3; - - /// 3 components vector aligned in memory of unsigned integer numbers. - typedef vec<3, uint, aligned_mediump> aligned_mediump_uvec3; - - /// 3 components vector aligned in memory of unsigned integer numbers. - typedef vec<3, uint, aligned_lowp> aligned_lowp_uvec3; - - /// 3 components vector aligned in memory of bool values. - typedef vec<3, bool, aligned_highp> aligned_highp_bvec3; - - /// 3 components vector aligned in memory of bool values. - typedef vec<3, bool, aligned_mediump> aligned_mediump_bvec3; - - /// 3 components vector aligned in memory of bool values. - typedef vec<3, bool, aligned_lowp> aligned_lowp_bvec3; - - /// 3 components vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<3, float, packed_highp> packed_highp_vec3; - - /// 3 components vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<3, float, packed_mediump> packed_mediump_vec3; - - /// 3 components vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<3, float, packed_lowp> packed_lowp_vec3; - - /// 3 components vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<3, double, packed_highp> packed_highp_dvec3; - - /// 3 components vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<3, double, packed_mediump> packed_mediump_dvec3; - - /// 3 components vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<3, double, packed_lowp> packed_lowp_dvec3; - - /// 3 components vector tightly packed in memory of signed integer numbers. - typedef vec<3, int, packed_highp> packed_highp_ivec3; - - /// 3 components vector tightly packed in memory of signed integer numbers. - typedef vec<3, int, packed_mediump> packed_mediump_ivec3; - - /// 3 components vector tightly packed in memory of signed integer numbers. - typedef vec<3, int, packed_lowp> packed_lowp_ivec3; - - /// 3 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<3, uint, packed_highp> packed_highp_uvec3; - - /// 3 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<3, uint, packed_mediump> packed_mediump_uvec3; - - /// 3 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<3, uint, packed_lowp> packed_lowp_uvec3; - - /// 3 components vector tightly packed in memory of bool values. - typedef vec<3, bool, packed_highp> packed_highp_bvec3; - - /// 3 components vector tightly packed in memory of bool values. - typedef vec<3, bool, packed_mediump> packed_mediump_bvec3; - - /// 3 components vector tightly packed in memory of bool values. - typedef vec<3, bool, packed_lowp> packed_lowp_bvec3; - - // -- *vec4 -- - - /// 4 components vector aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<4, float, aligned_highp> aligned_highp_vec4; - - /// 4 components vector aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<4, float, aligned_mediump> aligned_mediump_vec4; - - /// 4 components vector aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<4, float, aligned_lowp> aligned_lowp_vec4; - - /// 4 components vector aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<4, double, aligned_highp> aligned_highp_dvec4; - - /// 4 components vector aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<4, double, aligned_mediump> aligned_mediump_dvec4; - - /// 4 components vector aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<4, double, aligned_lowp> aligned_lowp_dvec4; - - /// 4 components vector aligned in memory of signed integer numbers. - typedef vec<4, int, aligned_highp> aligned_highp_ivec4; - - /// 4 components vector aligned in memory of signed integer numbers. - typedef vec<4, int, aligned_mediump> aligned_mediump_ivec4; - - /// 4 components vector aligned in memory of signed integer numbers. - typedef vec<4, int, aligned_lowp> aligned_lowp_ivec4; - - /// 4 components vector aligned in memory of unsigned integer numbers. - typedef vec<4, uint, aligned_highp> aligned_highp_uvec4; - - /// 4 components vector aligned in memory of unsigned integer numbers. - typedef vec<4, uint, aligned_mediump> aligned_mediump_uvec4; - - /// 4 components vector aligned in memory of unsigned integer numbers. - typedef vec<4, uint, aligned_lowp> aligned_lowp_uvec4; - - /// 4 components vector aligned in memory of bool values. - typedef vec<4, bool, aligned_highp> aligned_highp_bvec4; - - /// 4 components vector aligned in memory of bool values. - typedef vec<4, bool, aligned_mediump> aligned_mediump_bvec4; - - /// 4 components vector aligned in memory of bool values. - typedef vec<4, bool, aligned_lowp> aligned_lowp_bvec4; - - /// 4 components vector tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<4, float, packed_highp> packed_highp_vec4; - - /// 4 components vector tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<4, float, packed_mediump> packed_mediump_vec4; - - /// 4 components vector tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<4, float, packed_lowp> packed_lowp_vec4; - - /// 4 components vector tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef vec<4, double, packed_highp> packed_highp_dvec4; - - /// 4 components vector tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef vec<4, double, packed_mediump> packed_mediump_dvec4; - - /// 4 components vector tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef vec<4, double, packed_lowp> packed_lowp_dvec4; - - /// 4 components vector tightly packed in memory of signed integer numbers. - typedef vec<4, int, packed_highp> packed_highp_ivec4; - - /// 4 components vector tightly packed in memory of signed integer numbers. - typedef vec<4, int, packed_mediump> packed_mediump_ivec4; - - /// 4 components vector tightly packed in memory of signed integer numbers. - typedef vec<4, int, packed_lowp> packed_lowp_ivec4; - - /// 4 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<4, uint, packed_highp> packed_highp_uvec4; - - /// 4 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<4, uint, packed_mediump> packed_mediump_uvec4; - - /// 4 components vector tightly packed in memory of unsigned integer numbers. - typedef vec<4, uint, packed_lowp> packed_lowp_uvec4; - - /// 4 components vector tightly packed in memory of bool values. - typedef vec<4, bool, packed_highp> packed_highp_bvec4; - - /// 4 components vector tightly packed in memory of bool values. - typedef vec<4, bool, packed_mediump> packed_mediump_bvec4; - - /// 4 components vector tightly packed in memory of bool values. - typedef vec<4, bool, packed_lowp> packed_lowp_bvec4; - - // -- *mat2 -- - - /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, float, aligned_highp> aligned_highp_mat2; - - /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, float, aligned_mediump> aligned_mediump_mat2; - - /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, float, aligned_lowp> aligned_lowp_mat2; - - /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, double, aligned_highp> aligned_highp_dmat2; - - /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, double, aligned_mediump> aligned_mediump_dmat2; - - /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, double, aligned_lowp> aligned_lowp_dmat2; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, float, packed_highp> packed_highp_mat2; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, float, packed_mediump> packed_mediump_mat2; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, float, packed_lowp> packed_lowp_mat2; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, double, packed_highp> packed_highp_dmat2; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, double, packed_mediump> packed_mediump_dmat2; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, double, packed_lowp> packed_lowp_dmat2; - - // -- *mat3 -- - - /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, float, aligned_highp> aligned_highp_mat3; - - /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, float, aligned_mediump> aligned_mediump_mat3; - - /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, float, aligned_lowp> aligned_lowp_mat3; - - /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, double, aligned_highp> aligned_highp_dmat3; - - /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, double, aligned_mediump> aligned_mediump_dmat3; - - /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, double, aligned_lowp> aligned_lowp_dmat3; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, float, packed_highp> packed_highp_mat3; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, float, packed_mediump> packed_mediump_mat3; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, float, packed_lowp> packed_lowp_mat3; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, double, packed_highp> packed_highp_dmat3; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, double, packed_mediump> packed_mediump_dmat3; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, double, packed_lowp> packed_lowp_dmat3; - - // -- *mat4 -- - - /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, float, aligned_highp> aligned_highp_mat4; - - /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, float, aligned_mediump> aligned_mediump_mat4; - - /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, float, aligned_lowp> aligned_lowp_mat4; - - /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, double, aligned_highp> aligned_highp_dmat4; - - /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, double, aligned_mediump> aligned_mediump_dmat4; - - /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, double, aligned_lowp> aligned_lowp_dmat4; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, float, packed_highp> packed_highp_mat4; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, float, packed_mediump> packed_mediump_mat4; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, float, packed_lowp> packed_lowp_mat4; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, double, packed_highp> packed_highp_dmat4; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, double, packed_mediump> packed_mediump_dmat4; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, double, packed_lowp> packed_lowp_dmat4; - - // -- *mat2x2 -- - - /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, float, aligned_highp> aligned_highp_mat2x2; - - /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, float, aligned_mediump> aligned_mediump_mat2x2; - - /// 2 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, float, aligned_lowp> aligned_lowp_mat2x2; - - /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, double, aligned_highp> aligned_highp_dmat2x2; - - /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, double, aligned_mediump> aligned_mediump_dmat2x2; - - /// 2 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, double, aligned_lowp> aligned_lowp_dmat2x2; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, float, packed_highp> packed_highp_mat2x2; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, float, packed_mediump> packed_mediump_mat2x2; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, float, packed_lowp> packed_lowp_mat2x2; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 2, double, packed_highp> packed_highp_dmat2x2; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 2, double, packed_mediump> packed_mediump_dmat2x2; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 2, double, packed_lowp> packed_lowp_dmat2x2; - - // -- *mat2x3 -- - - /// 2 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 3, float, aligned_highp> aligned_highp_mat2x3; - - /// 2 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 3, float, aligned_mediump> aligned_mediump_mat2x3; - - /// 2 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 3, float, aligned_lowp> aligned_lowp_mat2x3; - - /// 2 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 3, double, aligned_highp> aligned_highp_dmat2x3; - - /// 2 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 3, double, aligned_mediump> aligned_mediump_dmat2x3; - - /// 2 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 3, double, aligned_lowp> aligned_lowp_dmat2x3; - - /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 3, float, packed_highp> packed_highp_mat2x3; - - /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 3, float, packed_mediump> packed_mediump_mat2x3; - - /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 3, float, packed_lowp> packed_lowp_mat2x3; - - /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 3, double, packed_highp> packed_highp_dmat2x3; - - /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 3, double, packed_mediump> packed_mediump_dmat2x3; - - /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 3, double, packed_lowp> packed_lowp_dmat2x3; - - // -- *mat2x4 -- - - /// 2 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 4, float, aligned_highp> aligned_highp_mat2x4; - - /// 2 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 4, float, aligned_mediump> aligned_mediump_mat2x4; - - /// 2 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 4, float, aligned_lowp> aligned_lowp_mat2x4; - - /// 2 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 4, double, aligned_highp> aligned_highp_dmat2x4; - - /// 2 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 4, double, aligned_mediump> aligned_mediump_dmat2x4; - - /// 2 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 4, double, aligned_lowp> aligned_lowp_dmat2x4; - - /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 4, float, packed_highp> packed_highp_mat2x4; - - /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 4, float, packed_mediump> packed_mediump_mat2x4; - - /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 4, float, packed_lowp> packed_lowp_mat2x4; - - /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<2, 4, double, packed_highp> packed_highp_dmat2x4; - - /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<2, 4, double, packed_mediump> packed_mediump_dmat2x4; - - /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<2, 4, double, packed_lowp> packed_lowp_dmat2x4; - - // -- *mat3x2 -- - - /// 3 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 2, float, aligned_highp> aligned_highp_mat3x2; - - /// 3 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 2, float, aligned_mediump> aligned_mediump_mat3x2; - - /// 3 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 2, float, aligned_lowp> aligned_lowp_mat3x2; - - /// 3 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 2, double, aligned_highp> aligned_highp_dmat3x2; - - /// 3 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 2, double, aligned_mediump> aligned_mediump_dmat3x2; - - /// 3 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 2, double, aligned_lowp> aligned_lowp_dmat3x2; - - /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 2, float, packed_highp> packed_highp_mat3x2; - - /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 2, float, packed_mediump> packed_mediump_mat3x2; - - /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 2, float, packed_lowp> packed_lowp_mat3x2; - - /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 2, double, packed_highp> packed_highp_dmat3x2; - - /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 2, double, packed_mediump> packed_mediump_dmat3x2; - - /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 2, double, packed_lowp> packed_lowp_dmat3x2; - - // -- *mat3x3 -- - - /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, float, aligned_highp> aligned_highp_mat3x3; - - /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, float, aligned_mediump> aligned_mediump_mat3x3; - - /// 3 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, float, aligned_lowp> aligned_lowp_mat3x3; - - /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, double, aligned_highp> aligned_highp_dmat3x3; - - /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, double, aligned_mediump> aligned_mediump_dmat3x3; - - /// 3 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, double, aligned_lowp> aligned_lowp_dmat3x3; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, float, packed_highp> packed_highp_mat3x3; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, float, packed_mediump> packed_mediump_mat3x3; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, float, packed_lowp> packed_lowp_mat3x3; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 3, double, packed_highp> packed_highp_dmat3x3; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 3, double, packed_mediump> packed_mediump_dmat3x3; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 3, double, packed_lowp> packed_lowp_dmat3x3; - - // -- *mat3x4 -- - - /// 3 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 4, float, aligned_highp> aligned_highp_mat3x4; - - /// 3 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 4, float, aligned_mediump> aligned_mediump_mat3x4; - - /// 3 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 4, float, aligned_lowp> aligned_lowp_mat3x4; - - /// 3 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 4, double, aligned_highp> aligned_highp_dmat3x4; - - /// 3 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 4, double, aligned_mediump> aligned_mediump_dmat3x4; - - /// 3 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 4, double, aligned_lowp> aligned_lowp_dmat3x4; - - /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 4, float, packed_highp> packed_highp_mat3x4; - - /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 4, float, packed_mediump> packed_mediump_mat3x4; - - /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 4, float, packed_lowp> packed_lowp_mat3x4; - - /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<3, 4, double, packed_highp> packed_highp_dmat3x4; - - /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<3, 4, double, packed_mediump> packed_mediump_dmat3x4; - - /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<3, 4, double, packed_lowp> packed_lowp_dmat3x4; - - // -- *mat4x2 -- - - /// 4 by 2 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 2, float, aligned_highp> aligned_highp_mat4x2; - - /// 4 by 2 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 2, float, aligned_mediump> aligned_mediump_mat4x2; - - /// 4 by 2 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 2, float, aligned_lowp> aligned_lowp_mat4x2; - - /// 4 by 2 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 2, double, aligned_highp> aligned_highp_dmat4x2; - - /// 4 by 2 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 2, double, aligned_mediump> aligned_mediump_dmat4x2; - - /// 4 by 2 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 2, double, aligned_lowp> aligned_lowp_dmat4x2; - - /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 2, float, packed_highp> packed_highp_mat4x2; - - /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 2, float, packed_mediump> packed_mediump_mat4x2; - - /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 2, float, packed_lowp> packed_lowp_mat4x2; - - /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 2, double, packed_highp> packed_highp_dmat4x2; - - /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 2, double, packed_mediump> packed_mediump_dmat4x2; - - /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 2, double, packed_lowp> packed_lowp_dmat4x2; - - // -- *mat4x3 -- - - /// 4 by 3 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 3, float, aligned_highp> aligned_highp_mat4x3; - - /// 4 by 3 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 3, float, aligned_mediump> aligned_mediump_mat4x3; - - /// 4 by 3 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 3, float, aligned_lowp> aligned_lowp_mat4x3; - - /// 4 by 3 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 3, double, aligned_highp> aligned_highp_dmat4x3; - - /// 4 by 3 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 3, double, aligned_mediump> aligned_mediump_dmat4x3; - - /// 4 by 3 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 3, double, aligned_lowp> aligned_lowp_dmat4x3; - - /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 3, float, packed_highp> packed_highp_mat4x3; - - /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 3, float, packed_mediump> packed_mediump_mat4x3; - - /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 3, float, packed_lowp> packed_lowp_mat4x3; - - /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 3, double, packed_highp> packed_highp_dmat4x3; - - /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 3, double, packed_mediump> packed_mediump_dmat4x3; - - /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 3, double, packed_lowp> packed_lowp_dmat4x3; - - // -- *mat4x4 -- - - /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, float, aligned_highp> aligned_highp_mat4x4; - - /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, float, aligned_mediump> aligned_mediump_mat4x4; - - /// 4 by 4 matrix aligned in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, float, aligned_lowp> aligned_lowp_mat4x4; - - /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, double, aligned_highp> aligned_highp_dmat4x4; - - /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, double, aligned_mediump> aligned_mediump_dmat4x4; - - /// 4 by 4 matrix aligned in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, double, aligned_lowp> aligned_lowp_dmat4x4; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, float, packed_highp> packed_highp_mat4x4; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, float, packed_mediump> packed_mediump_mat4x4; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, float, packed_lowp> packed_lowp_mat4x4; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using high precision arithmetic in term of ULPs. - typedef mat<4, 4, double, packed_highp> packed_highp_dmat4x4; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using medium precision arithmetic in term of ULPs. - typedef mat<4, 4, double, packed_mediump> packed_mediump_dmat4x4; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers using low precision arithmetic in term of ULPs. - typedef mat<4, 4, double, packed_lowp> packed_lowp_dmat4x4; - - // -- default -- - -#if(defined(GLM_PRECISION_LOWP_FLOAT)) - typedef aligned_lowp_vec1 aligned_vec1; - typedef aligned_lowp_vec2 aligned_vec2; - typedef aligned_lowp_vec3 aligned_vec3; - typedef aligned_lowp_vec4 aligned_vec4; - typedef packed_lowp_vec1 packed_vec1; - typedef packed_lowp_vec2 packed_vec2; - typedef packed_lowp_vec3 packed_vec3; - typedef packed_lowp_vec4 packed_vec4; - - typedef aligned_lowp_mat2 aligned_mat2; - typedef aligned_lowp_mat3 aligned_mat3; - typedef aligned_lowp_mat4 aligned_mat4; - typedef packed_lowp_mat2 packed_mat2; - typedef packed_lowp_mat3 packed_mat3; - typedef packed_lowp_mat4 packed_mat4; - - typedef aligned_lowp_mat2x2 aligned_mat2x2; - typedef aligned_lowp_mat2x3 aligned_mat2x3; - typedef aligned_lowp_mat2x4 aligned_mat2x4; - typedef aligned_lowp_mat3x2 aligned_mat3x2; - typedef aligned_lowp_mat3x3 aligned_mat3x3; - typedef aligned_lowp_mat3x4 aligned_mat3x4; - typedef aligned_lowp_mat4x2 aligned_mat4x2; - typedef aligned_lowp_mat4x3 aligned_mat4x3; - typedef aligned_lowp_mat4x4 aligned_mat4x4; - typedef packed_lowp_mat2x2 packed_mat2x2; - typedef packed_lowp_mat2x3 packed_mat2x3; - typedef packed_lowp_mat2x4 packed_mat2x4; - typedef packed_lowp_mat3x2 packed_mat3x2; - typedef packed_lowp_mat3x3 packed_mat3x3; - typedef packed_lowp_mat3x4 packed_mat3x4; - typedef packed_lowp_mat4x2 packed_mat4x2; - typedef packed_lowp_mat4x3 packed_mat4x3; - typedef packed_lowp_mat4x4 packed_mat4x4; -#elif(defined(GLM_PRECISION_MEDIUMP_FLOAT)) - typedef aligned_mediump_vec1 aligned_vec1; - typedef aligned_mediump_vec2 aligned_vec2; - typedef aligned_mediump_vec3 aligned_vec3; - typedef aligned_mediump_vec4 aligned_vec4; - typedef packed_mediump_vec1 packed_vec1; - typedef packed_mediump_vec2 packed_vec2; - typedef packed_mediump_vec3 packed_vec3; - typedef packed_mediump_vec4 packed_vec4; - - typedef aligned_mediump_mat2 aligned_mat2; - typedef aligned_mediump_mat3 aligned_mat3; - typedef aligned_mediump_mat4 aligned_mat4; - typedef packed_mediump_mat2 packed_mat2; - typedef packed_mediump_mat3 packed_mat3; - typedef packed_mediump_mat4 packed_mat4; - - typedef aligned_mediump_mat2x2 aligned_mat2x2; - typedef aligned_mediump_mat2x3 aligned_mat2x3; - typedef aligned_mediump_mat2x4 aligned_mat2x4; - typedef aligned_mediump_mat3x2 aligned_mat3x2; - typedef aligned_mediump_mat3x3 aligned_mat3x3; - typedef aligned_mediump_mat3x4 aligned_mat3x4; - typedef aligned_mediump_mat4x2 aligned_mat4x2; - typedef aligned_mediump_mat4x3 aligned_mat4x3; - typedef aligned_mediump_mat4x4 aligned_mat4x4; - typedef packed_mediump_mat2x2 packed_mat2x2; - typedef packed_mediump_mat2x3 packed_mat2x3; - typedef packed_mediump_mat2x4 packed_mat2x4; - typedef packed_mediump_mat3x2 packed_mat3x2; - typedef packed_mediump_mat3x3 packed_mat3x3; - typedef packed_mediump_mat3x4 packed_mat3x4; - typedef packed_mediump_mat4x2 packed_mat4x2; - typedef packed_mediump_mat4x3 packed_mat4x3; - typedef packed_mediump_mat4x4 packed_mat4x4; -#else //defined(GLM_PRECISION_HIGHP_FLOAT) - /// 1 component vector aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_vec1 aligned_vec1; - - /// 2 components vector aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_vec2 aligned_vec2; - - /// 3 components vector aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_vec3 aligned_vec3; - - /// 4 components vector aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_vec4 aligned_vec4; - - /// 1 component vector tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_vec1 packed_vec1; - - /// 2 components vector tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_vec2 packed_vec2; - - /// 3 components vector tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_vec3 packed_vec3; - - /// 4 components vector tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_vec4 packed_vec4; - - /// 2 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat2 aligned_mat2; - - /// 3 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat3 aligned_mat3; - - /// 4 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat4 aligned_mat4; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat2 packed_mat2; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat3 packed_mat3; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat4 packed_mat4; - - /// 2 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat2x2 aligned_mat2x2; - - /// 2 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat2x3 aligned_mat2x3; - - /// 2 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat2x4 aligned_mat2x4; - - /// 3 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat3x2 aligned_mat3x2; - - /// 3 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat3x3 aligned_mat3x3; - - /// 3 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat3x4 aligned_mat3x4; - - /// 4 by 2 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat4x2 aligned_mat4x2; - - /// 4 by 3 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat4x3 aligned_mat4x3; - - /// 4 by 4 matrix tightly aligned in memory of single-precision floating-point numbers. - typedef aligned_highp_mat4x4 aligned_mat4x4; - - /// 2 by 2 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat2x2 packed_mat2x2; - - /// 2 by 3 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat2x3 packed_mat2x3; - - /// 2 by 4 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat2x4 packed_mat2x4; - - /// 3 by 2 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat3x2 packed_mat3x2; - - /// 3 by 3 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat3x3 packed_mat3x3; - - /// 3 by 4 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat3x4 packed_mat3x4; - - /// 4 by 2 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat4x2 packed_mat4x2; - - /// 4 by 3 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat4x3 packed_mat4x3; - - /// 4 by 4 matrix tightly packed in memory of single-precision floating-point numbers. - typedef packed_highp_mat4x4 packed_mat4x4; -#endif//GLM_PRECISION - -#if(defined(GLM_PRECISION_LOWP_DOUBLE)) - typedef aligned_lowp_dvec1 aligned_dvec1; - typedef aligned_lowp_dvec2 aligned_dvec2; - typedef aligned_lowp_dvec3 aligned_dvec3; - typedef aligned_lowp_dvec4 aligned_dvec4; - typedef packed_lowp_dvec1 packed_dvec1; - typedef packed_lowp_dvec2 packed_dvec2; - typedef packed_lowp_dvec3 packed_dvec3; - typedef packed_lowp_dvec4 packed_dvec4; - - typedef aligned_lowp_dmat2 aligned_dmat2; - typedef aligned_lowp_dmat3 aligned_dmat3; - typedef aligned_lowp_dmat4 aligned_dmat4; - typedef packed_lowp_dmat2 packed_dmat2; - typedef packed_lowp_dmat3 packed_dmat3; - typedef packed_lowp_dmat4 packed_dmat4; - - typedef aligned_lowp_dmat2x2 aligned_dmat2x2; - typedef aligned_lowp_dmat2x3 aligned_dmat2x3; - typedef aligned_lowp_dmat2x4 aligned_dmat2x4; - typedef aligned_lowp_dmat3x2 aligned_dmat3x2; - typedef aligned_lowp_dmat3x3 aligned_dmat3x3; - typedef aligned_lowp_dmat3x4 aligned_dmat3x4; - typedef aligned_lowp_dmat4x2 aligned_dmat4x2; - typedef aligned_lowp_dmat4x3 aligned_dmat4x3; - typedef aligned_lowp_dmat4x4 aligned_dmat4x4; - typedef packed_lowp_dmat2x2 packed_dmat2x2; - typedef packed_lowp_dmat2x3 packed_dmat2x3; - typedef packed_lowp_dmat2x4 packed_dmat2x4; - typedef packed_lowp_dmat3x2 packed_dmat3x2; - typedef packed_lowp_dmat3x3 packed_dmat3x3; - typedef packed_lowp_dmat3x4 packed_dmat3x4; - typedef packed_lowp_dmat4x2 packed_dmat4x2; - typedef packed_lowp_dmat4x3 packed_dmat4x3; - typedef packed_lowp_dmat4x4 packed_dmat4x4; -#elif(defined(GLM_PRECISION_MEDIUMP_DOUBLE)) - typedef aligned_mediump_dvec1 aligned_dvec1; - typedef aligned_mediump_dvec2 aligned_dvec2; - typedef aligned_mediump_dvec3 aligned_dvec3; - typedef aligned_mediump_dvec4 aligned_dvec4; - typedef packed_mediump_dvec1 packed_dvec1; - typedef packed_mediump_dvec2 packed_dvec2; - typedef packed_mediump_dvec3 packed_dvec3; - typedef packed_mediump_dvec4 packed_dvec4; - - typedef aligned_mediump_dmat2 aligned_dmat2; - typedef aligned_mediump_dmat3 aligned_dmat3; - typedef aligned_mediump_dmat4 aligned_dmat4; - typedef packed_mediump_dmat2 packed_dmat2; - typedef packed_mediump_dmat3 packed_dmat3; - typedef packed_mediump_dmat4 packed_dmat4; - - typedef aligned_mediump_dmat2x2 aligned_dmat2x2; - typedef aligned_mediump_dmat2x3 aligned_dmat2x3; - typedef aligned_mediump_dmat2x4 aligned_dmat2x4; - typedef aligned_mediump_dmat3x2 aligned_dmat3x2; - typedef aligned_mediump_dmat3x3 aligned_dmat3x3; - typedef aligned_mediump_dmat3x4 aligned_dmat3x4; - typedef aligned_mediump_dmat4x2 aligned_dmat4x2; - typedef aligned_mediump_dmat4x3 aligned_dmat4x3; - typedef aligned_mediump_dmat4x4 aligned_dmat4x4; - typedef packed_mediump_dmat2x2 packed_dmat2x2; - typedef packed_mediump_dmat2x3 packed_dmat2x3; - typedef packed_mediump_dmat2x4 packed_dmat2x4; - typedef packed_mediump_dmat3x2 packed_dmat3x2; - typedef packed_mediump_dmat3x3 packed_dmat3x3; - typedef packed_mediump_dmat3x4 packed_dmat3x4; - typedef packed_mediump_dmat4x2 packed_dmat4x2; - typedef packed_mediump_dmat4x3 packed_dmat4x3; - typedef packed_mediump_dmat4x4 packed_dmat4x4; -#else //defined(GLM_PRECISION_HIGHP_DOUBLE) - /// 1 component vector aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dvec1 aligned_dvec1; - - /// 2 components vector aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dvec2 aligned_dvec2; - - /// 3 components vector aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dvec3 aligned_dvec3; - - /// 4 components vector aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dvec4 aligned_dvec4; - - /// 1 component vector tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dvec1 packed_dvec1; - - /// 2 components vector tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dvec2 packed_dvec2; - - /// 3 components vector tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dvec3 packed_dvec3; - - /// 4 components vector tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dvec4 packed_dvec4; - - /// 2 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat2 aligned_dmat2; - - /// 3 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat3 aligned_dmat3; - - /// 4 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat4 aligned_dmat4; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat2 packed_dmat2; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat3 packed_dmat3; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat4 packed_dmat4; - - /// 2 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat2x2 aligned_dmat2x2; - - /// 2 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat2x3 aligned_dmat2x3; - - /// 2 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat2x4 aligned_dmat2x4; - - /// 3 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat3x2 aligned_dmat3x2; - - /// 3 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat3x3 aligned_dmat3x3; - - /// 3 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat3x4 aligned_dmat3x4; - - /// 4 by 2 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat4x2 aligned_dmat4x2; - - /// 4 by 3 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat4x3 aligned_dmat4x3; - - /// 4 by 4 matrix tightly aligned in memory of double-precision floating-point numbers. - typedef aligned_highp_dmat4x4 aligned_dmat4x4; - - /// 2 by 2 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat2x2 packed_dmat2x2; - - /// 2 by 3 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat2x3 packed_dmat2x3; - - /// 2 by 4 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat2x4 packed_dmat2x4; - - /// 3 by 2 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat3x2 packed_dmat3x2; - - /// 3 by 3 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat3x3 packed_dmat3x3; - - /// 3 by 4 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat3x4 packed_dmat3x4; - - /// 4 by 2 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat4x2 packed_dmat4x2; - - /// 4 by 3 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat4x3 packed_dmat4x3; - - /// 4 by 4 matrix tightly packed in memory of double-precision floating-point numbers. - typedef packed_highp_dmat4x4 packed_dmat4x4; -#endif//GLM_PRECISION - -#if(defined(GLM_PRECISION_LOWP_INT)) - typedef aligned_lowp_ivec1 aligned_ivec1; - typedef aligned_lowp_ivec2 aligned_ivec2; - typedef aligned_lowp_ivec3 aligned_ivec3; - typedef aligned_lowp_ivec4 aligned_ivec4; -#elif(defined(GLM_PRECISION_MEDIUMP_INT)) - typedef aligned_mediump_ivec1 aligned_ivec1; - typedef aligned_mediump_ivec2 aligned_ivec2; - typedef aligned_mediump_ivec3 aligned_ivec3; - typedef aligned_mediump_ivec4 aligned_ivec4; -#else //defined(GLM_PRECISION_HIGHP_INT) - /// 1 component vector aligned in memory of signed integer numbers. - typedef aligned_highp_ivec1 aligned_ivec1; - - /// 2 components vector aligned in memory of signed integer numbers. - typedef aligned_highp_ivec2 aligned_ivec2; - - /// 3 components vector aligned in memory of signed integer numbers. - typedef aligned_highp_ivec3 aligned_ivec3; - - /// 4 components vector aligned in memory of signed integer numbers. - typedef aligned_highp_ivec4 aligned_ivec4; - - /// 1 component vector tightly packed in memory of signed integer numbers. - typedef packed_highp_ivec1 packed_ivec1; - - /// 2 components vector tightly packed in memory of signed integer numbers. - typedef packed_highp_ivec2 packed_ivec2; - - /// 3 components vector tightly packed in memory of signed integer numbers. - typedef packed_highp_ivec3 packed_ivec3; - - /// 4 components vector tightly packed in memory of signed integer numbers. - typedef packed_highp_ivec4 packed_ivec4; -#endif//GLM_PRECISION - - // -- Unsigned integer definition -- - -#if(defined(GLM_PRECISION_LOWP_UINT)) - typedef aligned_lowp_uvec1 aligned_uvec1; - typedef aligned_lowp_uvec2 aligned_uvec2; - typedef aligned_lowp_uvec3 aligned_uvec3; - typedef aligned_lowp_uvec4 aligned_uvec4; -#elif(defined(GLM_PRECISION_MEDIUMP_UINT)) - typedef aligned_mediump_uvec1 aligned_uvec1; - typedef aligned_mediump_uvec2 aligned_uvec2; - typedef aligned_mediump_uvec3 aligned_uvec3; - typedef aligned_mediump_uvec4 aligned_uvec4; -#else //defined(GLM_PRECISION_HIGHP_UINT) - /// 1 component vector aligned in memory of unsigned integer numbers. - typedef aligned_highp_uvec1 aligned_uvec1; - - /// 2 components vector aligned in memory of unsigned integer numbers. - typedef aligned_highp_uvec2 aligned_uvec2; - - /// 3 components vector aligned in memory of unsigned integer numbers. - typedef aligned_highp_uvec3 aligned_uvec3; - - /// 4 components vector aligned in memory of unsigned integer numbers. - typedef aligned_highp_uvec4 aligned_uvec4; - - /// 1 component vector tightly packed in memory of unsigned integer numbers. - typedef packed_highp_uvec1 packed_uvec1; - - /// 2 components vector tightly packed in memory of unsigned integer numbers. - typedef packed_highp_uvec2 packed_uvec2; - - /// 3 components vector tightly packed in memory of unsigned integer numbers. - typedef packed_highp_uvec3 packed_uvec3; - - /// 4 components vector tightly packed in memory of unsigned integer numbers. - typedef packed_highp_uvec4 packed_uvec4; -#endif//GLM_PRECISION - -#if(defined(GLM_PRECISION_LOWP_BOOL)) - typedef aligned_lowp_bvec1 aligned_bvec1; - typedef aligned_lowp_bvec2 aligned_bvec2; - typedef aligned_lowp_bvec3 aligned_bvec3; - typedef aligned_lowp_bvec4 aligned_bvec4; -#elif(defined(GLM_PRECISION_MEDIUMP_BOOL)) - typedef aligned_mediump_bvec1 aligned_bvec1; - typedef aligned_mediump_bvec2 aligned_bvec2; - typedef aligned_mediump_bvec3 aligned_bvec3; - typedef aligned_mediump_bvec4 aligned_bvec4; -#else //defined(GLM_PRECISION_HIGHP_BOOL) - /// 1 component vector aligned in memory of bool values. - typedef aligned_highp_bvec1 aligned_bvec1; - - /// 2 components vector aligned in memory of bool values. - typedef aligned_highp_bvec2 aligned_bvec2; - - /// 3 components vector aligned in memory of bool values. - typedef aligned_highp_bvec3 aligned_bvec3; - - /// 4 components vector aligned in memory of bool values. - typedef aligned_highp_bvec4 aligned_bvec4; - - /// 1 components vector tightly packed in memory of bool values. - typedef packed_highp_bvec1 packed_bvec1; - - /// 2 components vector tightly packed in memory of bool values. - typedef packed_highp_bvec2 packed_bvec2; - - /// 3 components vector tightly packed in memory of bool values. - typedef packed_highp_bvec3 packed_bvec3; - - /// 4 components vector tightly packed in memory of bool values. - typedef packed_highp_bvec4 packed_bvec4; -#endif//GLM_PRECISION - - /// @} -}//namespace glm diff --git a/third_party/glm/gtc/type_precision.hpp b/third_party/glm/gtc/type_precision.hpp deleted file mode 100755 index 250bc4f..0000000 --- a/third_party/glm/gtc/type_precision.hpp +++ /dev/null @@ -1,2138 +0,0 @@ -/// @ref gtc_type_precision -/// @file glm/gtc/type_precision.hpp -/// -/// @see core (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtc_type_precision GLM_GTC_type_precision -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Defines specific C++-based qualifier types. - -#pragma once - -// Dependency: -#include "../gtc/quaternion.hpp" -#include "../gtc/vec1.hpp" -#include "../ext/scalar_int_sized.hpp" -#include "../ext/scalar_uint_sized.hpp" -#include "../detail/type_vec2.hpp" -#include "../detail/type_vec3.hpp" -#include "../detail/type_vec4.hpp" -#include "../detail/type_mat2x2.hpp" -#include "../detail/type_mat2x3.hpp" -#include "../detail/type_mat2x4.hpp" -#include "../detail/type_mat3x2.hpp" -#include "../detail/type_mat3x3.hpp" -#include "../detail/type_mat3x4.hpp" -#include "../detail/type_mat4x2.hpp" -#include "../detail/type_mat4x3.hpp" -#include "../detail/type_mat4x4.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_type_precision extension included") -#endif - -namespace glm -{ - /////////////////////////// - // Signed int vector types - - /// @addtogroup gtc_type_precision - /// @{ - - /// Low qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 lowp_int8; - - /// Low qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 lowp_int16; - - /// Low qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 lowp_int32; - - /// Low qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 lowp_int64; - - /// Low qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 lowp_int8_t; - - /// Low qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 lowp_int16_t; - - /// Low qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 lowp_int32_t; - - /// Low qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 lowp_int64_t; - - /// Low qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 lowp_i8; - - /// Low qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 lowp_i16; - - /// Low qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 lowp_i32; - - /// Low qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 lowp_i64; - - /// Medium qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 mediump_int8; - - /// Medium qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 mediump_int16; - - /// Medium qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 mediump_int32; - - /// Medium qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 mediump_int64; - - /// Medium qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 mediump_int8_t; - - /// Medium qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 mediump_int16_t; - - /// Medium qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 mediump_int32_t; - - /// Medium qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 mediump_int64_t; - - /// Medium qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 mediump_i8; - - /// Medium qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 mediump_i16; - - /// Medium qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 mediump_i32; - - /// Medium qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 mediump_i64; - - /// High qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 highp_int8; - - /// High qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 highp_int16; - - /// High qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 highp_int32; - - /// High qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 highp_int64; - - /// High qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 highp_int8_t; - - /// High qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 highp_int16_t; - - /// 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 highp_int32_t; - - /// High qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 highp_int64_t; - - /// High qualifier 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 highp_i8; - - /// High qualifier 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 highp_i16; - - /// High qualifier 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 highp_i32; - - /// High qualifier 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 highp_i64; - - -#if GLM_HAS_EXTENDED_INTEGER_TYPE - using std::int8_t; - using std::int16_t; - using std::int32_t; - using std::int64_t; -#else - /// 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 int8_t; - - /// 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 int16_t; - - /// 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 int32_t; - - /// 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 int64_t; -#endif - - /// 8 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int8 i8; - - /// 16 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int16 i16; - - /// 32 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int32 i32; - - /// 64 bit signed integer type. - /// @see gtc_type_precision - typedef detail::int64 i64; - - - - /// Low qualifier 8 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i8, lowp> lowp_i8vec1; - - /// Low qualifier 8 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i8, lowp> lowp_i8vec2; - - /// Low qualifier 8 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i8, lowp> lowp_i8vec3; - - /// Low qualifier 8 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i8, lowp> lowp_i8vec4; - - - /// Medium qualifier 8 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i8, mediump> mediump_i8vec1; - - /// Medium qualifier 8 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i8, mediump> mediump_i8vec2; - - /// Medium qualifier 8 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i8, mediump> mediump_i8vec3; - - /// Medium qualifier 8 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i8, mediump> mediump_i8vec4; - - - /// High qualifier 8 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i8, highp> highp_i8vec1; - - /// High qualifier 8 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i8, highp> highp_i8vec2; - - /// High qualifier 8 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i8, highp> highp_i8vec3; - - /// High qualifier 8 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i8, highp> highp_i8vec4; - - - - /// 8 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i8, defaultp> i8vec1; - - /// 8 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i8, defaultp> i8vec2; - - /// 8 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i8, defaultp> i8vec3; - - /// 8 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i8, defaultp> i8vec4; - - - - - - /// Low qualifier 16 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i16, lowp> lowp_i16vec1; - - /// Low qualifier 16 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i16, lowp> lowp_i16vec2; - - /// Low qualifier 16 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i16, lowp> lowp_i16vec3; - - /// Low qualifier 16 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i16, lowp> lowp_i16vec4; - - - /// Medium qualifier 16 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i16, mediump> mediump_i16vec1; - - /// Medium qualifier 16 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i16, mediump> mediump_i16vec2; - - /// Medium qualifier 16 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i16, mediump> mediump_i16vec3; - - /// Medium qualifier 16 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i16, mediump> mediump_i16vec4; - - - /// High qualifier 16 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i16, highp> highp_i16vec1; - - /// High qualifier 16 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i16, highp> highp_i16vec2; - - /// High qualifier 16 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i16, highp> highp_i16vec3; - - /// High qualifier 16 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i16, highp> highp_i16vec4; - - - - - /// 16 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i16, defaultp> i16vec1; - - /// 16 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i16, defaultp> i16vec2; - - /// 16 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i16, defaultp> i16vec3; - - /// 16 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i16, defaultp> i16vec4; - - - - /// Low qualifier 32 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i32, lowp> lowp_i32vec1; - - /// Low qualifier 32 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i32, lowp> lowp_i32vec2; - - /// Low qualifier 32 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i32, lowp> lowp_i32vec3; - - /// Low qualifier 32 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i32, lowp> lowp_i32vec4; - - - /// Medium qualifier 32 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i32, mediump> mediump_i32vec1; - - /// Medium qualifier 32 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i32, mediump> mediump_i32vec2; - - /// Medium qualifier 32 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i32, mediump> mediump_i32vec3; - - /// Medium qualifier 32 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i32, mediump> mediump_i32vec4; - - - /// High qualifier 32 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i32, highp> highp_i32vec1; - - /// High qualifier 32 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i32, highp> highp_i32vec2; - - /// High qualifier 32 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i32, highp> highp_i32vec3; - - /// High qualifier 32 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i32, highp> highp_i32vec4; - - - /// 32 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i32, defaultp> i32vec1; - - /// 32 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i32, defaultp> i32vec2; - - /// 32 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i32, defaultp> i32vec3; - - /// 32 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i32, defaultp> i32vec4; - - - - - /// Low qualifier 64 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i64, lowp> lowp_i64vec1; - - /// Low qualifier 64 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i64, lowp> lowp_i64vec2; - - /// Low qualifier 64 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i64, lowp> lowp_i64vec3; - - /// Low qualifier 64 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i64, lowp> lowp_i64vec4; - - - /// Medium qualifier 64 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i64, mediump> mediump_i64vec1; - - /// Medium qualifier 64 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i64, mediump> mediump_i64vec2; - - /// Medium qualifier 64 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i64, mediump> mediump_i64vec3; - - /// Medium qualifier 64 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i64, mediump> mediump_i64vec4; - - - /// High qualifier 64 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i64, highp> highp_i64vec1; - - /// High qualifier 64 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i64, highp> highp_i64vec2; - - /// High qualifier 64 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i64, highp> highp_i64vec3; - - /// High qualifier 64 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i64, highp> highp_i64vec4; - - - /// 64 bit signed integer scalar type. - /// @see gtc_type_precision - typedef vec<1, i64, defaultp> i64vec1; - - /// 64 bit signed integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, i64, defaultp> i64vec2; - - /// 64 bit signed integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, i64, defaultp> i64vec3; - - /// 64 bit signed integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, i64, defaultp> i64vec4; - - - ///////////////////////////// - // Unsigned int vector types - - /// Low qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 lowp_uint8; - - /// Low qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 lowp_uint16; - - /// Low qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 lowp_uint32; - - /// Low qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 lowp_uint64; - - /// Low qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 lowp_uint8_t; - - /// Low qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 lowp_uint16_t; - - /// Low qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 lowp_uint32_t; - - /// Low qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 lowp_uint64_t; - - /// Low qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 lowp_u8; - - /// Low qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 lowp_u16; - - /// Low qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 lowp_u32; - - /// Low qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 lowp_u64; - - /// Medium qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 mediump_uint8; - - /// Medium qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 mediump_uint16; - - /// Medium qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 mediump_uint32; - - /// Medium qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 mediump_uint64; - - /// Medium qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 mediump_uint8_t; - - /// Medium qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 mediump_uint16_t; - - /// Medium qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 mediump_uint32_t; - - /// Medium qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 mediump_uint64_t; - - /// Medium qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 mediump_u8; - - /// Medium qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 mediump_u16; - - /// Medium qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 mediump_u32; - - /// Medium qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 mediump_u64; - - /// High qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 highp_uint8; - - /// High qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 highp_uint16; - - /// High qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 highp_uint32; - - /// High qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 highp_uint64; - - /// High qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 highp_uint8_t; - - /// High qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 highp_uint16_t; - - /// High qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 highp_uint32_t; - - /// High qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 highp_uint64_t; - - /// High qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 highp_u8; - - /// High qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 highp_u16; - - /// High qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 highp_u32; - - /// High qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 highp_u64; - -#if GLM_HAS_EXTENDED_INTEGER_TYPE - using std::uint8_t; - using std::uint16_t; - using std::uint32_t; - using std::uint64_t; -#else - /// Default qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 uint8_t; - - /// Default qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 uint16_t; - - /// Default qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 uint32_t; - - /// Default qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 uint64_t; -#endif - - /// Default qualifier 8 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint8 u8; - - /// Default qualifier 16 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint16 u16; - - /// Default qualifier 32 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint32 u32; - - /// Default qualifier 64 bit unsigned integer type. - /// @see gtc_type_precision - typedef detail::uint64 u64; - - - - - - ////////////////////// - // Float vector types - - /// Single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float float32; - - /// Double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef double float64; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_float32; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_float64; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_float32_t; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_float64_t; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_f32; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_f64; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_float32; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_float64; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_float32_t; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_float64_t; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_f32; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_f64; - - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_float32; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_float64; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_float32_t; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_float64_t; - - /// Low 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 lowp_f32; - - /// Low 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 lowp_f64; - - - /// Medium 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 mediump_float32; - - /// Medium 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 mediump_float64; - - /// Medium 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 mediump_float32_t; - - /// Medium 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 mediump_float64_t; - - /// Medium 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 mediump_f32; - - /// Medium 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 mediump_f64; - - - /// High 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 highp_float32; - - /// High 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 highp_float64; - - /// High 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 highp_float32_t; - - /// High 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 highp_float64_t; - - /// High 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 highp_f32; - - /// High 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 highp_f64; - - -#if(defined(GLM_PRECISION_LOWP_FLOAT)) - /// Default 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef lowp_float32_t float32_t; - - /// Default 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef lowp_float64_t float64_t; - - /// Default 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef lowp_f32 f32; - - /// Default 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef lowp_f64 f64; - -#elif(defined(GLM_PRECISION_MEDIUMP_FLOAT)) - /// Default 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef mediump_float32 float32_t; - - /// Default 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef mediump_float64 float64_t; - - /// Default 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef mediump_float32 f32; - - /// Default 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef mediump_float64 f64; - -#else//(defined(GLM_PRECISION_HIGHP_FLOAT)) - - /// Default 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef highp_float32_t float32_t; - - /// Default 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef highp_float64_t float64_t; - - /// Default 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef highp_float32_t f32; - - /// Default 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef highp_float64_t f64; -#endif - - - /// Low single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, float, lowp> lowp_fvec1; - - /// Low single-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, float, lowp> lowp_fvec2; - - /// Low single-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, float, lowp> lowp_fvec3; - - /// Low single-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, float, lowp> lowp_fvec4; - - - /// Medium single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, float, mediump> mediump_fvec1; - - /// Medium Single-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, float, mediump> mediump_fvec2; - - /// Medium Single-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, float, mediump> mediump_fvec3; - - /// Medium Single-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, float, mediump> mediump_fvec4; - - - /// High single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, float, highp> highp_fvec1; - - /// High Single-qualifier floating-point vector of 2 components. - /// @see core_precision - typedef vec<2, float, highp> highp_fvec2; - - /// High Single-qualifier floating-point vector of 3 components. - /// @see core_precision - typedef vec<3, float, highp> highp_fvec3; - - /// High Single-qualifier floating-point vector of 4 components. - /// @see core_precision - typedef vec<4, float, highp> highp_fvec4; - - - /// Low single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f32, lowp> lowp_f32vec1; - - /// Low single-qualifier floating-point vector of 2 components. - /// @see core_precision - typedef vec<2, f32, lowp> lowp_f32vec2; - - /// Low single-qualifier floating-point vector of 3 components. - /// @see core_precision - typedef vec<3, f32, lowp> lowp_f32vec3; - - /// Low single-qualifier floating-point vector of 4 components. - /// @see core_precision - typedef vec<4, f32, lowp> lowp_f32vec4; - - /// Medium single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f32, mediump> mediump_f32vec1; - - /// Medium single-qualifier floating-point vector of 2 components. - /// @see core_precision - typedef vec<2, f32, mediump> mediump_f32vec2; - - /// Medium single-qualifier floating-point vector of 3 components. - /// @see core_precision - typedef vec<3, f32, mediump> mediump_f32vec3; - - /// Medium single-qualifier floating-point vector of 4 components. - /// @see core_precision - typedef vec<4, f32, mediump> mediump_f32vec4; - - /// High single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f32, highp> highp_f32vec1; - - /// High single-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, f32, highp> highp_f32vec2; - - /// High single-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, f32, highp> highp_f32vec3; - - /// High single-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, f32, highp> highp_f32vec4; - - - /// Low double-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f64, lowp> lowp_f64vec1; - - /// Low double-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, f64, lowp> lowp_f64vec2; - - /// Low double-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, f64, lowp> lowp_f64vec3; - - /// Low double-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, f64, lowp> lowp_f64vec4; - - /// Medium double-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f64, mediump> mediump_f64vec1; - - /// Medium double-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, f64, mediump> mediump_f64vec2; - - /// Medium double-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, f64, mediump> mediump_f64vec3; - - /// Medium double-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, f64, mediump> mediump_f64vec4; - - /// High double-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f64, highp> highp_f64vec1; - - /// High double-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, f64, highp> highp_f64vec2; - - /// High double-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, f64, highp> highp_f64vec3; - - /// High double-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, f64, highp> highp_f64vec4; - - - - ////////////////////// - // Float matrix types - - /// Low single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef lowp_f32 lowp_fmat1x1; - - /// Low single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, lowp> lowp_fmat2x2; - - /// Low single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, lowp> lowp_fmat2x3; - - /// Low single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, lowp> lowp_fmat2x4; - - /// Low single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, lowp> lowp_fmat3x2; - - /// Low single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, lowp> lowp_fmat3x3; - - /// Low single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, lowp> lowp_fmat3x4; - - /// Low single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, lowp> lowp_fmat4x2; - - /// Low single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, lowp> lowp_fmat4x3; - - /// Low single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, lowp> lowp_fmat4x4; - - /// Low single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef lowp_fmat1x1 lowp_fmat1; - - /// Low single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef lowp_fmat2x2 lowp_fmat2; - - /// Low single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef lowp_fmat3x3 lowp_fmat3; - - /// Low single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef lowp_fmat4x4 lowp_fmat4; - - - /// Medium single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef mediump_f32 mediump_fmat1x1; - - /// Medium single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, mediump> mediump_fmat2x2; - - /// Medium single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, mediump> mediump_fmat2x3; - - /// Medium single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, mediump> mediump_fmat2x4; - - /// Medium single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, mediump> mediump_fmat3x2; - - /// Medium single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, mediump> mediump_fmat3x3; - - /// Medium single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, mediump> mediump_fmat3x4; - - /// Medium single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, mediump> mediump_fmat4x2; - - /// Medium single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, mediump> mediump_fmat4x3; - - /// Medium single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, mediump> mediump_fmat4x4; - - /// Medium single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef mediump_fmat1x1 mediump_fmat1; - - /// Medium single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mediump_fmat2x2 mediump_fmat2; - - /// Medium single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mediump_fmat3x3 mediump_fmat3; - - /// Medium single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mediump_fmat4x4 mediump_fmat4; - - - /// High single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef highp_f32 highp_fmat1x1; - - /// High single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, highp> highp_fmat2x2; - - /// High single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, highp> highp_fmat2x3; - - /// High single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, highp> highp_fmat2x4; - - /// High single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, highp> highp_fmat3x2; - - /// High single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, highp> highp_fmat3x3; - - /// High single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, highp> highp_fmat3x4; - - /// High single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, highp> highp_fmat4x2; - - /// High single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, highp> highp_fmat4x3; - - /// High single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, highp> highp_fmat4x4; - - /// High single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef highp_fmat1x1 highp_fmat1; - - /// High single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef highp_fmat2x2 highp_fmat2; - - /// High single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef highp_fmat3x3 highp_fmat3; - - /// High single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef highp_fmat4x4 highp_fmat4; - - - /// Low single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f32 lowp_f32mat1x1; - - /// Low single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, lowp> lowp_f32mat2x2; - - /// Low single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, lowp> lowp_f32mat2x3; - - /// Low single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, lowp> lowp_f32mat2x4; - - /// Low single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, lowp> lowp_f32mat3x2; - - /// Low single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, lowp> lowp_f32mat3x3; - - /// Low single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, lowp> lowp_f32mat3x4; - - /// Low single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, lowp> lowp_f32mat4x2; - - /// Low single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, lowp> lowp_f32mat4x3; - - /// Low single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, lowp> lowp_f32mat4x4; - - /// Low single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef detail::tmat1x1 lowp_f32mat1; - - /// Low single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef lowp_f32mat2x2 lowp_f32mat2; - - /// Low single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef lowp_f32mat3x3 lowp_f32mat3; - - /// Low single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef lowp_f32mat4x4 lowp_f32mat4; - - - /// High single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f32 mediump_f32mat1x1; - - /// Low single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, mediump> mediump_f32mat2x2; - - /// Medium single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, mediump> mediump_f32mat2x3; - - /// Medium single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, mediump> mediump_f32mat2x4; - - /// Medium single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, mediump> mediump_f32mat3x2; - - /// Medium single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, mediump> mediump_f32mat3x3; - - /// Medium single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, mediump> mediump_f32mat3x4; - - /// Medium single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, mediump> mediump_f32mat4x2; - - /// Medium single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, mediump> mediump_f32mat4x3; - - /// Medium single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, mediump> mediump_f32mat4x4; - - /// Medium single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef detail::tmat1x1 f32mat1; - - /// Medium single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mediump_f32mat2x2 mediump_f32mat2; - - /// Medium single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mediump_f32mat3x3 mediump_f32mat3; - - /// Medium single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mediump_f32mat4x4 mediump_f32mat4; - - - /// High single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f32 highp_f32mat1x1; - - /// High single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, highp> highp_f32mat2x2; - - /// High single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, highp> highp_f32mat2x3; - - /// High single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, highp> highp_f32mat2x4; - - /// High single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, highp> highp_f32mat3x2; - - /// High single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, highp> highp_f32mat3x3; - - /// High single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, highp> highp_f32mat3x4; - - /// High single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, highp> highp_f32mat4x2; - - /// High single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, highp> highp_f32mat4x3; - - /// High single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, highp> highp_f32mat4x4; - - /// High single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef detail::tmat1x1 f32mat1; - - /// High single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef highp_f32mat2x2 highp_f32mat2; - - /// High single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef highp_f32mat3x3 highp_f32mat3; - - /// High single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef highp_f32mat4x4 highp_f32mat4; - - - /// Low double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f64 lowp_f64mat1x1; - - /// Low double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f64, lowp> lowp_f64mat2x2; - - /// Low double-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f64, lowp> lowp_f64mat2x3; - - /// Low double-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f64, lowp> lowp_f64mat2x4; - - /// Low double-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f64, lowp> lowp_f64mat3x2; - - /// Low double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f64, lowp> lowp_f64mat3x3; - - /// Low double-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f64, lowp> lowp_f64mat3x4; - - /// Low double-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f64, lowp> lowp_f64mat4x2; - - /// Low double-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f64, lowp> lowp_f64mat4x3; - - /// Low double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f64, lowp> lowp_f64mat4x4; - - /// Low double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef lowp_f64mat1x1 lowp_f64mat1; - - /// Low double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef lowp_f64mat2x2 lowp_f64mat2; - - /// Low double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef lowp_f64mat3x3 lowp_f64mat3; - - /// Low double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef lowp_f64mat4x4 lowp_f64mat4; - - - /// Medium double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f64 Highp_f64mat1x1; - - /// Medium double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f64, mediump> mediump_f64mat2x2; - - /// Medium double-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f64, mediump> mediump_f64mat2x3; - - /// Medium double-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f64, mediump> mediump_f64mat2x4; - - /// Medium double-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f64, mediump> mediump_f64mat3x2; - - /// Medium double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f64, mediump> mediump_f64mat3x3; - - /// Medium double-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f64, mediump> mediump_f64mat3x4; - - /// Medium double-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f64, mediump> mediump_f64mat4x2; - - /// Medium double-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f64, mediump> mediump_f64mat4x3; - - /// Medium double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f64, mediump> mediump_f64mat4x4; - - /// Medium double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef mediump_f64mat1x1 mediump_f64mat1; - - /// Medium double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mediump_f64mat2x2 mediump_f64mat2; - - /// Medium double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mediump_f64mat3x3 mediump_f64mat3; - - /// Medium double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mediump_f64mat4x4 mediump_f64mat4; - - /// High double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f64 highp_f64mat1x1; - - /// High double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f64, highp> highp_f64mat2x2; - - /// High double-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f64, highp> highp_f64mat2x3; - - /// High double-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f64, highp> highp_f64mat2x4; - - /// High double-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f64, highp> highp_f64mat3x2; - - /// High double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f64, highp> highp_f64mat3x3; - - /// High double-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f64, highp> highp_f64mat3x4; - - /// High double-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f64, highp> highp_f64mat4x2; - - /// High double-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f64, highp> highp_f64mat4x3; - - /// High double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f64, highp> highp_f64mat4x4; - - /// High double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef highp_f64mat1x1 highp_f64mat1; - - /// High double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef highp_f64mat2x2 highp_f64mat2; - - /// High double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef highp_f64mat3x3 highp_f64mat3; - - /// High double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef highp_f64mat4x4 highp_f64mat4; - - - - - /// Low qualifier 8 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u8, lowp> lowp_u8vec1; - - /// Low qualifier 8 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u8, lowp> lowp_u8vec2; - - /// Low qualifier 8 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u8, lowp> lowp_u8vec3; - - /// Low qualifier 8 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u8, lowp> lowp_u8vec4; - - - /// Medium qualifier 8 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u8, mediump> mediump_u8vec1; - - /// Medium qualifier 8 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u8, mediump> mediump_u8vec2; - - /// Medium qualifier 8 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u8, mediump> mediump_u8vec3; - - /// Medium qualifier 8 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u8, mediump> mediump_u8vec4; - - - /// High qualifier 8 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u8, highp> highp_u8vec1; - - /// High qualifier 8 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u8, highp> highp_u8vec2; - - /// High qualifier 8 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u8, highp> highp_u8vec3; - - /// High qualifier 8 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u8, highp> highp_u8vec4; - - - - /// Default qualifier 8 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u8, defaultp> u8vec1; - - /// Default qualifier 8 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u8, defaultp> u8vec2; - - /// Default qualifier 8 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u8, defaultp> u8vec3; - - /// Default qualifier 8 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u8, defaultp> u8vec4; - - - - - /// Low qualifier 16 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u16, lowp> lowp_u16vec1; - - /// Low qualifier 16 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u16, lowp> lowp_u16vec2; - - /// Low qualifier 16 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u16, lowp> lowp_u16vec3; - - /// Low qualifier 16 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u16, lowp> lowp_u16vec4; - - - /// Medium qualifier 16 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u16, mediump> mediump_u16vec1; - - /// Medium qualifier 16 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u16, mediump> mediump_u16vec2; - - /// Medium qualifier 16 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u16, mediump> mediump_u16vec3; - - /// Medium qualifier 16 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u16, mediump> mediump_u16vec4; - - - /// High qualifier 16 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u16, highp> highp_u16vec1; - - /// High qualifier 16 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u16, highp> highp_u16vec2; - - /// High qualifier 16 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u16, highp> highp_u16vec3; - - /// High qualifier 16 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u16, highp> highp_u16vec4; - - - - - /// Default qualifier 16 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u16, defaultp> u16vec1; - - /// Default qualifier 16 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u16, defaultp> u16vec2; - - /// Default qualifier 16 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u16, defaultp> u16vec3; - - /// Default qualifier 16 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u16, defaultp> u16vec4; - - - - /// Low qualifier 32 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u32, lowp> lowp_u32vec1; - - /// Low qualifier 32 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u32, lowp> lowp_u32vec2; - - /// Low qualifier 32 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u32, lowp> lowp_u32vec3; - - /// Low qualifier 32 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u32, lowp> lowp_u32vec4; - - - /// Medium qualifier 32 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u32, mediump> mediump_u32vec1; - - /// Medium qualifier 32 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u32, mediump> mediump_u32vec2; - - /// Medium qualifier 32 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u32, mediump> mediump_u32vec3; - - /// Medium qualifier 32 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u32, mediump> mediump_u32vec4; - - - /// High qualifier 32 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u32, highp> highp_u32vec1; - - /// High qualifier 32 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u32, highp> highp_u32vec2; - - /// High qualifier 32 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u32, highp> highp_u32vec3; - - /// High qualifier 32 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u32, highp> highp_u32vec4; - - - - /// Default qualifier 32 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u32, defaultp> u32vec1; - - /// Default qualifier 32 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u32, defaultp> u32vec2; - - /// Default qualifier 32 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u32, defaultp> u32vec3; - - /// Default qualifier 32 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u32, defaultp> u32vec4; - - - - - /// Low qualifier 64 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u64, lowp> lowp_u64vec1; - - /// Low qualifier 64 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u64, lowp> lowp_u64vec2; - - /// Low qualifier 64 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u64, lowp> lowp_u64vec3; - - /// Low qualifier 64 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u64, lowp> lowp_u64vec4; - - - /// Medium qualifier 64 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u64, mediump> mediump_u64vec1; - - /// Medium qualifier 64 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u64, mediump> mediump_u64vec2; - - /// Medium qualifier 64 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u64, mediump> mediump_u64vec3; - - /// Medium qualifier 64 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u64, mediump> mediump_u64vec4; - - - /// High qualifier 64 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u64, highp> highp_u64vec1; - - /// High qualifier 64 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u64, highp> highp_u64vec2; - - /// High qualifier 64 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u64, highp> highp_u64vec3; - - /// High qualifier 64 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u64, highp> highp_u64vec4; - - - - - /// Default qualifier 64 bit unsigned integer scalar type. - /// @see gtc_type_precision - typedef vec<1, u64, defaultp> u64vec1; - - /// Default qualifier 64 bit unsigned integer vector of 2 components type. - /// @see gtc_type_precision - typedef vec<2, u64, defaultp> u64vec2; - - /// Default qualifier 64 bit unsigned integer vector of 3 components type. - /// @see gtc_type_precision - typedef vec<3, u64, defaultp> u64vec3; - - /// Default qualifier 64 bit unsigned integer vector of 4 components type. - /// @see gtc_type_precision - typedef vec<4, u64, defaultp> u64vec4; - - - ////////////////////// - // Float vector types - - /// 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 float32_t; - - /// 32 bit single-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float32 f32; - -# ifndef GLM_FORCE_SINGLE_ONLY - - /// 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 float64_t; - - /// 64 bit double-qualifier floating-point scalar. - /// @see gtc_type_precision - typedef float64 f64; -# endif//GLM_FORCE_SINGLE_ONLY - - /// Single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, float, defaultp> fvec1; - - /// Single-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, float, defaultp> fvec2; - - /// Single-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, float, defaultp> fvec3; - - /// Single-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, float, defaultp> fvec4; - - - /// Single-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f32, defaultp> f32vec1; - - /// Single-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, f32, defaultp> f32vec2; - - /// Single-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, f32, defaultp> f32vec3; - - /// Single-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, f32, defaultp> f32vec4; - -# ifndef GLM_FORCE_SINGLE_ONLY - /// Double-qualifier floating-point vector of 1 component. - /// @see gtc_type_precision - typedef vec<1, f64, defaultp> f64vec1; - - /// Double-qualifier floating-point vector of 2 components. - /// @see gtc_type_precision - typedef vec<2, f64, defaultp> f64vec2; - - /// Double-qualifier floating-point vector of 3 components. - /// @see gtc_type_precision - typedef vec<3, f64, defaultp> f64vec3; - - /// Double-qualifier floating-point vector of 4 components. - /// @see gtc_type_precision - typedef vec<4, f64, defaultp> f64vec4; -# endif//GLM_FORCE_SINGLE_ONLY - - - ////////////////////// - // Float matrix types - - /// Single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef detail::tmat1x1 fmat1; - - /// Single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, defaultp> fmat2; - - /// Single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, defaultp> fmat3; - - /// Single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, defaultp> fmat4; - - - /// Single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f32 fmat1x1; - - /// Single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, defaultp> fmat2x2; - - /// Single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, defaultp> fmat2x3; - - /// Single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, defaultp> fmat2x4; - - /// Single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, defaultp> fmat3x2; - - /// Single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, defaultp> fmat3x3; - - /// Single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, defaultp> fmat3x4; - - /// Single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, defaultp> fmat4x2; - - /// Single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, defaultp> fmat4x3; - - /// Single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, defaultp> fmat4x4; - - - /// Single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef detail::tmat1x1 f32mat1; - - /// Single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, defaultp> f32mat2; - - /// Single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, defaultp> f32mat3; - - /// Single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, defaultp> f32mat4; - - - /// Single-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f32 f32mat1x1; - - /// Single-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f32, defaultp> f32mat2x2; - - /// Single-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f32, defaultp> f32mat2x3; - - /// Single-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f32, defaultp> f32mat2x4; - - /// Single-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f32, defaultp> f32mat3x2; - - /// Single-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f32, defaultp> f32mat3x3; - - /// Single-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f32, defaultp> f32mat3x4; - - /// Single-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f32, defaultp> f32mat4x2; - - /// Single-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f32, defaultp> f32mat4x3; - - /// Single-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f32, defaultp> f32mat4x4; - - -# ifndef GLM_FORCE_SINGLE_ONLY - - /// Double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef detail::tmat1x1 f64mat1; - - /// Double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f64, defaultp> f64mat2; - - /// Double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f64, defaultp> f64mat3; - - /// Double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f64, defaultp> f64mat4; - - - /// Double-qualifier floating-point 1x1 matrix. - /// @see gtc_type_precision - //typedef f64 f64mat1x1; - - /// Double-qualifier floating-point 2x2 matrix. - /// @see gtc_type_precision - typedef mat<2, 2, f64, defaultp> f64mat2x2; - - /// Double-qualifier floating-point 2x3 matrix. - /// @see gtc_type_precision - typedef mat<2, 3, f64, defaultp> f64mat2x3; - - /// Double-qualifier floating-point 2x4 matrix. - /// @see gtc_type_precision - typedef mat<2, 4, f64, defaultp> f64mat2x4; - - /// Double-qualifier floating-point 3x2 matrix. - /// @see gtc_type_precision - typedef mat<3, 2, f64, defaultp> f64mat3x2; - - /// Double-qualifier floating-point 3x3 matrix. - /// @see gtc_type_precision - typedef mat<3, 3, f64, defaultp> f64mat3x3; - - /// Double-qualifier floating-point 3x4 matrix. - /// @see gtc_type_precision - typedef mat<3, 4, f64, defaultp> f64mat3x4; - - /// Double-qualifier floating-point 4x2 matrix. - /// @see gtc_type_precision - typedef mat<4, 2, f64, defaultp> f64mat4x2; - - /// Double-qualifier floating-point 4x3 matrix. - /// @see gtc_type_precision - typedef mat<4, 3, f64, defaultp> f64mat4x3; - - /// Double-qualifier floating-point 4x4 matrix. - /// @see gtc_type_precision - typedef mat<4, 4, f64, defaultp> f64mat4x4; - -# endif//GLM_FORCE_SINGLE_ONLY - - ////////////////////////// - // Quaternion types - - /// Single-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua f32quat; - - /// Low single-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua lowp_f32quat; - - /// Low double-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua lowp_f64quat; - - /// Medium single-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua mediump_f32quat; - -# ifndef GLM_FORCE_SINGLE_ONLY - - /// Medium double-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua mediump_f64quat; - - /// High single-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua highp_f32quat; - - /// High double-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua highp_f64quat; - - /// Double-qualifier floating-point quaternion. - /// @see gtc_type_precision - typedef qua f64quat; - -# endif//GLM_FORCE_SINGLE_ONLY - - /// @} -}//namespace glm - -#include "type_precision.inl" diff --git a/third_party/glm/gtc/type_precision.inl b/third_party/glm/gtc/type_precision.inl deleted file mode 100755 index ae80912..0000000 --- a/third_party/glm/gtc/type_precision.inl +++ /dev/null @@ -1,6 +0,0 @@ -/// @ref gtc_precision - -namespace glm -{ - -} diff --git a/third_party/glm/gtc/type_ptr.hpp b/third_party/glm/gtc/type_ptr.hpp deleted file mode 100755 index d7e625a..0000000 --- a/third_party/glm/gtc/type_ptr.hpp +++ /dev/null @@ -1,230 +0,0 @@ -/// @ref gtc_type_ptr -/// @file glm/gtc/type_ptr.hpp -/// -/// @see core (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtc_type_ptr GLM_GTC_type_ptr -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Handles the interaction between pointers and vector, matrix types. -/// -/// This extension defines an overloaded function, glm::value_ptr. It returns -/// a pointer to the memory layout of the object. Matrix types store their values -/// in column-major order. -/// -/// This is useful for uploading data to matrices or copying data to buffer objects. -/// -/// Example: -/// @code -/// #include -/// #include -/// -/// glm::vec3 aVector(3); -/// glm::mat4 someMatrix(1.0); -/// -/// glUniform3fv(uniformLoc, 1, glm::value_ptr(aVector)); -/// glUniformMatrix4fv(uniformMatrixLoc, 1, GL_FALSE, glm::value_ptr(someMatrix)); -/// @endcode -/// -/// need to be included to use the features of this extension. - -#pragma once - -// Dependency: -#include "../gtc/quaternion.hpp" -#include "../gtc/vec1.hpp" -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include "../mat2x2.hpp" -#include "../mat2x3.hpp" -#include "../mat2x4.hpp" -#include "../mat3x2.hpp" -#include "../mat3x3.hpp" -#include "../mat3x4.hpp" -#include "../mat4x2.hpp" -#include "../mat4x3.hpp" -#include "../mat4x4.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_type_ptr extension included") -#endif - -namespace glm -{ - /// @addtogroup gtc_type_ptr - /// @{ - - /// Return the constant address to the data of the input parameter. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL typename genType::value_type const * value_ptr(genType const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<1, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<2, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<3, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<1, T, Q> make_vec1(vec<4, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<1, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<2, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<3, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<2, T, Q> make_vec2(vec<4, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<1, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<2, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<3, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<3, T, Q> make_vec3(vec<4, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<1, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<2, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<3, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<4, T, Q> make_vec4(vec<4, T, Q> const& v); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<2, T, defaultp> make_vec2(T const * const ptr); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<3, T, defaultp> make_vec3(T const * const ptr); - - /// Build a vector from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL vec<4, T, defaultp> make_vec4(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<2, 2, T, defaultp> make_mat2x2(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<2, 3, T, defaultp> make_mat2x3(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<2, 4, T, defaultp> make_mat2x4(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<3, 2, T, defaultp> make_mat3x2(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<3, 3, T, defaultp> make_mat3x3(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<3, 4, T, defaultp> make_mat3x4(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<4, 2, T, defaultp> make_mat4x2(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<4, 3, T, defaultp> make_mat4x3(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> make_mat4x4(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<2, 2, T, defaultp> make_mat2(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<3, 3, T, defaultp> make_mat3(T const * const ptr); - - /// Build a matrix from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> make_mat4(T const * const ptr); - - /// Build a quaternion from a pointer. - /// @see gtc_type_ptr - template - GLM_FUNC_DECL qua make_quat(T const * const ptr); - - /// @} -}//namespace glm - -#include "type_ptr.inl" diff --git a/third_party/glm/gtc/type_ptr.inl b/third_party/glm/gtc/type_ptr.inl deleted file mode 100755 index 71df4d3..0000000 --- a/third_party/glm/gtc/type_ptr.inl +++ /dev/null @@ -1,386 +0,0 @@ -/// @ref gtc_type_ptr - -#include - -namespace glm -{ - /// @addtogroup gtc_type_ptr - /// @{ - - template - GLM_FUNC_QUALIFIER T const* value_ptr(vec<2, T, Q> const& v) - { - return &(v.x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(vec<2, T, Q>& v) - { - return &(v.x); - } - - template - GLM_FUNC_QUALIFIER T const * value_ptr(vec<3, T, Q> const& v) - { - return &(v.x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(vec<3, T, Q>& v) - { - return &(v.x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(vec<4, T, Q> const& v) - { - return &(v.x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(vec<4, T, Q>& v) - { - return &(v.x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<2, 2, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<2, 2, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<3, 3, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<3, 3, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<4, 4, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<4, 4, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<2, 3, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<2, 3, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<3, 2, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<3, 2, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<2, 4, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<2, 4, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<4, 2, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<4, 2, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<3, 4, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(mat<3, 4, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const* value_ptr(mat<4, 3, T, Q> const& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T * value_ptr(mat<4, 3, T, Q>& m) - { - return &(m[0].x); - } - - template - GLM_FUNC_QUALIFIER T const * value_ptr(qua const& q) - { - return &(q[0]); - } - - template - GLM_FUNC_QUALIFIER T* value_ptr(qua& q) - { - return &(q[0]); - } - - template - inline vec<1, T, Q> make_vec1(vec<1, T, Q> const& v) - { - return v; - } - - template - inline vec<1, T, Q> make_vec1(vec<2, T, Q> const& v) - { - return vec<1, T, Q>(v); - } - - template - inline vec<1, T, Q> make_vec1(vec<3, T, Q> const& v) - { - return vec<1, T, Q>(v); - } - - template - inline vec<1, T, Q> make_vec1(vec<4, T, Q> const& v) - { - return vec<1, T, Q>(v); - } - - template - inline vec<2, T, Q> make_vec2(vec<1, T, Q> const& v) - { - return vec<2, T, Q>(v.x, static_cast(0)); - } - - template - inline vec<2, T, Q> make_vec2(vec<2, T, Q> const& v) - { - return v; - } - - template - inline vec<2, T, Q> make_vec2(vec<3, T, Q> const& v) - { - return vec<2, T, Q>(v); - } - - template - inline vec<2, T, Q> make_vec2(vec<4, T, Q> const& v) - { - return vec<2, T, Q>(v); - } - - template - inline vec<3, T, Q> make_vec3(vec<1, T, Q> const& v) - { - return vec<3, T, Q>(v.x, static_cast(0), static_cast(0)); - } - - template - inline vec<3, T, Q> make_vec3(vec<2, T, Q> const& v) - { - return vec<3, T, Q>(v.x, v.y, static_cast(0)); - } - - template - inline vec<3, T, Q> make_vec3(vec<3, T, Q> const& v) - { - return v; - } - - template - inline vec<3, T, Q> make_vec3(vec<4, T, Q> const& v) - { - return vec<3, T, Q>(v); - } - - template - inline vec<4, T, Q> make_vec4(vec<1, T, Q> const& v) - { - return vec<4, T, Q>(v.x, static_cast(0), static_cast(0), static_cast(1)); - } - - template - inline vec<4, T, Q> make_vec4(vec<2, T, Q> const& v) - { - return vec<4, T, Q>(v.x, v.y, static_cast(0), static_cast(1)); - } - - template - inline vec<4, T, Q> make_vec4(vec<3, T, Q> const& v) - { - return vec<4, T, Q>(v.x, v.y, v.z, static_cast(1)); - } - - template - inline vec<4, T, Q> make_vec4(vec<4, T, Q> const& v) - { - return v; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, defaultp> make_vec2(T const *const ptr) - { - vec<2, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(vec<2, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, defaultp> make_vec3(T const *const ptr) - { - vec<3, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(vec<3, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, defaultp> make_vec4(T const *const ptr) - { - vec<4, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(vec<4, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, defaultp> make_mat2x2(T const *const ptr) - { - mat<2, 2, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<2, 2, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, defaultp> make_mat2x3(T const *const ptr) - { - mat<2, 3, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<2, 3, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, defaultp> make_mat2x4(T const *const ptr) - { - mat<2, 4, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<2, 4, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, defaultp> make_mat3x2(T const *const ptr) - { - mat<3, 2, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<3, 2, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, defaultp> make_mat3x3(T const *const ptr) - { - mat<3, 3, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<3, 3, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, defaultp> make_mat3x4(T const *const ptr) - { - mat<3, 4, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<3, 4, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, defaultp> make_mat4x2(T const *const ptr) - { - mat<4, 2, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<4, 2, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, defaultp> make_mat4x3(T const *const ptr) - { - mat<4, 3, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<4, 3, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> make_mat4x4(T const *const ptr) - { - mat<4, 4, T, defaultp> Result; - memcpy(value_ptr(Result), ptr, sizeof(mat<4, 4, T, defaultp>)); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, defaultp> make_mat2(T const *const ptr) - { - return make_mat2x2(ptr); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, defaultp> make_mat3(T const *const ptr) - { - return make_mat3x3(ptr); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> make_mat4(T const *const ptr) - { - return make_mat4x4(ptr); - } - - template - GLM_FUNC_QUALIFIER qua make_quat(T const *const ptr) - { - qua Result; - memcpy(value_ptr(Result), ptr, sizeof(qua)); - return Result; - } - - /// @} -}//namespace glm - diff --git a/third_party/glm/gtc/ulp.hpp b/third_party/glm/gtc/ulp.hpp deleted file mode 100755 index 0d80a75..0000000 --- a/third_party/glm/gtc/ulp.hpp +++ /dev/null @@ -1,152 +0,0 @@ -/// @ref gtc_ulp -/// @file glm/gtc/ulp.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_ulp GLM_GTC_ulp -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Allow the measurement of the accuracy of a function against a reference -/// implementation. This extension works on floating-point data and provide results -/// in ULP. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/_vectorize.hpp" -#include "../ext/scalar_int_sized.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_ulp extension included") -#endif - -namespace glm -{ - /// Return the next ULP value(s) after the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL genType next_float(genType x); - - /// Return the previous ULP value(s) before the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL genType prev_float(genType x); - - /// Return the value(s) ULP distance after the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL genType next_float(genType x, int ULPs); - - /// Return the value(s) ULP distance before the input value(s). - /// - /// @tparam genType A floating-point scalar type. - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL genType prev_float(genType x, int ULPs); - - /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. - /// - /// @see gtc_ulp - GLM_FUNC_DECL int float_distance(float x, float y); - - /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. - /// - /// @see gtc_ulp - GLM_FUNC_DECL int64 float_distance(double x, double y); - - /// Return the next ULP value(s) after the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec next_float(vec const& x); - - /// Return the value(s) ULP distance after the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec next_float(vec const& x, int ULPs); - - /// Return the value(s) ULP distance after the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec next_float(vec const& x, vec const& ULPs); - - /// Return the previous ULP value(s) before the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec prev_float(vec const& x); - - /// Return the value(s) ULP distance before the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec prev_float(vec const& x, int ULPs); - - /// Return the value(s) ULP distance before the input value(s). - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec prev_float(vec const& x, vec const& ULPs); - - /// Return the distance in the number of ULP between 2 single-precision floating-point scalars. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec float_distance(vec const& x, vec const& y); - - /// Return the distance in the number of ULP between 2 double-precision floating-point scalars. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam Q Value from qualifier enum - /// - /// @see gtc_ulp - template - GLM_FUNC_DECL vec float_distance(vec const& x, vec const& y); - - /// @} -}//namespace glm - -#include "ulp.inl" diff --git a/third_party/glm/gtc/ulp.inl b/third_party/glm/gtc/ulp.inl deleted file mode 100755 index 4ecbd3f..0000000 --- a/third_party/glm/gtc/ulp.inl +++ /dev/null @@ -1,173 +0,0 @@ -/// @ref gtc_ulp - -#include "../ext/scalar_ulp.hpp" - -namespace glm -{ - template<> - GLM_FUNC_QUALIFIER float next_float(float x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::max()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return detail::nextafterf(x, FLT_MAX); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafterf(x, FLT_MAX); -# else - return nextafterf(x, FLT_MAX); -# endif - } - - template<> - GLM_FUNC_QUALIFIER double next_float(double x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::max()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return detail::nextafter(x, std::numeric_limits::max()); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafter(x, DBL_MAX); -# else - return nextafter(x, DBL_MAX); -# endif - } - - template - GLM_FUNC_QUALIFIER T next_float(T x, int ULPs) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'next_float' only accept floating-point input"); - assert(ULPs >= 0); - - T temp = x; - for (int i = 0; i < ULPs; ++i) - temp = next_float(temp); - return temp; - } - - GLM_FUNC_QUALIFIER float prev_float(float x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::min()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return detail::nextafterf(x, FLT_MIN); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafterf(x, FLT_MIN); -# else - return nextafterf(x, FLT_MIN); -# endif - } - - GLM_FUNC_QUALIFIER double prev_float(double x) - { -# if GLM_HAS_CXX11_STL - return std::nextafter(x, std::numeric_limits::min()); -# elif((GLM_COMPILER & GLM_COMPILER_VC) || ((GLM_COMPILER & GLM_COMPILER_INTEL) && (GLM_PLATFORM & GLM_PLATFORM_WINDOWS))) - return _nextafter(x, DBL_MIN); -# elif(GLM_PLATFORM & GLM_PLATFORM_ANDROID) - return __builtin_nextafter(x, DBL_MIN); -# else - return nextafter(x, DBL_MIN); -# endif - } - - template - GLM_FUNC_QUALIFIER T prev_float(T x, int ULPs) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'prev_float' only accept floating-point input"); - assert(ULPs >= 0); - - T temp = x; - for (int i = 0; i < ULPs; ++i) - temp = prev_float(temp); - return temp; - } - - GLM_FUNC_QUALIFIER int float_distance(float x, float y) - { - detail::float_t const a(x); - detail::float_t const b(y); - - return abs(a.i - b.i); - } - - GLM_FUNC_QUALIFIER int64 float_distance(double x, double y) - { - detail::float_t const a(x); - detail::float_t const b(y); - - return abs(a.i - b.i); - } - - template - GLM_FUNC_QUALIFIER vec next_float(vec const& x) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = next_float(x[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec next_float(vec const& x, int ULPs) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = next_float(x[i], ULPs); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec next_float(vec const& x, vec const& ULPs) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = next_float(x[i], ULPs[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec prev_float(vec const& x) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = prev_float(x[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec prev_float(vec const& x, int ULPs) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = prev_float(x[i], ULPs); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec prev_float(vec const& x, vec const& ULPs) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = prev_float(x[i], ULPs[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec float_distance(vec const& x, vec const& y) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = float_distance(x[i], y[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER vec float_distance(vec const& x, vec const& y) - { - vec Result; - for (length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = float_distance(x[i], y[i]); - return Result; - } -}//namespace glm - diff --git a/third_party/glm/gtc/vec1.hpp b/third_party/glm/gtc/vec1.hpp deleted file mode 100755 index c20be87..0000000 --- a/third_party/glm/gtc/vec1.hpp +++ /dev/null @@ -1,30 +0,0 @@ -/// @ref gtc_vec1 -/// @file glm/gtc/vec1.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtc_vec1 GLM_GTC_vec1 -/// @ingroup gtc -/// -/// Include to use the features of this extension. -/// -/// Add vec1, ivec1, uvec1 and bvec1 types. - -#pragma once - -// Dependency: -#include "../ext/vector_bool1.hpp" -#include "../ext/vector_bool1_precision.hpp" -#include "../ext/vector_float1.hpp" -#include "../ext/vector_float1_precision.hpp" -#include "../ext/vector_double1.hpp" -#include "../ext/vector_double1_precision.hpp" -#include "../ext/vector_int1.hpp" -#include "../ext/vector_int1_precision.hpp" -#include "../ext/vector_uint1.hpp" -#include "../ext/vector_uint1_precision.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# pragma message("GLM: GLM_GTC_vec1 extension included") -#endif - diff --git a/third_party/glm/gtx/associated_min_max.hpp b/third_party/glm/gtx/associated_min_max.hpp deleted file mode 100755 index d1a41c0..0000000 --- a/third_party/glm/gtx/associated_min_max.hpp +++ /dev/null @@ -1,207 +0,0 @@ -/// @ref gtx_associated_min_max -/// @file glm/gtx/associated_min_max.hpp -/// -/// @see core (dependence) -/// @see gtx_extented_min_max (dependence) -/// -/// @defgroup gtx_associated_min_max GLM_GTX_associated_min_max -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// @brief Min and max functions that return associated values not the compared onces. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_associated_min_max is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_associated_min_max extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_associated_min_max - /// @{ - - /// Minimum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL U associatedMin(T x, U a, T y, U b); - - /// Minimum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec<2, U, Q> associatedMin( - vec const& x, vec const& a, - vec const& y, vec const& b); - - /// Minimum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMin( - T x, const vec& a, - T y, const vec& b); - - /// Minimum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMin( - vec const& x, U a, - vec const& y, U b); - - /// Minimum comparison between 3 variables and returns 3 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL U associatedMin( - T x, U a, - T y, U b, - T z, U c); - - /// Minimum comparison between 3 variables and returns 3 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMin( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c); - - /// Minimum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL U associatedMin( - T x, U a, - T y, U b, - T z, U c, - T w, U d); - - /// Minimum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMin( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c, - vec const& w, vec const& d); - - /// Minimum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMin( - T x, vec const& a, - T y, vec const& b, - T z, vec const& c, - T w, vec const& d); - - /// Minimum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMin( - vec const& x, U a, - vec const& y, U b, - vec const& z, U c, - vec const& w, U d); - - /// Maximum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL U associatedMax(T x, U a, T y, U b); - - /// Maximum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec<2, U, Q> associatedMax( - vec const& x, vec const& a, - vec const& y, vec const& b); - - /// Maximum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - T x, vec const& a, - T y, vec const& b); - - /// Maximum comparison between 2 variables and returns 2 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - vec const& x, U a, - vec const& y, U b); - - /// Maximum comparison between 3 variables and returns 3 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL U associatedMax( - T x, U a, - T y, U b, - T z, U c); - - /// Maximum comparison between 3 variables and returns 3 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c); - - /// Maximum comparison between 3 variables and returns 3 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - T x, vec const& a, - T y, vec const& b, - T z, vec const& c); - - /// Maximum comparison between 3 variables and returns 3 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - vec const& x, U a, - vec const& y, U b, - vec const& z, U c); - - /// Maximum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL U associatedMax( - T x, U a, - T y, U b, - T z, U c, - T w, U d); - - /// Maximum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c, - vec const& w, vec const& d); - - /// Maximum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - T x, vec const& a, - T y, vec const& b, - T z, vec const& c, - T w, vec const& d); - - /// Maximum comparison between 4 variables and returns 4 associated variable values - /// @see gtx_associated_min_max - template - GLM_FUNC_DECL vec associatedMax( - vec const& x, U a, - vec const& y, U b, - vec const& z, U c, - vec const& w, U d); - - /// @} -} //namespace glm - -#include "associated_min_max.inl" diff --git a/third_party/glm/gtx/associated_min_max.inl b/third_party/glm/gtx/associated_min_max.inl deleted file mode 100755 index 5186c47..0000000 --- a/third_party/glm/gtx/associated_min_max.inl +++ /dev/null @@ -1,354 +0,0 @@ -/// @ref gtx_associated_min_max - -namespace glm{ - -// Min comparison between 2 variables -template -GLM_FUNC_QUALIFIER U associatedMin(T x, U a, T y, U b) -{ - return x < y ? a : b; -} - -template -GLM_FUNC_QUALIFIER vec<2, U, Q> associatedMin -( - vec const& x, vec const& a, - vec const& y, vec const& b -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x[i] < y[i] ? a[i] : b[i]; - return Result; -} - -template -GLM_FUNC_QUALIFIER vec associatedMin -( - T x, const vec& a, - T y, const vec& b -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x < y ? a[i] : b[i]; - return Result; -} - -template -GLM_FUNC_QUALIFIER vec associatedMin -( - vec const& x, U a, - vec const& y, U b -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x[i] < y[i] ? a : b; - return Result; -} - -// Min comparison between 3 variables -template -GLM_FUNC_QUALIFIER U associatedMin -( - T x, U a, - T y, U b, - T z, U c -) -{ - U Result = x < y ? (x < z ? a : c) : (y < z ? b : c); - return Result; -} - -template -GLM_FUNC_QUALIFIER vec associatedMin -( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x[i] < y[i] ? (x[i] < z[i] ? a[i] : c[i]) : (y[i] < z[i] ? b[i] : c[i]); - return Result; -} - -// Min comparison between 4 variables -template -GLM_FUNC_QUALIFIER U associatedMin -( - T x, U a, - T y, U b, - T z, U c, - T w, U d -) -{ - T Test1 = min(x, y); - T Test2 = min(z, w); - U Result1 = x < y ? a : b; - U Result2 = z < w ? c : d; - U Result = Test1 < Test2 ? Result1 : Result2; - return Result; -} - -// Min comparison between 4 variables -template -GLM_FUNC_QUALIFIER vec associatedMin -( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c, - vec const& w, vec const& d -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - { - T Test1 = min(x[i], y[i]); - T Test2 = min(z[i], w[i]); - U Result1 = x[i] < y[i] ? a[i] : b[i]; - U Result2 = z[i] < w[i] ? c[i] : d[i]; - Result[i] = Test1 < Test2 ? Result1 : Result2; - } - return Result; -} - -// Min comparison between 4 variables -template -GLM_FUNC_QUALIFIER vec associatedMin -( - T x, vec const& a, - T y, vec const& b, - T z, vec const& c, - T w, vec const& d -) -{ - T Test1 = min(x, y); - T Test2 = min(z, w); - - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - { - U Result1 = x < y ? a[i] : b[i]; - U Result2 = z < w ? c[i] : d[i]; - Result[i] = Test1 < Test2 ? Result1 : Result2; - } - return Result; -} - -// Min comparison between 4 variables -template -GLM_FUNC_QUALIFIER vec associatedMin -( - vec const& x, U a, - vec const& y, U b, - vec const& z, U c, - vec const& w, U d -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - { - T Test1 = min(x[i], y[i]); - T Test2 = min(z[i], w[i]); - U Result1 = x[i] < y[i] ? a : b; - U Result2 = z[i] < w[i] ? c : d; - Result[i] = Test1 < Test2 ? Result1 : Result2; - } - return Result; -} - -// Max comparison between 2 variables -template -GLM_FUNC_QUALIFIER U associatedMax(T x, U a, T y, U b) -{ - return x > y ? a : b; -} - -// Max comparison between 2 variables -template -GLM_FUNC_QUALIFIER vec<2, U, Q> associatedMax -( - vec const& x, vec const& a, - vec const& y, vec const& b -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x[i] > y[i] ? a[i] : b[i]; - return Result; -} - -// Max comparison between 2 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - T x, vec const& a, - T y, vec const& b -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x > y ? a[i] : b[i]; - return Result; -} - -// Max comparison between 2 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - vec const& x, U a, - vec const& y, U b -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x[i] > y[i] ? a : b; - return Result; -} - -// Max comparison between 3 variables -template -GLM_FUNC_QUALIFIER U associatedMax -( - T x, U a, - T y, U b, - T z, U c -) -{ - U Result = x > y ? (x > z ? a : c) : (y > z ? b : c); - return Result; -} - -// Max comparison between 3 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x[i] > y[i] ? (x[i] > z[i] ? a[i] : c[i]) : (y[i] > z[i] ? b[i] : c[i]); - return Result; -} - -// Max comparison between 3 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - T x, vec const& a, - T y, vec const& b, - T z, vec const& c -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x > y ? (x > z ? a[i] : c[i]) : (y > z ? b[i] : c[i]); - return Result; -} - -// Max comparison between 3 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - vec const& x, U a, - vec const& y, U b, - vec const& z, U c -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - Result[i] = x[i] > y[i] ? (x[i] > z[i] ? a : c) : (y[i] > z[i] ? b : c); - return Result; -} - -// Max comparison between 4 variables -template -GLM_FUNC_QUALIFIER U associatedMax -( - T x, U a, - T y, U b, - T z, U c, - T w, U d -) -{ - T Test1 = max(x, y); - T Test2 = max(z, w); - U Result1 = x > y ? a : b; - U Result2 = z > w ? c : d; - U Result = Test1 > Test2 ? Result1 : Result2; - return Result; -} - -// Max comparison between 4 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - vec const& x, vec const& a, - vec const& y, vec const& b, - vec const& z, vec const& c, - vec const& w, vec const& d -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - { - T Test1 = max(x[i], y[i]); - T Test2 = max(z[i], w[i]); - U Result1 = x[i] > y[i] ? a[i] : b[i]; - U Result2 = z[i] > w[i] ? c[i] : d[i]; - Result[i] = Test1 > Test2 ? Result1 : Result2; - } - return Result; -} - -// Max comparison between 4 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - T x, vec const& a, - T y, vec const& b, - T z, vec const& c, - T w, vec const& d -) -{ - T Test1 = max(x, y); - T Test2 = max(z, w); - - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - { - U Result1 = x > y ? a[i] : b[i]; - U Result2 = z > w ? c[i] : d[i]; - Result[i] = Test1 > Test2 ? Result1 : Result2; - } - return Result; -} - -// Max comparison between 4 variables -template -GLM_FUNC_QUALIFIER vec associatedMax -( - vec const& x, U a, - vec const& y, U b, - vec const& z, U c, - vec const& w, U d -) -{ - vec Result; - for(length_t i = 0, n = Result.length(); i < n; ++i) - { - T Test1 = max(x[i], y[i]); - T Test2 = max(z[i], w[i]); - U Result1 = x[i] > y[i] ? a : b; - U Result2 = z[i] > w[i] ? c : d; - Result[i] = Test1 > Test2 ? Result1 : Result2; - } - return Result; -} -}//namespace glm diff --git a/third_party/glm/gtx/bit.hpp b/third_party/glm/gtx/bit.hpp deleted file mode 100755 index 60a7aef..0000000 --- a/third_party/glm/gtx/bit.hpp +++ /dev/null @@ -1,98 +0,0 @@ -/// @ref gtx_bit -/// @file glm/gtx/bit.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_bit GLM_GTX_bit -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Allow to perform bit operations on integer values - -#pragma once - -// Dependencies -#include "../gtc/bitfield.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_bit is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_bit extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_bit - /// @{ - - /// @see gtx_bit - template - GLM_FUNC_DECL genIUType highestBitValue(genIUType Value); - - /// @see gtx_bit - template - GLM_FUNC_DECL genIUType lowestBitValue(genIUType Value); - - /// Find the highest bit set to 1 in a integer variable and return its value. - /// - /// @see gtx_bit - template - GLM_FUNC_DECL vec highestBitValue(vec const& value); - - /// Return the power of two number which value is just higher the input value. - /// Deprecated, use ceilPowerOfTwo from GTC_round instead - /// - /// @see gtc_round - /// @see gtx_bit - template - GLM_DEPRECATED GLM_FUNC_DECL genIUType powerOfTwoAbove(genIUType Value); - - /// Return the power of two number which value is just higher the input value. - /// Deprecated, use ceilPowerOfTwo from GTC_round instead - /// - /// @see gtc_round - /// @see gtx_bit - template - GLM_DEPRECATED GLM_FUNC_DECL vec powerOfTwoAbove(vec const& value); - - /// Return the power of two number which value is just lower the input value. - /// Deprecated, use floorPowerOfTwo from GTC_round instead - /// - /// @see gtc_round - /// @see gtx_bit - template - GLM_DEPRECATED GLM_FUNC_DECL genIUType powerOfTwoBelow(genIUType Value); - - /// Return the power of two number which value is just lower the input value. - /// Deprecated, use floorPowerOfTwo from GTC_round instead - /// - /// @see gtc_round - /// @see gtx_bit - template - GLM_DEPRECATED GLM_FUNC_DECL vec powerOfTwoBelow(vec const& value); - - /// Return the power of two number which value is the closet to the input value. - /// Deprecated, use roundPowerOfTwo from GTC_round instead - /// - /// @see gtc_round - /// @see gtx_bit - template - GLM_DEPRECATED GLM_FUNC_DECL genIUType powerOfTwoNearest(genIUType Value); - - /// Return the power of two number which value is the closet to the input value. - /// Deprecated, use roundPowerOfTwo from GTC_round instead - /// - /// @see gtc_round - /// @see gtx_bit - template - GLM_DEPRECATED GLM_FUNC_DECL vec powerOfTwoNearest(vec const& value); - - /// @} -} //namespace glm - - -#include "bit.inl" - diff --git a/third_party/glm/gtx/bit.inl b/third_party/glm/gtx/bit.inl deleted file mode 100755 index 621b626..0000000 --- a/third_party/glm/gtx/bit.inl +++ /dev/null @@ -1,92 +0,0 @@ -/// @ref gtx_bit - -namespace glm -{ - /////////////////// - // highestBitValue - - template - GLM_FUNC_QUALIFIER genIUType highestBitValue(genIUType Value) - { - genIUType tmp = Value; - genIUType result = genIUType(0); - while(tmp) - { - result = (tmp & (~tmp + 1)); // grab lowest bit - tmp &= ~result; // clear lowest bit - } - return result; - } - - template - GLM_FUNC_QUALIFIER vec highestBitValue(vec const& v) - { - return detail::functor1::call(highestBitValue, v); - } - - /////////////////// - // lowestBitValue - - template - GLM_FUNC_QUALIFIER genIUType lowestBitValue(genIUType Value) - { - return (Value & (~Value + 1)); - } - - template - GLM_FUNC_QUALIFIER vec lowestBitValue(vec const& v) - { - return detail::functor1::call(lowestBitValue, v); - } - - /////////////////// - // powerOfTwoAbove - - template - GLM_FUNC_QUALIFIER genType powerOfTwoAbove(genType value) - { - return isPowerOfTwo(value) ? value : highestBitValue(value) << 1; - } - - template - GLM_FUNC_QUALIFIER vec powerOfTwoAbove(vec const& v) - { - return detail::functor1::call(powerOfTwoAbove, v); - } - - /////////////////// - // powerOfTwoBelow - - template - GLM_FUNC_QUALIFIER genType powerOfTwoBelow(genType value) - { - return isPowerOfTwo(value) ? value : highestBitValue(value); - } - - template - GLM_FUNC_QUALIFIER vec powerOfTwoBelow(vec const& v) - { - return detail::functor1::call(powerOfTwoBelow, v); - } - - ///////////////////// - // powerOfTwoNearest - - template - GLM_FUNC_QUALIFIER genType powerOfTwoNearest(genType value) - { - if(isPowerOfTwo(value)) - return value; - - genType const prev = highestBitValue(value); - genType const next = prev << 1; - return (next - value) < (value - prev) ? next : prev; - } - - template - GLM_FUNC_QUALIFIER vec powerOfTwoNearest(vec const& v) - { - return detail::functor1::call(powerOfTwoNearest, v); - } - -}//namespace glm diff --git a/third_party/glm/gtx/closest_point.hpp b/third_party/glm/gtx/closest_point.hpp deleted file mode 100755 index de6dbbf..0000000 --- a/third_party/glm/gtx/closest_point.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref gtx_closest_point -/// @file glm/gtx/closest_point.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_closest_point GLM_GTX_closest_point -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Find the point on a straight line which is the closet of a point. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_closest_point is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_closest_point extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_closest_point - /// @{ - - /// Find the point on a straight line which is the closet of a point. - /// @see gtx_closest_point - template - GLM_FUNC_DECL vec<3, T, Q> closestPointOnLine( - vec<3, T, Q> const& point, - vec<3, T, Q> const& a, - vec<3, T, Q> const& b); - - /// 2d lines work as well - template - GLM_FUNC_DECL vec<2, T, Q> closestPointOnLine( - vec<2, T, Q> const& point, - vec<2, T, Q> const& a, - vec<2, T, Q> const& b); - - /// @} -}// namespace glm - -#include "closest_point.inl" diff --git a/third_party/glm/gtx/closest_point.inl b/third_party/glm/gtx/closest_point.inl deleted file mode 100755 index 0a39b04..0000000 --- a/third_party/glm/gtx/closest_point.inl +++ /dev/null @@ -1,45 +0,0 @@ -/// @ref gtx_closest_point - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> closestPointOnLine - ( - vec<3, T, Q> const& point, - vec<3, T, Q> const& a, - vec<3, T, Q> const& b - ) - { - T LineLength = distance(a, b); - vec<3, T, Q> Vector = point - a; - vec<3, T, Q> LineDirection = (b - a) / LineLength; - - // Project Vector to LineDirection to get the distance of point from a - T Distance = dot(Vector, LineDirection); - - if(Distance <= T(0)) return a; - if(Distance >= LineLength) return b; - return a + LineDirection * Distance; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> closestPointOnLine - ( - vec<2, T, Q> const& point, - vec<2, T, Q> const& a, - vec<2, T, Q> const& b - ) - { - T LineLength = distance(a, b); - vec<2, T, Q> Vector = point - a; - vec<2, T, Q> LineDirection = (b - a) / LineLength; - - // Project Vector to LineDirection to get the distance of point from a - T Distance = dot(Vector, LineDirection); - - if(Distance <= T(0)) return a; - if(Distance >= LineLength) return b; - return a + LineDirection * Distance; - } - -}//namespace glm diff --git a/third_party/glm/gtx/color_encoding.hpp b/third_party/glm/gtx/color_encoding.hpp deleted file mode 100755 index 96ded2a..0000000 --- a/third_party/glm/gtx/color_encoding.hpp +++ /dev/null @@ -1,54 +0,0 @@ -/// @ref gtx_color_encoding -/// @file glm/gtx/color_encoding.hpp -/// -/// @see core (dependence) -/// @see gtx_color_encoding (dependence) -/// -/// @defgroup gtx_color_encoding GLM_GTX_color_encoding -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// @brief Allow to perform bit operations on integer values - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../vec3.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTC_color_encoding is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTC_color_encoding extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_color_encoding - /// @{ - - /// Convert a linear sRGB color to D65 YUV. - template - GLM_FUNC_DECL vec<3, T, Q> convertLinearSRGBToD65XYZ(vec<3, T, Q> const& ColorLinearSRGB); - - /// Convert a linear sRGB color to D50 YUV. - template - GLM_FUNC_DECL vec<3, T, Q> convertLinearSRGBToD50XYZ(vec<3, T, Q> const& ColorLinearSRGB); - - /// Convert a D65 YUV color to linear sRGB. - template - GLM_FUNC_DECL vec<3, T, Q> convertD65XYZToLinearSRGB(vec<3, T, Q> const& ColorD65XYZ); - - /// Convert a D65 YUV color to D50 YUV. - template - GLM_FUNC_DECL vec<3, T, Q> convertD65XYZToD50XYZ(vec<3, T, Q> const& ColorD65XYZ); - - /// @} -} //namespace glm - -#include "color_encoding.inl" diff --git a/third_party/glm/gtx/color_encoding.inl b/third_party/glm/gtx/color_encoding.inl deleted file mode 100755 index e50fa3e..0000000 --- a/third_party/glm/gtx/color_encoding.inl +++ /dev/null @@ -1,45 +0,0 @@ -/// @ref gtx_color_encoding - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> convertLinearSRGBToD65XYZ(vec<3, T, Q> const& ColorLinearSRGB) - { - vec<3, T, Q> const M(0.490f, 0.17697f, 0.2f); - vec<3, T, Q> const N(0.31f, 0.8124f, 0.01063f); - vec<3, T, Q> const O(0.490f, 0.01f, 0.99f); - - return (M * ColorLinearSRGB + N * ColorLinearSRGB + O * ColorLinearSRGB) * static_cast(5.650675255693055f); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> convertLinearSRGBToD50XYZ(vec<3, T, Q> const& ColorLinearSRGB) - { - vec<3, T, Q> const M(0.436030342570117f, 0.222438466210245f, 0.013897440074263f); - vec<3, T, Q> const N(0.385101860087134f, 0.716942745571917f, 0.097076381494207f); - vec<3, T, Q> const O(0.143067806654203f, 0.060618777416563f, 0.713926257896652f); - - return M * ColorLinearSRGB + N * ColorLinearSRGB + O * ColorLinearSRGB; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> convertD65XYZToLinearSRGB(vec<3, T, Q> const& ColorD65XYZ) - { - vec<3, T, Q> const M(0.41847f, -0.091169f, 0.0009209f); - vec<3, T, Q> const N(-0.15866f, 0.25243f, 0.015708f); - vec<3, T, Q> const O(0.0009209f, -0.0025498f, 0.1786f); - - return M * ColorD65XYZ + N * ColorD65XYZ + O * ColorD65XYZ; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> convertD65XYZToD50XYZ(vec<3, T, Q> const& ColorD65XYZ) - { - vec<3, T, Q> const M(+1.047844353856414f, +0.029549007606644f, -0.009250984365223f); - vec<3, T, Q> const N(+0.022898981050086f, +0.990508028941971f, +0.015072338237051f); - vec<3, T, Q> const O(-0.050206647741605f, -0.017074711360960f, +0.751717835079977f); - - return M * ColorD65XYZ + N * ColorD65XYZ + O * ColorD65XYZ; - } - -}//namespace glm diff --git a/third_party/glm/gtx/color_space.hpp b/third_party/glm/gtx/color_space.hpp deleted file mode 100755 index a634392..0000000 --- a/third_party/glm/gtx/color_space.hpp +++ /dev/null @@ -1,72 +0,0 @@ -/// @ref gtx_color_space -/// @file glm/gtx/color_space.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_color_space GLM_GTX_color_space -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Related to RGB to HSV conversions and operations. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_color_space is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_color_space extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_color_space - /// @{ - - /// Converts a color from HSV color space to its color in RGB color space. - /// @see gtx_color_space - template - GLM_FUNC_DECL vec<3, T, Q> rgbColor( - vec<3, T, Q> const& hsvValue); - - /// Converts a color from RGB color space to its color in HSV color space. - /// @see gtx_color_space - template - GLM_FUNC_DECL vec<3, T, Q> hsvColor( - vec<3, T, Q> const& rgbValue); - - /// Build a saturation matrix. - /// @see gtx_color_space - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> saturation( - T const s); - - /// Modify the saturation of a color. - /// @see gtx_color_space - template - GLM_FUNC_DECL vec<3, T, Q> saturation( - T const s, - vec<3, T, Q> const& color); - - /// Modify the saturation of a color. - /// @see gtx_color_space - template - GLM_FUNC_DECL vec<4, T, Q> saturation( - T const s, - vec<4, T, Q> const& color); - - /// Compute color luminosity associating ratios (0.33, 0.59, 0.11) to RGB canals. - /// @see gtx_color_space - template - GLM_FUNC_DECL T luminosity( - vec<3, T, Q> const& color); - - /// @} -}//namespace glm - -#include "color_space.inl" diff --git a/third_party/glm/gtx/color_space.inl b/third_party/glm/gtx/color_space.inl deleted file mode 100755 index f698afe..0000000 --- a/third_party/glm/gtx/color_space.inl +++ /dev/null @@ -1,141 +0,0 @@ -/// @ref gtx_color_space - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rgbColor(const vec<3, T, Q>& hsvColor) - { - vec<3, T, Q> hsv = hsvColor; - vec<3, T, Q> rgbColor; - - if(hsv.y == static_cast(0)) - // achromatic (grey) - rgbColor = vec<3, T, Q>(hsv.z); - else - { - T sector = floor(hsv.x * (T(1) / T(60))); - T frac = (hsv.x * (T(1) / T(60))) - sector; - // factorial part of h - T o = hsv.z * (T(1) - hsv.y); - T p = hsv.z * (T(1) - hsv.y * frac); - T q = hsv.z * (T(1) - hsv.y * (T(1) - frac)); - - switch(int(sector)) - { - default: - case 0: - rgbColor.r = hsv.z; - rgbColor.g = q; - rgbColor.b = o; - break; - case 1: - rgbColor.r = p; - rgbColor.g = hsv.z; - rgbColor.b = o; - break; - case 2: - rgbColor.r = o; - rgbColor.g = hsv.z; - rgbColor.b = q; - break; - case 3: - rgbColor.r = o; - rgbColor.g = p; - rgbColor.b = hsv.z; - break; - case 4: - rgbColor.r = q; - rgbColor.g = o; - rgbColor.b = hsv.z; - break; - case 5: - rgbColor.r = hsv.z; - rgbColor.g = o; - rgbColor.b = p; - break; - } - } - - return rgbColor; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> hsvColor(const vec<3, T, Q>& rgbColor) - { - vec<3, T, Q> hsv = rgbColor; - float Min = min(min(rgbColor.r, rgbColor.g), rgbColor.b); - float Max = max(max(rgbColor.r, rgbColor.g), rgbColor.b); - float Delta = Max - Min; - - hsv.z = Max; - - if(Max != static_cast(0)) - { - hsv.y = Delta / hsv.z; - T h = static_cast(0); - - if(rgbColor.r == Max) - // between yellow & magenta - h = static_cast(0) + T(60) * (rgbColor.g - rgbColor.b) / Delta; - else if(rgbColor.g == Max) - // between cyan & yellow - h = static_cast(120) + T(60) * (rgbColor.b - rgbColor.r) / Delta; - else - // between magenta & cyan - h = static_cast(240) + T(60) * (rgbColor.r - rgbColor.g) / Delta; - - if(h < T(0)) - hsv.x = h + T(360); - else - hsv.x = h; - } - else - { - // If r = g = b = 0 then s = 0, h is undefined - hsv.y = static_cast(0); - hsv.x = static_cast(0); - } - - return hsv; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> saturation(T const s) - { - vec<3, T, defaultp> rgbw = vec<3, T, defaultp>(T(0.2126), T(0.7152), T(0.0722)); - - vec<3, T, defaultp> const col((T(1) - s) * rgbw); - - mat<4, 4, T, defaultp> result(T(1)); - result[0][0] = col.x + s; - result[0][1] = col.x; - result[0][2] = col.x; - result[1][0] = col.y; - result[1][1] = col.y + s; - result[1][2] = col.y; - result[2][0] = col.z; - result[2][1] = col.z; - result[2][2] = col.z + s; - - return result; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> saturation(const T s, const vec<3, T, Q>& color) - { - return vec<3, T, Q>(saturation(s) * vec<4, T, Q>(color, T(0))); - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> saturation(const T s, const vec<4, T, Q>& color) - { - return saturation(s) * color; - } - - template - GLM_FUNC_QUALIFIER T luminosity(const vec<3, T, Q>& color) - { - const vec<3, T, Q> tmp = vec<3, T, Q>(0.33, 0.59, 0.11); - return dot(color, tmp); - } -}//namespace glm diff --git a/third_party/glm/gtx/color_space_YCoCg.hpp b/third_party/glm/gtx/color_space_YCoCg.hpp deleted file mode 100755 index dd2b771..0000000 --- a/third_party/glm/gtx/color_space_YCoCg.hpp +++ /dev/null @@ -1,60 +0,0 @@ -/// @ref gtx_color_space_YCoCg -/// @file glm/gtx/color_space_YCoCg.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_color_space_YCoCg GLM_GTX_color_space_YCoCg -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// RGB to YCoCg conversions and operations - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_color_space_YCoCg is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_color_space_YCoCg extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_color_space_YCoCg - /// @{ - - /// Convert a color from RGB color space to YCoCg color space. - /// @see gtx_color_space_YCoCg - template - GLM_FUNC_DECL vec<3, T, Q> rgb2YCoCg( - vec<3, T, Q> const& rgbColor); - - /// Convert a color from YCoCg color space to RGB color space. - /// @see gtx_color_space_YCoCg - template - GLM_FUNC_DECL vec<3, T, Q> YCoCg2rgb( - vec<3, T, Q> const& YCoCgColor); - - /// Convert a color from RGB color space to YCoCgR color space. - /// @see "YCoCg-R: A Color Space with RGB Reversibility and Low Dynamic Range" - /// @see gtx_color_space_YCoCg - template - GLM_FUNC_DECL vec<3, T, Q> rgb2YCoCgR( - vec<3, T, Q> const& rgbColor); - - /// Convert a color from YCoCgR color space to RGB color space. - /// @see "YCoCg-R: A Color Space with RGB Reversibility and Low Dynamic Range" - /// @see gtx_color_space_YCoCg - template - GLM_FUNC_DECL vec<3, T, Q> YCoCgR2rgb( - vec<3, T, Q> const& YCoCgColor); - - /// @} -}//namespace glm - -#include "color_space_YCoCg.inl" diff --git a/third_party/glm/gtx/color_space_YCoCg.inl b/third_party/glm/gtx/color_space_YCoCg.inl deleted file mode 100755 index 83ba857..0000000 --- a/third_party/glm/gtx/color_space_YCoCg.inl +++ /dev/null @@ -1,107 +0,0 @@ -/// @ref gtx_color_space_YCoCg - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCg - ( - vec<3, T, Q> const& rgbColor - ) - { - vec<3, T, Q> result; - result.x/*Y */ = rgbColor.r / T(4) + rgbColor.g / T(2) + rgbColor.b / T(4); - result.y/*Co*/ = rgbColor.r / T(2) + rgbColor.g * T(0) - rgbColor.b / T(2); - result.z/*Cg*/ = - rgbColor.r / T(4) + rgbColor.g / T(2) - rgbColor.b / T(4); - return result; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCg2rgb - ( - vec<3, T, Q> const& YCoCgColor - ) - { - vec<3, T, Q> result; - result.r = YCoCgColor.x + YCoCgColor.y - YCoCgColor.z; - result.g = YCoCgColor.x + YCoCgColor.z; - result.b = YCoCgColor.x - YCoCgColor.y - YCoCgColor.z; - return result; - } - - template - class compute_YCoCgR { - public: - static GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCgR - ( - vec<3, T, Q> const& rgbColor - ) - { - vec<3, T, Q> result; - result.x/*Y */ = rgbColor.g * static_cast(0.5) + (rgbColor.r + rgbColor.b) * static_cast(0.25); - result.y/*Co*/ = rgbColor.r - rgbColor.b; - result.z/*Cg*/ = rgbColor.g - (rgbColor.r + rgbColor.b) * static_cast(0.5); - return result; - } - - static GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCgR2rgb - ( - vec<3, T, Q> const& YCoCgRColor - ) - { - vec<3, T, Q> result; - T tmp = YCoCgRColor.x - (YCoCgRColor.z * static_cast(0.5)); - result.g = YCoCgRColor.z + tmp; - result.b = tmp - (YCoCgRColor.y * static_cast(0.5)); - result.r = result.b + YCoCgRColor.y; - return result; - } - }; - - template - class compute_YCoCgR { - public: - static GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCgR - ( - vec<3, T, Q> const& rgbColor - ) - { - vec<3, T, Q> result; - result.y/*Co*/ = rgbColor.r - rgbColor.b; - T tmp = rgbColor.b + (result.y >> 1); - result.z/*Cg*/ = rgbColor.g - tmp; - result.x/*Y */ = tmp + (result.z >> 1); - return result; - } - - static GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCgR2rgb - ( - vec<3, T, Q> const& YCoCgRColor - ) - { - vec<3, T, Q> result; - T tmp = YCoCgRColor.x - (YCoCgRColor.z >> 1); - result.g = YCoCgRColor.z + tmp; - result.b = tmp - (YCoCgRColor.y >> 1); - result.r = result.b + YCoCgRColor.y; - return result; - } - }; - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rgb2YCoCgR - ( - vec<3, T, Q> const& rgbColor - ) - { - return compute_YCoCgR::is_integer>::rgb2YCoCgR(rgbColor); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> YCoCgR2rgb - ( - vec<3, T, Q> const& YCoCgRColor - ) - { - return compute_YCoCgR::is_integer>::YCoCgR2rgb(YCoCgRColor); - } -}//namespace glm diff --git a/third_party/glm/gtx/common.hpp b/third_party/glm/gtx/common.hpp deleted file mode 100755 index 254ada2..0000000 --- a/third_party/glm/gtx/common.hpp +++ /dev/null @@ -1,76 +0,0 @@ -/// @ref gtx_common -/// @file glm/gtx/common.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_common GLM_GTX_common -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// @brief Provide functions to increase the compatibility with Cg and HLSL languages - -#pragma once - -// Dependencies: -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include "../gtc/vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_common is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_common extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_common - /// @{ - - /// Returns true if x is a denormalized number - /// Numbers whose absolute value is too small to be represented in the normal format are represented in an alternate, denormalized format. - /// This format is less precise but can represent values closer to zero. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see GLSL isnan man page - /// @see GLSL 4.20.8 specification, section 8.3 Common Functions - template - GLM_FUNC_DECL typename genType::bool_type isdenormal(genType const& x); - - /// Similar to 'mod' but with a different rounding and integer support. - /// Returns 'x - y * trunc(x/y)' instead of 'x - y * floor(x/y)' - /// - /// @see GLSL mod vs HLSL fmod - /// @see GLSL mod man page - template - GLM_FUNC_DECL vec fmod(vec const& v); - - /// Returns whether vector components values are within an interval. A open interval excludes its endpoints, and is denoted with square brackets. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_vector_relational - template - GLM_FUNC_DECL vec openBounded(vec const& Value, vec const& Min, vec const& Max); - - /// Returns whether vector components values are within an interval. A closed interval includes its endpoints, and is denoted with square brackets. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see ext_vector_relational - template - GLM_FUNC_DECL vec closeBounded(vec const& Value, vec const& Min, vec const& Max); - - /// @} -}//namespace glm - -#include "common.inl" diff --git a/third_party/glm/gtx/common.inl b/third_party/glm/gtx/common.inl deleted file mode 100755 index 4ad2126..0000000 --- a/third_party/glm/gtx/common.inl +++ /dev/null @@ -1,125 +0,0 @@ -/// @ref gtx_common - -#include -#include "../gtc/epsilon.hpp" -#include "../gtc/constants.hpp" - -namespace glm{ -namespace detail -{ - template - struct compute_fmod - { - GLM_FUNC_QUALIFIER static vec call(vec const& a, vec const& b) - { - return detail::functor2::call(std::fmod, a, b); - } - }; - - template - struct compute_fmod - { - GLM_FUNC_QUALIFIER static vec call(vec const& a, vec const& b) - { - return a % b; - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER bool isdenormal(T const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); - -# if GLM_HAS_CXX11_STL - return std::fpclassify(x) == FP_SUBNORMAL; -# else - return epsilonNotEqual(x, static_cast(0), epsilon()) && std::fabs(x) < std::numeric_limits::min(); -# endif - } - - template - GLM_FUNC_QUALIFIER typename vec<1, T, Q>::bool_type isdenormal - ( - vec<1, T, Q> const& x - ) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); - - return typename vec<1, T, Q>::bool_type( - isdenormal(x.x)); - } - - template - GLM_FUNC_QUALIFIER typename vec<2, T, Q>::bool_type isdenormal - ( - vec<2, T, Q> const& x - ) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); - - return typename vec<2, T, Q>::bool_type( - isdenormal(x.x), - isdenormal(x.y)); - } - - template - GLM_FUNC_QUALIFIER typename vec<3, T, Q>::bool_type isdenormal - ( - vec<3, T, Q> const& x - ) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); - - return typename vec<3, T, Q>::bool_type( - isdenormal(x.x), - isdenormal(x.y), - isdenormal(x.z)); - } - - template - GLM_FUNC_QUALIFIER typename vec<4, T, Q>::bool_type isdenormal - ( - vec<4, T, Q> const& x - ) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isdenormal' only accept floating-point inputs"); - - return typename vec<4, T, Q>::bool_type( - isdenormal(x.x), - isdenormal(x.y), - isdenormal(x.z), - isdenormal(x.w)); - } - - // fmod - template - GLM_FUNC_QUALIFIER genType fmod(genType x, genType y) - { - return fmod(vec<1, genType>(x), y).x; - } - - template - GLM_FUNC_QUALIFIER vec fmod(vec const& x, T y) - { - return detail::compute_fmod::is_iec559>::call(x, vec(y)); - } - - template - GLM_FUNC_QUALIFIER vec fmod(vec const& x, vec const& y) - { - return detail::compute_fmod::is_iec559>::call(x, y); - } - - template - GLM_FUNC_QUALIFIER vec openBounded(vec const& Value, vec const& Min, vec const& Max) - { - return greaterThan(Value, Min) && lessThan(Value, Max); - } - - template - GLM_FUNC_QUALIFIER vec closeBounded(vec const& Value, vec const& Min, vec const& Max) - { - return greaterThanEqual(Value, Min) && lessThanEqual(Value, Max); - } -}//namespace glm diff --git a/third_party/glm/gtx/compatibility.hpp b/third_party/glm/gtx/compatibility.hpp deleted file mode 100755 index f1b00a6..0000000 --- a/third_party/glm/gtx/compatibility.hpp +++ /dev/null @@ -1,133 +0,0 @@ -/// @ref gtx_compatibility -/// @file glm/gtx/compatibility.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_compatibility GLM_GTX_compatibility -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Provide functions to increase the compatibility with Cg and HLSL languages - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/quaternion.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_compatibility is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_compatibility extension included") -# endif -#endif - -#if GLM_COMPILER & GLM_COMPILER_VC -# include -#elif GLM_COMPILER & GLM_COMPILER_GCC -# include -# if(GLM_PLATFORM & GLM_PLATFORM_ANDROID) -# undef isfinite -# endif -#endif//GLM_COMPILER - -namespace glm -{ - /// @addtogroup gtx_compatibility - /// @{ - - template GLM_FUNC_QUALIFIER T lerp(T x, T y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<2, T, Q> lerp(const vec<2, T, Q>& x, const vec<2, T, Q>& y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) - - template GLM_FUNC_QUALIFIER vec<3, T, Q> lerp(const vec<3, T, Q>& x, const vec<3, T, Q>& y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<4, T, Q> lerp(const vec<4, T, Q>& x, const vec<4, T, Q>& y, T a){return mix(x, y, a);} //!< \brief Returns x * (1.0 - a) + y * a, i.e., the linear blend of x and y using the floating-point value a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<2, T, Q> lerp(const vec<2, T, Q>& x, const vec<2, T, Q>& y, const vec<2, T, Q>& a){return mix(x, y, a);} //!< \brief Returns the component-wise result of x * (1.0 - a) + y * a, i.e., the linear blend of x and y using vector a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<3, T, Q> lerp(const vec<3, T, Q>& x, const vec<3, T, Q>& y, const vec<3, T, Q>& a){return mix(x, y, a);} //!< \brief Returns the component-wise result of x * (1.0 - a) + y * a, i.e., the linear blend of x and y using vector a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<4, T, Q> lerp(const vec<4, T, Q>& x, const vec<4, T, Q>& y, const vec<4, T, Q>& a){return mix(x, y, a);} //!< \brief Returns the component-wise result of x * (1.0 - a) + y * a, i.e., the linear blend of x and y using vector a. The value for a is not restricted to the range [0, 1]. (From GLM_GTX_compatibility) - - template GLM_FUNC_QUALIFIER T saturate(T x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<2, T, Q> saturate(const vec<2, T, Q>& x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<3, T, Q> saturate(const vec<3, T, Q>& x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<4, T, Q> saturate(const vec<4, T, Q>& x){return clamp(x, T(0), T(1));} //!< \brief Returns clamp(x, 0, 1) for each component in x. (From GLM_GTX_compatibility) - - template GLM_FUNC_QUALIFIER T atan2(T x, T y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<2, T, Q> atan2(const vec<2, T, Q>& x, const vec<2, T, Q>& y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<3, T, Q> atan2(const vec<3, T, Q>& x, const vec<3, T, Q>& y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) - template GLM_FUNC_QUALIFIER vec<4, T, Q> atan2(const vec<4, T, Q>& x, const vec<4, T, Q>& y){return atan(x, y);} //!< \brief Arc tangent. Returns an angle whose tangent is y/x. The signs of x and y are used to determine what quadrant the angle is in. The range of values returned by this function is [-PI, PI]. Results are undefined if x and y are both 0. (From GLM_GTX_compatibility) - - template GLM_FUNC_DECL bool isfinite(genType const& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) - template GLM_FUNC_DECL vec<1, bool, Q> isfinite(const vec<1, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) - template GLM_FUNC_DECL vec<2, bool, Q> isfinite(const vec<2, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) - template GLM_FUNC_DECL vec<3, bool, Q> isfinite(const vec<3, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) - template GLM_FUNC_DECL vec<4, bool, Q> isfinite(const vec<4, T, Q>& x); //!< \brief Test whether or not a scalar or each vector component is a finite value. (From GLM_GTX_compatibility) - - typedef bool bool1; //!< \brief boolean type with 1 component. (From GLM_GTX_compatibility extension) - typedef vec<2, bool, highp> bool2; //!< \brief boolean type with 2 components. (From GLM_GTX_compatibility extension) - typedef vec<3, bool, highp> bool3; //!< \brief boolean type with 3 components. (From GLM_GTX_compatibility extension) - typedef vec<4, bool, highp> bool4; //!< \brief boolean type with 4 components. (From GLM_GTX_compatibility extension) - - typedef bool bool1x1; //!< \brief boolean matrix with 1 x 1 component. (From GLM_GTX_compatibility extension) - typedef mat<2, 2, bool, highp> bool2x2; //!< \brief boolean matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 3, bool, highp> bool2x3; //!< \brief boolean matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 4, bool, highp> bool2x4; //!< \brief boolean matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 2, bool, highp> bool3x2; //!< \brief boolean matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 3, bool, highp> bool3x3; //!< \brief boolean matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 4, bool, highp> bool3x4; //!< \brief boolean matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 2, bool, highp> bool4x2; //!< \brief boolean matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 3, bool, highp> bool4x3; //!< \brief boolean matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 4, bool, highp> bool4x4; //!< \brief boolean matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) - - typedef int int1; //!< \brief integer vector with 1 component. (From GLM_GTX_compatibility extension) - typedef vec<2, int, highp> int2; //!< \brief integer vector with 2 components. (From GLM_GTX_compatibility extension) - typedef vec<3, int, highp> int3; //!< \brief integer vector with 3 components. (From GLM_GTX_compatibility extension) - typedef vec<4, int, highp> int4; //!< \brief integer vector with 4 components. (From GLM_GTX_compatibility extension) - - typedef int int1x1; //!< \brief integer matrix with 1 component. (From GLM_GTX_compatibility extension) - typedef mat<2, 2, int, highp> int2x2; //!< \brief integer matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 3, int, highp> int2x3; //!< \brief integer matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 4, int, highp> int2x4; //!< \brief integer matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 2, int, highp> int3x2; //!< \brief integer matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 3, int, highp> int3x3; //!< \brief integer matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 4, int, highp> int3x4; //!< \brief integer matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 2, int, highp> int4x2; //!< \brief integer matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 3, int, highp> int4x3; //!< \brief integer matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 4, int, highp> int4x4; //!< \brief integer matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) - - typedef float float1; //!< \brief single-qualifier floating-point vector with 1 component. (From GLM_GTX_compatibility extension) - typedef vec<2, float, highp> float2; //!< \brief single-qualifier floating-point vector with 2 components. (From GLM_GTX_compatibility extension) - typedef vec<3, float, highp> float3; //!< \brief single-qualifier floating-point vector with 3 components. (From GLM_GTX_compatibility extension) - typedef vec<4, float, highp> float4; //!< \brief single-qualifier floating-point vector with 4 components. (From GLM_GTX_compatibility extension) - - typedef float float1x1; //!< \brief single-qualifier floating-point matrix with 1 component. (From GLM_GTX_compatibility extension) - typedef mat<2, 2, float, highp> float2x2; //!< \brief single-qualifier floating-point matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 3, float, highp> float2x3; //!< \brief single-qualifier floating-point matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 4, float, highp> float2x4; //!< \brief single-qualifier floating-point matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 2, float, highp> float3x2; //!< \brief single-qualifier floating-point matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 3, float, highp> float3x3; //!< \brief single-qualifier floating-point matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 4, float, highp> float3x4; //!< \brief single-qualifier floating-point matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 2, float, highp> float4x2; //!< \brief single-qualifier floating-point matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 3, float, highp> float4x3; //!< \brief single-qualifier floating-point matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 4, float, highp> float4x4; //!< \brief single-qualifier floating-point matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) - - typedef double double1; //!< \brief double-qualifier floating-point vector with 1 component. (From GLM_GTX_compatibility extension) - typedef vec<2, double, highp> double2; //!< \brief double-qualifier floating-point vector with 2 components. (From GLM_GTX_compatibility extension) - typedef vec<3, double, highp> double3; //!< \brief double-qualifier floating-point vector with 3 components. (From GLM_GTX_compatibility extension) - typedef vec<4, double, highp> double4; //!< \brief double-qualifier floating-point vector with 4 components. (From GLM_GTX_compatibility extension) - - typedef double double1x1; //!< \brief double-qualifier floating-point matrix with 1 component. (From GLM_GTX_compatibility extension) - typedef mat<2, 2, double, highp> double2x2; //!< \brief double-qualifier floating-point matrix with 2 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 3, double, highp> double2x3; //!< \brief double-qualifier floating-point matrix with 2 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<2, 4, double, highp> double2x4; //!< \brief double-qualifier floating-point matrix with 2 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 2, double, highp> double3x2; //!< \brief double-qualifier floating-point matrix with 3 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 3, double, highp> double3x3; //!< \brief double-qualifier floating-point matrix with 3 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<3, 4, double, highp> double3x4; //!< \brief double-qualifier floating-point matrix with 3 x 4 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 2, double, highp> double4x2; //!< \brief double-qualifier floating-point matrix with 4 x 2 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 3, double, highp> double4x3; //!< \brief double-qualifier floating-point matrix with 4 x 3 components. (From GLM_GTX_compatibility extension) - typedef mat<4, 4, double, highp> double4x4; //!< \brief double-qualifier floating-point matrix with 4 x 4 components. (From GLM_GTX_compatibility extension) - - /// @} -}//namespace glm - -#include "compatibility.inl" diff --git a/third_party/glm/gtx/compatibility.inl b/third_party/glm/gtx/compatibility.inl deleted file mode 100755 index 1d49496..0000000 --- a/third_party/glm/gtx/compatibility.inl +++ /dev/null @@ -1,62 +0,0 @@ -#include - -namespace glm -{ - // isfinite - template - GLM_FUNC_QUALIFIER bool isfinite( - genType const& x) - { -# if GLM_HAS_CXX11_STL - return std::isfinite(x) != 0; -# elif GLM_COMPILER & GLM_COMPILER_VC - return _finite(x) != 0; -# elif GLM_COMPILER & GLM_COMPILER_GCC && GLM_PLATFORM & GLM_PLATFORM_ANDROID - return _isfinite(x) != 0; -# else - if (std::numeric_limits::is_integer || std::denorm_absent == std::numeric_limits::has_denorm) - return std::numeric_limits::min() <= x && std::numeric_limits::max() >= x; - else - return -std::numeric_limits::max() <= x && std::numeric_limits::max() >= x; -# endif - } - - template - GLM_FUNC_QUALIFIER vec<1, bool, Q> isfinite( - vec<1, T, Q> const& x) - { - return vec<1, bool, Q>( - isfinite(x.x)); - } - - template - GLM_FUNC_QUALIFIER vec<2, bool, Q> isfinite( - vec<2, T, Q> const& x) - { - return vec<2, bool, Q>( - isfinite(x.x), - isfinite(x.y)); - } - - template - GLM_FUNC_QUALIFIER vec<3, bool, Q> isfinite( - vec<3, T, Q> const& x) - { - return vec<3, bool, Q>( - isfinite(x.x), - isfinite(x.y), - isfinite(x.z)); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> isfinite( - vec<4, T, Q> const& x) - { - return vec<4, bool, Q>( - isfinite(x.x), - isfinite(x.y), - isfinite(x.z), - isfinite(x.w)); - } - -}//namespace glm diff --git a/third_party/glm/gtx/component_wise.hpp b/third_party/glm/gtx/component_wise.hpp deleted file mode 100755 index 34a2b0a..0000000 --- a/third_party/glm/gtx/component_wise.hpp +++ /dev/null @@ -1,69 +0,0 @@ -/// @ref gtx_component_wise -/// @file glm/gtx/component_wise.hpp -/// @date 2007-05-21 / 2011-06-07 -/// @author Christophe Riccio -/// -/// @see core (dependence) -/// -/// @defgroup gtx_component_wise GLM_GTX_component_wise -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Operations between components of a type - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_component_wise is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_component_wise extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_component_wise - /// @{ - - /// Convert an integer vector to a normalized float vector. - /// If the parameter value type is already a floating qualifier type, the value is passed through. - /// @see gtx_component_wise - template - GLM_FUNC_DECL vec compNormalize(vec const& v); - - /// Convert a normalized float vector to an integer vector. - /// If the parameter value type is already a floating qualifier type, the value is passed through. - /// @see gtx_component_wise - template - GLM_FUNC_DECL vec compScale(vec const& v); - - /// Add all vector components together. - /// @see gtx_component_wise - template - GLM_FUNC_DECL typename genType::value_type compAdd(genType const& v); - - /// Multiply all vector components together. - /// @see gtx_component_wise - template - GLM_FUNC_DECL typename genType::value_type compMul(genType const& v); - - /// Find the minimum value between single vector components. - /// @see gtx_component_wise - template - GLM_FUNC_DECL typename genType::value_type compMin(genType const& v); - - /// Find the maximum value between single vector components. - /// @see gtx_component_wise - template - GLM_FUNC_DECL typename genType::value_type compMax(genType const& v); - - /// @} -}//namespace glm - -#include "component_wise.inl" diff --git a/third_party/glm/gtx/component_wise.inl b/third_party/glm/gtx/component_wise.inl deleted file mode 100755 index cbbc7d4..0000000 --- a/third_party/glm/gtx/component_wise.inl +++ /dev/null @@ -1,127 +0,0 @@ -/// @ref gtx_component_wise - -#include - -namespace glm{ -namespace detail -{ - template - struct compute_compNormalize - {}; - - template - struct compute_compNormalize - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - floatType const Min = static_cast(std::numeric_limits::min()); - floatType const Max = static_cast(std::numeric_limits::max()); - return (vec(v) - Min) / (Max - Min) * static_cast(2) - static_cast(1); - } - }; - - template - struct compute_compNormalize - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - return vec(v) / static_cast(std::numeric_limits::max()); - } - }; - - template - struct compute_compNormalize - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - return v; - } - }; - - template - struct compute_compScale - {}; - - template - struct compute_compScale - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - floatType const Max = static_cast(std::numeric_limits::max()) + static_cast(0.5); - vec const Scaled(v * Max); - vec const Result(Scaled - static_cast(0.5)); - return Result; - } - }; - - template - struct compute_compScale - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - return vec(vec(v) * static_cast(std::numeric_limits::max())); - } - }; - - template - struct compute_compScale - { - GLM_FUNC_QUALIFIER static vec call(vec const& v) - { - return v; - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER vec compNormalize(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'compNormalize' accepts only floating-point types for 'floatType' template parameter"); - - return detail::compute_compNormalize::is_integer, std::numeric_limits::is_signed>::call(v); - } - - template - GLM_FUNC_QUALIFIER vec compScale(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'compScale' accepts only floating-point types for 'floatType' template parameter"); - - return detail::compute_compScale::is_integer, std::numeric_limits::is_signed>::call(v); - } - - template - GLM_FUNC_QUALIFIER T compAdd(vec const& v) - { - T Result(0); - for(length_t i = 0, n = v.length(); i < n; ++i) - Result += v[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER T compMul(vec const& v) - { - T Result(1); - for(length_t i = 0, n = v.length(); i < n; ++i) - Result *= v[i]; - return Result; - } - - template - GLM_FUNC_QUALIFIER T compMin(vec const& v) - { - T Result(v[0]); - for(length_t i = 1, n = v.length(); i < n; ++i) - Result = min(Result, v[i]); - return Result; - } - - template - GLM_FUNC_QUALIFIER T compMax(vec const& v) - { - T Result(v[0]); - for(length_t i = 1, n = v.length(); i < n; ++i) - Result = max(Result, v[i]); - return Result; - } -}//namespace glm diff --git a/third_party/glm/gtx/dual_quaternion.hpp b/third_party/glm/gtx/dual_quaternion.hpp deleted file mode 100755 index 6a51ab7..0000000 --- a/third_party/glm/gtx/dual_quaternion.hpp +++ /dev/null @@ -1,274 +0,0 @@ -/// @ref gtx_dual_quaternion -/// @file glm/gtx/dual_quaternion.hpp -/// @author Maksim Vorobiev (msomeone@gmail.com) -/// -/// @see core (dependence) -/// @see gtc_constants (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtx_dual_quaternion GLM_GTX_dual_quaternion -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Defines a templated dual-quaternion type and several dual-quaternion operations. - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/constants.hpp" -#include "../gtc/quaternion.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_dual_quaternion is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_dual_quaternion extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_dual_quaternion - /// @{ - - template - struct tdualquat - { - // -- Implementation detail -- - - typedef T value_type; - typedef qua part_type; - - // -- Data -- - - qua real, dual; - - // -- Component accesses -- - - typedef length_t length_type; - /// Return the count of components of a dual quaternion - GLM_FUNC_DECL static GLM_CONSTEXPR length_type length(){return 2;} - - GLM_FUNC_DECL part_type & operator[](length_type i); - GLM_FUNC_DECL part_type const& operator[](length_type i) const; - - // -- Implicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR tdualquat() GLM_DEFAULT; - GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(tdualquat const& d) GLM_DEFAULT; - template - GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(tdualquat const& d); - - // -- Explicit basic constructors -- - - GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(qua const& real); - GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(qua const& orientation, vec<3, T, Q> const& translation); - GLM_FUNC_DECL GLM_CONSTEXPR tdualquat(qua const& real, qua const& dual); - - // -- Conversion constructors -- - - template - GLM_FUNC_DECL GLM_CONSTEXPR GLM_EXPLICIT tdualquat(tdualquat const& q); - - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR tdualquat(mat<2, 4, T, Q> const& holder_mat); - GLM_FUNC_DECL GLM_EXPLICIT GLM_CONSTEXPR tdualquat(mat<3, 4, T, Q> const& aug_mat); - - // -- Unary arithmetic operators -- - - GLM_FUNC_DECL tdualquat & operator=(tdualquat const& m) GLM_DEFAULT; - - template - GLM_FUNC_DECL tdualquat & operator=(tdualquat const& m); - template - GLM_FUNC_DECL tdualquat & operator*=(U s); - template - GLM_FUNC_DECL tdualquat & operator/=(U s); - }; - - // -- Unary bit operators -- - - template - GLM_FUNC_DECL tdualquat operator+(tdualquat const& q); - - template - GLM_FUNC_DECL tdualquat operator-(tdualquat const& q); - - // -- Binary operators -- - - template - GLM_FUNC_DECL tdualquat operator+(tdualquat const& q, tdualquat const& p); - - template - GLM_FUNC_DECL tdualquat operator*(tdualquat const& q, tdualquat const& p); - - template - GLM_FUNC_DECL vec<3, T, Q> operator*(tdualquat const& q, vec<3, T, Q> const& v); - - template - GLM_FUNC_DECL vec<3, T, Q> operator*(vec<3, T, Q> const& v, tdualquat const& q); - - template - GLM_FUNC_DECL vec<4, T, Q> operator*(tdualquat const& q, vec<4, T, Q> const& v); - - template - GLM_FUNC_DECL vec<4, T, Q> operator*(vec<4, T, Q> const& v, tdualquat const& q); - - template - GLM_FUNC_DECL tdualquat operator*(tdualquat const& q, T const& s); - - template - GLM_FUNC_DECL tdualquat operator*(T const& s, tdualquat const& q); - - template - GLM_FUNC_DECL tdualquat operator/(tdualquat const& q, T const& s); - - // -- Boolean operators -- - - template - GLM_FUNC_DECL bool operator==(tdualquat const& q1, tdualquat const& q2); - - template - GLM_FUNC_DECL bool operator!=(tdualquat const& q1, tdualquat const& q2); - - /// Creates an identity dual quaternion. - /// - /// @see gtx_dual_quaternion - template - GLM_FUNC_DECL tdualquat dual_quat_identity(); - - /// Returns the normalized quaternion. - /// - /// @see gtx_dual_quaternion - template - GLM_FUNC_DECL tdualquat normalize(tdualquat const& q); - - /// Returns the linear interpolation of two dual quaternion. - /// - /// @see gtc_dual_quaternion - template - GLM_FUNC_DECL tdualquat lerp(tdualquat const& x, tdualquat const& y, T const& a); - - /// Returns the q inverse. - /// - /// @see gtx_dual_quaternion - template - GLM_FUNC_DECL tdualquat inverse(tdualquat const& q); - - /// Converts a quaternion to a 2 * 4 matrix. - /// - /// @see gtx_dual_quaternion - template - GLM_FUNC_DECL mat<2, 4, T, Q> mat2x4_cast(tdualquat const& x); - - /// Converts a quaternion to a 3 * 4 matrix. - /// - /// @see gtx_dual_quaternion - template - GLM_FUNC_DECL mat<3, 4, T, Q> mat3x4_cast(tdualquat const& x); - - /// Converts a 2 * 4 matrix (matrix which holds real and dual parts) to a quaternion. - /// - /// @see gtx_dual_quaternion - template - GLM_FUNC_DECL tdualquat dualquat_cast(mat<2, 4, T, Q> const& x); - - /// Converts a 3 * 4 matrix (augmented matrix rotation + translation) to a quaternion. - /// - /// @see gtx_dual_quaternion - template - GLM_FUNC_DECL tdualquat dualquat_cast(mat<3, 4, T, Q> const& x); - - - /// Dual-quaternion of low single-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat lowp_dualquat; - - /// Dual-quaternion of medium single-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat mediump_dualquat; - - /// Dual-quaternion of high single-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat highp_dualquat; - - - /// Dual-quaternion of low single-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat lowp_fdualquat; - - /// Dual-quaternion of medium single-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat mediump_fdualquat; - - /// Dual-quaternion of high single-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat highp_fdualquat; - - - /// Dual-quaternion of low double-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat lowp_ddualquat; - - /// Dual-quaternion of medium double-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat mediump_ddualquat; - - /// Dual-quaternion of high double-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef tdualquat highp_ddualquat; - - -#if(!defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT)) - /// Dual-quaternion of floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef highp_fdualquat dualquat; - - /// Dual-quaternion of single-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef highp_fdualquat fdualquat; -#elif(defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT)) - typedef highp_fdualquat dualquat; - typedef highp_fdualquat fdualquat; -#elif(!defined(GLM_PRECISION_HIGHP_FLOAT) && defined(GLM_PRECISION_MEDIUMP_FLOAT) && !defined(GLM_PRECISION_LOWP_FLOAT)) - typedef mediump_fdualquat dualquat; - typedef mediump_fdualquat fdualquat; -#elif(!defined(GLM_PRECISION_HIGHP_FLOAT) && !defined(GLM_PRECISION_MEDIUMP_FLOAT) && defined(GLM_PRECISION_LOWP_FLOAT)) - typedef lowp_fdualquat dualquat; - typedef lowp_fdualquat fdualquat; -#else -# error "GLM error: multiple default precision requested for single-precision floating-point types" -#endif - - -#if(!defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE)) - /// Dual-quaternion of default double-qualifier floating-point numbers. - /// - /// @see gtx_dual_quaternion - typedef highp_ddualquat ddualquat; -#elif(defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE)) - typedef highp_ddualquat ddualquat; -#elif(!defined(GLM_PRECISION_HIGHP_DOUBLE) && defined(GLM_PRECISION_MEDIUMP_DOUBLE) && !defined(GLM_PRECISION_LOWP_DOUBLE)) - typedef mediump_ddualquat ddualquat; -#elif(!defined(GLM_PRECISION_HIGHP_DOUBLE) && !defined(GLM_PRECISION_MEDIUMP_DOUBLE) && defined(GLM_PRECISION_LOWP_DOUBLE)) - typedef lowp_ddualquat ddualquat; -#else -# error "GLM error: Multiple default precision requested for double-precision floating-point types" -#endif - - /// @} -} //namespace glm - -#include "dual_quaternion.inl" diff --git a/third_party/glm/gtx/dual_quaternion.inl b/third_party/glm/gtx/dual_quaternion.inl deleted file mode 100755 index fad07ea..0000000 --- a/third_party/glm/gtx/dual_quaternion.inl +++ /dev/null @@ -1,352 +0,0 @@ -/// @ref gtx_dual_quaternion - -#include "../geometric.hpp" -#include - -namespace glm -{ - // -- Component accesses -- - - template - GLM_FUNC_QUALIFIER typename tdualquat::part_type & tdualquat::operator[](typename tdualquat::length_type i) - { - assert(i >= 0 && i < this->length()); - return (&real)[i]; - } - - template - GLM_FUNC_QUALIFIER typename tdualquat::part_type const& tdualquat::operator[](typename tdualquat::length_type i) const - { - assert(i >= 0 && i < this->length()); - return (&real)[i]; - } - - // -- Implicit basic constructors -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat() -# if GLM_CONFIG_DEFAULTED_FUNCTIONS != GLM_DISABLE - : real(qua()) - , dual(qua(0, 0, 0, 0)) -# endif - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(tdualquat const& d) - : real(d.real) - , dual(d.dual) - {} -# endif - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(tdualquat const& d) - : real(d.real) - , dual(d.dual) - {} - - // -- Explicit basic constructors -- - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(qua const& r) - : real(r), dual(qua(0, 0, 0, 0)) - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(qua const& q, vec<3, T, Q> const& p) - : real(q), dual( - T(-0.5) * ( p.x*q.x + p.y*q.y + p.z*q.z), - T(+0.5) * ( p.x*q.w + p.y*q.z - p.z*q.y), - T(+0.5) * (-p.x*q.z + p.y*q.w + p.z*q.x), - T(+0.5) * ( p.x*q.y - p.y*q.x + p.z*q.w)) - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(qua const& r, qua const& d) - : real(r), dual(d) - {} - - // -- Conversion constructors -- - - template - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(tdualquat const& q) - : real(q.real) - , dual(q.dual) - {} - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(mat<2, 4, T, Q> const& m) - { - *this = dualquat_cast(m); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR tdualquat::tdualquat(mat<3, 4, T, Q> const& m) - { - *this = dualquat_cast(m); - } - - // -- Unary arithmetic operators -- - -# if GLM_CONFIG_DEFAULTED_FUNCTIONS == GLM_DISABLE - template - GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator=(tdualquat const& q) - { - this->real = q.real; - this->dual = q.dual; - return *this; - } -# endif - - template - template - GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator=(tdualquat const& q) - { - this->real = q.real; - this->dual = q.dual; - return *this; - } - - template - template - GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator*=(U s) - { - this->real *= static_cast(s); - this->dual *= static_cast(s); - return *this; - } - - template - template - GLM_FUNC_QUALIFIER tdualquat & tdualquat::operator/=(U s) - { - this->real /= static_cast(s); - this->dual /= static_cast(s); - return *this; - } - - // -- Unary bit operators -- - - template - GLM_FUNC_QUALIFIER tdualquat operator+(tdualquat const& q) - { - return q; - } - - template - GLM_FUNC_QUALIFIER tdualquat operator-(tdualquat const& q) - { - return tdualquat(-q.real, -q.dual); - } - - // -- Binary operators -- - - template - GLM_FUNC_QUALIFIER tdualquat operator+(tdualquat const& q, tdualquat const& p) - { - return tdualquat(q.real + p.real,q.dual + p.dual); - } - - template - GLM_FUNC_QUALIFIER tdualquat operator*(tdualquat const& p, tdualquat const& o) - { - return tdualquat(p.real * o.real,p.real * o.dual + p.dual * o.real); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> operator*(tdualquat const& q, vec<3, T, Q> const& v) - { - vec<3, T, Q> const real_v3(q.real.x,q.real.y,q.real.z); - vec<3, T, Q> const dual_v3(q.dual.x,q.dual.y,q.dual.z); - return (cross(real_v3, cross(real_v3,v) + v * q.real.w + dual_v3) + dual_v3 * q.real.w - real_v3 * q.dual.w) * T(2) + v; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> operator*(vec<3, T, Q> const& v, tdualquat const& q) - { - return glm::inverse(q) * v; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> operator*(tdualquat const& q, vec<4, T, Q> const& v) - { - return vec<4, T, Q>(q * vec<3, T, Q>(v), v.w); - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> operator*(vec<4, T, Q> const& v, tdualquat const& q) - { - return glm::inverse(q) * v; - } - - template - GLM_FUNC_QUALIFIER tdualquat operator*(tdualquat const& q, T const& s) - { - return tdualquat(q.real * s, q.dual * s); - } - - template - GLM_FUNC_QUALIFIER tdualquat operator*(T const& s, tdualquat const& q) - { - return q * s; - } - - template - GLM_FUNC_QUALIFIER tdualquat operator/(tdualquat const& q, T const& s) - { - return tdualquat(q.real / s, q.dual / s); - } - - // -- Boolean operators -- - - template - GLM_FUNC_QUALIFIER bool operator==(tdualquat const& q1, tdualquat const& q2) - { - return (q1.real == q2.real) && (q1.dual == q2.dual); - } - - template - GLM_FUNC_QUALIFIER bool operator!=(tdualquat const& q1, tdualquat const& q2) - { - return (q1.real != q2.real) || (q1.dual != q2.dual); - } - - // -- Operations -- - - template - GLM_FUNC_QUALIFIER tdualquat dual_quat_identity() - { - return tdualquat( - qua(static_cast(1), static_cast(0), static_cast(0), static_cast(0)), - qua(static_cast(0), static_cast(0), static_cast(0), static_cast(0))); - } - - template - GLM_FUNC_QUALIFIER tdualquat normalize(tdualquat const& q) - { - return q / length(q.real); - } - - template - GLM_FUNC_QUALIFIER tdualquat lerp(tdualquat const& x, tdualquat const& y, T const& a) - { - // Dual Quaternion Linear blend aka DLB: - // Lerp is only defined in [0, 1] - assert(a >= static_cast(0)); - assert(a <= static_cast(1)); - T const k = dot(x.real,y.real) < static_cast(0) ? -a : a; - T const one(1); - return tdualquat(x * (one - a) + y * k); - } - - template - GLM_FUNC_QUALIFIER tdualquat inverse(tdualquat const& q) - { - const glm::qua real = conjugate(q.real); - const glm::qua dual = conjugate(q.dual); - return tdualquat(real, dual + (real * (-2.0f * dot(real,dual)))); - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> mat2x4_cast(tdualquat const& x) - { - return mat<2, 4, T, Q>( x[0].x, x[0].y, x[0].z, x[0].w, x[1].x, x[1].y, x[1].z, x[1].w ); - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> mat3x4_cast(tdualquat const& x) - { - qua r = x.real / length2(x.real); - - qua const rr(r.w * x.real.w, r.x * x.real.x, r.y * x.real.y, r.z * x.real.z); - r *= static_cast(2); - - T const xy = r.x * x.real.y; - T const xz = r.x * x.real.z; - T const yz = r.y * x.real.z; - T const wx = r.w * x.real.x; - T const wy = r.w * x.real.y; - T const wz = r.w * x.real.z; - - vec<4, T, Q> const a( - rr.w + rr.x - rr.y - rr.z, - xy - wz, - xz + wy, - -(x.dual.w * r.x - x.dual.x * r.w + x.dual.y * r.z - x.dual.z * r.y)); - - vec<4, T, Q> const b( - xy + wz, - rr.w + rr.y - rr.x - rr.z, - yz - wx, - -(x.dual.w * r.y - x.dual.x * r.z - x.dual.y * r.w + x.dual.z * r.x)); - - vec<4, T, Q> const c( - xz - wy, - yz + wx, - rr.w + rr.z - rr.x - rr.y, - -(x.dual.w * r.z + x.dual.x * r.y - x.dual.y * r.x - x.dual.z * r.w)); - - return mat<3, 4, T, Q>(a, b, c); - } - - template - GLM_FUNC_QUALIFIER tdualquat dualquat_cast(mat<2, 4, T, Q> const& x) - { - return tdualquat( - qua( x[0].w, x[0].x, x[0].y, x[0].z ), - qua( x[1].w, x[1].x, x[1].y, x[1].z )); - } - - template - GLM_FUNC_QUALIFIER tdualquat dualquat_cast(mat<3, 4, T, Q> const& x) - { - qua real; - - T const trace = x[0].x + x[1].y + x[2].z; - if(trace > static_cast(0)) - { - T const r = sqrt(T(1) + trace); - T const invr = static_cast(0.5) / r; - real.w = static_cast(0.5) * r; - real.x = (x[2].y - x[1].z) * invr; - real.y = (x[0].z - x[2].x) * invr; - real.z = (x[1].x - x[0].y) * invr; - } - else if(x[0].x > x[1].y && x[0].x > x[2].z) - { - T const r = sqrt(T(1) + x[0].x - x[1].y - x[2].z); - T const invr = static_cast(0.5) / r; - real.x = static_cast(0.5)*r; - real.y = (x[1].x + x[0].y) * invr; - real.z = (x[0].z + x[2].x) * invr; - real.w = (x[2].y - x[1].z) * invr; - } - else if(x[1].y > x[2].z) - { - T const r = sqrt(T(1) + x[1].y - x[0].x - x[2].z); - T const invr = static_cast(0.5) / r; - real.x = (x[1].x + x[0].y) * invr; - real.y = static_cast(0.5) * r; - real.z = (x[2].y + x[1].z) * invr; - real.w = (x[0].z - x[2].x) * invr; - } - else - { - T const r = sqrt(T(1) + x[2].z - x[0].x - x[1].y); - T const invr = static_cast(0.5) / r; - real.x = (x[0].z + x[2].x) * invr; - real.y = (x[2].y + x[1].z) * invr; - real.z = static_cast(0.5) * r; - real.w = (x[1].x - x[0].y) * invr; - } - - qua dual; - dual.x = static_cast(0.5) * ( x[0].w * real.w + x[1].w * real.z - x[2].w * real.y); - dual.y = static_cast(0.5) * (-x[0].w * real.z + x[1].w * real.w + x[2].w * real.x); - dual.z = static_cast(0.5) * ( x[0].w * real.y - x[1].w * real.x + x[2].w * real.w); - dual.w = -static_cast(0.5) * ( x[0].w * real.x + x[1].w * real.y + x[2].w * real.z); - return tdualquat(real, dual); - } -}//namespace glm diff --git a/third_party/glm/gtx/easing.hpp b/third_party/glm/gtx/easing.hpp deleted file mode 100755 index 57f3d61..0000000 --- a/third_party/glm/gtx/easing.hpp +++ /dev/null @@ -1,219 +0,0 @@ -/// @ref gtx_easing -/// @file glm/gtx/easing.hpp -/// @author Robert Chisholm -/// -/// @see core (dependence) -/// -/// @defgroup gtx_easing GLM_GTX_easing -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Easing functions for animations and transitons -/// All functions take a parameter x in the range [0.0,1.0] -/// -/// Based on the AHEasing project of Warren Moore (https://github.com/warrenm/AHEasing) - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/constants.hpp" -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_easing is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_easing extension included") -# endif -#endif - -namespace glm{ - /// @addtogroup gtx_easing - /// @{ - - /// Modelled after the line y = x - /// @see gtx_easing - template - GLM_FUNC_DECL genType linearInterpolation(genType const & a); - - /// Modelled after the parabola y = x^2 - /// @see gtx_easing - template - GLM_FUNC_DECL genType quadraticEaseIn(genType const & a); - - /// Modelled after the parabola y = -x^2 + 2x - /// @see gtx_easing - template - GLM_FUNC_DECL genType quadraticEaseOut(genType const & a); - - /// Modelled after the piecewise quadratic - /// y = (1/2)((2x)^2) ; [0, 0.5) - /// y = -(1/2)((2x-1)*(2x-3) - 1) ; [0.5, 1] - /// @see gtx_easing - template - GLM_FUNC_DECL genType quadraticEaseInOut(genType const & a); - - /// Modelled after the cubic y = x^3 - template - GLM_FUNC_DECL genType cubicEaseIn(genType const & a); - - /// Modelled after the cubic y = (x - 1)^3 + 1 - /// @see gtx_easing - template - GLM_FUNC_DECL genType cubicEaseOut(genType const & a); - - /// Modelled after the piecewise cubic - /// y = (1/2)((2x)^3) ; [0, 0.5) - /// y = (1/2)((2x-2)^3 + 2) ; [0.5, 1] - /// @see gtx_easing - template - GLM_FUNC_DECL genType cubicEaseInOut(genType const & a); - - /// Modelled after the quartic x^4 - /// @see gtx_easing - template - GLM_FUNC_DECL genType quarticEaseIn(genType const & a); - - /// Modelled after the quartic y = 1 - (x - 1)^4 - /// @see gtx_easing - template - GLM_FUNC_DECL genType quarticEaseOut(genType const & a); - - /// Modelled after the piecewise quartic - /// y = (1/2)((2x)^4) ; [0, 0.5) - /// y = -(1/2)((2x-2)^4 - 2) ; [0.5, 1] - /// @see gtx_easing - template - GLM_FUNC_DECL genType quarticEaseInOut(genType const & a); - - /// Modelled after the quintic y = x^5 - /// @see gtx_easing - template - GLM_FUNC_DECL genType quinticEaseIn(genType const & a); - - /// Modelled after the quintic y = (x - 1)^5 + 1 - /// @see gtx_easing - template - GLM_FUNC_DECL genType quinticEaseOut(genType const & a); - - /// Modelled after the piecewise quintic - /// y = (1/2)((2x)^5) ; [0, 0.5) - /// y = (1/2)((2x-2)^5 + 2) ; [0.5, 1] - /// @see gtx_easing - template - GLM_FUNC_DECL genType quinticEaseInOut(genType const & a); - - /// Modelled after quarter-cycle of sine wave - /// @see gtx_easing - template - GLM_FUNC_DECL genType sineEaseIn(genType const & a); - - /// Modelled after quarter-cycle of sine wave (different phase) - /// @see gtx_easing - template - GLM_FUNC_DECL genType sineEaseOut(genType const & a); - - /// Modelled after half sine wave - /// @see gtx_easing - template - GLM_FUNC_DECL genType sineEaseInOut(genType const & a); - - /// Modelled after shifted quadrant IV of unit circle - /// @see gtx_easing - template - GLM_FUNC_DECL genType circularEaseIn(genType const & a); - - /// Modelled after shifted quadrant II of unit circle - /// @see gtx_easing - template - GLM_FUNC_DECL genType circularEaseOut(genType const & a); - - /// Modelled after the piecewise circular function - /// y = (1/2)(1 - sqrt(1 - 4x^2)) ; [0, 0.5) - /// y = (1/2)(sqrt(-(2x - 3)*(2x - 1)) + 1) ; [0.5, 1] - /// @see gtx_easing - template - GLM_FUNC_DECL genType circularEaseInOut(genType const & a); - - /// Modelled after the exponential function y = 2^(10(x - 1)) - /// @see gtx_easing - template - GLM_FUNC_DECL genType exponentialEaseIn(genType const & a); - - /// Modelled after the exponential function y = -2^(-10x) + 1 - /// @see gtx_easing - template - GLM_FUNC_DECL genType exponentialEaseOut(genType const & a); - - /// Modelled after the piecewise exponential - /// y = (1/2)2^(10(2x - 1)) ; [0,0.5) - /// y = -(1/2)*2^(-10(2x - 1))) + 1 ; [0.5,1] - /// @see gtx_easing - template - GLM_FUNC_DECL genType exponentialEaseInOut(genType const & a); - - /// Modelled after the damped sine wave y = sin(13pi/2*x)*pow(2, 10 * (x - 1)) - /// @see gtx_easing - template - GLM_FUNC_DECL genType elasticEaseIn(genType const & a); - - /// Modelled after the damped sine wave y = sin(-13pi/2*(x + 1))*pow(2, -10x) + 1 - /// @see gtx_easing - template - GLM_FUNC_DECL genType elasticEaseOut(genType const & a); - - /// Modelled after the piecewise exponentially-damped sine wave: - /// y = (1/2)*sin(13pi/2*(2*x))*pow(2, 10 * ((2*x) - 1)) ; [0,0.5) - /// y = (1/2)*(sin(-13pi/2*((2x-1)+1))*pow(2,-10(2*x-1)) + 2) ; [0.5, 1] - /// @see gtx_easing - template - GLM_FUNC_DECL genType elasticEaseInOut(genType const & a); - - /// @see gtx_easing - template - GLM_FUNC_DECL genType backEaseIn(genType const& a); - - /// @see gtx_easing - template - GLM_FUNC_DECL genType backEaseOut(genType const& a); - - /// @see gtx_easing - template - GLM_FUNC_DECL genType backEaseInOut(genType const& a); - - /// @param a parameter - /// @param o Optional overshoot modifier - /// @see gtx_easing - template - GLM_FUNC_DECL genType backEaseIn(genType const& a, genType const& o); - - /// @param a parameter - /// @param o Optional overshoot modifier - /// @see gtx_easing - template - GLM_FUNC_DECL genType backEaseOut(genType const& a, genType const& o); - - /// @param a parameter - /// @param o Optional overshoot modifier - /// @see gtx_easing - template - GLM_FUNC_DECL genType backEaseInOut(genType const& a, genType const& o); - - /// @see gtx_easing - template - GLM_FUNC_DECL genType bounceEaseIn(genType const& a); - - /// @see gtx_easing - template - GLM_FUNC_DECL genType bounceEaseOut(genType const& a); - - /// @see gtx_easing - template - GLM_FUNC_DECL genType bounceEaseInOut(genType const& a); - - /// @} -}//namespace glm - -#include "easing.inl" diff --git a/third_party/glm/gtx/easing.inl b/third_party/glm/gtx/easing.inl deleted file mode 100755 index 4b7d05b..0000000 --- a/third_party/glm/gtx/easing.inl +++ /dev/null @@ -1,436 +0,0 @@ -/// @ref gtx_easing - -#include - -namespace glm{ - - template - GLM_FUNC_QUALIFIER genType linearInterpolation(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return a; - } - - template - GLM_FUNC_QUALIFIER genType quadraticEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return a * a; - } - - template - GLM_FUNC_QUALIFIER genType quadraticEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return -(a * (a - static_cast(2))); - } - - template - GLM_FUNC_QUALIFIER genType quadraticEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(0.5)) - { - return static_cast(2) * a * a; - } - else - { - return (-static_cast(2) * a * a) + (4 * a) - one(); - } - } - - template - GLM_FUNC_QUALIFIER genType cubicEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return a * a * a; - } - - template - GLM_FUNC_QUALIFIER genType cubicEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - genType const f = a - one(); - return f * f * f + one(); - } - - template - GLM_FUNC_QUALIFIER genType cubicEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if (a < static_cast(0.5)) - { - return static_cast(4) * a * a * a; - } - else - { - genType const f = ((static_cast(2) * a) - static_cast(2)); - return static_cast(0.5) * f * f * f + one(); - } - } - - template - GLM_FUNC_QUALIFIER genType quarticEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return a * a * a * a; - } - - template - GLM_FUNC_QUALIFIER genType quarticEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - genType const f = (a - one()); - return f * f * f * (one() - a) + one(); - } - - template - GLM_FUNC_QUALIFIER genType quarticEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(0.5)) - { - return static_cast(8) * a * a * a * a; - } - else - { - genType const f = (a - one()); - return -static_cast(8) * f * f * f * f + one(); - } - } - - template - GLM_FUNC_QUALIFIER genType quinticEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return a * a * a * a * a; - } - - template - GLM_FUNC_QUALIFIER genType quinticEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - genType const f = (a - one()); - return f * f * f * f * f + one(); - } - - template - GLM_FUNC_QUALIFIER genType quinticEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(0.5)) - { - return static_cast(16) * a * a * a * a * a; - } - else - { - genType const f = ((static_cast(2) * a) - static_cast(2)); - return static_cast(0.5) * f * f * f * f * f + one(); - } - } - - template - GLM_FUNC_QUALIFIER genType sineEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return sin((a - one()) * half_pi()) + one(); - } - - template - GLM_FUNC_QUALIFIER genType sineEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return sin(a * half_pi()); - } - - template - GLM_FUNC_QUALIFIER genType sineEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return static_cast(0.5) * (one() - cos(a * pi())); - } - - template - GLM_FUNC_QUALIFIER genType circularEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return one() - sqrt(one() - (a * a)); - } - - template - GLM_FUNC_QUALIFIER genType circularEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return sqrt((static_cast(2) - a) * a); - } - - template - GLM_FUNC_QUALIFIER genType circularEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(0.5)) - { - return static_cast(0.5) * (one() - std::sqrt(one() - static_cast(4) * (a * a))); - } - else - { - return static_cast(0.5) * (std::sqrt(-((static_cast(2) * a) - static_cast(3)) * ((static_cast(2) * a) - one())) + one()); - } - } - - template - GLM_FUNC_QUALIFIER genType exponentialEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a <= zero()) - return a; - else - { - genType const Complementary = a - one(); - genType const Two = static_cast(2); - - return glm::pow(Two, Complementary * static_cast(10)); - } - } - - template - GLM_FUNC_QUALIFIER genType exponentialEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a >= one()) - return a; - else - { - return one() - glm::pow(static_cast(2), -static_cast(10) * a); - } - } - - template - GLM_FUNC_QUALIFIER genType exponentialEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(0.5)) - return static_cast(0.5) * glm::pow(static_cast(2), (static_cast(20) * a) - static_cast(10)); - else - return -static_cast(0.5) * glm::pow(static_cast(2), (-static_cast(20) * a) + static_cast(10)) + one(); - } - - template - GLM_FUNC_QUALIFIER genType elasticEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return std::sin(static_cast(13) * half_pi() * a) * glm::pow(static_cast(2), static_cast(10) * (a - one())); - } - - template - GLM_FUNC_QUALIFIER genType elasticEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return std::sin(-static_cast(13) * half_pi() * (a + one())) * glm::pow(static_cast(2), -static_cast(10) * a) + one(); - } - - template - GLM_FUNC_QUALIFIER genType elasticEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(0.5)) - return static_cast(0.5) * std::sin(static_cast(13) * half_pi() * (static_cast(2) * a)) * glm::pow(static_cast(2), static_cast(10) * ((static_cast(2) * a) - one())); - else - return static_cast(0.5) * (std::sin(-static_cast(13) * half_pi() * ((static_cast(2) * a - one()) + one())) * glm::pow(static_cast(2), -static_cast(10) * (static_cast(2) * a - one())) + static_cast(2)); - } - - template - GLM_FUNC_QUALIFIER genType backEaseIn(genType const& a, genType const& o) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - genType z = ((o + one()) * a) - o; - return (a * a * z); - } - - template - GLM_FUNC_QUALIFIER genType backEaseOut(genType const& a, genType const& o) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - genType n = a - one(); - genType z = ((o + one()) * n) + o; - return (n * n * z) + one(); - } - - template - GLM_FUNC_QUALIFIER genType backEaseInOut(genType const& a, genType const& o) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - genType s = o * static_cast(1.525); - genType x = static_cast(0.5); - genType n = a / static_cast(0.5); - - if (n < static_cast(1)) - { - genType z = ((s + static_cast(1)) * n) - s; - genType m = n * n * z; - return x * m; - } - else - { - n -= static_cast(2); - genType z = ((s + static_cast(1)) * n) + s; - genType m = (n*n*z) + static_cast(2); - return x * m; - } - } - - template - GLM_FUNC_QUALIFIER genType backEaseIn(genType const& a) - { - return backEaseIn(a, static_cast(1.70158)); - } - - template - GLM_FUNC_QUALIFIER genType backEaseOut(genType const& a) - { - return backEaseOut(a, static_cast(1.70158)); - } - - template - GLM_FUNC_QUALIFIER genType backEaseInOut(genType const& a) - { - return backEaseInOut(a, static_cast(1.70158)); - } - - template - GLM_FUNC_QUALIFIER genType bounceEaseOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(4.0 / 11.0)) - { - return (static_cast(121) * a * a) / static_cast(16); - } - else if(a < static_cast(8.0 / 11.0)) - { - return (static_cast(363.0 / 40.0) * a * a) - (static_cast(99.0 / 10.0) * a) + static_cast(17.0 / 5.0); - } - else if(a < static_cast(9.0 / 10.0)) - { - return (static_cast(4356.0 / 361.0) * a * a) - (static_cast(35442.0 / 1805.0) * a) + static_cast(16061.0 / 1805.0); - } - else - { - return (static_cast(54.0 / 5.0) * a * a) - (static_cast(513.0 / 25.0) * a) + static_cast(268.0 / 25.0); - } - } - - template - GLM_FUNC_QUALIFIER genType bounceEaseIn(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - return one() - bounceEaseOut(one() - a); - } - - template - GLM_FUNC_QUALIFIER genType bounceEaseInOut(genType const& a) - { - // Only defined in [0, 1] - assert(a >= zero()); - assert(a <= one()); - - if(a < static_cast(0.5)) - { - return static_cast(0.5) * (one() - bounceEaseOut(a * static_cast(2))); - } - else - { - return static_cast(0.5) * bounceEaseOut(a * static_cast(2) - one()) + static_cast(0.5); - } - } - -}//namespace glm diff --git a/third_party/glm/gtx/euler_angles.hpp b/third_party/glm/gtx/euler_angles.hpp deleted file mode 100755 index 2723697..0000000 --- a/third_party/glm/gtx/euler_angles.hpp +++ /dev/null @@ -1,335 +0,0 @@ -/// @ref gtx_euler_angles -/// @file glm/gtx/euler_angles.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_euler_angles GLM_GTX_euler_angles -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Build matrices from Euler angles. -/// -/// Extraction of Euler angles from rotation matrix. -/// Based on the original paper 2014 Mike Day - Extracting Euler Angles from a Rotation Matrix. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_euler_angles is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_euler_angles extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_euler_angles - /// @{ - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from an euler angle X. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleX( - T const& angleX); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from an euler angle Y. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleY( - T const& angleY); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from an euler angle Z. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZ( - T const& angleZ); - - /// Creates a 3D 4 * 4 homogeneous derived matrix from the rotation matrix about X-axis. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> derivedEulerAngleX( - T const & angleX, T const & angularVelocityX); - - /// Creates a 3D 4 * 4 homogeneous derived matrix from the rotation matrix about Y-axis. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> derivedEulerAngleY( - T const & angleY, T const & angularVelocityY); - - /// Creates a 3D 4 * 4 homogeneous derived matrix from the rotation matrix about Z-axis. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> derivedEulerAngleZ( - T const & angleZ, T const & angularVelocityZ); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Y). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXY( - T const& angleX, - T const& angleY); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYX( - T const& angleY, - T const& angleX); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXZ( - T const& angleX, - T const& angleZ); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * X). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZX( - T const& angle, - T const& angleX); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYZ( - T const& angleY, - T const& angleZ); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * Y). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZY( - T const& angleZ, - T const& angleY); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Y * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXYZ( - T const& t1, - T const& t2, - T const& t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYXZ( - T const& yaw, - T const& pitch, - T const& roll); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Z * X). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXZX( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Y * X). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXYX( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Y). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYXY( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * Z * Y). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYZY( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * Y * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZYZ( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * X * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZXZ( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (X * Z * Y). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleXZY( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * Z * X). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleYZX( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * Y * X). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZYX( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Z * X * Y). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> eulerAngleZXY( - T const & t1, - T const & t2, - T const & t3); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, defaultp> yawPitchRoll( - T const& yaw, - T const& pitch, - T const& roll); - - /// Creates a 2D 2 * 2 rotation matrix from an euler angle. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<2, 2, T, defaultp> orientate2(T const& angle); - - /// Creates a 2D 4 * 4 homogeneous rotation matrix from an euler angle. - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<3, 3, T, defaultp> orientate3(T const& angle); - - /// Creates a 3D 3 * 3 rotation matrix from euler angles (Y * X * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<3, 3, T, Q> orientate3(vec<3, T, Q> const& angles); - - /// Creates a 3D 4 * 4 homogeneous rotation matrix from euler angles (Y * X * Z). - /// @see gtx_euler_angles - template - GLM_FUNC_DECL mat<4, 4, T, Q> orientate4(vec<3, T, Q> const& angles); - - /// Extracts the (X * Y * Z) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleXYZ(mat<4, 4, T, defaultp> const& M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Y * X * Z) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleYXZ(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (X * Z * X) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleXZX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (X * Y * X) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleXYX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Y * X * Y) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleYXY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Y * Z * Y) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleYZY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Z * Y * Z) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleZYZ(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Z * X * Z) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleZXZ(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (X * Z * Y) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleXZY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Y * Z * X) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleYZX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Z * Y * X) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleZYX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// Extracts the (Z * X * Y) Euler angles from the rotation matrix M - /// @see gtx_euler_angles - template - GLM_FUNC_DECL void extractEulerAngleZXY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3); - - /// @} -}//namespace glm - -#include "euler_angles.inl" diff --git a/third_party/glm/gtx/euler_angles.inl b/third_party/glm/gtx/euler_angles.inl deleted file mode 100755 index 68c5012..0000000 --- a/third_party/glm/gtx/euler_angles.inl +++ /dev/null @@ -1,899 +0,0 @@ -/// @ref gtx_euler_angles - -#include "compatibility.hpp" // glm::atan2 - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleX - ( - T const& angleX - ) - { - T cosX = glm::cos(angleX); - T sinX = glm::sin(angleX); - - return mat<4, 4, T, defaultp>( - T(1), T(0), T(0), T(0), - T(0), cosX, sinX, T(0), - T(0),-sinX, cosX, T(0), - T(0), T(0), T(0), T(1)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleY - ( - T const& angleY - ) - { - T cosY = glm::cos(angleY); - T sinY = glm::sin(angleY); - - return mat<4, 4, T, defaultp>( - cosY, T(0), -sinY, T(0), - T(0), T(1), T(0), T(0), - sinY, T(0), cosY, T(0), - T(0), T(0), T(0), T(1)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZ - ( - T const& angleZ - ) - { - T cosZ = glm::cos(angleZ); - T sinZ = glm::sin(angleZ); - - return mat<4, 4, T, defaultp>( - cosZ, sinZ, T(0), T(0), - -sinZ, cosZ, T(0), T(0), - T(0), T(0), T(1), T(0), - T(0), T(0), T(0), T(1)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> derivedEulerAngleX - ( - T const & angleX, - T const & angularVelocityX - ) - { - T cosX = glm::cos(angleX) * angularVelocityX; - T sinX = glm::sin(angleX) * angularVelocityX; - - return mat<4, 4, T, defaultp>( - T(0), T(0), T(0), T(0), - T(0),-sinX, cosX, T(0), - T(0),-cosX,-sinX, T(0), - T(0), T(0), T(0), T(0)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> derivedEulerAngleY - ( - T const & angleY, - T const & angularVelocityY - ) - { - T cosY = glm::cos(angleY) * angularVelocityY; - T sinY = glm::sin(angleY) * angularVelocityY; - - return mat<4, 4, T, defaultp>( - -sinY, T(0), -cosY, T(0), - T(0), T(0), T(0), T(0), - cosY, T(0), -sinY, T(0), - T(0), T(0), T(0), T(0)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> derivedEulerAngleZ - ( - T const & angleZ, - T const & angularVelocityZ - ) - { - T cosZ = glm::cos(angleZ) * angularVelocityZ; - T sinZ = glm::sin(angleZ) * angularVelocityZ; - - return mat<4, 4, T, defaultp>( - -sinZ, cosZ, T(0), T(0), - -cosZ, -sinZ, T(0), T(0), - T(0), T(0), T(0), T(0), - T(0), T(0), T(0), T(0)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXY - ( - T const& angleX, - T const& angleY - ) - { - T cosX = glm::cos(angleX); - T sinX = glm::sin(angleX); - T cosY = glm::cos(angleY); - T sinY = glm::sin(angleY); - - return mat<4, 4, T, defaultp>( - cosY, -sinX * -sinY, cosX * -sinY, T(0), - T(0), cosX, sinX, T(0), - sinY, -sinX * cosY, cosX * cosY, T(0), - T(0), T(0), T(0), T(1)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYX - ( - T const& angleY, - T const& angleX - ) - { - T cosX = glm::cos(angleX); - T sinX = glm::sin(angleX); - T cosY = glm::cos(angleY); - T sinY = glm::sin(angleY); - - return mat<4, 4, T, defaultp>( - cosY, 0, -sinY, T(0), - sinY * sinX, cosX, cosY * sinX, T(0), - sinY * cosX, -sinX, cosY * cosX, T(0), - T(0), T(0), T(0), T(1)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXZ - ( - T const& angleX, - T const& angleZ - ) - { - return eulerAngleX(angleX) * eulerAngleZ(angleZ); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZX - ( - T const& angleZ, - T const& angleX - ) - { - return eulerAngleZ(angleZ) * eulerAngleX(angleX); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYZ - ( - T const& angleY, - T const& angleZ - ) - { - return eulerAngleY(angleY) * eulerAngleZ(angleZ); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZY - ( - T const& angleZ, - T const& angleY - ) - { - return eulerAngleZ(angleZ) * eulerAngleY(angleY); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXYZ - ( - T const& t1, - T const& t2, - T const& t3 - ) - { - T c1 = glm::cos(-t1); - T c2 = glm::cos(-t2); - T c3 = glm::cos(-t3); - T s1 = glm::sin(-t1); - T s2 = glm::sin(-t2); - T s3 = glm::sin(-t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c2 * c3; - Result[0][1] =-c1 * s3 + s1 * s2 * c3; - Result[0][2] = s1 * s3 + c1 * s2 * c3; - Result[0][3] = static_cast(0); - Result[1][0] = c2 * s3; - Result[1][1] = c1 * c3 + s1 * s2 * s3; - Result[1][2] =-s1 * c3 + c1 * s2 * s3; - Result[1][3] = static_cast(0); - Result[2][0] =-s2; - Result[2][1] = s1 * c2; - Result[2][2] = c1 * c2; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYXZ - ( - T const& yaw, - T const& pitch, - T const& roll - ) - { - T tmp_ch = glm::cos(yaw); - T tmp_sh = glm::sin(yaw); - T tmp_cp = glm::cos(pitch); - T tmp_sp = glm::sin(pitch); - T tmp_cb = glm::cos(roll); - T tmp_sb = glm::sin(roll); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = tmp_ch * tmp_cb + tmp_sh * tmp_sp * tmp_sb; - Result[0][1] = tmp_sb * tmp_cp; - Result[0][2] = -tmp_sh * tmp_cb + tmp_ch * tmp_sp * tmp_sb; - Result[0][3] = static_cast(0); - Result[1][0] = -tmp_ch * tmp_sb + tmp_sh * tmp_sp * tmp_cb; - Result[1][1] = tmp_cb * tmp_cp; - Result[1][2] = tmp_sb * tmp_sh + tmp_ch * tmp_sp * tmp_cb; - Result[1][3] = static_cast(0); - Result[2][0] = tmp_sh * tmp_cp; - Result[2][1] = -tmp_sp; - Result[2][2] = tmp_ch * tmp_cp; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXZX - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c2; - Result[0][1] = c1 * s2; - Result[0][2] = s1 * s2; - Result[0][3] = static_cast(0); - Result[1][0] =-c3 * s2; - Result[1][1] = c1 * c2 * c3 - s1 * s3; - Result[1][2] = c1 * s3 + c2 * c3 * s1; - Result[1][3] = static_cast(0); - Result[2][0] = s2 * s3; - Result[2][1] =-c3 * s1 - c1 * c2 * s3; - Result[2][2] = c1 * c3 - c2 * s1 * s3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXYX - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c2; - Result[0][1] = s1 * s2; - Result[0][2] =-c1 * s2; - Result[0][3] = static_cast(0); - Result[1][0] = s2 * s3; - Result[1][1] = c1 * c3 - c2 * s1 * s3; - Result[1][2] = c3 * s1 + c1 * c2 * s3; - Result[1][3] = static_cast(0); - Result[2][0] = c3 * s2; - Result[2][1] =-c1 * s3 - c2 * c3 * s1; - Result[2][2] = c1 * c2 * c3 - s1 * s3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYXY - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c1 * c3 - c2 * s1 * s3; - Result[0][1] = s2* s3; - Result[0][2] =-c3 * s1 - c1 * c2 * s3; - Result[0][3] = static_cast(0); - Result[1][0] = s1 * s2; - Result[1][1] = c2; - Result[1][2] = c1 * s2; - Result[1][3] = static_cast(0); - Result[2][0] = c1 * s3 + c2 * c3 * s1; - Result[2][1] =-c3 * s2; - Result[2][2] = c1 * c2 * c3 - s1 * s3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYZY - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c1 * c2 * c3 - s1 * s3; - Result[0][1] = c3 * s2; - Result[0][2] =-c1 * s3 - c2 * c3 * s1; - Result[0][3] = static_cast(0); - Result[1][0] =-c1 * s2; - Result[1][1] = c2; - Result[1][2] = s1 * s2; - Result[1][3] = static_cast(0); - Result[2][0] = c3 * s1 + c1 * c2 * s3; - Result[2][1] = s2 * s3; - Result[2][2] = c1 * c3 - c2 * s1 * s3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZYZ - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c1 * c2 * c3 - s1 * s3; - Result[0][1] = c1 * s3 + c2 * c3 * s1; - Result[0][2] =-c3 * s2; - Result[0][3] = static_cast(0); - Result[1][0] =-c3 * s1 - c1 * c2 * s3; - Result[1][1] = c1 * c3 - c2 * s1 * s3; - Result[1][2] = s2 * s3; - Result[1][3] = static_cast(0); - Result[2][0] = c1 * s2; - Result[2][1] = s1 * s2; - Result[2][2] = c2; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZXZ - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c1 * c3 - c2 * s1 * s3; - Result[0][1] = c3 * s1 + c1 * c2 * s3; - Result[0][2] = s2 *s3; - Result[0][3] = static_cast(0); - Result[1][0] =-c1 * s3 - c2 * c3 * s1; - Result[1][1] = c1 * c2 * c3 - s1 * s3; - Result[1][2] = c3 * s2; - Result[1][3] = static_cast(0); - Result[2][0] = s1 * s2; - Result[2][1] =-c1 * s2; - Result[2][2] = c2; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleXZY - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c2 * c3; - Result[0][1] = s1 * s3 + c1 * c3 * s2; - Result[0][2] = c3 * s1 * s2 - c1 * s3; - Result[0][3] = static_cast(0); - Result[1][0] =-s2; - Result[1][1] = c1 * c2; - Result[1][2] = c2 * s1; - Result[1][3] = static_cast(0); - Result[2][0] = c2 * s3; - Result[2][1] = c1 * s2 * s3 - c3 * s1; - Result[2][2] = c1 * c3 + s1 * s2 *s3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleYZX - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c1 * c2; - Result[0][1] = s2; - Result[0][2] =-c2 * s1; - Result[0][3] = static_cast(0); - Result[1][0] = s1 * s3 - c1 * c3 * s2; - Result[1][1] = c2 * c3; - Result[1][2] = c1 * s3 + c3 * s1 * s2; - Result[1][3] = static_cast(0); - Result[2][0] = c3 * s1 + c1 * s2 * s3; - Result[2][1] =-c2 * s3; - Result[2][2] = c1 * c3 - s1 * s2 * s3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZYX - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c1 * c2; - Result[0][1] = c2 * s1; - Result[0][2] =-s2; - Result[0][3] = static_cast(0); - Result[1][0] = c1 * s2 * s3 - c3 * s1; - Result[1][1] = c1 * c3 + s1 * s2 * s3; - Result[1][2] = c2 * s3; - Result[1][3] = static_cast(0); - Result[2][0] = s1 * s3 + c1 * c3 * s2; - Result[2][1] = c3 * s1 * s2 - c1 * s3; - Result[2][2] = c2 * c3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> eulerAngleZXY - ( - T const & t1, - T const & t2, - T const & t3 - ) - { - T c1 = glm::cos(t1); - T s1 = glm::sin(t1); - T c2 = glm::cos(t2); - T s2 = glm::sin(t2); - T c3 = glm::cos(t3); - T s3 = glm::sin(t3); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = c1 * c3 - s1 * s2 * s3; - Result[0][1] = c3 * s1 + c1 * s2 * s3; - Result[0][2] =-c2 * s3; - Result[0][3] = static_cast(0); - Result[1][0] =-c2 * s1; - Result[1][1] = c1 * c2; - Result[1][2] = s2; - Result[1][3] = static_cast(0); - Result[2][0] = c1 * s3 + c3 * s1 * s2; - Result[2][1] = s1 * s3 - c1 * c3 * s2; - Result[2][2] = c2 * c3; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, defaultp> yawPitchRoll - ( - T const& yaw, - T const& pitch, - T const& roll - ) - { - T tmp_ch = glm::cos(yaw); - T tmp_sh = glm::sin(yaw); - T tmp_cp = glm::cos(pitch); - T tmp_sp = glm::sin(pitch); - T tmp_cb = glm::cos(roll); - T tmp_sb = glm::sin(roll); - - mat<4, 4, T, defaultp> Result; - Result[0][0] = tmp_ch * tmp_cb + tmp_sh * tmp_sp * tmp_sb; - Result[0][1] = tmp_sb * tmp_cp; - Result[0][2] = -tmp_sh * tmp_cb + tmp_ch * tmp_sp * tmp_sb; - Result[0][3] = static_cast(0); - Result[1][0] = -tmp_ch * tmp_sb + tmp_sh * tmp_sp * tmp_cb; - Result[1][1] = tmp_cb * tmp_cp; - Result[1][2] = tmp_sb * tmp_sh + tmp_ch * tmp_sp * tmp_cb; - Result[1][3] = static_cast(0); - Result[2][0] = tmp_sh * tmp_cp; - Result[2][1] = -tmp_sp; - Result[2][2] = tmp_ch * tmp_cp; - Result[2][3] = static_cast(0); - Result[3][0] = static_cast(0); - Result[3][1] = static_cast(0); - Result[3][2] = static_cast(0); - Result[3][3] = static_cast(1); - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, defaultp> orientate2 - ( - T const& angle - ) - { - T c = glm::cos(angle); - T s = glm::sin(angle); - - mat<2, 2, T, defaultp> Result; - Result[0][0] = c; - Result[0][1] = s; - Result[1][0] = -s; - Result[1][1] = c; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, defaultp> orientate3 - ( - T const& angle - ) - { - T c = glm::cos(angle); - T s = glm::sin(angle); - - mat<3, 3, T, defaultp> Result; - Result[0][0] = c; - Result[0][1] = s; - Result[0][2] = 0.0f; - Result[1][0] = -s; - Result[1][1] = c; - Result[1][2] = 0.0f; - Result[2][0] = 0.0f; - Result[2][1] = 0.0f; - Result[2][2] = 1.0f; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> orientate3 - ( - vec<3, T, Q> const& angles - ) - { - return mat<3, 3, T, Q>(yawPitchRoll(angles.z, angles.x, angles.y)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> orientate4 - ( - vec<3, T, Q> const& angles - ) - { - return yawPitchRoll(angles.z, angles.x, angles.y); - } - - template - GLM_FUNC_DECL void extractEulerAngleXYZ(mat<4, 4, T, defaultp> const& M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[2][1], M[2][2]); - T C2 = glm::sqrt(M[0][0]*M[0][0] + M[1][0]*M[1][0]); - T T2 = glm::atan2(-M[2][0], C2); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(S1*M[0][2] - C1*M[0][1], C1*M[1][1] - S1*M[1][2 ]); - t1 = -T1; - t2 = -T2; - t3 = -T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleYXZ(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[2][0], M[2][2]); - T C2 = glm::sqrt(M[0][1]*M[0][1] + M[1][1]*M[1][1]); - T T2 = glm::atan2(-M[2][1], C2); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(S1*M[1][2] - C1*M[1][0], C1*M[0][0] - S1*M[0][2]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleXZX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[0][2], M[0][1]); - T S2 = glm::sqrt(M[1][0]*M[1][0] + M[2][0]*M[2][0]); - T T2 = glm::atan2(S2, M[0][0]); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(C1*M[1][2] - S1*M[1][1], C1*M[2][2] - S1*M[2][1]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleXYX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[0][1], -M[0][2]); - T S2 = glm::sqrt(M[1][0]*M[1][0] + M[2][0]*M[2][0]); - T T2 = glm::atan2(S2, M[0][0]); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(-C1*M[2][1] - S1*M[2][2], C1*M[1][1] + S1*M[1][2]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleYXY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[1][0], M[1][2]); - T S2 = glm::sqrt(M[0][1]*M[0][1] + M[2][1]*M[2][1]); - T T2 = glm::atan2(S2, M[1][1]); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(C1*M[2][0] - S1*M[2][2], C1*M[0][0] - S1*M[0][2]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleYZY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[1][2], -M[1][0]); - T S2 = glm::sqrt(M[0][1]*M[0][1] + M[2][1]*M[2][1]); - T T2 = glm::atan2(S2, M[1][1]); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(-S1*M[0][0] - C1*M[0][2], S1*M[2][0] + C1*M[2][2]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleZYZ(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[2][1], M[2][0]); - T S2 = glm::sqrt(M[0][2]*M[0][2] + M[1][2]*M[1][2]); - T T2 = glm::atan2(S2, M[2][2]); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(C1*M[0][1] - S1*M[0][0], C1*M[1][1] - S1*M[1][0]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleZXZ(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[2][0], -M[2][1]); - T S2 = glm::sqrt(M[0][2]*M[0][2] + M[1][2]*M[1][2]); - T T2 = glm::atan2(S2, M[2][2]); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(-C1*M[1][0] - S1*M[1][1], C1*M[0][0] + S1*M[0][1]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleXZY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[1][2], M[1][1]); - T C2 = glm::sqrt(M[0][0]*M[0][0] + M[2][0]*M[2][0]); - T T2 = glm::atan2(-M[1][0], C2); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(S1*M[0][1] - C1*M[0][2], C1*M[2][2] - S1*M[2][1]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleYZX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(-M[0][2], M[0][0]); - T C2 = glm::sqrt(M[1][1]*M[1][1] + M[2][1]*M[2][1]); - T T2 = glm::atan2(M[0][1], C2); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(S1*M[1][0] + C1*M[1][2], S1*M[2][0] + C1*M[2][2]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleZYX(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(M[0][1], M[0][0]); - T C2 = glm::sqrt(M[1][2]*M[1][2] + M[2][2]*M[2][2]); - T T2 = glm::atan2(-M[0][2], C2); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(S1*M[2][0] - C1*M[2][1], C1*M[1][1] - S1*M[1][0]); - t1 = T1; - t2 = T2; - t3 = T3; - } - - template - GLM_FUNC_QUALIFIER void extractEulerAngleZXY(mat<4, 4, T, defaultp> const & M, - T & t1, - T & t2, - T & t3) - { - T T1 = glm::atan2(-M[1][0], M[1][1]); - T C2 = glm::sqrt(M[0][2]*M[0][2] + M[2][2]*M[2][2]); - T T2 = glm::atan2(M[1][2], C2); - T S1 = glm::sin(T1); - T C1 = glm::cos(T1); - T T3 = glm::atan2(C1*M[2][0] + S1*M[2][1], C1*M[0][0] + S1*M[0][1]); - t1 = T1; - t2 = T2; - t3 = T3; - } -}//namespace glm diff --git a/third_party/glm/gtx/extend.hpp b/third_party/glm/gtx/extend.hpp deleted file mode 100755 index 28b7c5c..0000000 --- a/third_party/glm/gtx/extend.hpp +++ /dev/null @@ -1,42 +0,0 @@ -/// @ref gtx_extend -/// @file glm/gtx/extend.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_extend GLM_GTX_extend -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Extend a position from a source to a position at a defined length. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_extend is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_extend extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_extend - /// @{ - - /// Extends of Length the Origin position using the (Source - Origin) direction. - /// @see gtx_extend - template - GLM_FUNC_DECL genType extend( - genType const& Origin, - genType const& Source, - typename genType::value_type const Length); - - /// @} -}//namespace glm - -#include "extend.inl" diff --git a/third_party/glm/gtx/extend.inl b/third_party/glm/gtx/extend.inl deleted file mode 100755 index 32128eb..0000000 --- a/third_party/glm/gtx/extend.inl +++ /dev/null @@ -1,48 +0,0 @@ -/// @ref gtx_extend - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType extend - ( - genType const& Origin, - genType const& Source, - genType const& Distance - ) - { - return Origin + (Source - Origin) * Distance; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> extend - ( - vec<2, T, Q> const& Origin, - vec<2, T, Q> const& Source, - T const& Distance - ) - { - return Origin + (Source - Origin) * Distance; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> extend - ( - vec<3, T, Q> const& Origin, - vec<3, T, Q> const& Source, - T const& Distance - ) - { - return Origin + (Source - Origin) * Distance; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> extend - ( - vec<4, T, Q> const& Origin, - vec<4, T, Q> const& Source, - T const& Distance - ) - { - return Origin + (Source - Origin) * Distance; - } -}//namespace glm diff --git a/third_party/glm/gtx/extended_min_max.hpp b/third_party/glm/gtx/extended_min_max.hpp deleted file mode 100755 index ad23a91..0000000 --- a/third_party/glm/gtx/extended_min_max.hpp +++ /dev/null @@ -1,182 +0,0 @@ -/// @ref gtx_extended_min_max -/// @file glm/gtx/extended_min_max.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_extended_min_max GLM_GTX_extented_min_max -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Min and max functions for 3 to 4 parameters. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_extented_min_max is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_extented_min_max extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_extended_min_max - /// @{ - - /// Return the minimum component-wise values of 3 inputs - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL T min( - T const& x, - T const& y, - T const& z); - - /// Return the minimum component-wise values of 3 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C min( - C const& x, - typename C::T const& y, - typename C::T const& z); - - /// Return the minimum component-wise values of 3 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C min( - C const& x, - C const& y, - C const& z); - - /// Return the minimum component-wise values of 4 inputs - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL T min( - T const& x, - T const& y, - T const& z, - T const& w); - - /// Return the minimum component-wise values of 4 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C min( - C const& x, - typename C::T const& y, - typename C::T const& z, - typename C::T const& w); - - /// Return the minimum component-wise values of 4 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C min( - C const& x, - C const& y, - C const& z, - C const& w); - - /// Return the maximum component-wise values of 3 inputs - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL T max( - T const& x, - T const& y, - T const& z); - - /// Return the maximum component-wise values of 3 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C max( - C const& x, - typename C::T const& y, - typename C::T const& z); - - /// Return the maximum component-wise values of 3 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C max( - C const& x, - C const& y, - C const& z); - - /// Return the maximum component-wise values of 4 inputs - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL T max( - T const& x, - T const& y, - T const& z, - T const& w); - - /// Return the maximum component-wise values of 4 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C max( - C const& x, - typename C::T const& y, - typename C::T const& z, - typename C::T const& w); - - /// Return the maximum component-wise values of 4 inputs - /// @see gtx_extented_min_max - template class C> - GLM_FUNC_DECL C max( - C const& x, - C const& y, - C const& z, - C const& w); - - /// Returns y if y < x; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam genType Floating-point or integer; scalar or vector types. - /// - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL genType fmin(genType x, genType y); - - /// Returns y if x < y; otherwise, it returns x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam genType Floating-point; scalar or vector types. - /// - /// @see gtx_extented_min_max - /// @see std::fmax documentation - template - GLM_FUNC_DECL genType fmax(genType x, genType y); - - /// Returns min(max(x, minVal), maxVal) for each component in x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam genType Floating-point scalar or vector types. - /// - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL genType fclamp(genType x, genType minVal, genType maxVal); - - /// Returns min(max(x, minVal), maxVal) for each component in x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL vec fclamp(vec const& x, T minVal, T maxVal); - - /// Returns min(max(x, minVal), maxVal) for each component in x. If one of the two arguments is NaN, the value of the other argument is returned. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see gtx_extented_min_max - template - GLM_FUNC_DECL vec fclamp(vec const& x, vec const& minVal, vec const& maxVal); - - - /// @} -}//namespace glm - -#include "extended_min_max.inl" diff --git a/third_party/glm/gtx/extended_min_max.inl b/third_party/glm/gtx/extended_min_max.inl deleted file mode 100755 index e72d1cc..0000000 --- a/third_party/glm/gtx/extended_min_max.inl +++ /dev/null @@ -1,218 +0,0 @@ -/// @ref gtx_extended_min_max - -namespace glm -{ - template - GLM_FUNC_QUALIFIER T min( - T const& x, - T const& y, - T const& z) - { - return glm::min(glm::min(x, y), z); - } - - template class C> - GLM_FUNC_QUALIFIER C min - ( - C const& x, - typename C::T const& y, - typename C::T const& z - ) - { - return glm::min(glm::min(x, y), z); - } - - template class C> - GLM_FUNC_QUALIFIER C min - ( - C const& x, - C const& y, - C const& z - ) - { - return glm::min(glm::min(x, y), z); - } - - template - GLM_FUNC_QUALIFIER T min - ( - T const& x, - T const& y, - T const& z, - T const& w - ) - { - return glm::min(glm::min(x, y), glm::min(z, w)); - } - - template class C> - GLM_FUNC_QUALIFIER C min - ( - C const& x, - typename C::T const& y, - typename C::T const& z, - typename C::T const& w - ) - { - return glm::min(glm::min(x, y), glm::min(z, w)); - } - - template class C> - GLM_FUNC_QUALIFIER C min - ( - C const& x, - C const& y, - C const& z, - C const& w - ) - { - return glm::min(glm::min(x, y), glm::min(z, w)); - } - - template - GLM_FUNC_QUALIFIER T max( - T const& x, - T const& y, - T const& z) - { - return glm::max(glm::max(x, y), z); - } - - template class C> - GLM_FUNC_QUALIFIER C max - ( - C const& x, - typename C::T const& y, - typename C::T const& z - ) - { - return glm::max(glm::max(x, y), z); - } - - template class C> - GLM_FUNC_QUALIFIER C max - ( - C const& x, - C const& y, - C const& z - ) - { - return glm::max(glm::max(x, y), z); - } - - template - GLM_FUNC_QUALIFIER T max - ( - T const& x, - T const& y, - T const& z, - T const& w - ) - { - return glm::max(glm::max(x, y), glm::max(z, w)); - } - - template class C> - GLM_FUNC_QUALIFIER C max - ( - C const& x, - typename C::T const& y, - typename C::T const& z, - typename C::T const& w - ) - { - return glm::max(glm::max(x, y), glm::max(z, w)); - } - - template class C> - GLM_FUNC_QUALIFIER C max - ( - C const& x, - C const& y, - C const& z, - C const& w - ) - { - return glm::max(glm::max(x, y), glm::max(z, w)); - } - - // fmin -# if GLM_HAS_CXX11_STL - using std::fmin; -# else - template - GLM_FUNC_QUALIFIER genType fmin(genType x, genType y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmin' only accept floating-point input"); - - if (isnan(x)) - return y; - if (isnan(y)) - return x; - - return min(x, y); - } -# endif - - template - GLM_FUNC_QUALIFIER vec fmin(vec const& a, T b) - { - return detail::functor2::call(fmin, a, vec(b)); - } - - template - GLM_FUNC_QUALIFIER vec fmin(vec const& a, vec const& b) - { - return detail::functor2::call(fmin, a, b); - } - - // fmax -# if GLM_HAS_CXX11_STL - using std::fmax; -# else - template - GLM_FUNC_QUALIFIER genType fmax(genType x, genType y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fmax' only accept floating-point input"); - - if (isnan(x)) - return y; - if (isnan(y)) - return x; - - return max(x, y); - } -# endif - - template - GLM_FUNC_QUALIFIER vec fmax(vec const& a, T b) - { - return detail::functor2::call(fmax, a, vec(b)); - } - - template - GLM_FUNC_QUALIFIER vec fmax(vec const& a, vec const& b) - { - return detail::functor2::call(fmax, a, b); - } - - // fclamp - template - GLM_FUNC_QUALIFIER genType fclamp(genType x, genType minVal, genType maxVal) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fclamp' only accept floating-point or integer inputs"); - return fmin(fmax(x, minVal), maxVal); - } - - template - GLM_FUNC_QUALIFIER vec fclamp(vec const& x, T minVal, T maxVal) - { - return fmin(fmax(x, vec(minVal)), vec(maxVal)); - } - - template - GLM_FUNC_QUALIFIER vec fclamp(vec const& x, vec const& minVal, vec const& maxVal) - { - return fmin(fmax(x, minVal), maxVal); - } -}//namespace glm diff --git a/third_party/glm/gtx/exterior_product.hpp b/third_party/glm/gtx/exterior_product.hpp deleted file mode 100755 index 5522df7..0000000 --- a/third_party/glm/gtx/exterior_product.hpp +++ /dev/null @@ -1,45 +0,0 @@ -/// @ref gtx_exterior_product -/// @file glm/gtx/exterior_product.hpp -/// -/// @see core (dependence) -/// @see gtx_exterior_product (dependence) -/// -/// @defgroup gtx_exterior_product GLM_GTX_exterior_product -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// @brief Allow to perform bit operations on integer values - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_exterior_product is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_exterior_product extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_exterior_product - /// @{ - - /// Returns the cross product of x and y. - /// - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see Exterior product - template - GLM_FUNC_DECL T cross(vec<2, T, Q> const& v, vec<2, T, Q> const& u); - - /// @} -} //namespace glm - -#include "exterior_product.inl" diff --git a/third_party/glm/gtx/exterior_product.inl b/third_party/glm/gtx/exterior_product.inl deleted file mode 100755 index 93661fd..0000000 --- a/third_party/glm/gtx/exterior_product.inl +++ /dev/null @@ -1,26 +0,0 @@ -/// @ref gtx_exterior_product - -#include - -namespace glm { -namespace detail -{ - template - struct compute_cross_vec2 - { - GLM_FUNC_QUALIFIER static T call(vec<2, T, Q> const& v, vec<2, T, Q> const& u) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'cross' accepts only floating-point inputs"); - - return v.x * u.y - u.x * v.y; - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER T cross(vec<2, T, Q> const& x, vec<2, T, Q> const& y) - { - return detail::compute_cross_vec2::value>::call(x, y); - } -}//namespace glm - diff --git a/third_party/glm/gtx/fast_exponential.hpp b/third_party/glm/gtx/fast_exponential.hpp deleted file mode 100755 index 6fb7286..0000000 --- a/third_party/glm/gtx/fast_exponential.hpp +++ /dev/null @@ -1,95 +0,0 @@ -/// @ref gtx_fast_exponential -/// @file glm/gtx/fast_exponential.hpp -/// -/// @see core (dependence) -/// @see gtx_half_float (dependence) -/// -/// @defgroup gtx_fast_exponential GLM_GTX_fast_exponential -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Fast but less accurate implementations of exponential based functions. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_fast_exponential is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_fast_exponential extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_fast_exponential - /// @{ - - /// Faster than the common pow function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL genType fastPow(genType x, genType y); - - /// Faster than the common pow function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL vec fastPow(vec const& x, vec const& y); - - /// Faster than the common pow function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL genTypeT fastPow(genTypeT x, genTypeU y); - - /// Faster than the common pow function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL vec fastPow(vec const& x); - - /// Faster than the common exp function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL T fastExp(T x); - - /// Faster than the common exp function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL vec fastExp(vec const& x); - - /// Faster than the common log function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL T fastLog(T x); - - /// Faster than the common exp2 function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL vec fastLog(vec const& x); - - /// Faster than the common exp2 function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL T fastExp2(T x); - - /// Faster than the common exp2 function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL vec fastExp2(vec const& x); - - /// Faster than the common log2 function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL T fastLog2(T x); - - /// Faster than the common log2 function but less accurate. - /// @see gtx_fast_exponential - template - GLM_FUNC_DECL vec fastLog2(vec const& x); - - /// @} -}//namespace glm - -#include "fast_exponential.inl" diff --git a/third_party/glm/gtx/fast_exponential.inl b/third_party/glm/gtx/fast_exponential.inl deleted file mode 100755 index f139e50..0000000 --- a/third_party/glm/gtx/fast_exponential.inl +++ /dev/null @@ -1,136 +0,0 @@ -/// @ref gtx_fast_exponential - -namespace glm -{ - // fastPow: - template - GLM_FUNC_QUALIFIER genType fastPow(genType x, genType y) - { - return exp(y * log(x)); - } - - template - GLM_FUNC_QUALIFIER vec fastPow(vec const& x, vec const& y) - { - return exp(y * log(x)); - } - - template - GLM_FUNC_QUALIFIER T fastPow(T x, int y) - { - T f = static_cast(1); - for(int i = 0; i < y; ++i) - f *= x; - return f; - } - - template - GLM_FUNC_QUALIFIER vec fastPow(vec const& x, vec const& y) - { - vec Result; - for(length_t i = 0, n = x.length(); i < n; ++i) - Result[i] = fastPow(x[i], y[i]); - return Result; - } - - // fastExp - // Note: This function provides accurate results only for value between -1 and 1, else avoid it. - template - GLM_FUNC_QUALIFIER T fastExp(T x) - { - // This has a better looking and same performance in release mode than the following code. However, in debug mode it's slower. - // return 1.0f + x * (1.0f + x * 0.5f * (1.0f + x * 0.3333333333f * (1.0f + x * 0.25 * (1.0f + x * 0.2f)))); - T x2 = x * x; - T x3 = x2 * x; - T x4 = x3 * x; - T x5 = x4 * x; - return T(1) + x + (x2 * T(0.5)) + (x3 * T(0.1666666667)) + (x4 * T(0.041666667)) + (x5 * T(0.008333333333)); - } - /* // Try to handle all values of float... but often shower than std::exp, glm::floor and the loop kill the performance - GLM_FUNC_QUALIFIER float fastExp(float x) - { - const float e = 2.718281828f; - const float IntegerPart = floor(x); - const float FloatPart = x - IntegerPart; - float z = 1.f; - - for(int i = 0; i < int(IntegerPart); ++i) - z *= e; - - const float x2 = FloatPart * FloatPart; - const float x3 = x2 * FloatPart; - const float x4 = x3 * FloatPart; - const float x5 = x4 * FloatPart; - return z * (1.0f + FloatPart + (x2 * 0.5f) + (x3 * 0.1666666667f) + (x4 * 0.041666667f) + (x5 * 0.008333333333f)); - } - - // Increase accuracy on number bigger that 1 and smaller than -1 but it's not enough for high and negative numbers - GLM_FUNC_QUALIFIER float fastExp(float x) - { - // This has a better looking and same performance in release mode than the following code. However, in debug mode it's slower. - // return 1.0f + x * (1.0f + x * 0.5f * (1.0f + x * 0.3333333333f * (1.0f + x * 0.25 * (1.0f + x * 0.2f)))); - float x2 = x * x; - float x3 = x2 * x; - float x4 = x3 * x; - float x5 = x4 * x; - float x6 = x5 * x; - float x7 = x6 * x; - float x8 = x7 * x; - return 1.0f + x + (x2 * 0.5f) + (x3 * 0.1666666667f) + (x4 * 0.041666667f) + (x5 * 0.008333333333f)+ (x6 * 0.00138888888888f) + (x7 * 0.000198412698f) + (x8 * 0.0000248015873f);; - } - */ - - template - GLM_FUNC_QUALIFIER vec fastExp(vec const& x) - { - return detail::functor1::call(fastExp, x); - } - - // fastLog - template - GLM_FUNC_QUALIFIER genType fastLog(genType x) - { - return std::log(x); - } - - /* Slower than the VC7.1 function... - GLM_FUNC_QUALIFIER float fastLog(float x) - { - float y1 = (x - 1.0f) / (x + 1.0f); - float y2 = y1 * y1; - return 2.0f * y1 * (1.0f + y2 * (0.3333333333f + y2 * (0.2f + y2 * 0.1428571429f))); - } - */ - - template - GLM_FUNC_QUALIFIER vec fastLog(vec const& x) - { - return detail::functor1::call(fastLog, x); - } - - //fastExp2, ln2 = 0.69314718055994530941723212145818f - template - GLM_FUNC_QUALIFIER genType fastExp2(genType x) - { - return fastExp(0.69314718055994530941723212145818f * x); - } - - template - GLM_FUNC_QUALIFIER vec fastExp2(vec const& x) - { - return detail::functor1::call(fastExp2, x); - } - - // fastLog2, ln2 = 0.69314718055994530941723212145818f - template - GLM_FUNC_QUALIFIER genType fastLog2(genType x) - { - return fastLog(x) / 0.69314718055994530941723212145818f; - } - - template - GLM_FUNC_QUALIFIER vec fastLog2(vec const& x) - { - return detail::functor1::call(fastLog2, x); - } -}//namespace glm diff --git a/third_party/glm/gtx/fast_square_root.hpp b/third_party/glm/gtx/fast_square_root.hpp deleted file mode 100755 index 9fb3f2f..0000000 --- a/third_party/glm/gtx/fast_square_root.hpp +++ /dev/null @@ -1,92 +0,0 @@ -/// @ref gtx_fast_square_root -/// @file glm/gtx/fast_square_root.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_fast_square_root GLM_GTX_fast_square_root -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Fast but less accurate implementations of square root based functions. -/// - Sqrt optimisation based on Newton's method, -/// www.gamedev.net/community/forums/topic.asp?topic id=139956 - -#pragma once - -// Dependency: -#include "../common.hpp" -#include "../exponential.hpp" -#include "../geometric.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_fast_square_root is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_fast_square_root extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_fast_square_root - /// @{ - - /// Faster than the common sqrt function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL genType fastSqrt(genType x); - - /// Faster than the common sqrt function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL vec fastSqrt(vec const& x); - - /// Faster than the common inversesqrt function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL genType fastInverseSqrt(genType x); - - /// Faster than the common inversesqrt function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL vec fastInverseSqrt(vec const& x); - - /// Faster than the common length function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL genType fastLength(genType x); - - /// Faster than the common length function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL T fastLength(vec const& x); - - /// Faster than the common distance function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL genType fastDistance(genType x, genType y); - - /// Faster than the common distance function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL T fastDistance(vec const& x, vec const& y); - - /// Faster than the common normalize function but less accurate. - /// - /// @see gtx_fast_square_root extension. - template - GLM_FUNC_DECL genType fastNormalize(genType const& x); - - /// @} -}// namespace glm - -#include "fast_square_root.inl" diff --git a/third_party/glm/gtx/fast_square_root.inl b/third_party/glm/gtx/fast_square_root.inl deleted file mode 100755 index 4e6c6de..0000000 --- a/third_party/glm/gtx/fast_square_root.inl +++ /dev/null @@ -1,75 +0,0 @@ -/// @ref gtx_fast_square_root - -namespace glm -{ - // fastSqrt - template - GLM_FUNC_QUALIFIER genType fastSqrt(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fastSqrt' only accept floating-point input"); - - return genType(1) / fastInverseSqrt(x); - } - - template - GLM_FUNC_QUALIFIER vec fastSqrt(vec const& x) - { - return detail::functor1::call(fastSqrt, x); - } - - // fastInversesqrt - template - GLM_FUNC_QUALIFIER genType fastInverseSqrt(genType x) - { - return detail::compute_inversesqrt<1, genType, lowp, detail::is_aligned::value>::call(vec<1, genType, lowp>(x)).x; - } - - template - GLM_FUNC_QUALIFIER vec fastInverseSqrt(vec const& x) - { - return detail::compute_inversesqrt::value>::call(x); - } - - // fastLength - template - GLM_FUNC_QUALIFIER genType fastLength(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fastLength' only accept floating-point inputs"); - - return abs(x); - } - - template - GLM_FUNC_QUALIFIER T fastLength(vec const& x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'fastLength' only accept floating-point inputs"); - - return fastSqrt(dot(x, x)); - } - - // fastDistance - template - GLM_FUNC_QUALIFIER genType fastDistance(genType x, genType y) - { - return fastLength(y - x); - } - - template - GLM_FUNC_QUALIFIER T fastDistance(vec const& x, vec const& y) - { - return fastLength(y - x); - } - - // fastNormalize - template - GLM_FUNC_QUALIFIER genType fastNormalize(genType x) - { - return x > genType(0) ? genType(1) : -genType(1); - } - - template - GLM_FUNC_QUALIFIER vec fastNormalize(vec const& x) - { - return x * fastInverseSqrt(dot(x, x)); - } -}//namespace glm diff --git a/third_party/glm/gtx/fast_trigonometry.hpp b/third_party/glm/gtx/fast_trigonometry.hpp deleted file mode 100755 index 2650d6e..0000000 --- a/third_party/glm/gtx/fast_trigonometry.hpp +++ /dev/null @@ -1,79 +0,0 @@ -/// @ref gtx_fast_trigonometry -/// @file glm/gtx/fast_trigonometry.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_fast_trigonometry GLM_GTX_fast_trigonometry -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Fast but less accurate implementations of trigonometric functions. - -#pragma once - -// Dependency: -#include "../gtc/constants.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_fast_trigonometry is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_fast_trigonometry extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_fast_trigonometry - /// @{ - - /// Wrap an angle to [0 2pi[ - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T wrapAngle(T angle); - - /// Faster than the common sin function but less accurate. - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T fastSin(T angle); - - /// Faster than the common cos function but less accurate. - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T fastCos(T angle); - - /// Faster than the common tan function but less accurate. - /// Defined between -2pi and 2pi. - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T fastTan(T angle); - - /// Faster than the common asin function but less accurate. - /// Defined between -2pi and 2pi. - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T fastAsin(T angle); - - /// Faster than the common acos function but less accurate. - /// Defined between -2pi and 2pi. - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T fastAcos(T angle); - - /// Faster than the common atan function but less accurate. - /// Defined between -2pi and 2pi. - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T fastAtan(T y, T x); - - /// Faster than the common atan function but less accurate. - /// Defined between -2pi and 2pi. - /// From GLM_GTX_fast_trigonometry extension. - template - GLM_FUNC_DECL T fastAtan(T angle); - - /// @} -}//namespace glm - -#include "fast_trigonometry.inl" diff --git a/third_party/glm/gtx/fast_trigonometry.inl b/third_party/glm/gtx/fast_trigonometry.inl deleted file mode 100755 index 1a710cb..0000000 --- a/third_party/glm/gtx/fast_trigonometry.inl +++ /dev/null @@ -1,142 +0,0 @@ -/// @ref gtx_fast_trigonometry - -namespace glm{ -namespace detail -{ - template - GLM_FUNC_QUALIFIER vec taylorCos(vec const& x) - { - return static_cast(1) - - (x * x) * (1.f / 2.f) - + ((x * x) * (x * x)) * (1.f / 24.f) - - (((x * x) * (x * x)) * (x * x)) * (1.f / 720.f) - + (((x * x) * (x * x)) * ((x * x) * (x * x))) * (1.f / 40320.f); - } - - template - GLM_FUNC_QUALIFIER T cos_52s(T x) - { - T const xx(x * x); - return (T(0.9999932946) + xx * (T(-0.4999124376) + xx * (T(0.0414877472) + xx * T(-0.0012712095)))); - } - - template - GLM_FUNC_QUALIFIER vec cos_52s(vec const& x) - { - return detail::functor1::call(cos_52s, x); - } -}//namespace detail - - // wrapAngle - template - GLM_FUNC_QUALIFIER T wrapAngle(T angle) - { - return abs(mod(angle, two_pi())); - } - - template - GLM_FUNC_QUALIFIER vec wrapAngle(vec const& x) - { - return detail::functor1::call(wrapAngle, x); - } - - // cos - template - GLM_FUNC_QUALIFIER T fastCos(T x) - { - T const angle(wrapAngle(x)); - - if(angle < half_pi()) - return detail::cos_52s(angle); - if(angle < pi()) - return -detail::cos_52s(pi() - angle); - if(angle < (T(3) * half_pi())) - return -detail::cos_52s(angle - pi()); - - return detail::cos_52s(two_pi() - angle); - } - - template - GLM_FUNC_QUALIFIER vec fastCos(vec const& x) - { - return detail::functor1::call(fastCos, x); - } - - // sin - template - GLM_FUNC_QUALIFIER T fastSin(T x) - { - return fastCos(half_pi() - x); - } - - template - GLM_FUNC_QUALIFIER vec fastSin(vec const& x) - { - return detail::functor1::call(fastSin, x); - } - - // tan - template - GLM_FUNC_QUALIFIER T fastTan(T x) - { - return x + (x * x * x * T(0.3333333333)) + (x * x * x * x * x * T(0.1333333333333)) + (x * x * x * x * x * x * x * T(0.0539682539)); - } - - template - GLM_FUNC_QUALIFIER vec fastTan(vec const& x) - { - return detail::functor1::call(fastTan, x); - } - - // asin - template - GLM_FUNC_QUALIFIER T fastAsin(T x) - { - return x + (x * x * x * T(0.166666667)) + (x * x * x * x * x * T(0.075)) + (x * x * x * x * x * x * x * T(0.0446428571)) + (x * x * x * x * x * x * x * x * x * T(0.0303819444));// + (x * x * x * x * x * x * x * x * x * x * x * T(0.022372159)); - } - - template - GLM_FUNC_QUALIFIER vec fastAsin(vec const& x) - { - return detail::functor1::call(fastAsin, x); - } - - // acos - template - GLM_FUNC_QUALIFIER T fastAcos(T x) - { - return T(1.5707963267948966192313216916398) - fastAsin(x); //(PI / 2) - } - - template - GLM_FUNC_QUALIFIER vec fastAcos(vec const& x) - { - return detail::functor1::call(fastAcos, x); - } - - // atan - template - GLM_FUNC_QUALIFIER T fastAtan(T y, T x) - { - T sgn = sign(y) * sign(x); - return abs(fastAtan(y / x)) * sgn; - } - - template - GLM_FUNC_QUALIFIER vec fastAtan(vec const& y, vec const& x) - { - return detail::functor2::call(fastAtan, y, x); - } - - template - GLM_FUNC_QUALIFIER T fastAtan(T x) - { - return x - (x * x * x * T(0.333333333333)) + (x * x * x * x * x * T(0.2)) - (x * x * x * x * x * x * x * T(0.1428571429)) + (x * x * x * x * x * x * x * x * x * T(0.111111111111)) - (x * x * x * x * x * x * x * x * x * x * x * T(0.0909090909)); - } - - template - GLM_FUNC_QUALIFIER vec fastAtan(vec const& x) - { - return detail::functor1::call(fastAtan, x); - } -}//namespace glm diff --git a/third_party/glm/gtx/float_notmalize.inl b/third_party/glm/gtx/float_notmalize.inl deleted file mode 100755 index 8cdbc5a..0000000 --- a/third_party/glm/gtx/float_notmalize.inl +++ /dev/null @@ -1,13 +0,0 @@ -/// @ref gtx_float_normalize - -#include - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec floatNormalize(vec const& v) - { - return vec(v) / static_cast(std::numeric_limits::max()); - } - -}//namespace glm diff --git a/third_party/glm/gtx/functions.hpp b/third_party/glm/gtx/functions.hpp deleted file mode 100755 index 9f4166c..0000000 --- a/third_party/glm/gtx/functions.hpp +++ /dev/null @@ -1,56 +0,0 @@ -/// @ref gtx_functions -/// @file glm/gtx/functions.hpp -/// -/// @see core (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtx_functions GLM_GTX_functions -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// List of useful common functions. - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" -#include "../detail/qualifier.hpp" -#include "../detail/type_vec2.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_functions is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_functions extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_functions - /// @{ - - /// 1D gauss function - /// - /// @see gtc_epsilon - template - GLM_FUNC_DECL T gauss( - T x, - T ExpectedValue, - T StandardDeviation); - - /// 2D gauss function - /// - /// @see gtc_epsilon - template - GLM_FUNC_DECL T gauss( - vec<2, T, Q> const& Coord, - vec<2, T, Q> const& ExpectedValue, - vec<2, T, Q> const& StandardDeviation); - - /// @} -}//namespace glm - -#include "functions.inl" - diff --git a/third_party/glm/gtx/functions.inl b/third_party/glm/gtx/functions.inl deleted file mode 100755 index 29cbb20..0000000 --- a/third_party/glm/gtx/functions.inl +++ /dev/null @@ -1,30 +0,0 @@ -/// @ref gtx_functions - -#include "../exponential.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER T gauss - ( - T x, - T ExpectedValue, - T StandardDeviation - ) - { - return exp(-((x - ExpectedValue) * (x - ExpectedValue)) / (static_cast(2) * StandardDeviation * StandardDeviation)) / (StandardDeviation * sqrt(static_cast(6.28318530717958647692528676655900576))); - } - - template - GLM_FUNC_QUALIFIER T gauss - ( - vec<2, T, Q> const& Coord, - vec<2, T, Q> const& ExpectedValue, - vec<2, T, Q> const& StandardDeviation - ) - { - vec<2, T, Q> const Squared = ((Coord - ExpectedValue) * (Coord - ExpectedValue)) / (static_cast(2) * StandardDeviation * StandardDeviation); - return exp(-(Squared.x + Squared.y)); - } -}//namespace glm - diff --git a/third_party/glm/gtx/gradient_paint.hpp b/third_party/glm/gtx/gradient_paint.hpp deleted file mode 100755 index 6f85bf4..0000000 --- a/third_party/glm/gtx/gradient_paint.hpp +++ /dev/null @@ -1,53 +0,0 @@ -/// @ref gtx_gradient_paint -/// @file glm/gtx/gradient_paint.hpp -/// -/// @see core (dependence) -/// @see gtx_optimum_pow (dependence) -/// -/// @defgroup gtx_gradient_paint GLM_GTX_gradient_paint -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Functions that return the color of procedural gradient for specific coordinates. - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtx/optimum_pow.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_gradient_paint is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_gradient_paint extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_gradient_paint - /// @{ - - /// Return a color from a radial gradient. - /// @see - gtx_gradient_paint - template - GLM_FUNC_DECL T radialGradient( - vec<2, T, Q> const& Center, - T const& Radius, - vec<2, T, Q> const& Focal, - vec<2, T, Q> const& Position); - - /// Return a color from a linear gradient. - /// @see - gtx_gradient_paint - template - GLM_FUNC_DECL T linearGradient( - vec<2, T, Q> const& Point0, - vec<2, T, Q> const& Point1, - vec<2, T, Q> const& Position); - - /// @} -}// namespace glm - -#include "gradient_paint.inl" diff --git a/third_party/glm/gtx/gradient_paint.inl b/third_party/glm/gtx/gradient_paint.inl deleted file mode 100755 index 4c495e6..0000000 --- a/third_party/glm/gtx/gradient_paint.inl +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref gtx_gradient_paint - -namespace glm -{ - template - GLM_FUNC_QUALIFIER T radialGradient - ( - vec<2, T, Q> const& Center, - T const& Radius, - vec<2, T, Q> const& Focal, - vec<2, T, Q> const& Position - ) - { - vec<2, T, Q> F = Focal - Center; - vec<2, T, Q> D = Position - Focal; - T Radius2 = pow2(Radius); - T Fx2 = pow2(F.x); - T Fy2 = pow2(F.y); - - T Numerator = (D.x * F.x + D.y * F.y) + sqrt(Radius2 * (pow2(D.x) + pow2(D.y)) - pow2(D.x * F.y - D.y * F.x)); - T Denominator = Radius2 - (Fx2 + Fy2); - return Numerator / Denominator; - } - - template - GLM_FUNC_QUALIFIER T linearGradient - ( - vec<2, T, Q> const& Point0, - vec<2, T, Q> const& Point1, - vec<2, T, Q> const& Position - ) - { - vec<2, T, Q> Dist = Point1 - Point0; - return (Dist.x * (Position.x - Point0.x) + Dist.y * (Position.y - Point0.y)) / glm::dot(Dist, Dist); - } -}//namespace glm diff --git a/third_party/glm/gtx/handed_coordinate_space.hpp b/third_party/glm/gtx/handed_coordinate_space.hpp deleted file mode 100755 index 3c85968..0000000 --- a/third_party/glm/gtx/handed_coordinate_space.hpp +++ /dev/null @@ -1,50 +0,0 @@ -/// @ref gtx_handed_coordinate_space -/// @file glm/gtx/handed_coordinate_space.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_handed_coordinate_space GLM_GTX_handed_coordinate_space -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// To know if a set of three basis vectors defines a right or left-handed coordinate system. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_handed_coordinate_space is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_handed_coordinate_space extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_handed_coordinate_space - /// @{ - - //! Return if a trihedron right handed or not. - //! From GLM_GTX_handed_coordinate_space extension. - template - GLM_FUNC_DECL bool rightHanded( - vec<3, T, Q> const& tangent, - vec<3, T, Q> const& binormal, - vec<3, T, Q> const& normal); - - //! Return if a trihedron left handed or not. - //! From GLM_GTX_handed_coordinate_space extension. - template - GLM_FUNC_DECL bool leftHanded( - vec<3, T, Q> const& tangent, - vec<3, T, Q> const& binormal, - vec<3, T, Q> const& normal); - - /// @} -}// namespace glm - -#include "handed_coordinate_space.inl" diff --git a/third_party/glm/gtx/handed_coordinate_space.inl b/third_party/glm/gtx/handed_coordinate_space.inl deleted file mode 100755 index e43c17b..0000000 --- a/third_party/glm/gtx/handed_coordinate_space.inl +++ /dev/null @@ -1,26 +0,0 @@ -/// @ref gtx_handed_coordinate_space - -namespace glm -{ - template - GLM_FUNC_QUALIFIER bool rightHanded - ( - vec<3, T, Q> const& tangent, - vec<3, T, Q> const& binormal, - vec<3, T, Q> const& normal - ) - { - return dot(cross(normal, tangent), binormal) > T(0); - } - - template - GLM_FUNC_QUALIFIER bool leftHanded - ( - vec<3, T, Q> const& tangent, - vec<3, T, Q> const& binormal, - vec<3, T, Q> const& normal - ) - { - return dot(cross(normal, tangent), binormal) < T(0); - } -}//namespace glm diff --git a/third_party/glm/gtx/hash.hpp b/third_party/glm/gtx/hash.hpp deleted file mode 100755 index 05dae9f..0000000 --- a/third_party/glm/gtx/hash.hpp +++ /dev/null @@ -1,142 +0,0 @@ -/// @ref gtx_hash -/// @file glm/gtx/hash.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_hash GLM_GTX_hash -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Add std::hash support for glm types - -#pragma once - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_hash is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_hash extension included") -# endif -#endif - -#include - -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include "../gtc/vec1.hpp" - -#include "../gtc/quaternion.hpp" -#include "../gtx/dual_quaternion.hpp" - -#include "../mat2x2.hpp" -#include "../mat2x3.hpp" -#include "../mat2x4.hpp" - -#include "../mat3x2.hpp" -#include "../mat3x3.hpp" -#include "../mat3x4.hpp" - -#include "../mat4x2.hpp" -#include "../mat4x3.hpp" -#include "../mat4x4.hpp" - -#if !GLM_HAS_CXX11_STL -# error "GLM_GTX_hash requires C++11 standard library support" -#endif - -namespace std -{ - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::vec<1, T, Q> const& v) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::vec<2, T, Q> const& v) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::vec<3, T, Q> const& v) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::vec<4, T, Q> const& v) const; - }; - - template - struct hash> - { - GLM_FUNC_DECL size_t operator()(glm::qua const& q) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::tdualquat const& q) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<2, 2, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<2, 3, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<2, 4, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<3, 2, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<3, 3, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<3, 4, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<4, 2, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<4, 3, T,Q> const& m) const; - }; - - template - struct hash > - { - GLM_FUNC_DECL size_t operator()(glm::mat<4, 4, T,Q> const& m) const; - }; -} // namespace std - -#include "hash.inl" diff --git a/third_party/glm/gtx/hash.inl b/third_party/glm/gtx/hash.inl deleted file mode 100755 index ff71ca9..0000000 --- a/third_party/glm/gtx/hash.inl +++ /dev/null @@ -1,184 +0,0 @@ -/// @ref gtx_hash -/// -/// @see core (dependence) -/// -/// @defgroup gtx_hash GLM_GTX_hash -/// @ingroup gtx -/// -/// @brief Add std::hash support for glm types -/// -/// need to be included to use the features of this extension. - -namespace glm { -namespace detail -{ - GLM_INLINE void hash_combine(size_t &seed, size_t hash) - { - hash += 0x9e3779b9 + (seed << 6) + (seed >> 2); - seed ^= hash; - } -}} - -namespace std -{ - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<1, T, Q> const& v) const - { - hash hasher; - return hasher(v.x); - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<2, T, Q> const& v) const - { - size_t seed = 0; - hash hasher; - glm::detail::hash_combine(seed, hasher(v.x)); - glm::detail::hash_combine(seed, hasher(v.y)); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<3, T, Q> const& v) const - { - size_t seed = 0; - hash hasher; - glm::detail::hash_combine(seed, hasher(v.x)); - glm::detail::hash_combine(seed, hasher(v.y)); - glm::detail::hash_combine(seed, hasher(v.z)); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::vec<4, T, Q> const& v) const - { - size_t seed = 0; - hash hasher; - glm::detail::hash_combine(seed, hasher(v.x)); - glm::detail::hash_combine(seed, hasher(v.y)); - glm::detail::hash_combine(seed, hasher(v.z)); - glm::detail::hash_combine(seed, hasher(v.w)); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::qua const& q) const - { - size_t seed = 0; - hash hasher; - glm::detail::hash_combine(seed, hasher(q.x)); - glm::detail::hash_combine(seed, hasher(q.y)); - glm::detail::hash_combine(seed, hasher(q.z)); - glm::detail::hash_combine(seed, hasher(q.w)); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::tdualquat const& q) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(q.real)); - glm::detail::hash_combine(seed, hasher(q.dual)); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<2, 2, T, Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<2, 3, T, Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<2, 4, T, Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<3, 2, T, Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - glm::detail::hash_combine(seed, hasher(m[2])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<3, 3, T, Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - glm::detail::hash_combine(seed, hasher(m[2])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<3, 4, T, Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - glm::detail::hash_combine(seed, hasher(m[2])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<4, 2, T,Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - glm::detail::hash_combine(seed, hasher(m[2])); - glm::detail::hash_combine(seed, hasher(m[3])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<4, 3, T,Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - glm::detail::hash_combine(seed, hasher(m[2])); - glm::detail::hash_combine(seed, hasher(m[3])); - return seed; - } - - template - GLM_FUNC_QUALIFIER size_t hash>::operator()(glm::mat<4, 4, T, Q> const& m) const - { - size_t seed = 0; - hash> hasher; - glm::detail::hash_combine(seed, hasher(m[0])); - glm::detail::hash_combine(seed, hasher(m[1])); - glm::detail::hash_combine(seed, hasher(m[2])); - glm::detail::hash_combine(seed, hasher(m[3])); - return seed; - } -} diff --git a/third_party/glm/gtx/integer.hpp b/third_party/glm/gtx/integer.hpp deleted file mode 100755 index d0b4c61..0000000 --- a/third_party/glm/gtx/integer.hpp +++ /dev/null @@ -1,76 +0,0 @@ -/// @ref gtx_integer -/// @file glm/gtx/integer.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_integer GLM_GTX_integer -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Add support for integer for core functions - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/integer.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_integer is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_integer extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_integer - /// @{ - - //! Returns x raised to the y power. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL int pow(int x, uint y); - - //! Returns the positive square root of x. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL int sqrt(int x); - - //! Returns the floor log2 of x. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL unsigned int floor_log2(unsigned int x); - - //! Modulus. Returns x - y * floor(x / y) for each component in x using the floating point value y. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL int mod(int x, int y); - - //! Return the factorial value of a number (!12 max, integer only) - //! From GLM_GTX_integer extension. - template - GLM_FUNC_DECL genType factorial(genType const& x); - - //! 32bit signed integer. - //! From GLM_GTX_integer extension. - typedef signed int sint; - - //! Returns x raised to the y power. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL uint pow(uint x, uint y); - - //! Returns the positive square root of x. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL uint sqrt(uint x); - - //! Modulus. Returns x - y * floor(x / y) for each component in x using the floating point value y. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL uint mod(uint x, uint y); - - //! Returns the number of leading zeros. - //! From GLM_GTX_integer extension. - GLM_FUNC_DECL uint nlz(uint x); - - /// @} -}//namespace glm - -#include "integer.inl" diff --git a/third_party/glm/gtx/integer.inl b/third_party/glm/gtx/integer.inl deleted file mode 100755 index 956366b..0000000 --- a/third_party/glm/gtx/integer.inl +++ /dev/null @@ -1,185 +0,0 @@ -/// @ref gtx_integer - -namespace glm -{ - // pow - GLM_FUNC_QUALIFIER int pow(int x, uint y) - { - if(y == 0) - return x >= 0 ? 1 : -1; - - int result = x; - for(uint i = 1; i < y; ++i) - result *= x; - return result; - } - - // sqrt: From Christopher J. Musial, An integer square root, Graphics Gems, 1990, page 387 - GLM_FUNC_QUALIFIER int sqrt(int x) - { - if(x <= 1) return x; - - int NextTrial = x >> 1; - int CurrentAnswer; - - do - { - CurrentAnswer = NextTrial; - NextTrial = (NextTrial + x / NextTrial) >> 1; - } while(NextTrial < CurrentAnswer); - - return CurrentAnswer; - } - -// Henry Gordon Dietz: http://aggregate.org/MAGIC/ -namespace detail -{ - GLM_FUNC_QUALIFIER unsigned int ones32(unsigned int x) - { - /* 32-bit recursive reduction using SWAR... - but first step is mapping 2-bit values - into sum of 2 1-bit values in sneaky way - */ - x -= ((x >> 1) & 0x55555555); - x = (((x >> 2) & 0x33333333) + (x & 0x33333333)); - x = (((x >> 4) + x) & 0x0f0f0f0f); - x += (x >> 8); - x += (x >> 16); - return(x & 0x0000003f); - } -}//namespace detail - - // Henry Gordon Dietz: http://aggregate.org/MAGIC/ -/* - GLM_FUNC_QUALIFIER unsigned int floor_log2(unsigned int x) - { - x |= (x >> 1); - x |= (x >> 2); - x |= (x >> 4); - x |= (x >> 8); - x |= (x >> 16); - - return _detail::ones32(x) >> 1; - } -*/ - // mod - GLM_FUNC_QUALIFIER int mod(int x, int y) - { - return ((x % y) + y) % y; - } - - // factorial (!12 max, integer only) - template - GLM_FUNC_QUALIFIER genType factorial(genType const& x) - { - genType Temp = x; - genType Result; - for(Result = 1; Temp > 1; --Temp) - Result *= Temp; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> factorial( - vec<2, T, Q> const& x) - { - return vec<2, T, Q>( - factorial(x.x), - factorial(x.y)); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> factorial( - vec<3, T, Q> const& x) - { - return vec<3, T, Q>( - factorial(x.x), - factorial(x.y), - factorial(x.z)); - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> factorial( - vec<4, T, Q> const& x) - { - return vec<4, T, Q>( - factorial(x.x), - factorial(x.y), - factorial(x.z), - factorial(x.w)); - } - - GLM_FUNC_QUALIFIER uint pow(uint x, uint y) - { - if (y == 0) - return 1u; - - uint result = x; - for(uint i = 1; i < y; ++i) - result *= x; - return result; - } - - GLM_FUNC_QUALIFIER uint sqrt(uint x) - { - if(x <= 1) return x; - - uint NextTrial = x >> 1; - uint CurrentAnswer; - - do - { - CurrentAnswer = NextTrial; - NextTrial = (NextTrial + x / NextTrial) >> 1; - } while(NextTrial < CurrentAnswer); - - return CurrentAnswer; - } - - GLM_FUNC_QUALIFIER uint mod(uint x, uint y) - { - return x - y * (x / y); - } - -#if(GLM_COMPILER & (GLM_COMPILER_VC | GLM_COMPILER_GCC)) - - GLM_FUNC_QUALIFIER unsigned int nlz(unsigned int x) - { - return 31u - findMSB(x); - } - -#else - - // Hackers Delight: http://www.hackersdelight.org/HDcode/nlz.c.txt - GLM_FUNC_QUALIFIER unsigned int nlz(unsigned int x) - { - int y, m, n; - - y = -int(x >> 16); // If left half of x is 0, - m = (y >> 16) & 16; // set n = 16. If left half - n = 16 - m; // is nonzero, set n = 0 and - x = x >> m; // shift x right 16. - // Now x is of the form 0000xxxx. - y = x - 0x100; // If positions 8-15 are 0, - m = (y >> 16) & 8; // add 8 to n and shift x left 8. - n = n + m; - x = x << m; - - y = x - 0x1000; // If positions 12-15 are 0, - m = (y >> 16) & 4; // add 4 to n and shift x left 4. - n = n + m; - x = x << m; - - y = x - 0x4000; // If positions 14-15 are 0, - m = (y >> 16) & 2; // add 2 to n and shift x left 2. - n = n + m; - x = x << m; - - y = x >> 14; // Set y = 0, 1, 2, or 3. - m = y & ~(y >> 1); // Set m = 0, 1, 2, or 2 resp. - return unsigned(n + 2 - m); - } - -#endif//(GLM_COMPILER) - -}//namespace glm diff --git a/third_party/glm/gtx/intersect.hpp b/third_party/glm/gtx/intersect.hpp deleted file mode 100755 index 3c78f2b..0000000 --- a/third_party/glm/gtx/intersect.hpp +++ /dev/null @@ -1,92 +0,0 @@ -/// @ref gtx_intersect -/// @file glm/gtx/intersect.hpp -/// -/// @see core (dependence) -/// @see gtx_closest_point (dependence) -/// -/// @defgroup gtx_intersect GLM_GTX_intersect -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Add intersection functions - -#pragma once - -// Dependency: -#include -#include -#include "../glm.hpp" -#include "../geometric.hpp" -#include "../gtx/closest_point.hpp" -#include "../gtx/vector_query.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_closest_point is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_closest_point extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_intersect - /// @{ - - //! Compute the intersection of a ray and a plane. - //! Ray direction and plane normal must be unit length. - //! From GLM_GTX_intersect extension. - template - GLM_FUNC_DECL bool intersectRayPlane( - genType const& orig, genType const& dir, - genType const& planeOrig, genType const& planeNormal, - typename genType::value_type & intersectionDistance); - - //! Compute the intersection of a ray and a triangle. - /// Based om Tomas Möller implementation http://fileadmin.cs.lth.se/cs/Personal/Tomas_Akenine-Moller/raytri/ - //! From GLM_GTX_intersect extension. - template - GLM_FUNC_DECL bool intersectRayTriangle( - vec<3, T, Q> const& orig, vec<3, T, Q> const& dir, - vec<3, T, Q> const& v0, vec<3, T, Q> const& v1, vec<3, T, Q> const& v2, - vec<2, T, Q>& baryPosition, T& distance); - - //! Compute the intersection of a line and a triangle. - //! From GLM_GTX_intersect extension. - template - GLM_FUNC_DECL bool intersectLineTriangle( - genType const& orig, genType const& dir, - genType const& vert0, genType const& vert1, genType const& vert2, - genType & position); - - //! Compute the intersection distance of a ray and a sphere. - //! The ray direction vector is unit length. - //! From GLM_GTX_intersect extension. - template - GLM_FUNC_DECL bool intersectRaySphere( - genType const& rayStarting, genType const& rayNormalizedDirection, - genType const& sphereCenter, typename genType::value_type const sphereRadiusSquered, - typename genType::value_type & intersectionDistance); - - //! Compute the intersection of a ray and a sphere. - //! From GLM_GTX_intersect extension. - template - GLM_FUNC_DECL bool intersectRaySphere( - genType const& rayStarting, genType const& rayNormalizedDirection, - genType const& sphereCenter, const typename genType::value_type sphereRadius, - genType & intersectionPosition, genType & intersectionNormal); - - //! Compute the intersection of a line and a sphere. - //! From GLM_GTX_intersect extension - template - GLM_FUNC_DECL bool intersectLineSphere( - genType const& point0, genType const& point1, - genType const& sphereCenter, typename genType::value_type sphereRadius, - genType & intersectionPosition1, genType & intersectionNormal1, - genType & intersectionPosition2 = genType(), genType & intersectionNormal2 = genType()); - - /// @} -}//namespace glm - -#include "intersect.inl" diff --git a/third_party/glm/gtx/intersect.inl b/third_party/glm/gtx/intersect.inl deleted file mode 100755 index 54ecb4d..0000000 --- a/third_party/glm/gtx/intersect.inl +++ /dev/null @@ -1,200 +0,0 @@ -/// @ref gtx_intersect - -namespace glm -{ - template - GLM_FUNC_QUALIFIER bool intersectRayPlane - ( - genType const& orig, genType const& dir, - genType const& planeOrig, genType const& planeNormal, - typename genType::value_type & intersectionDistance - ) - { - typename genType::value_type d = glm::dot(dir, planeNormal); - typename genType::value_type Epsilon = std::numeric_limits::epsilon(); - - if(glm::abs(d) > Epsilon) // if dir and planeNormal are not perpendicular - { - typename genType::value_type const tmp_intersectionDistance = glm::dot(planeOrig - orig, planeNormal) / d; - if (tmp_intersectionDistance > static_cast(0)) { // allow only intersections - intersectionDistance = tmp_intersectionDistance; - return true; - } - } - - return false; - } - - template - GLM_FUNC_QUALIFIER bool intersectRayTriangle - ( - vec<3, T, Q> const& orig, vec<3, T, Q> const& dir, - vec<3, T, Q> const& vert0, vec<3, T, Q> const& vert1, vec<3, T, Q> const& vert2, - vec<2, T, Q>& baryPosition, T& distance - ) - { - // find vectors for two edges sharing vert0 - vec<3, T, Q> const edge1 = vert1 - vert0; - vec<3, T, Q> const edge2 = vert2 - vert0; - - // begin calculating determinant - also used to calculate U parameter - vec<3, T, Q> const p = glm::cross(dir, edge2); - - // if determinant is near zero, ray lies in plane of triangle - T const det = glm::dot(edge1, p); - - vec<3, T, Q> Perpendicular(0); - - if(det > std::numeric_limits::epsilon()) - { - // calculate distance from vert0 to ray origin - vec<3, T, Q> const dist = orig - vert0; - - // calculate U parameter and test bounds - baryPosition.x = glm::dot(dist, p); - if(baryPosition.x < static_cast(0) || baryPosition.x > det) - return false; - - // prepare to test V parameter - Perpendicular = glm::cross(dist, edge1); - - // calculate V parameter and test bounds - baryPosition.y = glm::dot(dir, Perpendicular); - if((baryPosition.y < static_cast(0)) || ((baryPosition.x + baryPosition.y) > det)) - return false; - } - else if(det < -std::numeric_limits::epsilon()) - { - // calculate distance from vert0 to ray origin - vec<3, T, Q> const dist = orig - vert0; - - // calculate U parameter and test bounds - baryPosition.x = glm::dot(dist, p); - if((baryPosition.x > static_cast(0)) || (baryPosition.x < det)) - return false; - - // prepare to test V parameter - Perpendicular = glm::cross(dist, edge1); - - // calculate V parameter and test bounds - baryPosition.y = glm::dot(dir, Perpendicular); - if((baryPosition.y > static_cast(0)) || (baryPosition.x + baryPosition.y < det)) - return false; - } - else - return false; // ray is parallel to the plane of the triangle - - T inv_det = static_cast(1) / det; - - // calculate distance, ray intersects triangle - distance = glm::dot(edge2, Perpendicular) * inv_det; - baryPosition *= inv_det; - - return true; - } - - template - GLM_FUNC_QUALIFIER bool intersectLineTriangle - ( - genType const& orig, genType const& dir, - genType const& vert0, genType const& vert1, genType const& vert2, - genType & position - ) - { - typename genType::value_type Epsilon = std::numeric_limits::epsilon(); - - genType edge1 = vert1 - vert0; - genType edge2 = vert2 - vert0; - - genType Perpendicular = cross(dir, edge2); - - float det = dot(edge1, Perpendicular); - - if (det > -Epsilon && det < Epsilon) - return false; - typename genType::value_type inv_det = typename genType::value_type(1) / det; - - genType Tengant = orig - vert0; - - position.y = dot(Tengant, Perpendicular) * inv_det; - if (position.y < typename genType::value_type(0) || position.y > typename genType::value_type(1)) - return false; - - genType Cotengant = cross(Tengant, edge1); - - position.z = dot(dir, Cotengant) * inv_det; - if (position.z < typename genType::value_type(0) || position.y + position.z > typename genType::value_type(1)) - return false; - - position.x = dot(edge2, Cotengant) * inv_det; - - return true; - } - - template - GLM_FUNC_QUALIFIER bool intersectRaySphere - ( - genType const& rayStarting, genType const& rayNormalizedDirection, - genType const& sphereCenter, const typename genType::value_type sphereRadiusSquered, - typename genType::value_type & intersectionDistance - ) - { - typename genType::value_type Epsilon = std::numeric_limits::epsilon(); - genType diff = sphereCenter - rayStarting; - typename genType::value_type t0 = dot(diff, rayNormalizedDirection); - typename genType::value_type dSquared = dot(diff, diff) - t0 * t0; - if( dSquared > sphereRadiusSquered ) - { - return false; - } - typename genType::value_type t1 = sqrt( sphereRadiusSquered - dSquared ); - intersectionDistance = t0 > t1 + Epsilon ? t0 - t1 : t0 + t1; - return intersectionDistance > Epsilon; - } - - template - GLM_FUNC_QUALIFIER bool intersectRaySphere - ( - genType const& rayStarting, genType const& rayNormalizedDirection, - genType const& sphereCenter, const typename genType::value_type sphereRadius, - genType & intersectionPosition, genType & intersectionNormal - ) - { - typename genType::value_type distance; - if( intersectRaySphere( rayStarting, rayNormalizedDirection, sphereCenter, sphereRadius * sphereRadius, distance ) ) - { - intersectionPosition = rayStarting + rayNormalizedDirection * distance; - intersectionNormal = (intersectionPosition - sphereCenter) / sphereRadius; - return true; - } - return false; - } - - template - GLM_FUNC_QUALIFIER bool intersectLineSphere - ( - genType const& point0, genType const& point1, - genType const& sphereCenter, typename genType::value_type sphereRadius, - genType & intersectionPoint1, genType & intersectionNormal1, - genType & intersectionPoint2, genType & intersectionNormal2 - ) - { - typename genType::value_type Epsilon = std::numeric_limits::epsilon(); - genType dir = normalize(point1 - point0); - genType diff = sphereCenter - point0; - typename genType::value_type t0 = dot(diff, dir); - typename genType::value_type dSquared = dot(diff, diff) - t0 * t0; - if( dSquared > sphereRadius * sphereRadius ) - { - return false; - } - typename genType::value_type t1 = sqrt( sphereRadius * sphereRadius - dSquared ); - if( t0 < t1 + Epsilon ) - t1 = -t1; - intersectionPoint1 = point0 + dir * (t0 - t1); - intersectionNormal1 = (intersectionPoint1 - sphereCenter) / sphereRadius; - intersectionPoint2 = point0 + dir * (t0 + t1); - intersectionNormal2 = (intersectionPoint2 - sphereCenter) / sphereRadius; - return true; - } -}//namespace glm diff --git a/third_party/glm/gtx/io.hpp b/third_party/glm/gtx/io.hpp deleted file mode 100755 index 8d974f0..0000000 --- a/third_party/glm/gtx/io.hpp +++ /dev/null @@ -1,201 +0,0 @@ -/// @ref gtx_io -/// @file glm/gtx/io.hpp -/// @author Jan P Springer (regnirpsj@gmail.com) -/// -/// @see core (dependence) -/// @see gtc_matrix_access (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtx_io GLM_GTX_io -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// std::[w]ostream support for glm types -/// -/// std::[w]ostream support for glm types + qualifier/width/etc. manipulators -/// based on howard hinnant's std::chrono io proposal -/// [http://home.roadrunner.com/~hinnant/bloomington/chrono_io.html] - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtx/quaternion.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_io is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_io extension included") -# endif -#endif - -#include // std::basic_ostream<> (fwd) -#include // std::locale, std::locale::facet, std::locale::id -#include // std::pair<> - -namespace glm -{ - /// @addtogroup gtx_io - /// @{ - - namespace io - { - enum order_type { column_major, row_major}; - - template - class format_punct : public std::locale::facet - { - typedef CTy char_type; - - public: - - static std::locale::id id; - - bool formatted; - unsigned precision; - unsigned width; - char_type separator; - char_type delim_left; - char_type delim_right; - char_type space; - char_type newline; - order_type order; - - GLM_FUNC_DECL explicit format_punct(size_t a = 0); - GLM_FUNC_DECL explicit format_punct(format_punct const&); - }; - - template > - class basic_state_saver { - - public: - - GLM_FUNC_DECL explicit basic_state_saver(std::basic_ios&); - GLM_FUNC_DECL ~basic_state_saver(); - - private: - - typedef ::std::basic_ios state_type; - typedef typename state_type::char_type char_type; - typedef ::std::ios_base::fmtflags flags_type; - typedef ::std::streamsize streamsize_type; - typedef ::std::locale const locale_type; - - state_type& state_; - flags_type flags_; - streamsize_type precision_; - streamsize_type width_; - char_type fill_; - locale_type locale_; - - GLM_FUNC_DECL basic_state_saver& operator=(basic_state_saver const&); - }; - - typedef basic_state_saver state_saver; - typedef basic_state_saver wstate_saver; - - template > - class basic_format_saver - { - public: - - GLM_FUNC_DECL explicit basic_format_saver(std::basic_ios&); - GLM_FUNC_DECL ~basic_format_saver(); - - private: - - basic_state_saver const bss_; - - GLM_FUNC_DECL basic_format_saver& operator=(basic_format_saver const&); - }; - - typedef basic_format_saver format_saver; - typedef basic_format_saver wformat_saver; - - struct precision - { - unsigned value; - - GLM_FUNC_DECL explicit precision(unsigned); - }; - - struct width - { - unsigned value; - - GLM_FUNC_DECL explicit width(unsigned); - }; - - template - struct delimeter - { - CTy value[3]; - - GLM_FUNC_DECL explicit delimeter(CTy /* left */, CTy /* right */, CTy /* separator */ = ','); - }; - - struct order - { - order_type value; - - GLM_FUNC_DECL explicit order(order_type); - }; - - // functions, inlined (inline) - - template - FTy const& get_facet(std::basic_ios&); - template - std::basic_ios& formatted(std::basic_ios&); - template - std::basic_ios& unformattet(std::basic_ios&); - - template - std::basic_ostream& operator<<(std::basic_ostream&, precision const&); - template - std::basic_ostream& operator<<(std::basic_ostream&, width const&); - template - std::basic_ostream& operator<<(std::basic_ostream&, delimeter const&); - template - std::basic_ostream& operator<<(std::basic_ostream&, order const&); - }//namespace io - - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, qua const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<1, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<2, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<3, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, vec<4, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<2, 2, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<2, 3, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<2, 4, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<3, 2, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<3, 3, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<3, 4, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<4, 2, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<4, 3, T, Q> const&); - template - GLM_FUNC_DECL std::basic_ostream& operator<<(std::basic_ostream&, mat<4, 4, T, Q> const&); - - template - GLM_FUNC_DECL std::basic_ostream & operator<<(std::basic_ostream &, - std::pair const, mat<4, 4, T, Q> const> const&); - - /// @} -}//namespace glm - -#include "io.inl" diff --git a/third_party/glm/gtx/io.inl b/third_party/glm/gtx/io.inl deleted file mode 100755 index a3a1bb6..0000000 --- a/third_party/glm/gtx/io.inl +++ /dev/null @@ -1,440 +0,0 @@ -/// @ref gtx_io -/// @author Jan P Springer (regnirpsj@gmail.com) - -#include // std::fixed, std::setfill<>, std::setprecision, std::right, std::setw -#include // std::basic_ostream<> -#include "../gtc/matrix_access.hpp" // glm::col, glm::row -#include "../gtx/type_trait.hpp" // glm::type<> - -namespace glm{ -namespace io -{ - template - GLM_FUNC_QUALIFIER format_punct::format_punct(size_t a) - : std::locale::facet(a) - , formatted(true) - , precision(3) - , width(1 + 4 + 1 + precision) - , separator(',') - , delim_left('[') - , delim_right(']') - , space(' ') - , newline('\n') - , order(column_major) - {} - - template - GLM_FUNC_QUALIFIER format_punct::format_punct(format_punct const& a) - : std::locale::facet(0) - , formatted(a.formatted) - , precision(a.precision) - , width(a.width) - , separator(a.separator) - , delim_left(a.delim_left) - , delim_right(a.delim_right) - , space(a.space) - , newline(a.newline) - , order(a.order) - {} - - template std::locale::id format_punct::id; - - template - GLM_FUNC_QUALIFIER basic_state_saver::basic_state_saver(std::basic_ios& a) - : state_(a) - , flags_(a.flags()) - , precision_(a.precision()) - , width_(a.width()) - , fill_(a.fill()) - , locale_(a.getloc()) - {} - - template - GLM_FUNC_QUALIFIER basic_state_saver::~basic_state_saver() - { - state_.imbue(locale_); - state_.fill(fill_); - state_.width(width_); - state_.precision(precision_); - state_.flags(flags_); - } - - template - GLM_FUNC_QUALIFIER basic_format_saver::basic_format_saver(std::basic_ios& a) - : bss_(a) - { - a.imbue(std::locale(a.getloc(), new format_punct(get_facet >(a)))); - } - - template - GLM_FUNC_QUALIFIER - basic_format_saver::~basic_format_saver() - {} - - GLM_FUNC_QUALIFIER precision::precision(unsigned a) - : value(a) - {} - - GLM_FUNC_QUALIFIER width::width(unsigned a) - : value(a) - {} - - template - GLM_FUNC_QUALIFIER delimeter::delimeter(CTy a, CTy b, CTy c) - : value() - { - value[0] = a; - value[1] = b; - value[2] = c; - } - - GLM_FUNC_QUALIFIER order::order(order_type a) - : value(a) - {} - - template - GLM_FUNC_QUALIFIER FTy const& get_facet(std::basic_ios& ios) - { - if(!std::has_facet(ios.getloc())) - ios.imbue(std::locale(ios.getloc(), new FTy)); - - return std::use_facet(ios.getloc()); - } - - template - GLM_FUNC_QUALIFIER std::basic_ios& formatted(std::basic_ios& ios) - { - const_cast&>(get_facet >(ios)).formatted = true; - return ios; - } - - template - GLM_FUNC_QUALIFIER std::basic_ios& unformatted(std::basic_ios& ios) - { - const_cast&>(get_facet >(ios)).formatted = false; - return ios; - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, precision const& a) - { - const_cast&>(get_facet >(os)).precision = a.value; - return os; - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, width const& a) - { - const_cast&>(get_facet >(os)).width = a.value; - return os; - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, delimeter const& a) - { - format_punct & fmt(const_cast&>(get_facet >(os))); - - fmt.delim_left = a.value[0]; - fmt.delim_right = a.value[1]; - fmt.separator = a.value[2]; - - return os; - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, order const& a) - { - const_cast&>(get_facet >(os)).order = a.value; - return os; - } -} // namespace io - -namespace detail -{ - template - GLM_FUNC_QUALIFIER std::basic_ostream& - print_vector_on(std::basic_ostream& os, V const& a) - { - typename std::basic_ostream::sentry const cerberus(os); - - if(cerberus) - { - io::format_punct const& fmt(io::get_facet >(os)); - - length_t const& components(type::components); - - if(fmt.formatted) - { - io::basic_state_saver const bss(os); - - os << std::fixed << std::right << std::setprecision(fmt.precision) << std::setfill(fmt.space) << fmt.delim_left; - - for(length_t i(0); i < components; ++i) - { - os << std::setw(fmt.width) << a[i]; - if(components-1 != i) - os << fmt.separator; - } - - os << fmt.delim_right; - } - else - { - for(length_t i(0); i < components; ++i) - { - os << a[i]; - - if(components-1 != i) - os << fmt.space; - } - } - } - - return os; - } -}//namespace detail - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, qua const& a) - { - return detail::print_vector_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<1, T, Q> const& a) - { - return detail::print_vector_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<2, T, Q> const& a) - { - return detail::print_vector_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<3, T, Q> const& a) - { - return detail::print_vector_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, vec<4, T, Q> const& a) - { - return detail::print_vector_on(os, a); - } - -namespace detail -{ - template class M, length_t C, length_t R, typename T, qualifier Q> - GLM_FUNC_QUALIFIER std::basic_ostream& print_matrix_on(std::basic_ostream& os, M const& a) - { - typename std::basic_ostream::sentry const cerberus(os); - - if(cerberus) - { - io::format_punct const& fmt(io::get_facet >(os)); - - length_t const& cols(type >::cols); - length_t const& rows(type >::rows); - - if(fmt.formatted) - { - os << fmt.newline << fmt.delim_left; - - switch(fmt.order) - { - case io::column_major: - { - for(length_t i(0); i < rows; ++i) - { - if (0 != i) - os << fmt.space; - - os << row(a, i); - - if(rows-1 != i) - os << fmt.newline; - } - } - break; - - case io::row_major: - { - for(length_t i(0); i < cols; ++i) - { - if(0 != i) - os << fmt.space; - - os << column(a, i); - - if(cols-1 != i) - os << fmt.newline; - } - } - break; - } - - os << fmt.delim_right; - } - else - { - switch (fmt.order) - { - case io::column_major: - { - for(length_t i(0); i < cols; ++i) - { - os << column(a, i); - - if(cols - 1 != i) - os << fmt.space; - } - } - break; - - case io::row_major: - { - for (length_t i(0); i < rows; ++i) - { - os << row(a, i); - - if (rows-1 != i) - os << fmt.space; - } - } - break; - } - } - } - - return os; - } -}//namespace detail - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<2, 2, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<2, 3, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<2, 4, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<3, 2, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<(std::basic_ostream& os, mat<3, 3, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<3, 4, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<4, 2, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<4, 3, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - - template - GLM_FUNC_QUALIFIER std::basic_ostream & operator<<(std::basic_ostream& os, mat<4, 4, T, Q> const& a) - { - return detail::print_matrix_on(os, a); - } - -namespace detail -{ - template class M, length_t C, length_t R, typename T, qualifier Q> - GLM_FUNC_QUALIFIER std::basic_ostream& print_matrix_pair_on(std::basic_ostream& os, std::pair const, M const> const& a) - { - typename std::basic_ostream::sentry const cerberus(os); - - if(cerberus) - { - io::format_punct const& fmt(io::get_facet >(os)); - M const& ml(a.first); - M const& mr(a.second); - length_t const& cols(type >::cols); - length_t const& rows(type >::rows); - - if(fmt.formatted) - { - os << fmt.newline << fmt.delim_left; - - switch(fmt.order) - { - case io::column_major: - { - for(length_t i(0); i < rows; ++i) - { - if(0 != i) - os << fmt.space; - - os << row(ml, i) << ((rows-1 != i) ? fmt.space : fmt.delim_right) << fmt.space << ((0 != i) ? fmt.space : fmt.delim_left) << row(mr, i); - - if(rows-1 != i) - os << fmt.newline; - } - } - break; - case io::row_major: - { - for(length_t i(0); i < cols; ++i) - { - if(0 != i) - os << fmt.space; - - os << column(ml, i) << ((cols-1 != i) ? fmt.space : fmt.delim_right) << fmt.space << ((0 != i) ? fmt.space : fmt.delim_left) << column(mr, i); - - if(cols-1 != i) - os << fmt.newline; - } - } - break; - } - - os << fmt.delim_right; - } - else - { - os << ml << fmt.space << mr; - } - } - - return os; - } -}//namespace detail - - template - GLM_FUNC_QUALIFIER std::basic_ostream& operator<<( - std::basic_ostream & os, - std::pair const, - mat<4, 4, T, Q> const> const& a) - { - return detail::print_matrix_pair_on(os, a); - } -}//namespace glm diff --git a/third_party/glm/gtx/log_base.hpp b/third_party/glm/gtx/log_base.hpp deleted file mode 100755 index ba28c9d..0000000 --- a/third_party/glm/gtx/log_base.hpp +++ /dev/null @@ -1,48 +0,0 @@ -/// @ref gtx_log_base -/// @file glm/gtx/log_base.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_log_base GLM_GTX_log_base -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Logarithm for any base. base can be a vector or a scalar. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_log_base is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_log_base extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_log_base - /// @{ - - /// Logarithm for any base. - /// From GLM_GTX_log_base. - template - GLM_FUNC_DECL genType log( - genType const& x, - genType const& base); - - /// Logarithm for any base. - /// From GLM_GTX_log_base. - template - GLM_FUNC_DECL vec sign( - vec const& x, - vec const& base); - - /// @} -}//namespace glm - -#include "log_base.inl" diff --git a/third_party/glm/gtx/log_base.inl b/third_party/glm/gtx/log_base.inl deleted file mode 100755 index 4bbb8e8..0000000 --- a/third_party/glm/gtx/log_base.inl +++ /dev/null @@ -1,16 +0,0 @@ -/// @ref gtx_log_base - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType log(genType const& x, genType const& base) - { - return glm::log(x) / glm::log(base); - } - - template - GLM_FUNC_QUALIFIER vec log(vec const& x, vec const& base) - { - return glm::log(x) / glm::log(base); - } -}//namespace glm diff --git a/third_party/glm/gtx/matrix_cross_product.hpp b/third_party/glm/gtx/matrix_cross_product.hpp deleted file mode 100755 index 1e585f9..0000000 --- a/third_party/glm/gtx/matrix_cross_product.hpp +++ /dev/null @@ -1,47 +0,0 @@ -/// @ref gtx_matrix_cross_product -/// @file glm/gtx/matrix_cross_product.hpp -/// -/// @see core (dependence) -/// @see gtx_extented_min_max (dependence) -/// -/// @defgroup gtx_matrix_cross_product GLM_GTX_matrix_cross_product -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Build cross product matrices - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_cross_product is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_cross_product extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_matrix_cross_product - /// @{ - - //! Build a cross product matrix. - //! From GLM_GTX_matrix_cross_product extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> matrixCross3( - vec<3, T, Q> const& x); - - //! Build a cross product matrix. - //! From GLM_GTX_matrix_cross_product extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> matrixCross4( - vec<3, T, Q> const& x); - - /// @} -}//namespace glm - -#include "matrix_cross_product.inl" diff --git a/third_party/glm/gtx/matrix_cross_product.inl b/third_party/glm/gtx/matrix_cross_product.inl deleted file mode 100755 index 3a15397..0000000 --- a/third_party/glm/gtx/matrix_cross_product.inl +++ /dev/null @@ -1,37 +0,0 @@ -/// @ref gtx_matrix_cross_product - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> matrixCross3 - ( - vec<3, T, Q> const& x - ) - { - mat<3, 3, T, Q> Result(T(0)); - Result[0][1] = x.z; - Result[1][0] = -x.z; - Result[0][2] = -x.y; - Result[2][0] = x.y; - Result[1][2] = x.x; - Result[2][1] = -x.x; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> matrixCross4 - ( - vec<3, T, Q> const& x - ) - { - mat<4, 4, T, Q> Result(T(0)); - Result[0][1] = x.z; - Result[1][0] = -x.z; - Result[0][2] = -x.y; - Result[2][0] = x.y; - Result[1][2] = x.x; - Result[2][1] = -x.x; - return Result; - } - -}//namespace glm diff --git a/third_party/glm/gtx/matrix_decompose.hpp b/third_party/glm/gtx/matrix_decompose.hpp deleted file mode 100755 index acd7a7f..0000000 --- a/third_party/glm/gtx/matrix_decompose.hpp +++ /dev/null @@ -1,46 +0,0 @@ -/// @ref gtx_matrix_decompose -/// @file glm/gtx/matrix_decompose.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_matrix_decompose GLM_GTX_matrix_decompose -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Decomposes a model matrix to translations, rotation and scale components - -#pragma once - -// Dependencies -#include "../mat4x4.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include "../geometric.hpp" -#include "../gtc/quaternion.hpp" -#include "../gtc/matrix_transform.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_decompose is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_decompose extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_matrix_decompose - /// @{ - - /// Decomposes a model matrix to translations, rotation and scale components - /// @see gtx_matrix_decompose - template - GLM_FUNC_DECL bool decompose( - mat<4, 4, T, Q> const& modelMatrix, - vec<3, T, Q> & scale, qua & orientation, vec<3, T, Q> & translation, vec<3, T, Q> & skew, vec<4, T, Q> & perspective); - - /// @} -}//namespace glm - -#include "matrix_decompose.inl" diff --git a/third_party/glm/gtx/matrix_decompose.inl b/third_party/glm/gtx/matrix_decompose.inl deleted file mode 100755 index 694f5ec..0000000 --- a/third_party/glm/gtx/matrix_decompose.inl +++ /dev/null @@ -1,186 +0,0 @@ -/// @ref gtx_matrix_decompose - -#include "../gtc/constants.hpp" -#include "../gtc/epsilon.hpp" - -namespace glm{ -namespace detail -{ - /// Make a linear combination of two vectors and return the result. - // result = (a * ascl) + (b * bscl) - template - GLM_FUNC_QUALIFIER vec<3, T, Q> combine( - vec<3, T, Q> const& a, - vec<3, T, Q> const& b, - T ascl, T bscl) - { - return (a * ascl) + (b * bscl); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> scale(vec<3, T, Q> const& v, T desiredLength) - { - return v * desiredLength / length(v); - } -}//namespace detail - - // Matrix decompose - // http://www.opensource.apple.com/source/WebCore/WebCore-514/platform/graphics/transforms/TransformationMatrix.cpp - // Decomposes the mode matrix to translations,rotation scale components - - template - GLM_FUNC_QUALIFIER bool decompose(mat<4, 4, T, Q> const& ModelMatrix, vec<3, T, Q> & Scale, qua & Orientation, vec<3, T, Q> & Translation, vec<3, T, Q> & Skew, vec<4, T, Q> & Perspective) - { - mat<4, 4, T, Q> LocalMatrix(ModelMatrix); - - // Normalize the matrix. - if(epsilonEqual(LocalMatrix[3][3], static_cast(0), epsilon())) - return false; - - for(length_t i = 0; i < 4; ++i) - for(length_t j = 0; j < 4; ++j) - LocalMatrix[i][j] /= LocalMatrix[3][3]; - - // perspectiveMatrix is used to solve for perspective, but it also provides - // an easy way to test for singularity of the upper 3x3 component. - mat<4, 4, T, Q> PerspectiveMatrix(LocalMatrix); - - for(length_t i = 0; i < 3; i++) - PerspectiveMatrix[i][3] = static_cast(0); - PerspectiveMatrix[3][3] = static_cast(1); - - /// TODO: Fixme! - if(epsilonEqual(determinant(PerspectiveMatrix), static_cast(0), epsilon())) - return false; - - // First, isolate perspective. This is the messiest. - if( - epsilonNotEqual(LocalMatrix[0][3], static_cast(0), epsilon()) || - epsilonNotEqual(LocalMatrix[1][3], static_cast(0), epsilon()) || - epsilonNotEqual(LocalMatrix[2][3], static_cast(0), epsilon())) - { - // rightHandSide is the right hand side of the equation. - vec<4, T, Q> RightHandSide; - RightHandSide[0] = LocalMatrix[0][3]; - RightHandSide[1] = LocalMatrix[1][3]; - RightHandSide[2] = LocalMatrix[2][3]; - RightHandSide[3] = LocalMatrix[3][3]; - - // Solve the equation by inverting PerspectiveMatrix and multiplying - // rightHandSide by the inverse. (This is the easiest way, not - // necessarily the best.) - mat<4, 4, T, Q> InversePerspectiveMatrix = glm::inverse(PerspectiveMatrix);// inverse(PerspectiveMatrix, inversePerspectiveMatrix); - mat<4, 4, T, Q> TransposedInversePerspectiveMatrix = glm::transpose(InversePerspectiveMatrix);// transposeMatrix4(inversePerspectiveMatrix, transposedInversePerspectiveMatrix); - - Perspective = TransposedInversePerspectiveMatrix * RightHandSide; - // v4MulPointByMatrix(rightHandSide, transposedInversePerspectiveMatrix, perspectivePoint); - - // Clear the perspective partition - LocalMatrix[0][3] = LocalMatrix[1][3] = LocalMatrix[2][3] = static_cast(0); - LocalMatrix[3][3] = static_cast(1); - } - else - { - // No perspective. - Perspective = vec<4, T, Q>(0, 0, 0, 1); - } - - // Next take care of translation (easy). - Translation = vec<3, T, Q>(LocalMatrix[3]); - LocalMatrix[3] = vec<4, T, Q>(0, 0, 0, LocalMatrix[3].w); - - vec<3, T, Q> Row[3], Pdum3; - - // Now get scale and shear. - for(length_t i = 0; i < 3; ++i) - for(length_t j = 0; j < 3; ++j) - Row[i][j] = LocalMatrix[i][j]; - - // Compute X scale factor and normalize first row. - Scale.x = length(Row[0]);// v3Length(Row[0]); - - Row[0] = detail::scale(Row[0], static_cast(1)); - - // Compute XY shear factor and make 2nd row orthogonal to 1st. - Skew.z = dot(Row[0], Row[1]); - Row[1] = detail::combine(Row[1], Row[0], static_cast(1), -Skew.z); - - // Now, compute Y scale and normalize 2nd row. - Scale.y = length(Row[1]); - Row[1] = detail::scale(Row[1], static_cast(1)); - Skew.z /= Scale.y; - - // Compute XZ and YZ shears, orthogonalize 3rd row. - Skew.y = glm::dot(Row[0], Row[2]); - Row[2] = detail::combine(Row[2], Row[0], static_cast(1), -Skew.y); - Skew.x = glm::dot(Row[1], Row[2]); - Row[2] = detail::combine(Row[2], Row[1], static_cast(1), -Skew.x); - - // Next, get Z scale and normalize 3rd row. - Scale.z = length(Row[2]); - Row[2] = detail::scale(Row[2], static_cast(1)); - Skew.y /= Scale.z; - Skew.x /= Scale.z; - - // At this point, the matrix (in rows[]) is orthonormal. - // Check for a coordinate system flip. If the determinant - // is -1, then negate the matrix and the scaling factors. - Pdum3 = cross(Row[1], Row[2]); // v3Cross(row[1], row[2], Pdum3); - if(dot(Row[0], Pdum3) < 0) - { - for(length_t i = 0; i < 3; i++) - { - Scale[i] *= static_cast(-1); - Row[i] *= static_cast(-1); - } - } - - // Now, get the rotations out, as described in the gem. - - // FIXME - Add the ability to return either quaternions (which are - // easier to recompose with) or Euler angles (rx, ry, rz), which - // are easier for authors to deal with. The latter will only be useful - // when we fix https://bugs.webkit.org/show_bug.cgi?id=23799, so I - // will leave the Euler angle code here for now. - - // ret.rotateY = asin(-Row[0][2]); - // if (cos(ret.rotateY) != 0) { - // ret.rotateX = atan2(Row[1][2], Row[2][2]); - // ret.rotateZ = atan2(Row[0][1], Row[0][0]); - // } else { - // ret.rotateX = atan2(-Row[2][0], Row[1][1]); - // ret.rotateZ = 0; - // } - - int i, j, k = 0; - T root, trace = Row[0].x + Row[1].y + Row[2].z; - if(trace > static_cast(0)) - { - root = sqrt(trace + static_cast(1.0)); - Orientation.w = static_cast(0.5) * root; - root = static_cast(0.5) / root; - Orientation.x = root * (Row[1].z - Row[2].y); - Orientation.y = root * (Row[2].x - Row[0].z); - Orientation.z = root * (Row[0].y - Row[1].x); - } // End if > 0 - else - { - static int Next[3] = {1, 2, 0}; - i = 0; - if(Row[1].y > Row[0].x) i = 1; - if(Row[2].z > Row[i][i]) i = 2; - j = Next[i]; - k = Next[j]; - - root = sqrt(Row[i][i] - Row[j][j] - Row[k][k] + static_cast(1.0)); - - Orientation[i] = static_cast(0.5) * root; - root = static_cast(0.5) / root; - Orientation[j] = root * (Row[i][j] + Row[j][i]); - Orientation[k] = root * (Row[i][k] + Row[k][i]); - Orientation.w = root * (Row[j][k] - Row[k][j]); - } // End if <= 0 - - return true; - } -}//namespace glm diff --git a/third_party/glm/gtx/matrix_factorisation.hpp b/third_party/glm/gtx/matrix_factorisation.hpp deleted file mode 100755 index 5a975d6..0000000 --- a/third_party/glm/gtx/matrix_factorisation.hpp +++ /dev/null @@ -1,69 +0,0 @@ -/// @ref gtx_matrix_factorisation -/// @file glm/gtx/matrix_factorisation.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_matrix_factorisation GLM_GTX_matrix_factorisation -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Functions to factor matrices in various forms - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_factorisation is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_factorisation extension included") -# endif -#endif - -/* -Suggestions: - - Move helper functions flipud and fliplr to another file: They may be helpful in more general circumstances. - - Implement other types of matrix factorisation, such as: QL and LQ, L(D)U, eigendecompositions, etc... -*/ - -namespace glm -{ - /// @addtogroup gtx_matrix_factorisation - /// @{ - - /// Flips the matrix rows up and down. - /// - /// From GLM_GTX_matrix_factorisation extension. - template - GLM_FUNC_DECL mat flipud(mat const& in); - - /// Flips the matrix columns right and left. - /// - /// From GLM_GTX_matrix_factorisation extension. - template - GLM_FUNC_DECL mat fliplr(mat const& in); - - /// Performs QR factorisation of a matrix. - /// Returns 2 matrices, q and r, such that the columns of q are orthonormal and span the same subspace than those of the input matrix, r is an upper triangular matrix, and q*r=in. - /// Given an n-by-m input matrix, q has dimensions min(n,m)-by-m, and r has dimensions n-by-min(n,m). - /// - /// From GLM_GTX_matrix_factorisation extension. - template - GLM_FUNC_DECL void qr_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& q, mat& r); - - /// Performs RQ factorisation of a matrix. - /// Returns 2 matrices, r and q, such that r is an upper triangular matrix, the rows of q are orthonormal and span the same subspace than those of the input matrix, and r*q=in. - /// Note that in the context of RQ factorisation, the diagonal is seen as starting in the lower-right corner of the matrix, instead of the usual upper-left. - /// Given an n-by-m input matrix, r has dimensions min(n,m)-by-m, and q has dimensions n-by-min(n,m). - /// - /// From GLM_GTX_matrix_factorisation extension. - template - GLM_FUNC_DECL void rq_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& r, mat& q); - - /// @} -} - -#include "matrix_factorisation.inl" diff --git a/third_party/glm/gtx/matrix_factorisation.inl b/third_party/glm/gtx/matrix_factorisation.inl deleted file mode 100755 index c479b8a..0000000 --- a/third_party/glm/gtx/matrix_factorisation.inl +++ /dev/null @@ -1,84 +0,0 @@ -/// @ref gtx_matrix_factorisation - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat flipud(mat const& in) - { - mat tin = transpose(in); - tin = fliplr(tin); - mat out = transpose(tin); - - return out; - } - - template - GLM_FUNC_QUALIFIER mat fliplr(mat const& in) - { - mat out; - for (length_t i = 0; i < C; i++) - { - out[i] = in[(C - i) - 1]; - } - - return out; - } - - template - GLM_FUNC_QUALIFIER void qr_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& q, mat& r) - { - // Uses modified Gram-Schmidt method - // Source: https://en.wikipedia.org/wiki/Gram–Schmidt_process - // And https://en.wikipedia.org/wiki/QR_decomposition - - //For all the linearly independs columns of the input... - // (there can be no more linearly independents columns than there are rows.) - for (length_t i = 0; i < (C < R ? C : R); i++) - { - //Copy in Q the input's i-th column. - q[i] = in[i]; - - //j = [0,i[ - // Make that column orthogonal to all the previous ones by substracting to it the non-orthogonal projection of all the previous columns. - // Also: Fill the zero elements of R - for (length_t j = 0; j < i; j++) - { - q[i] -= dot(q[i], q[j])*q[j]; - r[j][i] = 0; - } - - //Now, Q i-th column is orthogonal to all the previous columns. Normalize it. - q[i] = normalize(q[i]); - - //j = [i,C[ - //Finally, compute the corresponding coefficients of R by computing the projection of the resulting column on the other columns of the input. - for (length_t j = i; j < C; j++) - { - r[j][i] = dot(in[j], q[i]); - } - } - } - - template - GLM_FUNC_QUALIFIER void rq_decompose(mat const& in, mat<(C < R ? C : R), R, T, Q>& r, mat& q) - { - // From https://en.wikipedia.org/wiki/QR_decomposition: - // The RQ decomposition transforms a matrix A into the product of an upper triangular matrix R (also known as right-triangular) and an orthogonal matrix Q. The only difference from QR decomposition is the order of these matrices. - // QR decomposition is Gram–Schmidt orthogonalization of columns of A, started from the first column. - // RQ decomposition is Gram–Schmidt orthogonalization of rows of A, started from the last row. - - mat tin = transpose(in); - tin = fliplr(tin); - - mat tr; - mat<(C < R ? C : R), C, T, Q> tq; - qr_decompose(tin, tq, tr); - - tr = fliplr(tr); - r = transpose(tr); - r = fliplr(r); - - tq = fliplr(tq); - q = transpose(tq); - } -} //namespace glm diff --git a/third_party/glm/gtx/matrix_interpolation.hpp b/third_party/glm/gtx/matrix_interpolation.hpp deleted file mode 100755 index 7d5ad4c..0000000 --- a/third_party/glm/gtx/matrix_interpolation.hpp +++ /dev/null @@ -1,60 +0,0 @@ -/// @ref gtx_matrix_interpolation -/// @file glm/gtx/matrix_interpolation.hpp -/// @author Ghenadii Ursachi (the.asteroth@gmail.com) -/// -/// @see core (dependence) -/// -/// @defgroup gtx_matrix_interpolation GLM_GTX_matrix_interpolation -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Allows to directly interpolate two matrices. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_interpolation is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_interpolation extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_matrix_interpolation - /// @{ - - /// Get the axis and angle of the rotation from a matrix. - /// From GLM_GTX_matrix_interpolation extension. - template - GLM_FUNC_DECL void axisAngle( - mat<4, 4, T, Q> const& Mat, vec<3, T, Q> & Axis, T & Angle); - - /// Build a matrix from axis and angle. - /// From GLM_GTX_matrix_interpolation extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> axisAngleMatrix( - vec<3, T, Q> const& Axis, T const Angle); - - /// Extracts the rotation part of a matrix. - /// From GLM_GTX_matrix_interpolation extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> extractMatrixRotation( - mat<4, 4, T, Q> const& Mat); - - /// Build a interpolation of 4 * 4 matrixes. - /// From GLM_GTX_matrix_interpolation extension. - /// Warning! works only with rotation and/or translation matrixes, scale will generate unexpected results. - template - GLM_FUNC_DECL mat<4, 4, T, Q> interpolate( - mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2, T const Delta); - - /// @} -}//namespace glm - -#include "matrix_interpolation.inl" diff --git a/third_party/glm/gtx/matrix_interpolation.inl b/third_party/glm/gtx/matrix_interpolation.inl deleted file mode 100755 index de40b7d..0000000 --- a/third_party/glm/gtx/matrix_interpolation.inl +++ /dev/null @@ -1,129 +0,0 @@ -/// @ref gtx_matrix_interpolation - -#include "../gtc/constants.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER void axisAngle(mat<4, 4, T, Q> const& m, vec<3, T, Q> & axis, T& angle) - { - T epsilon = static_cast(0.01); - T epsilon2 = static_cast(0.1); - - if((abs(m[1][0] - m[0][1]) < epsilon) && (abs(m[2][0] - m[0][2]) < epsilon) && (abs(m[2][1] - m[1][2]) < epsilon)) - { - if ((abs(m[1][0] + m[0][1]) < epsilon2) && (abs(m[2][0] + m[0][2]) < epsilon2) && (abs(m[2][1] + m[1][2]) < epsilon2) && (abs(m[0][0] + m[1][1] + m[2][2] - static_cast(3.0)) < epsilon2)) - { - angle = static_cast(0.0); - axis.x = static_cast(1.0); - axis.y = static_cast(0.0); - axis.z = static_cast(0.0); - return; - } - angle = static_cast(3.1415926535897932384626433832795); - T xx = (m[0][0] + static_cast(1.0)) * static_cast(0.5); - T yy = (m[1][1] + static_cast(1.0)) * static_cast(0.5); - T zz = (m[2][2] + static_cast(1.0)) * static_cast(0.5); - T xy = (m[1][0] + m[0][1]) * static_cast(0.25); - T xz = (m[2][0] + m[0][2]) * static_cast(0.25); - T yz = (m[2][1] + m[1][2]) * static_cast(0.25); - if((xx > yy) && (xx > zz)) - { - if(xx < epsilon) - { - axis.x = static_cast(0.0); - axis.y = static_cast(0.7071); - axis.z = static_cast(0.7071); - } - else - { - axis.x = sqrt(xx); - axis.y = xy / axis.x; - axis.z = xz / axis.x; - } - } - else if (yy > zz) - { - if(yy < epsilon) - { - axis.x = static_cast(0.7071); - axis.y = static_cast(0.0); - axis.z = static_cast(0.7071); - } - else - { - axis.y = sqrt(yy); - axis.x = xy / axis.y; - axis.z = yz / axis.y; - } - } - else - { - if (zz < epsilon) - { - axis.x = static_cast(0.7071); - axis.y = static_cast(0.7071); - axis.z = static_cast(0.0); - } - else - { - axis.z = sqrt(zz); - axis.x = xz / axis.z; - axis.y = yz / axis.z; - } - } - return; - } - T s = sqrt((m[2][1] - m[1][2]) * (m[2][1] - m[1][2]) + (m[2][0] - m[0][2]) * (m[2][0] - m[0][2]) + (m[1][0] - m[0][1]) * (m[1][0] - m[0][1])); - if (glm::abs(s) < T(0.001)) - s = static_cast(1); - T const angleCos = (m[0][0] + m[1][1] + m[2][2] - static_cast(1)) * static_cast(0.5); - if(angleCos - static_cast(1) < epsilon) - angle = pi() * static_cast(0.25); - else - angle = acos(angleCos); - axis.x = (m[1][2] - m[2][1]) / s; - axis.y = (m[2][0] - m[0][2]) / s; - axis.z = (m[0][1] - m[1][0]) / s; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> axisAngleMatrix(vec<3, T, Q> const& axis, T const angle) - { - T c = cos(angle); - T s = sin(angle); - T t = static_cast(1) - c; - vec<3, T, Q> n = normalize(axis); - - return mat<4, 4, T, Q>( - t * n.x * n.x + c, t * n.x * n.y + n.z * s, t * n.x * n.z - n.y * s, static_cast(0.0), - t * n.x * n.y - n.z * s, t * n.y * n.y + c, t * n.y * n.z + n.x * s, static_cast(0.0), - t * n.x * n.z + n.y * s, t * n.y * n.z - n.x * s, t * n.z * n.z + c, static_cast(0.0), - static_cast(0.0), static_cast(0.0), static_cast(0.0), static_cast(1.0)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> extractMatrixRotation(mat<4, 4, T, Q> const& m) - { - return mat<4, 4, T, Q>( - m[0][0], m[0][1], m[0][2], static_cast(0.0), - m[1][0], m[1][1], m[1][2], static_cast(0.0), - m[2][0], m[2][1], m[2][2], static_cast(0.0), - static_cast(0.0), static_cast(0.0), static_cast(0.0), static_cast(1.0)); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> interpolate(mat<4, 4, T, Q> const& m1, mat<4, 4, T, Q> const& m2, T const delta) - { - mat<4, 4, T, Q> m1rot = extractMatrixRotation(m1); - mat<4, 4, T, Q> dltRotation = m2 * transpose(m1rot); - vec<3, T, Q> dltAxis; - T dltAngle; - axisAngle(dltRotation, dltAxis, dltAngle); - mat<4, 4, T, Q> out = axisAngleMatrix(dltAxis, dltAngle * delta) * m1rot; - out[3][0] = m1[3][0] + delta * (m2[3][0] - m1[3][0]); - out[3][1] = m1[3][1] + delta * (m2[3][1] - m1[3][1]); - out[3][2] = m1[3][2] + delta * (m2[3][2] - m1[3][2]); - return out; - } -}//namespace glm diff --git a/third_party/glm/gtx/matrix_major_storage.hpp b/third_party/glm/gtx/matrix_major_storage.hpp deleted file mode 100755 index 8c6bc22..0000000 --- a/third_party/glm/gtx/matrix_major_storage.hpp +++ /dev/null @@ -1,119 +0,0 @@ -/// @ref gtx_matrix_major_storage -/// @file glm/gtx/matrix_major_storage.hpp -/// -/// @see core (dependence) -/// @see gtx_extented_min_max (dependence) -/// -/// @defgroup gtx_matrix_major_storage GLM_GTX_matrix_major_storage -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Build matrices with specific matrix order, row or column - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_major_storage is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_major_storage extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_matrix_major_storage - /// @{ - - //! Build a row major matrix from row vectors. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<2, 2, T, Q> rowMajor2( - vec<2, T, Q> const& v1, - vec<2, T, Q> const& v2); - - //! Build a row major matrix from other matrix. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<2, 2, T, Q> rowMajor2( - mat<2, 2, T, Q> const& m); - - //! Build a row major matrix from row vectors. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> rowMajor3( - vec<3, T, Q> const& v1, - vec<3, T, Q> const& v2, - vec<3, T, Q> const& v3); - - //! Build a row major matrix from other matrix. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> rowMajor3( - mat<3, 3, T, Q> const& m); - - //! Build a row major matrix from row vectors. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> rowMajor4( - vec<4, T, Q> const& v1, - vec<4, T, Q> const& v2, - vec<4, T, Q> const& v3, - vec<4, T, Q> const& v4); - - //! Build a row major matrix from other matrix. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> rowMajor4( - mat<4, 4, T, Q> const& m); - - //! Build a column major matrix from column vectors. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<2, 2, T, Q> colMajor2( - vec<2, T, Q> const& v1, - vec<2, T, Q> const& v2); - - //! Build a column major matrix from other matrix. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<2, 2, T, Q> colMajor2( - mat<2, 2, T, Q> const& m); - - //! Build a column major matrix from column vectors. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> colMajor3( - vec<3, T, Q> const& v1, - vec<3, T, Q> const& v2, - vec<3, T, Q> const& v3); - - //! Build a column major matrix from other matrix. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> colMajor3( - mat<3, 3, T, Q> const& m); - - //! Build a column major matrix from column vectors. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> colMajor4( - vec<4, T, Q> const& v1, - vec<4, T, Q> const& v2, - vec<4, T, Q> const& v3, - vec<4, T, Q> const& v4); - - //! Build a column major matrix from other matrix. - //! From GLM_GTX_matrix_major_storage extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> colMajor4( - mat<4, 4, T, Q> const& m); - - /// @} -}//namespace glm - -#include "matrix_major_storage.inl" diff --git a/third_party/glm/gtx/matrix_major_storage.inl b/third_party/glm/gtx/matrix_major_storage.inl deleted file mode 100755 index 279dd34..0000000 --- a/third_party/glm/gtx/matrix_major_storage.inl +++ /dev/null @@ -1,166 +0,0 @@ -/// @ref gtx_matrix_major_storage - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> rowMajor2 - ( - vec<2, T, Q> const& v1, - vec<2, T, Q> const& v2 - ) - { - mat<2, 2, T, Q> Result; - Result[0][0] = v1.x; - Result[1][0] = v1.y; - Result[0][1] = v2.x; - Result[1][1] = v2.y; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> rowMajor2( - const mat<2, 2, T, Q>& m) - { - mat<2, 2, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rowMajor3( - const vec<3, T, Q>& v1, - const vec<3, T, Q>& v2, - const vec<3, T, Q>& v3) - { - mat<3, 3, T, Q> Result; - Result[0][0] = v1.x; - Result[1][0] = v1.y; - Result[2][0] = v1.z; - Result[0][1] = v2.x; - Result[1][1] = v2.y; - Result[2][1] = v2.z; - Result[0][2] = v3.x; - Result[1][2] = v3.y; - Result[2][2] = v3.z; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rowMajor3( - const mat<3, 3, T, Q>& m) - { - mat<3, 3, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - Result[2][2] = m[2][2]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rowMajor4( - const vec<4, T, Q>& v1, - const vec<4, T, Q>& v2, - const vec<4, T, Q>& v3, - const vec<4, T, Q>& v4) - { - mat<4, 4, T, Q> Result; - Result[0][0] = v1.x; - Result[1][0] = v1.y; - Result[2][0] = v1.z; - Result[3][0] = v1.w; - Result[0][1] = v2.x; - Result[1][1] = v2.y; - Result[2][1] = v2.z; - Result[3][1] = v2.w; - Result[0][2] = v3.x; - Result[1][2] = v3.y; - Result[2][2] = v3.z; - Result[3][2] = v3.w; - Result[0][3] = v4.x; - Result[1][3] = v4.y; - Result[2][3] = v4.z; - Result[3][3] = v4.w; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rowMajor4( - const mat<4, 4, T, Q>& m) - { - mat<4, 4, T, Q> Result; - Result[0][0] = m[0][0]; - Result[0][1] = m[1][0]; - Result[0][2] = m[2][0]; - Result[0][3] = m[3][0]; - Result[1][0] = m[0][1]; - Result[1][1] = m[1][1]; - Result[1][2] = m[2][1]; - Result[1][3] = m[3][1]; - Result[2][0] = m[0][2]; - Result[2][1] = m[1][2]; - Result[2][2] = m[2][2]; - Result[2][3] = m[3][2]; - Result[3][0] = m[0][3]; - Result[3][1] = m[1][3]; - Result[3][2] = m[2][3]; - Result[3][3] = m[3][3]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> colMajor2( - const vec<2, T, Q>& v1, - const vec<2, T, Q>& v2) - { - return mat<2, 2, T, Q>(v1, v2); - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> colMajor2( - const mat<2, 2, T, Q>& m) - { - return mat<2, 2, T, Q>(m); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> colMajor3( - const vec<3, T, Q>& v1, - const vec<3, T, Q>& v2, - const vec<3, T, Q>& v3) - { - return mat<3, 3, T, Q>(v1, v2, v3); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> colMajor3( - const mat<3, 3, T, Q>& m) - { - return mat<3, 3, T, Q>(m); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> colMajor4( - const vec<4, T, Q>& v1, - const vec<4, T, Q>& v2, - const vec<4, T, Q>& v3, - const vec<4, T, Q>& v4) - { - return mat<4, 4, T, Q>(v1, v2, v3, v4); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> colMajor4( - const mat<4, 4, T, Q>& m) - { - return mat<4, 4, T, Q>(m); - } -}//namespace glm diff --git a/third_party/glm/gtx/matrix_operation.hpp b/third_party/glm/gtx/matrix_operation.hpp deleted file mode 100755 index de6ff1f..0000000 --- a/third_party/glm/gtx/matrix_operation.hpp +++ /dev/null @@ -1,103 +0,0 @@ -/// @ref gtx_matrix_operation -/// @file glm/gtx/matrix_operation.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_matrix_operation GLM_GTX_matrix_operation -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Build diagonal matrices from vectors. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_operation is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_operation extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_matrix_operation - /// @{ - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<2, 2, T, Q> diagonal2x2( - vec<2, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<2, 3, T, Q> diagonal2x3( - vec<2, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<2, 4, T, Q> diagonal2x4( - vec<2, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<3, 2, T, Q> diagonal3x2( - vec<2, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> diagonal3x3( - vec<3, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<3, 4, T, Q> diagonal3x4( - vec<3, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<4, 2, T, Q> diagonal4x2( - vec<2, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<4, 3, T, Q> diagonal4x3( - vec<3, T, Q> const& v); - - //! Build a diagonal matrix. - //! From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> diagonal4x4( - vec<4, T, Q> const& v); - - /// Build an adjugate matrix. - /// From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<2, 2, T, Q> adjugate(mat<2, 2, T, Q> const& m); - - /// Build an adjugate matrix. - /// From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> adjugate(mat<3, 3, T, Q> const& m); - - /// Build an adjugate matrix. - /// From GLM_GTX_matrix_operation extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> adjugate(mat<4, 4, T, Q> const& m); - - /// @} -}//namespace glm - -#include "matrix_operation.inl" diff --git a/third_party/glm/gtx/matrix_operation.inl b/third_party/glm/gtx/matrix_operation.inl deleted file mode 100755 index 9de83f8..0000000 --- a/third_party/glm/gtx/matrix_operation.inl +++ /dev/null @@ -1,176 +0,0 @@ -/// @ref gtx_matrix_operation - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> diagonal2x2 - ( - vec<2, T, Q> const& v - ) - { - mat<2, 2, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 3, T, Q> diagonal2x3 - ( - vec<2, T, Q> const& v - ) - { - mat<2, 3, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 4, T, Q> diagonal2x4 - ( - vec<2, T, Q> const& v - ) - { - mat<2, 4, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 2, T, Q> diagonal3x2 - ( - vec<2, T, Q> const& v - ) - { - mat<3, 2, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> diagonal3x3 - ( - vec<3, T, Q> const& v - ) - { - mat<3, 3, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - Result[2][2] = v[2]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 4, T, Q> diagonal3x4 - ( - vec<3, T, Q> const& v - ) - { - mat<3, 4, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - Result[2][2] = v[2]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> diagonal4x4 - ( - vec<4, T, Q> const& v - ) - { - mat<4, 4, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - Result[2][2] = v[2]; - Result[3][3] = v[3]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 3, T, Q> diagonal4x3 - ( - vec<3, T, Q> const& v - ) - { - mat<4, 3, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - Result[2][2] = v[2]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 2, T, Q> diagonal4x2 - ( - vec<2, T, Q> const& v - ) - { - mat<4, 2, T, Q> Result(static_cast(1)); - Result[0][0] = v[0]; - Result[1][1] = v[1]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<2, 2, T, Q> adjugate(mat<2, 2, T, Q> const& m) - { - return mat<2, 2, T, Q>( - +m[1][1], -m[1][0], - -m[0][1], +m[0][0]); - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> adjugate(mat<3, 3, T, Q> const& m) - { - T const m00 = determinant(mat<2, 2, T, Q>(m[1][1], m[2][1], m[1][2], m[2][2])); - T const m01 = determinant(mat<2, 2, T, Q>(m[0][1], m[2][1], m[0][2], m[2][2])); - T const m02 = determinant(mat<2, 2, T, Q>(m[0][1], m[1][1], m[0][2], m[1][2])); - - T const m10 = determinant(mat<2, 2, T, Q>(m[1][0], m[2][0], m[1][2], m[2][2])); - T const m11 = determinant(mat<2, 2, T, Q>(m[0][0], m[2][0], m[0][2], m[2][2])); - T const m12 = determinant(mat<2, 2, T, Q>(m[0][0], m[1][0], m[0][2], m[1][2])); - - T const m20 = determinant(mat<2, 2, T, Q>(m[1][0], m[2][0], m[1][1], m[2][1])); - T const m21 = determinant(mat<2, 2, T, Q>(m[0][0], m[2][0], m[0][1], m[2][1])); - T const m22 = determinant(mat<2, 2, T, Q>(m[0][0], m[1][0], m[0][1], m[1][1])); - - return mat<3, 3, T, Q>( - +m00, -m01, +m02, - -m10, +m11, -m12, - +m20, -m21, +m22); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> adjugate(mat<4, 4, T, Q> const& m) - { - T const m00 = determinant(mat<3, 3, T, Q>(m[1][1], m[1][2], m[1][3], m[2][1], m[2][2], m[2][3], m[3][1], m[3][2], m[3][3])); - T const m01 = determinant(mat<3, 3, T, Q>(m[1][0], m[1][2], m[1][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][2], m[3][3])); - T const m02 = determinant(mat<3, 3, T, Q>(m[1][0], m[1][1], m[1][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][1], m[3][3])); - T const m03 = determinant(mat<3, 3, T, Q>(m[1][0], m[1][1], m[1][2], m[2][0], m[2][1], m[2][2], m[3][0], m[3][1], m[3][2])); - - T const m10 = determinant(mat<3, 3, T, Q>(m[0][1], m[0][2], m[0][3], m[2][1], m[2][2], m[2][3], m[3][1], m[3][2], m[3][3])); - T const m11 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][2], m[0][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][2], m[3][3])); - T const m12 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][3], m[2][0], m[2][1], m[2][3], m[3][0], m[3][1], m[3][3])); - T const m13 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][2], m[2][0], m[2][1], m[2][2], m[3][0], m[3][1], m[3][2])); - - T const m20 = determinant(mat<3, 3, T, Q>(m[0][1], m[0][2], m[0][3], m[1][1], m[1][2], m[1][3], m[3][1], m[3][2], m[3][3])); - T const m21 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][2], m[0][3], m[1][0], m[1][2], m[1][3], m[3][0], m[3][2], m[3][3])); - T const m22 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][3], m[1][0], m[1][1], m[1][3], m[3][0], m[3][1], m[3][3])); - T const m23 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][2], m[1][0], m[1][1], m[1][2], m[3][0], m[3][1], m[3][2])); - - T const m30 = determinant(mat<3, 3, T, Q>(m[0][1], m[0][2], m[0][3], m[1][1], m[1][2], m[1][3], m[2][1], m[2][2], m[2][3])); - T const m31 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][2], m[0][3], m[1][0], m[1][2], m[1][3], m[2][0], m[2][2], m[2][3])); - T const m32 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][3], m[1][0], m[1][1], m[1][3], m[2][0], m[2][1], m[2][3])); - T const m33 = determinant(mat<3, 3, T, Q>(m[0][0], m[0][1], m[0][2], m[1][0], m[1][1], m[1][2], m[2][0], m[2][1], m[2][2])); - - return mat<4, 4, T, Q>( - +m00, -m01, +m02, -m03, - -m10, +m11, -m12, +m13, - +m20, -m21, +m22, -m23, - -m30, +m31, -m32, +m33); - } -}//namespace glm diff --git a/third_party/glm/gtx/matrix_query.hpp b/third_party/glm/gtx/matrix_query.hpp deleted file mode 100755 index 8011b2b..0000000 --- a/third_party/glm/gtx/matrix_query.hpp +++ /dev/null @@ -1,77 +0,0 @@ -/// @ref gtx_matrix_query -/// @file glm/gtx/matrix_query.hpp -/// -/// @see core (dependence) -/// @see gtx_vector_query (dependence) -/// -/// @defgroup gtx_matrix_query GLM_GTX_matrix_query -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Query to evaluate matrix properties - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtx/vector_query.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_query is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_query extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_matrix_query - /// @{ - - /// Return whether a matrix a null matrix. - /// From GLM_GTX_matrix_query extension. - template - GLM_FUNC_DECL bool isNull(mat<2, 2, T, Q> const& m, T const& epsilon); - - /// Return whether a matrix a null matrix. - /// From GLM_GTX_matrix_query extension. - template - GLM_FUNC_DECL bool isNull(mat<3, 3, T, Q> const& m, T const& epsilon); - - /// Return whether a matrix is a null matrix. - /// From GLM_GTX_matrix_query extension. - template - GLM_FUNC_DECL bool isNull(mat<4, 4, T, Q> const& m, T const& epsilon); - - /// Return whether a matrix is an identity matrix. - /// From GLM_GTX_matrix_query extension. - template class matType> - GLM_FUNC_DECL bool isIdentity(matType const& m, T const& epsilon); - - /// Return whether a matrix is a normalized matrix. - /// From GLM_GTX_matrix_query extension. - template - GLM_FUNC_DECL bool isNormalized(mat<2, 2, T, Q> const& m, T const& epsilon); - - /// Return whether a matrix is a normalized matrix. - /// From GLM_GTX_matrix_query extension. - template - GLM_FUNC_DECL bool isNormalized(mat<3, 3, T, Q> const& m, T const& epsilon); - - /// Return whether a matrix is a normalized matrix. - /// From GLM_GTX_matrix_query extension. - template - GLM_FUNC_DECL bool isNormalized(mat<4, 4, T, Q> const& m, T const& epsilon); - - /// Return whether a matrix is an orthonormalized matrix. - /// From GLM_GTX_matrix_query extension. - template class matType> - GLM_FUNC_DECL bool isOrthogonal(matType const& m, T const& epsilon); - - /// @} -}//namespace glm - -#include "matrix_query.inl" diff --git a/third_party/glm/gtx/matrix_query.inl b/third_party/glm/gtx/matrix_query.inl deleted file mode 100755 index 77bd231..0000000 --- a/third_party/glm/gtx/matrix_query.inl +++ /dev/null @@ -1,113 +0,0 @@ -/// @ref gtx_matrix_query - -namespace glm -{ - template - GLM_FUNC_QUALIFIER bool isNull(mat<2, 2, T, Q> const& m, T const& epsilon) - { - bool result = true; - for(length_t i = 0; result && i < m.length() ; ++i) - result = isNull(m[i], epsilon); - return result; - } - - template - GLM_FUNC_QUALIFIER bool isNull(mat<3, 3, T, Q> const& m, T const& epsilon) - { - bool result = true; - for(length_t i = 0; result && i < m.length() ; ++i) - result = isNull(m[i], epsilon); - return result; - } - - template - GLM_FUNC_QUALIFIER bool isNull(mat<4, 4, T, Q> const& m, T const& epsilon) - { - bool result = true; - for(length_t i = 0; result && i < m.length() ; ++i) - result = isNull(m[i], epsilon); - return result; - } - - template - GLM_FUNC_QUALIFIER bool isIdentity(mat const& m, T const& epsilon) - { - bool result = true; - for(length_t i = 0; result && i < m[0].length() ; ++i) - { - for(length_t j = 0; result && j < i ; ++j) - result = abs(m[i][j]) <= epsilon; - if(result) - result = abs(m[i][i] - 1) <= epsilon; - for(length_t j = i + 1; result && j < m.length(); ++j) - result = abs(m[i][j]) <= epsilon; - } - return result; - } - - template - GLM_FUNC_QUALIFIER bool isNormalized(mat<2, 2, T, Q> const& m, T const& epsilon) - { - bool result(true); - for(length_t i = 0; result && i < m.length(); ++i) - result = isNormalized(m[i], epsilon); - for(length_t i = 0; result && i < m.length(); ++i) - { - typename mat<2, 2, T, Q>::col_type v; - for(length_t j = 0; j < m.length(); ++j) - v[j] = m[j][i]; - result = isNormalized(v, epsilon); - } - return result; - } - - template - GLM_FUNC_QUALIFIER bool isNormalized(mat<3, 3, T, Q> const& m, T const& epsilon) - { - bool result(true); - for(length_t i = 0; result && i < m.length(); ++i) - result = isNormalized(m[i], epsilon); - for(length_t i = 0; result && i < m.length(); ++i) - { - typename mat<3, 3, T, Q>::col_type v; - for(length_t j = 0; j < m.length(); ++j) - v[j] = m[j][i]; - result = isNormalized(v, epsilon); - } - return result; - } - - template - GLM_FUNC_QUALIFIER bool isNormalized(mat<4, 4, T, Q> const& m, T const& epsilon) - { - bool result(true); - for(length_t i = 0; result && i < m.length(); ++i) - result = isNormalized(m[i], epsilon); - for(length_t i = 0; result && i < m.length(); ++i) - { - typename mat<4, 4, T, Q>::col_type v; - for(length_t j = 0; j < m.length(); ++j) - v[j] = m[j][i]; - result = isNormalized(v, epsilon); - } - return result; - } - - template - GLM_FUNC_QUALIFIER bool isOrthogonal(mat const& m, T const& epsilon) - { - bool result = true; - for(length_t i(0); result && i < m.length() - 1; ++i) - for(length_t j(i + 1); result && j < m.length(); ++j) - result = areOrthogonal(m[i], m[j], epsilon); - - if(result) - { - mat tmp = transpose(m); - for(length_t i(0); result && i < m.length() - 1 ; ++i) - for(length_t j(i + 1); result && j < m.length(); ++j) - result = areOrthogonal(tmp[i], tmp[j], epsilon); - } - return result; - } -}//namespace glm diff --git a/third_party/glm/gtx/matrix_transform_2d.hpp b/third_party/glm/gtx/matrix_transform_2d.hpp deleted file mode 100755 index 5f9c540..0000000 --- a/third_party/glm/gtx/matrix_transform_2d.hpp +++ /dev/null @@ -1,81 +0,0 @@ -/// @ref gtx_matrix_transform_2d -/// @file glm/gtx/matrix_transform_2d.hpp -/// @author Miguel Ãngel Pérez Martínez -/// -/// @see core (dependence) -/// -/// @defgroup gtx_matrix_transform_2d GLM_GTX_matrix_transform_2d -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Defines functions that generate common 2d transformation matrices. - -#pragma once - -// Dependency: -#include "../mat3x3.hpp" -#include "../vec2.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_matrix_transform_2d is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_matrix_transform_2d extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_matrix_transform_2d - /// @{ - - /// Builds a translation 3 * 3 matrix created from a vector of 2 components. - /// - /// @param m Input matrix multiplied by this translation matrix. - /// @param v Coordinates of a translation vector. - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> translate( - mat<3, 3, T, Q> const& m, - vec<2, T, Q> const& v); - - /// Builds a rotation 3 * 3 matrix created from an angle. - /// - /// @param m Input matrix multiplied by this translation matrix. - /// @param angle Rotation angle expressed in radians. - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rotate( - mat<3, 3, T, Q> const& m, - T angle); - - /// Builds a scale 3 * 3 matrix created from a vector of 2 components. - /// - /// @param m Input matrix multiplied by this translation matrix. - /// @param v Coordinates of a scale vector. - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> scale( - mat<3, 3, T, Q> const& m, - vec<2, T, Q> const& v); - - /// Builds an horizontal (parallel to the x axis) shear 3 * 3 matrix. - /// - /// @param m Input matrix multiplied by this translation matrix. - /// @param y Shear factor. - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearX( - mat<3, 3, T, Q> const& m, - T y); - - /// Builds a vertical (parallel to the y axis) shear 3 * 3 matrix. - /// - /// @param m Input matrix multiplied by this translation matrix. - /// @param x Shear factor. - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearY( - mat<3, 3, T, Q> const& m, - T x); - - /// @} -}//namespace glm - -#include "matrix_transform_2d.inl" diff --git a/third_party/glm/gtx/matrix_transform_2d.inl b/third_party/glm/gtx/matrix_transform_2d.inl deleted file mode 100755 index a68d24d..0000000 --- a/third_party/glm/gtx/matrix_transform_2d.inl +++ /dev/null @@ -1,68 +0,0 @@ -/// @ref gtx_matrix_transform_2d -/// @author Miguel Ãngel Pérez Martínez - -#include "../trigonometric.hpp" - -namespace glm -{ - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> translate( - mat<3, 3, T, Q> const& m, - vec<2, T, Q> const& v) - { - mat<3, 3, T, Q> Result(m); - Result[2] = m[0] * v[0] + m[1] * v[1] + m[2]; - return Result; - } - - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> rotate( - mat<3, 3, T, Q> const& m, - T angle) - { - T const a = angle; - T const c = cos(a); - T const s = sin(a); - - mat<3, 3, T, Q> Result; - Result[0] = m[0] * c + m[1] * s; - Result[1] = m[0] * -s + m[1] * c; - Result[2] = m[2]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> scale( - mat<3, 3, T, Q> const& m, - vec<2, T, Q> const& v) - { - mat<3, 3, T, Q> Result; - Result[0] = m[0] * v[0]; - Result[1] = m[1] * v[1]; - Result[2] = m[2]; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearX( - mat<3, 3, T, Q> const& m, - T y) - { - mat<3, 3, T, Q> Result(1); - Result[0][1] = y; - return m * Result; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearY( - mat<3, 3, T, Q> const& m, - T x) - { - mat<3, 3, T, Q> Result(1); - Result[1][0] = x; - return m * Result; - } - -}//namespace glm diff --git a/third_party/glm/gtx/mixed_product.hpp b/third_party/glm/gtx/mixed_product.hpp deleted file mode 100755 index b242e35..0000000 --- a/third_party/glm/gtx/mixed_product.hpp +++ /dev/null @@ -1,41 +0,0 @@ -/// @ref gtx_mixed_product -/// @file glm/gtx/mixed_product.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_mixed_product GLM_GTX_mixed_producte -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Mixed product of 3 vectors. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_mixed_product is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_mixed_product extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_mixed_product - /// @{ - - /// @brief Mixed product of 3 vectors (from GLM_GTX_mixed_product extension) - template - GLM_FUNC_DECL T mixedProduct( - vec<3, T, Q> const& v1, - vec<3, T, Q> const& v2, - vec<3, T, Q> const& v3); - - /// @} -}// namespace glm - -#include "mixed_product.inl" diff --git a/third_party/glm/gtx/mixed_product.inl b/third_party/glm/gtx/mixed_product.inl deleted file mode 100755 index e5cdbdb..0000000 --- a/third_party/glm/gtx/mixed_product.inl +++ /dev/null @@ -1,15 +0,0 @@ -/// @ref gtx_mixed_product - -namespace glm -{ - template - GLM_FUNC_QUALIFIER T mixedProduct - ( - vec<3, T, Q> const& v1, - vec<3, T, Q> const& v2, - vec<3, T, Q> const& v3 - ) - { - return dot(cross(v1, v2), v3); - } -}//namespace glm diff --git a/third_party/glm/gtx/norm.hpp b/third_party/glm/gtx/norm.hpp deleted file mode 100755 index dfaebb7..0000000 --- a/third_party/glm/gtx/norm.hpp +++ /dev/null @@ -1,88 +0,0 @@ -/// @ref gtx_norm -/// @file glm/gtx/norm.hpp -/// -/// @see core (dependence) -/// @see gtx_quaternion (dependence) -/// @see gtx_component_wise (dependence) -/// -/// @defgroup gtx_norm GLM_GTX_norm -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Various ways to compute vector norms. - -#pragma once - -// Dependency: -#include "../geometric.hpp" -#include "../gtx/quaternion.hpp" -#include "../gtx/component_wise.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_norm is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_norm extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_norm - /// @{ - - /// Returns the squared length of x. - /// From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T length2(vec const& x); - - /// Returns the squared distance between p0 and p1, i.e., length2(p0 - p1). - /// From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T distance2(vec const& p0, vec const& p1); - - //! Returns the L1 norm between x and y. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T l1Norm(vec<3, T, Q> const& x, vec<3, T, Q> const& y); - - //! Returns the L1 norm of v. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T l1Norm(vec<3, T, Q> const& v); - - //! Returns the L2 norm between x and y. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T l2Norm(vec<3, T, Q> const& x, vec<3, T, Q> const& y); - - //! Returns the L2 norm of v. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T l2Norm(vec<3, T, Q> const& x); - - //! Returns the L norm between x and y. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T lxNorm(vec<3, T, Q> const& x, vec<3, T, Q> const& y, unsigned int Depth); - - //! Returns the L norm of v. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T lxNorm(vec<3, T, Q> const& x, unsigned int Depth); - - //! Returns the LMax norm between x and y. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T lMaxNorm(vec<3, T, Q> const& x, vec<3, T, Q> const& y); - - //! Returns the LMax norm of v. - //! From GLM_GTX_norm extension. - template - GLM_FUNC_DECL T lMaxNorm(vec<3, T, Q> const& x); - - /// @} -}//namespace glm - -#include "norm.inl" diff --git a/third_party/glm/gtx/norm.inl b/third_party/glm/gtx/norm.inl deleted file mode 100755 index 6db561b..0000000 --- a/third_party/glm/gtx/norm.inl +++ /dev/null @@ -1,95 +0,0 @@ -/// @ref gtx_norm - -#include "../detail/qualifier.hpp" - -namespace glm{ -namespace detail -{ - template - struct compute_length2 - { - GLM_FUNC_QUALIFIER static T call(vec const& v) - { - return dot(v, v); - } - }; -}//namespace detail - - template - GLM_FUNC_QUALIFIER genType length2(genType x) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length2' accepts only floating-point inputs"); - return x * x; - } - - template - GLM_FUNC_QUALIFIER T length2(vec const& v) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'length2' accepts only floating-point inputs"); - return detail::compute_length2::value>::call(v); - } - - template - GLM_FUNC_QUALIFIER T distance2(T p0, T p1) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'distance2' accepts only floating-point inputs"); - return length2(p1 - p0); - } - - template - GLM_FUNC_QUALIFIER T distance2(vec const& p0, vec const& p1) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'distance2' accepts only floating-point inputs"); - return length2(p1 - p0); - } - - template - GLM_FUNC_QUALIFIER T l1Norm(vec<3, T, Q> const& a, vec<3, T, Q> const& b) - { - return abs(b.x - a.x) + abs(b.y - a.y) + abs(b.z - a.z); - } - - template - GLM_FUNC_QUALIFIER T l1Norm(vec<3, T, Q> const& v) - { - return abs(v.x) + abs(v.y) + abs(v.z); - } - - template - GLM_FUNC_QUALIFIER T l2Norm(vec<3, T, Q> const& a, vec<3, T, Q> const& b - ) - { - return length(b - a); - } - - template - GLM_FUNC_QUALIFIER T l2Norm(vec<3, T, Q> const& v) - { - return length(v); - } - - template - GLM_FUNC_QUALIFIER T lxNorm(vec<3, T, Q> const& x, vec<3, T, Q> const& y, unsigned int Depth) - { - return pow(pow(abs(y.x - x.x), T(Depth)) + pow(abs(y.y - x.y), T(Depth)) + pow(abs(y.z - x.z), T(Depth)), T(1) / T(Depth)); - } - - template - GLM_FUNC_QUALIFIER T lxNorm(vec<3, T, Q> const& v, unsigned int Depth) - { - return pow(pow(abs(v.x), T(Depth)) + pow(abs(v.y), T(Depth)) + pow(abs(v.z), T(Depth)), T(1) / T(Depth)); - } - - template - GLM_FUNC_QUALIFIER T lMaxNorm(vec<3, T, Q> const& a, vec<3, T, Q> const& b) - { - return compMax(abs(b - a)); - } - - template - GLM_FUNC_QUALIFIER T lMaxNorm(vec<3, T, Q> const& v) - { - return compMax(abs(v)); - } - -}//namespace glm diff --git a/third_party/glm/gtx/normal.hpp b/third_party/glm/gtx/normal.hpp deleted file mode 100755 index 068682f..0000000 --- a/third_party/glm/gtx/normal.hpp +++ /dev/null @@ -1,41 +0,0 @@ -/// @ref gtx_normal -/// @file glm/gtx/normal.hpp -/// -/// @see core (dependence) -/// @see gtx_extented_min_max (dependence) -/// -/// @defgroup gtx_normal GLM_GTX_normal -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Compute the normal of a triangle. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_normal is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_normal extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_normal - /// @{ - - /// Computes triangle normal from triangle points. - /// - /// @see gtx_normal - template - GLM_FUNC_DECL vec<3, T, Q> triangleNormal(vec<3, T, Q> const& p1, vec<3, T, Q> const& p2, vec<3, T, Q> const& p3); - - /// @} -}//namespace glm - -#include "normal.inl" diff --git a/third_party/glm/gtx/normal.inl b/third_party/glm/gtx/normal.inl deleted file mode 100755 index 74f9fc9..0000000 --- a/third_party/glm/gtx/normal.inl +++ /dev/null @@ -1,15 +0,0 @@ -/// @ref gtx_normal - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> triangleNormal - ( - vec<3, T, Q> const& p1, - vec<3, T, Q> const& p2, - vec<3, T, Q> const& p3 - ) - { - return normalize(cross(p1 - p2, p1 - p3)); - } -}//namespace glm diff --git a/third_party/glm/gtx/normalize_dot.hpp b/third_party/glm/gtx/normalize_dot.hpp deleted file mode 100755 index 5195802..0000000 --- a/third_party/glm/gtx/normalize_dot.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref gtx_normalize_dot -/// @file glm/gtx/normalize_dot.hpp -/// -/// @see core (dependence) -/// @see gtx_fast_square_root (dependence) -/// -/// @defgroup gtx_normalize_dot GLM_GTX_normalize_dot -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Dot product of vectors that need to be normalize with a single square root. - -#pragma once - -// Dependency: -#include "../gtx/fast_square_root.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_normalize_dot is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_normalize_dot extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_normalize_dot - /// @{ - - /// Normalize parameters and returns the dot product of x and y. - /// It's faster that dot(normalize(x), normalize(y)). - /// - /// @see gtx_normalize_dot extension. - template - GLM_FUNC_DECL T normalizeDot(vec const& x, vec const& y); - - /// Normalize parameters and returns the dot product of x and y. - /// Faster that dot(fastNormalize(x), fastNormalize(y)). - /// - /// @see gtx_normalize_dot extension. - template - GLM_FUNC_DECL T fastNormalizeDot(vec const& x, vec const& y); - - /// @} -}//namespace glm - -#include "normalize_dot.inl" diff --git a/third_party/glm/gtx/normalize_dot.inl b/third_party/glm/gtx/normalize_dot.inl deleted file mode 100755 index 7bcd9a5..0000000 --- a/third_party/glm/gtx/normalize_dot.inl +++ /dev/null @@ -1,16 +0,0 @@ -/// @ref gtx_normalize_dot - -namespace glm -{ - template - GLM_FUNC_QUALIFIER T normalizeDot(vec const& x, vec const& y) - { - return glm::dot(x, y) * glm::inversesqrt(glm::dot(x, x) * glm::dot(y, y)); - } - - template - GLM_FUNC_QUALIFIER T fastNormalizeDot(vec const& x, vec const& y) - { - return glm::dot(x, y) * glm::fastInverseSqrt(glm::dot(x, x) * glm::dot(y, y)); - } -}//namespace glm diff --git a/third_party/glm/gtx/number_precision.hpp b/third_party/glm/gtx/number_precision.hpp deleted file mode 100755 index 3a606bd..0000000 --- a/third_party/glm/gtx/number_precision.hpp +++ /dev/null @@ -1,61 +0,0 @@ -/// @ref gtx_number_precision -/// @file glm/gtx/number_precision.hpp -/// -/// @see core (dependence) -/// @see gtc_type_precision (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtx_number_precision GLM_GTX_number_precision -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Defined size types. - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/type_precision.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_number_precision is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_number_precision extension included") -# endif -#endif - -namespace glm{ -namespace gtx -{ - ///////////////////////////// - // Unsigned int vector types - - /// @addtogroup gtx_number_precision - /// @{ - - typedef u8 u8vec1; //!< \brief 8bit unsigned integer scalar. (from GLM_GTX_number_precision extension) - typedef u16 u16vec1; //!< \brief 16bit unsigned integer scalar. (from GLM_GTX_number_precision extension) - typedef u32 u32vec1; //!< \brief 32bit unsigned integer scalar. (from GLM_GTX_number_precision extension) - typedef u64 u64vec1; //!< \brief 64bit unsigned integer scalar. (from GLM_GTX_number_precision extension) - - ////////////////////// - // Float vector types - - typedef f32 f32vec1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) - typedef f64 f64vec1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) - - ////////////////////// - // Float matrix types - - typedef f32 f32mat1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) - typedef f32 f32mat1x1; //!< \brief Single-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) - typedef f64 f64mat1; //!< \brief Double-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) - typedef f64 f64mat1x1; //!< \brief Double-qualifier floating-point scalar. (from GLM_GTX_number_precision extension) - - /// @} -}//namespace gtx -}//namespace glm - -#include "number_precision.inl" diff --git a/third_party/glm/gtx/number_precision.inl b/third_party/glm/gtx/number_precision.inl deleted file mode 100755 index b39d71c..0000000 --- a/third_party/glm/gtx/number_precision.inl +++ /dev/null @@ -1,6 +0,0 @@ -/// @ref gtx_number_precision - -namespace glm -{ - -} diff --git a/third_party/glm/gtx/optimum_pow.hpp b/third_party/glm/gtx/optimum_pow.hpp deleted file mode 100755 index 9284a47..0000000 --- a/third_party/glm/gtx/optimum_pow.hpp +++ /dev/null @@ -1,54 +0,0 @@ -/// @ref gtx_optimum_pow -/// @file glm/gtx/optimum_pow.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_optimum_pow GLM_GTX_optimum_pow -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Integer exponentiation of power functions. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_optimum_pow is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_optimum_pow extension included") -# endif -#endif - -namespace glm{ -namespace gtx -{ - /// @addtogroup gtx_optimum_pow - /// @{ - - /// Returns x raised to the power of 2. - /// - /// @see gtx_optimum_pow - template - GLM_FUNC_DECL genType pow2(genType const& x); - - /// Returns x raised to the power of 3. - /// - /// @see gtx_optimum_pow - template - GLM_FUNC_DECL genType pow3(genType const& x); - - /// Returns x raised to the power of 4. - /// - /// @see gtx_optimum_pow - template - GLM_FUNC_DECL genType pow4(genType const& x); - - /// @} -}//namespace gtx -}//namespace glm - -#include "optimum_pow.inl" diff --git a/third_party/glm/gtx/optimum_pow.inl b/third_party/glm/gtx/optimum_pow.inl deleted file mode 100755 index a26c19c..0000000 --- a/third_party/glm/gtx/optimum_pow.inl +++ /dev/null @@ -1,22 +0,0 @@ -/// @ref gtx_optimum_pow - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType pow2(genType const& x) - { - return x * x; - } - - template - GLM_FUNC_QUALIFIER genType pow3(genType const& x) - { - return x * x * x; - } - - template - GLM_FUNC_QUALIFIER genType pow4(genType const& x) - { - return (x * x) * (x * x); - } -}//namespace glm diff --git a/third_party/glm/gtx/orthonormalize.hpp b/third_party/glm/gtx/orthonormalize.hpp deleted file mode 100755 index 3e004fb..0000000 --- a/third_party/glm/gtx/orthonormalize.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/// @ref gtx_orthonormalize -/// @file glm/gtx/orthonormalize.hpp -/// -/// @see core (dependence) -/// @see gtx_extented_min_max (dependence) -/// -/// @defgroup gtx_orthonormalize GLM_GTX_orthonormalize -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Orthonormalize matrices. - -#pragma once - -// Dependency: -#include "../vec3.hpp" -#include "../mat3x3.hpp" -#include "../geometric.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_orthonormalize is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_orthonormalize extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_orthonormalize - /// @{ - - /// Returns the orthonormalized matrix of m. - /// - /// @see gtx_orthonormalize - template - GLM_FUNC_DECL mat<3, 3, T, Q> orthonormalize(mat<3, 3, T, Q> const& m); - - /// Orthonormalizes x according y. - /// - /// @see gtx_orthonormalize - template - GLM_FUNC_DECL vec<3, T, Q> orthonormalize(vec<3, T, Q> const& x, vec<3, T, Q> const& y); - - /// @} -}//namespace glm - -#include "orthonormalize.inl" diff --git a/third_party/glm/gtx/orthonormalize.inl b/third_party/glm/gtx/orthonormalize.inl deleted file mode 100755 index cb553ba..0000000 --- a/third_party/glm/gtx/orthonormalize.inl +++ /dev/null @@ -1,29 +0,0 @@ -/// @ref gtx_orthonormalize - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> orthonormalize(mat<3, 3, T, Q> const& m) - { - mat<3, 3, T, Q> r = m; - - r[0] = normalize(r[0]); - - T d0 = dot(r[0], r[1]); - r[1] -= r[0] * d0; - r[1] = normalize(r[1]); - - T d1 = dot(r[1], r[2]); - d0 = dot(r[0], r[2]); - r[2] -= r[0] * d0 + r[1] * d1; - r[2] = normalize(r[2]); - - return r; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> orthonormalize(vec<3, T, Q> const& x, vec<3, T, Q> const& y) - { - return normalize(x - y * dot(y, x)); - } -}//namespace glm diff --git a/third_party/glm/gtx/perpendicular.hpp b/third_party/glm/gtx/perpendicular.hpp deleted file mode 100755 index 72b77b6..0000000 --- a/third_party/glm/gtx/perpendicular.hpp +++ /dev/null @@ -1,41 +0,0 @@ -/// @ref gtx_perpendicular -/// @file glm/gtx/perpendicular.hpp -/// -/// @see core (dependence) -/// @see gtx_projection (dependence) -/// -/// @defgroup gtx_perpendicular GLM_GTX_perpendicular -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Perpendicular of a vector from other one - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtx/projection.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_perpendicular is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_perpendicular extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_perpendicular - /// @{ - - //! Projects x a perpendicular axis of Normal. - //! From GLM_GTX_perpendicular extension. - template - GLM_FUNC_DECL genType perp(genType const& x, genType const& Normal); - - /// @} -}//namespace glm - -#include "perpendicular.inl" diff --git a/third_party/glm/gtx/perpendicular.inl b/third_party/glm/gtx/perpendicular.inl deleted file mode 100755 index 1e72f33..0000000 --- a/third_party/glm/gtx/perpendicular.inl +++ /dev/null @@ -1,10 +0,0 @@ -/// @ref gtx_perpendicular - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType perp(genType const& x, genType const& Normal) - { - return x - proj(x, Normal); - } -}//namespace glm diff --git a/third_party/glm/gtx/polar_coordinates.hpp b/third_party/glm/gtx/polar_coordinates.hpp deleted file mode 100755 index b399112..0000000 --- a/third_party/glm/gtx/polar_coordinates.hpp +++ /dev/null @@ -1,48 +0,0 @@ -/// @ref gtx_polar_coordinates -/// @file glm/gtx/polar_coordinates.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_polar_coordinates GLM_GTX_polar_coordinates -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Conversion from Euclidean space to polar space and revert. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_polar_coordinates is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_polar_coordinates extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_polar_coordinates - /// @{ - - /// Convert Euclidean to Polar coordinates, x is the xz distance, y, the latitude and z the longitude. - /// - /// @see gtx_polar_coordinates - template - GLM_FUNC_DECL vec<3, T, Q> polar( - vec<3, T, Q> const& euclidean); - - /// Convert Polar to Euclidean coordinates. - /// - /// @see gtx_polar_coordinates - template - GLM_FUNC_DECL vec<3, T, Q> euclidean( - vec<2, T, Q> const& polar); - - /// @} -}//namespace glm - -#include "polar_coordinates.inl" diff --git a/third_party/glm/gtx/polar_coordinates.inl b/third_party/glm/gtx/polar_coordinates.inl deleted file mode 100755 index 371c8dd..0000000 --- a/third_party/glm/gtx/polar_coordinates.inl +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref gtx_polar_coordinates - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> polar - ( - vec<3, T, Q> const& euclidean - ) - { - T const Length(length(euclidean)); - vec<3, T, Q> const tmp(euclidean / Length); - T const xz_dist(sqrt(tmp.x * tmp.x + tmp.z * tmp.z)); - - return vec<3, T, Q>( - asin(tmp.y), // latitude - atan(tmp.x, tmp.z), // longitude - xz_dist); // xz distance - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> euclidean - ( - vec<2, T, Q> const& polar - ) - { - T const latitude(polar.x); - T const longitude(polar.y); - - return vec<3, T, Q>( - cos(latitude) * sin(longitude), - sin(latitude), - cos(latitude) * cos(longitude)); - } - -}//namespace glm diff --git a/third_party/glm/gtx/projection.hpp b/third_party/glm/gtx/projection.hpp deleted file mode 100755 index 678f3ad..0000000 --- a/third_party/glm/gtx/projection.hpp +++ /dev/null @@ -1,43 +0,0 @@ -/// @ref gtx_projection -/// @file glm/gtx/projection.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_projection GLM_GTX_projection -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Projection of a vector to other one - -#pragma once - -// Dependency: -#include "../geometric.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_projection is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_projection extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_projection - /// @{ - - /// Projects x on Normal. - /// - /// @param[in] x A vector to project - /// @param[in] Normal A normal that doesn't need to be of unit length. - /// - /// @see gtx_projection - template - GLM_FUNC_DECL genType proj(genType const& x, genType const& Normal); - - /// @} -}//namespace glm - -#include "projection.inl" diff --git a/third_party/glm/gtx/projection.inl b/third_party/glm/gtx/projection.inl deleted file mode 100755 index f23f884..0000000 --- a/third_party/glm/gtx/projection.inl +++ /dev/null @@ -1,10 +0,0 @@ -/// @ref gtx_projection - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType proj(genType const& x, genType const& Normal) - { - return glm::dot(x, Normal) / glm::dot(Normal, Normal) * Normal; - } -}//namespace glm diff --git a/third_party/glm/gtx/quaternion.hpp b/third_party/glm/gtx/quaternion.hpp deleted file mode 100755 index 5c2b5ad..0000000 --- a/third_party/glm/gtx/quaternion.hpp +++ /dev/null @@ -1,174 +0,0 @@ -/// @ref gtx_quaternion -/// @file glm/gtx/quaternion.hpp -/// -/// @see core (dependence) -/// @see gtx_extented_min_max (dependence) -/// -/// @defgroup gtx_quaternion GLM_GTX_quaternion -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Extented quaternion types and functions - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/constants.hpp" -#include "../gtc/quaternion.hpp" -#include "../ext/quaternion_exponential.hpp" -#include "../gtx/norm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_quaternion is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_quaternion extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_quaternion - /// @{ - - /// Create an identity quaternion. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL GLM_CONSTEXPR qua quat_identity(); - - /// Compute a cross product between a quaternion and a vector. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL vec<3, T, Q> cross( - qua const& q, - vec<3, T, Q> const& v); - - //! Compute a cross product between a vector and a quaternion. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL vec<3, T, Q> cross( - vec<3, T, Q> const& v, - qua const& q); - - //! Compute a point on a path according squad equation. - //! q1 and q2 are control points; s1 and s2 are intermediate control points. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL qua squad( - qua const& q1, - qua const& q2, - qua const& s1, - qua const& s2, - T const& h); - - //! Returns an intermediate control point for squad interpolation. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL qua intermediate( - qua const& prev, - qua const& curr, - qua const& next); - - //! Returns quarternion square root. - /// - /// @see gtx_quaternion - //template - //qua sqrt( - // qua const& q); - - //! Rotates a 3 components vector by a quaternion. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL vec<3, T, Q> rotate( - qua const& q, - vec<3, T, Q> const& v); - - /// Rotates a 4 components vector by a quaternion. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL vec<4, T, Q> rotate( - qua const& q, - vec<4, T, Q> const& v); - - /// Extract the real component of a quaternion. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL T extractRealComponent( - qua const& q); - - /// Converts a quaternion to a 3 * 3 matrix. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL mat<3, 3, T, Q> toMat3( - qua const& x){return mat3_cast(x);} - - /// Converts a quaternion to a 4 * 4 matrix. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL mat<4, 4, T, Q> toMat4( - qua const& x){return mat4_cast(x);} - - /// Converts a 3 * 3 matrix to a quaternion. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL qua toQuat( - mat<3, 3, T, Q> const& x){return quat_cast(x);} - - /// Converts a 4 * 4 matrix to a quaternion. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL qua toQuat( - mat<4, 4, T, Q> const& x){return quat_cast(x);} - - /// Quaternion interpolation using the rotation short path. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL qua shortMix( - qua const& x, - qua const& y, - T const& a); - - /// Quaternion normalized linear interpolation. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL qua fastMix( - qua const& x, - qua const& y, - T const& a); - - /// Compute the rotation between two vectors. - /// @param orig vector, needs to be normalized - /// @param dest vector, needs to be normalized - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL qua rotation( - vec<3, T, Q> const& orig, - vec<3, T, Q> const& dest); - - /// Returns the squared length of x. - /// - /// @see gtx_quaternion - template - GLM_FUNC_DECL GLM_CONSTEXPR T length2(qua const& q); - - /// @} -}//namespace glm - -#include "quaternion.inl" diff --git a/third_party/glm/gtx/quaternion.inl b/third_party/glm/gtx/quaternion.inl deleted file mode 100755 index d125bcc..0000000 --- a/third_party/glm/gtx/quaternion.inl +++ /dev/null @@ -1,159 +0,0 @@ -/// @ref gtx_quaternion - -#include -#include "../gtc/constants.hpp" - -namespace glm -{ - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR qua quat_identity() - { - return qua(static_cast(1), static_cast(0), static_cast(0), static_cast(0)); - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> cross(vec<3, T, Q> const& v, qua const& q) - { - return inverse(q) * v; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> cross(qua const& q, vec<3, T, Q> const& v) - { - return q * v; - } - - template - GLM_FUNC_QUALIFIER qua squad - ( - qua const& q1, - qua const& q2, - qua const& s1, - qua const& s2, - T const& h) - { - return mix(mix(q1, q2, h), mix(s1, s2, h), static_cast(2) * (static_cast(1) - h) * h); - } - - template - GLM_FUNC_QUALIFIER qua intermediate - ( - qua const& prev, - qua const& curr, - qua const& next - ) - { - qua invQuat = inverse(curr); - return exp((log(next * invQuat) + log(prev * invQuat)) / static_cast(-4)) * curr; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rotate(qua const& q, vec<3, T, Q> const& v) - { - return q * v; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> rotate(qua const& q, vec<4, T, Q> const& v) - { - return q * v; - } - - template - GLM_FUNC_QUALIFIER T extractRealComponent(qua const& q) - { - T w = static_cast(1) - q.x * q.x - q.y * q.y - q.z * q.z; - if(w < T(0)) - return T(0); - else - return -sqrt(w); - } - - template - GLM_FUNC_QUALIFIER GLM_CONSTEXPR T length2(qua const& q) - { - return q.x * q.x + q.y * q.y + q.z * q.z + q.w * q.w; - } - - template - GLM_FUNC_QUALIFIER qua shortMix(qua const& x, qua const& y, T const& a) - { - if(a <= static_cast(0)) return x; - if(a >= static_cast(1)) return y; - - T fCos = dot(x, y); - qua y2(y); //BUG!!! qua y2; - if(fCos < static_cast(0)) - { - y2 = -y; - fCos = -fCos; - } - - //if(fCos > 1.0f) // problem - T k0, k1; - if(fCos > (static_cast(1) - epsilon())) - { - k0 = static_cast(1) - a; - k1 = static_cast(0) + a; //BUG!!! 1.0f + a; - } - else - { - T fSin = sqrt(T(1) - fCos * fCos); - T fAngle = atan(fSin, fCos); - T fOneOverSin = static_cast(1) / fSin; - k0 = sin((static_cast(1) - a) * fAngle) * fOneOverSin; - k1 = sin((static_cast(0) + a) * fAngle) * fOneOverSin; - } - - return qua( - k0 * x.w + k1 * y2.w, - k0 * x.x + k1 * y2.x, - k0 * x.y + k1 * y2.y, - k0 * x.z + k1 * y2.z); - } - - template - GLM_FUNC_QUALIFIER qua fastMix(qua const& x, qua const& y, T const& a) - { - return glm::normalize(x * (static_cast(1) - a) + (y * a)); - } - - template - GLM_FUNC_QUALIFIER qua rotation(vec<3, T, Q> const& orig, vec<3, T, Q> const& dest) - { - T cosTheta = dot(orig, dest); - vec<3, T, Q> rotationAxis; - - if(cosTheta >= static_cast(1) - epsilon()) { - // orig and dest point in the same direction - return quat_identity(); - } - - if(cosTheta < static_cast(-1) + epsilon()) - { - // special case when vectors in opposite directions : - // there is no "ideal" rotation axis - // So guess one; any will do as long as it's perpendicular to start - // This implementation favors a rotation around the Up axis (Y), - // since it's often what you want to do. - rotationAxis = cross(vec<3, T, Q>(0, 0, 1), orig); - if(length2(rotationAxis) < epsilon()) // bad luck, they were parallel, try again! - rotationAxis = cross(vec<3, T, Q>(1, 0, 0), orig); - - rotationAxis = normalize(rotationAxis); - return angleAxis(pi(), rotationAxis); - } - - // Implementation from Stan Melax's Game Programming Gems 1 article - rotationAxis = cross(orig, dest); - - T s = sqrt((T(1) + cosTheta) * static_cast(2)); - T invs = static_cast(1) / s; - - return qua( - s * static_cast(0.5f), - rotationAxis.x * invs, - rotationAxis.y * invs, - rotationAxis.z * invs); - } -}//namespace glm diff --git a/third_party/glm/gtx/range.hpp b/third_party/glm/gtx/range.hpp deleted file mode 100755 index 93bcb9a..0000000 --- a/third_party/glm/gtx/range.hpp +++ /dev/null @@ -1,98 +0,0 @@ -/// @ref gtx_range -/// @file glm/gtx/range.hpp -/// @author Joshua Moerman -/// -/// @defgroup gtx_range GLM_GTX_range -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Defines begin and end for vectors and matrices. Useful for range-based for loop. -/// The range is defined over the elements, not over columns or rows (e.g. mat4 has 16 elements). - -#pragma once - -// Dependencies -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_range is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_range extension included") -# endif -#endif - -#include "../gtc/type_ptr.hpp" -#include "../gtc/vec1.hpp" - -namespace glm -{ - /// @addtogroup gtx_range - /// @{ - -# if GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(push) -# pragma warning(disable : 4100) // unreferenced formal parameter -# endif - - template - inline length_t components(vec<1, T, Q> const& v) - { - return v.length(); - } - - template - inline length_t components(vec<2, T, Q> const& v) - { - return v.length(); - } - - template - inline length_t components(vec<3, T, Q> const& v) - { - return v.length(); - } - - template - inline length_t components(vec<4, T, Q> const& v) - { - return v.length(); - } - - template - inline length_t components(genType const& m) - { - return m.length() * m[0].length(); - } - - template - inline typename genType::value_type const * begin(genType const& v) - { - return value_ptr(v); - } - - template - inline typename genType::value_type const * end(genType const& v) - { - return begin(v) + components(v); - } - - template - inline typename genType::value_type * begin(genType& v) - { - return value_ptr(v); - } - - template - inline typename genType::value_type * end(genType& v) - { - return begin(v) + components(v); - } - -# if GLM_COMPILER & GLM_COMPILER_VC -# pragma warning(pop) -# endif - - /// @} -}//namespace glm diff --git a/third_party/glm/gtx/raw_data.hpp b/third_party/glm/gtx/raw_data.hpp deleted file mode 100755 index 86cbe77..0000000 --- a/third_party/glm/gtx/raw_data.hpp +++ /dev/null @@ -1,51 +0,0 @@ -/// @ref gtx_raw_data -/// @file glm/gtx/raw_data.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_raw_data GLM_GTX_raw_data -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Projection of a vector to other one - -#pragma once - -// Dependencies -#include "../ext/scalar_uint_sized.hpp" -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_raw_data is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_raw_data extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_raw_data - /// @{ - - //! Type for byte numbers. - //! From GLM_GTX_raw_data extension. - typedef detail::uint8 byte; - - //! Type for word numbers. - //! From GLM_GTX_raw_data extension. - typedef detail::uint16 word; - - //! Type for dword numbers. - //! From GLM_GTX_raw_data extension. - typedef detail::uint32 dword; - - //! Type for qword numbers. - //! From GLM_GTX_raw_data extension. - typedef detail::uint64 qword; - - /// @} -}// namespace glm - -#include "raw_data.inl" diff --git a/third_party/glm/gtx/raw_data.inl b/third_party/glm/gtx/raw_data.inl deleted file mode 100755 index c740317..0000000 --- a/third_party/glm/gtx/raw_data.inl +++ /dev/null @@ -1,2 +0,0 @@ -/// @ref gtx_raw_data - diff --git a/third_party/glm/gtx/rotate_normalized_axis.hpp b/third_party/glm/gtx/rotate_normalized_axis.hpp deleted file mode 100755 index 2103ca0..0000000 --- a/third_party/glm/gtx/rotate_normalized_axis.hpp +++ /dev/null @@ -1,68 +0,0 @@ -/// @ref gtx_rotate_normalized_axis -/// @file glm/gtx/rotate_normalized_axis.hpp -/// -/// @see core (dependence) -/// @see gtc_matrix_transform -/// @see gtc_quaternion -/// -/// @defgroup gtx_rotate_normalized_axis GLM_GTX_rotate_normalized_axis -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Quaternions and matrices rotations around normalized axis. - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/epsilon.hpp" -#include "../gtc/quaternion.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_rotate_normalized_axis is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_rotate_normalized_axis extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_rotate_normalized_axis - /// @{ - - /// Builds a rotation 4 * 4 matrix created from a normalized axis and an angle. - /// - /// @param m Input matrix multiplied by this rotation matrix. - /// @param angle Rotation angle expressed in radians. - /// @param axis Rotation axis, must be normalized. - /// @tparam T Value type used to build the matrix. Currently supported: half (not recommended), float or double. - /// - /// @see gtx_rotate_normalized_axis - /// @see - rotate(T angle, T x, T y, T z) - /// @see - rotate(mat<4, 4, T, Q> const& m, T angle, T x, T y, T z) - /// @see - rotate(T angle, vec<3, T, Q> const& v) - template - GLM_FUNC_DECL mat<4, 4, T, Q> rotateNormalizedAxis( - mat<4, 4, T, Q> const& m, - T const& angle, - vec<3, T, Q> const& axis); - - /// Rotates a quaternion from a vector of 3 components normalized axis and an angle. - /// - /// @param q Source orientation - /// @param angle Angle expressed in radians. - /// @param axis Normalized axis of the rotation, must be normalized. - /// - /// @see gtx_rotate_normalized_axis - template - GLM_FUNC_DECL qua rotateNormalizedAxis( - qua const& q, - T const& angle, - vec<3, T, Q> const& axis); - - /// @} -}//namespace glm - -#include "rotate_normalized_axis.inl" diff --git a/third_party/glm/gtx/rotate_normalized_axis.inl b/third_party/glm/gtx/rotate_normalized_axis.inl deleted file mode 100755 index b2e9278..0000000 --- a/third_party/glm/gtx/rotate_normalized_axis.inl +++ /dev/null @@ -1,58 +0,0 @@ -/// @ref gtx_rotate_normalized_axis - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotateNormalizedAxis - ( - mat<4, 4, T, Q> const& m, - T const& angle, - vec<3, T, Q> const& v - ) - { - T const a = angle; - T const c = cos(a); - T const s = sin(a); - - vec<3, T, Q> const axis(v); - - vec<3, T, Q> const temp((static_cast(1) - c) * axis); - - mat<4, 4, T, Q> Rotate; - Rotate[0][0] = c + temp[0] * axis[0]; - Rotate[0][1] = 0 + temp[0] * axis[1] + s * axis[2]; - Rotate[0][2] = 0 + temp[0] * axis[2] - s * axis[1]; - - Rotate[1][0] = 0 + temp[1] * axis[0] - s * axis[2]; - Rotate[1][1] = c + temp[1] * axis[1]; - Rotate[1][2] = 0 + temp[1] * axis[2] + s * axis[0]; - - Rotate[2][0] = 0 + temp[2] * axis[0] + s * axis[1]; - Rotate[2][1] = 0 + temp[2] * axis[1] - s * axis[0]; - Rotate[2][2] = c + temp[2] * axis[2]; - - mat<4, 4, T, Q> Result; - Result[0] = m[0] * Rotate[0][0] + m[1] * Rotate[0][1] + m[2] * Rotate[0][2]; - Result[1] = m[0] * Rotate[1][0] + m[1] * Rotate[1][1] + m[2] * Rotate[1][2]; - Result[2] = m[0] * Rotate[2][0] + m[1] * Rotate[2][1] + m[2] * Rotate[2][2]; - Result[3] = m[3]; - return Result; - } - - template - GLM_FUNC_QUALIFIER qua rotateNormalizedAxis - ( - qua const& q, - T const& angle, - vec<3, T, Q> const& v - ) - { - vec<3, T, Q> const Tmp(v); - - T const AngleRad(angle); - T const Sin = sin(AngleRad * T(0.5)); - - return q * qua(cos(AngleRad * static_cast(0.5)), Tmp.x * Sin, Tmp.y * Sin, Tmp.z * Sin); - //return gtc::quaternion::cross(q, tquat(cos(AngleRad * T(0.5)), Tmp.x * fSin, Tmp.y * fSin, Tmp.z * fSin)); - } -}//namespace glm diff --git a/third_party/glm/gtx/rotate_vector.hpp b/third_party/glm/gtx/rotate_vector.hpp deleted file mode 100755 index dcd5b95..0000000 --- a/third_party/glm/gtx/rotate_vector.hpp +++ /dev/null @@ -1,123 +0,0 @@ -/// @ref gtx_rotate_vector -/// @file glm/gtx/rotate_vector.hpp -/// -/// @see core (dependence) -/// @see gtx_transform (dependence) -/// -/// @defgroup gtx_rotate_vector GLM_GTX_rotate_vector -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Function to directly rotate a vector - -#pragma once - -// Dependency: -#include "../gtx/transform.hpp" -#include "../gtc/epsilon.hpp" -#include "../ext/vector_relational.hpp" -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_rotate_vector is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_rotate_vector extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_rotate_vector - /// @{ - - /// Returns Spherical interpolation between two vectors - /// - /// @param x A first vector - /// @param y A second vector - /// @param a Interpolation factor. The interpolation is defined beyond the range [0, 1]. - /// - /// @see gtx_rotate_vector - template - GLM_FUNC_DECL vec<3, T, Q> slerp( - vec<3, T, Q> const& x, - vec<3, T, Q> const& y, - T const& a); - - //! Rotate a two dimensional vector. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<2, T, Q> rotate( - vec<2, T, Q> const& v, - T const& angle); - - //! Rotate a three dimensional vector around an axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<3, T, Q> rotate( - vec<3, T, Q> const& v, - T const& angle, - vec<3, T, Q> const& normal); - - //! Rotate a four dimensional vector around an axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<4, T, Q> rotate( - vec<4, T, Q> const& v, - T const& angle, - vec<3, T, Q> const& normal); - - //! Rotate a three dimensional vector around the X axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<3, T, Q> rotateX( - vec<3, T, Q> const& v, - T const& angle); - - //! Rotate a three dimensional vector around the Y axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<3, T, Q> rotateY( - vec<3, T, Q> const& v, - T const& angle); - - //! Rotate a three dimensional vector around the Z axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<3, T, Q> rotateZ( - vec<3, T, Q> const& v, - T const& angle); - - //! Rotate a four dimensional vector around the X axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<4, T, Q> rotateX( - vec<4, T, Q> const& v, - T const& angle); - - //! Rotate a four dimensional vector around the Y axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<4, T, Q> rotateY( - vec<4, T, Q> const& v, - T const& angle); - - //! Rotate a four dimensional vector around the Z axis. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL vec<4, T, Q> rotateZ( - vec<4, T, Q> const& v, - T const& angle); - - //! Build a rotation matrix from a normal and a up vector. - //! From GLM_GTX_rotate_vector extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> orientation( - vec<3, T, Q> const& Normal, - vec<3, T, Q> const& Up); - - /// @} -}//namespace glm - -#include "rotate_vector.inl" diff --git a/third_party/glm/gtx/rotate_vector.inl b/third_party/glm/gtx/rotate_vector.inl deleted file mode 100755 index f8136e7..0000000 --- a/third_party/glm/gtx/rotate_vector.inl +++ /dev/null @@ -1,187 +0,0 @@ -/// @ref gtx_rotate_vector - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec<3, T, Q> slerp - ( - vec<3, T, Q> const& x, - vec<3, T, Q> const& y, - T const& a - ) - { - // get cosine of angle between vectors (-1 -> 1) - T CosAlpha = dot(x, y); - // get angle (0 -> pi) - T Alpha = acos(CosAlpha); - // get sine of angle between vectors (0 -> 1) - T SinAlpha = sin(Alpha); - // this breaks down when SinAlpha = 0, i.e. Alpha = 0 or pi - T t1 = sin((static_cast(1) - a) * Alpha) / SinAlpha; - T t2 = sin(a * Alpha) / SinAlpha; - - // interpolate src vectors - return x * t1 + y * t2; - } - - template - GLM_FUNC_QUALIFIER vec<2, T, Q> rotate - ( - vec<2, T, Q> const& v, - T const& angle - ) - { - vec<2, T, Q> Result; - T const Cos(cos(angle)); - T const Sin(sin(angle)); - - Result.x = v.x * Cos - v.y * Sin; - Result.y = v.x * Sin + v.y * Cos; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rotate - ( - vec<3, T, Q> const& v, - T const& angle, - vec<3, T, Q> const& normal - ) - { - return mat<3, 3, T, Q>(glm::rotate(angle, normal)) * v; - } - /* - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rotateGTX( - const vec<3, T, Q>& x, - T angle, - const vec<3, T, Q>& normal) - { - const T Cos = cos(radians(angle)); - const T Sin = sin(radians(angle)); - return x * Cos + ((x * normal) * (T(1) - Cos)) * normal + cross(x, normal) * Sin; - } - */ - template - GLM_FUNC_QUALIFIER vec<4, T, Q> rotate - ( - vec<4, T, Q> const& v, - T const& angle, - vec<3, T, Q> const& normal - ) - { - return rotate(angle, normal) * v; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rotateX - ( - vec<3, T, Q> const& v, - T const& angle - ) - { - vec<3, T, Q> Result(v); - T const Cos(cos(angle)); - T const Sin(sin(angle)); - - Result.y = v.y * Cos - v.z * Sin; - Result.z = v.y * Sin + v.z * Cos; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rotateY - ( - vec<3, T, Q> const& v, - T const& angle - ) - { - vec<3, T, Q> Result = v; - T const Cos(cos(angle)); - T const Sin(sin(angle)); - - Result.x = v.x * Cos + v.z * Sin; - Result.z = -v.x * Sin + v.z * Cos; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<3, T, Q> rotateZ - ( - vec<3, T, Q> const& v, - T const& angle - ) - { - vec<3, T, Q> Result = v; - T const Cos(cos(angle)); - T const Sin(sin(angle)); - - Result.x = v.x * Cos - v.y * Sin; - Result.y = v.x * Sin + v.y * Cos; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> rotateX - ( - vec<4, T, Q> const& v, - T const& angle - ) - { - vec<4, T, Q> Result = v; - T const Cos(cos(angle)); - T const Sin(sin(angle)); - - Result.y = v.y * Cos - v.z * Sin; - Result.z = v.y * Sin + v.z * Cos; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> rotateY - ( - vec<4, T, Q> const& v, - T const& angle - ) - { - vec<4, T, Q> Result = v; - T const Cos(cos(angle)); - T const Sin(sin(angle)); - - Result.x = v.x * Cos + v.z * Sin; - Result.z = -v.x * Sin + v.z * Cos; - return Result; - } - - template - GLM_FUNC_QUALIFIER vec<4, T, Q> rotateZ - ( - vec<4, T, Q> const& v, - T const& angle - ) - { - vec<4, T, Q> Result = v; - T const Cos(cos(angle)); - T const Sin(sin(angle)); - - Result.x = v.x * Cos - v.y * Sin; - Result.y = v.x * Sin + v.y * Cos; - return Result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> orientation - ( - vec<3, T, Q> const& Normal, - vec<3, T, Q> const& Up - ) - { - if(all(equal(Normal, Up, epsilon()))) - return mat<4, 4, T, Q>(static_cast(1)); - - vec<3, T, Q> RotationAxis = cross(Up, Normal); - T Angle = acos(dot(Normal, Up)); - - return rotate(Angle, RotationAxis); - } -}//namespace glm diff --git a/third_party/glm/gtx/scalar_multiplication.hpp b/third_party/glm/gtx/scalar_multiplication.hpp deleted file mode 100755 index 496ba19..0000000 --- a/third_party/glm/gtx/scalar_multiplication.hpp +++ /dev/null @@ -1,75 +0,0 @@ -/// @ref gtx -/// @file glm/gtx/scalar_multiplication.hpp -/// @author Joshua Moerman -/// -/// Include to use the features of this extension. -/// -/// Enables scalar multiplication for all types -/// -/// Since GLSL is very strict about types, the following (often used) combinations do not work: -/// double * vec4 -/// int * vec4 -/// vec4 / int -/// So we'll fix that! Of course "float * vec4" should remain the same (hence the enable_if magic) - -#pragma once - -#include "../detail/setup.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_scalar_multiplication is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_scalar_multiplication extension included") -# endif -#endif - -#include "../vec2.hpp" -#include "../vec3.hpp" -#include "../vec4.hpp" -#include "../mat2x2.hpp" -#include - -namespace glm -{ - template - using return_type_scalar_multiplication = typename std::enable_if< - !std::is_same::value // T may not be a float - && std::is_arithmetic::value, Vec // But it may be an int or double (no vec3 or mat3, ...) - >::type; - -#define GLM_IMPLEMENT_SCAL_MULT(Vec) \ - template \ - return_type_scalar_multiplication \ - operator*(T const& s, Vec rh){ \ - return rh *= static_cast(s); \ - } \ - \ - template \ - return_type_scalar_multiplication \ - operator*(Vec lh, T const& s){ \ - return lh *= static_cast(s); \ - } \ - \ - template \ - return_type_scalar_multiplication \ - operator/(Vec lh, T const& s){ \ - return lh *= 1.0f / static_cast(s); \ - } - -GLM_IMPLEMENT_SCAL_MULT(vec2) -GLM_IMPLEMENT_SCAL_MULT(vec3) -GLM_IMPLEMENT_SCAL_MULT(vec4) - -GLM_IMPLEMENT_SCAL_MULT(mat2) -GLM_IMPLEMENT_SCAL_MULT(mat2x3) -GLM_IMPLEMENT_SCAL_MULT(mat2x4) -GLM_IMPLEMENT_SCAL_MULT(mat3x2) -GLM_IMPLEMENT_SCAL_MULT(mat3) -GLM_IMPLEMENT_SCAL_MULT(mat3x4) -GLM_IMPLEMENT_SCAL_MULT(mat4x2) -GLM_IMPLEMENT_SCAL_MULT(mat4x3) -GLM_IMPLEMENT_SCAL_MULT(mat4) - -#undef GLM_IMPLEMENT_SCAL_MULT -} // namespace glm diff --git a/third_party/glm/gtx/scalar_relational.hpp b/third_party/glm/gtx/scalar_relational.hpp deleted file mode 100755 index 8be9c57..0000000 --- a/third_party/glm/gtx/scalar_relational.hpp +++ /dev/null @@ -1,36 +0,0 @@ -/// @ref gtx_scalar_relational -/// @file glm/gtx/scalar_relational.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_scalar_relational GLM_GTX_scalar_relational -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Extend a position from a source to a position at a defined length. - -#pragma once - -// Dependency: -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_extend is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_extend extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_scalar_relational - /// @{ - - - - /// @} -}//namespace glm - -#include "scalar_relational.inl" diff --git a/third_party/glm/gtx/scalar_relational.inl b/third_party/glm/gtx/scalar_relational.inl deleted file mode 100755 index c2a121c..0000000 --- a/third_party/glm/gtx/scalar_relational.inl +++ /dev/null @@ -1,88 +0,0 @@ -/// @ref gtx_scalar_relational - -namespace glm -{ - template - GLM_FUNC_QUALIFIER bool lessThan - ( - T const& x, - T const& y - ) - { - return x < y; - } - - template - GLM_FUNC_QUALIFIER bool lessThanEqual - ( - T const& x, - T const& y - ) - { - return x <= y; - } - - template - GLM_FUNC_QUALIFIER bool greaterThan - ( - T const& x, - T const& y - ) - { - return x > y; - } - - template - GLM_FUNC_QUALIFIER bool greaterThanEqual - ( - T const& x, - T const& y - ) - { - return x >= y; - } - - template - GLM_FUNC_QUALIFIER bool equal - ( - T const& x, - T const& y - ) - { - return detail::compute_equal::is_iec559>::call(x, y); - } - - template - GLM_FUNC_QUALIFIER bool notEqual - ( - T const& x, - T const& y - ) - { - return !detail::compute_equal::is_iec559>::call(x, y); - } - - GLM_FUNC_QUALIFIER bool any - ( - bool const& x - ) - { - return x; - } - - GLM_FUNC_QUALIFIER bool all - ( - bool const& x - ) - { - return x; - } - - GLM_FUNC_QUALIFIER bool not_ - ( - bool const& x - ) - { - return !x; - } -}//namespace glm diff --git a/third_party/glm/gtx/spline.hpp b/third_party/glm/gtx/spline.hpp deleted file mode 100755 index 731c979..0000000 --- a/third_party/glm/gtx/spline.hpp +++ /dev/null @@ -1,65 +0,0 @@ -/// @ref gtx_spline -/// @file glm/gtx/spline.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_spline GLM_GTX_spline -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Spline functions - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtx/optimum_pow.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_spline is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_spline extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_spline - /// @{ - - /// Return a point from a catmull rom curve. - /// @see gtx_spline extension. - template - GLM_FUNC_DECL genType catmullRom( - genType const& v1, - genType const& v2, - genType const& v3, - genType const& v4, - typename genType::value_type const& s); - - /// Return a point from a hermite curve. - /// @see gtx_spline extension. - template - GLM_FUNC_DECL genType hermite( - genType const& v1, - genType const& t1, - genType const& v2, - genType const& t2, - typename genType::value_type const& s); - - /// Return a point from a cubic curve. - /// @see gtx_spline extension. - template - GLM_FUNC_DECL genType cubic( - genType const& v1, - genType const& v2, - genType const& v3, - genType const& v4, - typename genType::value_type const& s); - - /// @} -}//namespace glm - -#include "spline.inl" diff --git a/third_party/glm/gtx/spline.inl b/third_party/glm/gtx/spline.inl deleted file mode 100755 index c3fd056..0000000 --- a/third_party/glm/gtx/spline.inl +++ /dev/null @@ -1,60 +0,0 @@ -/// @ref gtx_spline - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType catmullRom - ( - genType const& v1, - genType const& v2, - genType const& v3, - genType const& v4, - typename genType::value_type const& s - ) - { - typename genType::value_type s2 = pow2(s); - typename genType::value_type s3 = pow3(s); - - typename genType::value_type f1 = -s3 + typename genType::value_type(2) * s2 - s; - typename genType::value_type f2 = typename genType::value_type(3) * s3 - typename genType::value_type(5) * s2 + typename genType::value_type(2); - typename genType::value_type f3 = typename genType::value_type(-3) * s3 + typename genType::value_type(4) * s2 + s; - typename genType::value_type f4 = s3 - s2; - - return (f1 * v1 + f2 * v2 + f3 * v3 + f4 * v4) / typename genType::value_type(2); - - } - - template - GLM_FUNC_QUALIFIER genType hermite - ( - genType const& v1, - genType const& t1, - genType const& v2, - genType const& t2, - typename genType::value_type const& s - ) - { - typename genType::value_type s2 = pow2(s); - typename genType::value_type s3 = pow3(s); - - typename genType::value_type f1 = typename genType::value_type(2) * s3 - typename genType::value_type(3) * s2 + typename genType::value_type(1); - typename genType::value_type f2 = typename genType::value_type(-2) * s3 + typename genType::value_type(3) * s2; - typename genType::value_type f3 = s3 - typename genType::value_type(2) * s2 + s; - typename genType::value_type f4 = s3 - s2; - - return f1 * v1 + f2 * v2 + f3 * t1 + f4 * t2; - } - - template - GLM_FUNC_QUALIFIER genType cubic - ( - genType const& v1, - genType const& v2, - genType const& v3, - genType const& v4, - typename genType::value_type const& s - ) - { - return ((v1 * s + v2) * s + v3) * s + v4; - } -}//namespace glm diff --git a/third_party/glm/gtx/std_based_type.hpp b/third_party/glm/gtx/std_based_type.hpp deleted file mode 100755 index cd3be8c..0000000 --- a/third_party/glm/gtx/std_based_type.hpp +++ /dev/null @@ -1,68 +0,0 @@ -/// @ref gtx_std_based_type -/// @file glm/gtx/std_based_type.hpp -/// -/// @see core (dependence) -/// @see gtx_extented_min_max (dependence) -/// -/// @defgroup gtx_std_based_type GLM_GTX_std_based_type -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Adds vector types based on STL value types. - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_std_based_type is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_std_based_type extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_std_based_type - /// @{ - - /// Vector type based of one std::size_t component. - /// @see GLM_GTX_std_based_type - typedef vec<1, std::size_t, defaultp> size1; - - /// Vector type based of two std::size_t components. - /// @see GLM_GTX_std_based_type - typedef vec<2, std::size_t, defaultp> size2; - - /// Vector type based of three std::size_t components. - /// @see GLM_GTX_std_based_type - typedef vec<3, std::size_t, defaultp> size3; - - /// Vector type based of four std::size_t components. - /// @see GLM_GTX_std_based_type - typedef vec<4, std::size_t, defaultp> size4; - - /// Vector type based of one std::size_t component. - /// @see GLM_GTX_std_based_type - typedef vec<1, std::size_t, defaultp> size1_t; - - /// Vector type based of two std::size_t components. - /// @see GLM_GTX_std_based_type - typedef vec<2, std::size_t, defaultp> size2_t; - - /// Vector type based of three std::size_t components. - /// @see GLM_GTX_std_based_type - typedef vec<3, std::size_t, defaultp> size3_t; - - /// Vector type based of four std::size_t components. - /// @see GLM_GTX_std_based_type - typedef vec<4, std::size_t, defaultp> size4_t; - - /// @} -}//namespace glm - -#include "std_based_type.inl" diff --git a/third_party/glm/gtx/std_based_type.inl b/third_party/glm/gtx/std_based_type.inl deleted file mode 100755 index 9c34bdb..0000000 --- a/third_party/glm/gtx/std_based_type.inl +++ /dev/null @@ -1,6 +0,0 @@ -/// @ref gtx_std_based_type - -namespace glm -{ - -} diff --git a/third_party/glm/gtx/string_cast.hpp b/third_party/glm/gtx/string_cast.hpp deleted file mode 100755 index 27846bf..0000000 --- a/third_party/glm/gtx/string_cast.hpp +++ /dev/null @@ -1,52 +0,0 @@ -/// @ref gtx_string_cast -/// @file glm/gtx/string_cast.hpp -/// -/// @see core (dependence) -/// @see gtx_integer (dependence) -/// @see gtx_quaternion (dependence) -/// -/// @defgroup gtx_string_cast GLM_GTX_string_cast -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Setup strings for GLM type values -/// -/// This extension is not supported with CUDA - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/type_precision.hpp" -#include "../gtc/quaternion.hpp" -#include "../gtx/dual_quaternion.hpp" -#include -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_string_cast is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_string_cast extension included") -# endif -#endif - -#if(GLM_COMPILER & GLM_COMPILER_CUDA) -# error "GLM_GTX_string_cast is not supported on CUDA compiler" -#endif - -namespace glm -{ - /// @addtogroup gtx_string_cast - /// @{ - - /// Create a string from a GLM vector or matrix typed variable. - /// @see gtx_string_cast extension. - template - GLM_FUNC_DECL std::string to_string(genType const& x); - - /// @} -}//namespace glm - -#include "string_cast.inl" diff --git a/third_party/glm/gtx/string_cast.inl b/third_party/glm/gtx/string_cast.inl deleted file mode 100755 index f67751d..0000000 --- a/third_party/glm/gtx/string_cast.inl +++ /dev/null @@ -1,492 +0,0 @@ -/// @ref gtx_string_cast - -#include -#include - -namespace glm{ -namespace detail -{ - template - struct cast - { - typedef T value_type; - }; - - template <> - struct cast - { - typedef double value_type; - }; - - GLM_FUNC_QUALIFIER std::string format(const char* msg, ...) - { - std::size_t const STRING_BUFFER(4096); - char text[STRING_BUFFER]; - va_list list; - - if(msg == GLM_NULLPTR) - return std::string(); - - va_start(list, msg); -# if (GLM_COMPILER & GLM_COMPILER_VC) - vsprintf_s(text, STRING_BUFFER, msg, list); -# else// - std::vsprintf(text, msg, list); -# endif// - va_end(list); - - return std::string(text); - } - - static const char* LabelTrue = "true"; - static const char* LabelFalse = "false"; - - template - struct literal - { - GLM_FUNC_QUALIFIER static char const * value() {return "%d";} - }; - - template - struct literal - { - GLM_FUNC_QUALIFIER static char const * value() {return "%f";} - }; - -# if GLM_MODEL == GLM_MODEL_32 && GLM_COMPILER && GLM_COMPILER_VC - template<> - struct literal - { - GLM_FUNC_QUALIFIER static char const * value() {return "%lld";} - }; - - template<> - struct literal - { - GLM_FUNC_QUALIFIER static char const * value() {return "%lld";} - }; -# endif//GLM_MODEL == GLM_MODEL_32 && GLM_COMPILER && GLM_COMPILER_VC - - template - struct prefix{}; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "d";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "b";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "u8";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "i8";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "u16";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "i16";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "u";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "i";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "u64";} - }; - - template<> - struct prefix - { - GLM_FUNC_QUALIFIER static char const * value() {return "i64";} - }; - - template - struct compute_to_string - {}; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<1, bool, Q> const& x) - { - return detail::format("bvec1(%s)", - x[0] ? detail::LabelTrue : detail::LabelFalse); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<2, bool, Q> const& x) - { - return detail::format("bvec2(%s, %s)", - x[0] ? detail::LabelTrue : detail::LabelFalse, - x[1] ? detail::LabelTrue : detail::LabelFalse); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<3, bool, Q> const& x) - { - return detail::format("bvec3(%s, %s, %s)", - x[0] ? detail::LabelTrue : detail::LabelFalse, - x[1] ? detail::LabelTrue : detail::LabelFalse, - x[2] ? detail::LabelTrue : detail::LabelFalse); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<4, bool, Q> const& x) - { - return detail::format("bvec4(%s, %s, %s, %s)", - x[0] ? detail::LabelTrue : detail::LabelFalse, - x[1] ? detail::LabelTrue : detail::LabelFalse, - x[2] ? detail::LabelTrue : detail::LabelFalse, - x[3] ? detail::LabelTrue : detail::LabelFalse); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<1, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%svec1(%s)", - PrefixStr, - LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<2, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%svec2(%s, %s)", - PrefixStr, - LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0]), - static_cast::value_type>(x[1])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<3, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%svec3(%s, %s, %s)", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0]), - static_cast::value_type>(x[1]), - static_cast::value_type>(x[2])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(vec<4, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%svec4(%s, %s, %s, %s)", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0]), - static_cast::value_type>(x[1]), - static_cast::value_type>(x[2]), - static_cast::value_type>(x[3])); - } - }; - - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<2, 2, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat2x2((%s, %s), (%s, %s))", - PrefixStr, - LiteralStr, LiteralStr, - LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<2, 3, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat2x3((%s, %s, %s), (%s, %s, %s))", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<2, 4, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat2x4((%s, %s, %s, %s), (%s, %s, %s, %s))", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), static_cast::value_type>(x[0][3]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), static_cast::value_type>(x[1][3])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<3, 2, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat3x2((%s, %s), (%s, %s), (%s, %s))", - PrefixStr, - LiteralStr, LiteralStr, - LiteralStr, LiteralStr, - LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), - static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<3, 3, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat3x3((%s, %s, %s), (%s, %s, %s), (%s, %s, %s))", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), - static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<3, 4, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat3x4((%s, %s, %s, %s), (%s, %s, %s, %s), (%s, %s, %s, %s))", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), static_cast::value_type>(x[0][3]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), static_cast::value_type>(x[1][3]), - static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2]), static_cast::value_type>(x[2][3])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<4, 2, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat4x2((%s, %s), (%s, %s), (%s, %s), (%s, %s))", - PrefixStr, - LiteralStr, LiteralStr, - LiteralStr, LiteralStr, - LiteralStr, LiteralStr, - LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), - static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), - static_cast::value_type>(x[3][0]), static_cast::value_type>(x[3][1])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<4, 3, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat4x3((%s, %s, %s), (%s, %s, %s), (%s, %s, %s), (%s, %s, %s))", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), - static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2]), - static_cast::value_type>(x[3][0]), static_cast::value_type>(x[3][1]), static_cast::value_type>(x[3][2])); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(mat<4, 4, T, Q> const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%smat4x4((%s, %s, %s, %s), (%s, %s, %s, %s), (%s, %s, %s, %s), (%s, %s, %s, %s))", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x[0][0]), static_cast::value_type>(x[0][1]), static_cast::value_type>(x[0][2]), static_cast::value_type>(x[0][3]), - static_cast::value_type>(x[1][0]), static_cast::value_type>(x[1][1]), static_cast::value_type>(x[1][2]), static_cast::value_type>(x[1][3]), - static_cast::value_type>(x[2][0]), static_cast::value_type>(x[2][1]), static_cast::value_type>(x[2][2]), static_cast::value_type>(x[2][3]), - static_cast::value_type>(x[3][0]), static_cast::value_type>(x[3][1]), static_cast::value_type>(x[3][2]), static_cast::value_type>(x[3][3])); - } - }; - - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(qua const& q) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%squat(%s, {%s, %s, %s})", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(q.w), - static_cast::value_type>(q.x), - static_cast::value_type>(q.y), - static_cast::value_type>(q.z)); - } - }; - - template - struct compute_to_string > - { - GLM_FUNC_QUALIFIER static std::string call(tdualquat const& x) - { - char const * PrefixStr = prefix::value(); - char const * LiteralStr = literal::is_iec559>::value(); - std::string FormatStr(detail::format("%sdualquat((%s, {%s, %s, %s}), (%s, {%s, %s, %s}))", - PrefixStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr, - LiteralStr, LiteralStr, LiteralStr, LiteralStr)); - - return detail::format(FormatStr.c_str(), - static_cast::value_type>(x.real.w), - static_cast::value_type>(x.real.x), - static_cast::value_type>(x.real.y), - static_cast::value_type>(x.real.z), - static_cast::value_type>(x.dual.w), - static_cast::value_type>(x.dual.x), - static_cast::value_type>(x.dual.y), - static_cast::value_type>(x.dual.z)); - } - }; - -}//namespace detail - -template -GLM_FUNC_QUALIFIER std::string to_string(matType const& x) -{ - return detail::compute_to_string::call(x); -} - -}//namespace glm diff --git a/third_party/glm/gtx/texture.hpp b/third_party/glm/gtx/texture.hpp deleted file mode 100755 index 20585e6..0000000 --- a/third_party/glm/gtx/texture.hpp +++ /dev/null @@ -1,46 +0,0 @@ -/// @ref gtx_texture -/// @file glm/gtx/texture.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_texture GLM_GTX_texture -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Wrapping mode of texture coordinates. - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/integer.hpp" -#include "../gtx/component_wise.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_texture is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_texture extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_texture - /// @{ - - /// Compute the number of mipmaps levels necessary to create a mipmap complete texture - /// - /// @param Extent Extent of the texture base level mipmap - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point or signed integer scalar types - /// @tparam Q Value from qualifier enum - template - T levels(vec const& Extent); - - /// @} -}// namespace glm - -#include "texture.inl" - diff --git a/third_party/glm/gtx/texture.inl b/third_party/glm/gtx/texture.inl deleted file mode 100755 index 593c826..0000000 --- a/third_party/glm/gtx/texture.inl +++ /dev/null @@ -1,17 +0,0 @@ -/// @ref gtx_texture - -namespace glm -{ - template - inline T levels(vec const& Extent) - { - return glm::log2(compMax(Extent)) + static_cast(1); - } - - template - inline T levels(T Extent) - { - return vec<1, T, defaultp>(Extent).x; - } -}//namespace glm - diff --git a/third_party/glm/gtx/transform.hpp b/third_party/glm/gtx/transform.hpp deleted file mode 100755 index 0279fc8..0000000 --- a/third_party/glm/gtx/transform.hpp +++ /dev/null @@ -1,60 +0,0 @@ -/// @ref gtx_transform -/// @file glm/gtx/transform.hpp -/// -/// @see core (dependence) -/// @see gtc_matrix_transform (dependence) -/// @see gtx_transform -/// @see gtx_transform2 -/// -/// @defgroup gtx_transform GLM_GTX_transform -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Add transformation matrices - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/matrix_transform.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_transform is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_transform extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_transform - /// @{ - - /// Transforms a matrix with a translation 4 * 4 matrix created from 3 scalars. - /// @see gtc_matrix_transform - /// @see gtx_transform - template - GLM_FUNC_DECL mat<4, 4, T, Q> translate( - vec<3, T, Q> const& v); - - /// Builds a rotation 4 * 4 matrix created from an axis of 3 scalars and an angle expressed in radians. - /// @see gtc_matrix_transform - /// @see gtx_transform - template - GLM_FUNC_DECL mat<4, 4, T, Q> rotate( - T angle, - vec<3, T, Q> const& v); - - /// Transforms a matrix with a scale 4 * 4 matrix created from a vector of 3 components. - /// @see gtc_matrix_transform - /// @see gtx_transform - template - GLM_FUNC_DECL mat<4, 4, T, Q> scale( - vec<3, T, Q> const& v); - - /// @} -}// namespace glm - -#include "transform.inl" diff --git a/third_party/glm/gtx/transform.inl b/third_party/glm/gtx/transform.inl deleted file mode 100755 index 48ee680..0000000 --- a/third_party/glm/gtx/transform.inl +++ /dev/null @@ -1,23 +0,0 @@ -/// @ref gtx_transform - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> translate(vec<3, T, Q> const& v) - { - return translate(mat<4, 4, T, Q>(static_cast(1)), v); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> rotate(T angle, vec<3, T, Q> const& v) - { - return rotate(mat<4, 4, T, Q>(static_cast(1)), angle, v); - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scale(vec<3, T, Q> const& v) - { - return scale(mat<4, 4, T, Q>(static_cast(1)), v); - } - -}//namespace glm diff --git a/third_party/glm/gtx/transform2.hpp b/third_party/glm/gtx/transform2.hpp deleted file mode 100755 index 0d8ba9d..0000000 --- a/third_party/glm/gtx/transform2.hpp +++ /dev/null @@ -1,89 +0,0 @@ -/// @ref gtx_transform2 -/// @file glm/gtx/transform2.hpp -/// -/// @see core (dependence) -/// @see gtx_transform (dependence) -/// -/// @defgroup gtx_transform2 GLM_GTX_transform2 -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Add extra transformation matrices - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtx/transform.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_transform2 is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_transform2 extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_transform2 - /// @{ - - //! Transforms a matrix with a shearing on X axis. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> shearX2D(mat<3, 3, T, Q> const& m, T y); - - //! Transforms a matrix with a shearing on Y axis. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> shearY2D(mat<3, 3, T, Q> const& m, T x); - - //! Transforms a matrix with a shearing on X axis - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> shearX3D(mat<4, 4, T, Q> const& m, T y, T z); - - //! Transforms a matrix with a shearing on Y axis. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> shearY3D(mat<4, 4, T, Q> const& m, T x, T z); - - //! Transforms a matrix with a shearing on Z axis. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> shearZ3D(mat<4, 4, T, Q> const& m, T x, T y); - - //template GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shear(const mat<4, 4, T, Q> & m, shearPlane, planePoint, angle) - // Identity + tan(angle) * cross(Normal, OnPlaneVector) 0 - // - dot(PointOnPlane, normal) * OnPlaneVector 1 - - // Reflect functions seem to don't work - //template mat<3, 3, T, Q> reflect2D(const mat<3, 3, T, Q> & m, const vec<3, T, Q>& normal){return reflect2DGTX(m, normal);} //!< \brief Build a reflection matrix (from GLM_GTX_transform2 extension) - //template mat<4, 4, T, Q> reflect3D(const mat<4, 4, T, Q> & m, const vec<3, T, Q>& normal){return reflect3DGTX(m, normal);} //!< \brief Build a reflection matrix (from GLM_GTX_transform2 extension) - - //! Build planar projection matrix along normal axis. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<3, 3, T, Q> proj2D(mat<3, 3, T, Q> const& m, vec<3, T, Q> const& normal); - - //! Build planar projection matrix along normal axis. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> proj3D(mat<4, 4, T, Q> const & m, vec<3, T, Q> const& normal); - - //! Build a scale bias matrix. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> scaleBias(T scale, T bias); - - //! Build a scale bias matrix. - //! From GLM_GTX_transform2 extension. - template - GLM_FUNC_DECL mat<4, 4, T, Q> scaleBias(mat<4, 4, T, Q> const& m, T scale, T bias); - - /// @} -}// namespace glm - -#include "transform2.inl" diff --git a/third_party/glm/gtx/transform2.inl b/third_party/glm/gtx/transform2.inl deleted file mode 100755 index 2b53198..0000000 --- a/third_party/glm/gtx/transform2.inl +++ /dev/null @@ -1,125 +0,0 @@ -/// @ref gtx_transform2 - -namespace glm -{ - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearX2D(mat<3, 3, T, Q> const& m, T s) - { - mat<3, 3, T, Q> r(1); - r[1][0] = s; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> shearY2D(mat<3, 3, T, Q> const& m, T s) - { - mat<3, 3, T, Q> r(1); - r[0][1] = s; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shearX3D(mat<4, 4, T, Q> const& m, T s, T t) - { - mat<4, 4, T, Q> r(1); - r[0][1] = s; - r[0][2] = t; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shearY3D(mat<4, 4, T, Q> const& m, T s, T t) - { - mat<4, 4, T, Q> r(1); - r[1][0] = s; - r[1][2] = t; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> shearZ3D(mat<4, 4, T, Q> const& m, T s, T t) - { - mat<4, 4, T, Q> r(1); - r[2][0] = s; - r[2][1] = t; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> reflect2D(mat<3, 3, T, Q> const& m, vec<3, T, Q> const& normal) - { - mat<3, 3, T, Q> r(static_cast(1)); - r[0][0] = static_cast(1) - static_cast(2) * normal.x * normal.x; - r[0][1] = -static_cast(2) * normal.x * normal.y; - r[1][0] = -static_cast(2) * normal.x * normal.y; - r[1][1] = static_cast(1) - static_cast(2) * normal.y * normal.y; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> reflect3D(mat<4, 4, T, Q> const& m, vec<3, T, Q> const& normal) - { - mat<4, 4, T, Q> r(static_cast(1)); - r[0][0] = static_cast(1) - static_cast(2) * normal.x * normal.x; - r[0][1] = -static_cast(2) * normal.x * normal.y; - r[0][2] = -static_cast(2) * normal.x * normal.z; - - r[1][0] = -static_cast(2) * normal.x * normal.y; - r[1][1] = static_cast(1) - static_cast(2) * normal.y * normal.y; - r[1][2] = -static_cast(2) * normal.y * normal.z; - - r[2][0] = -static_cast(2) * normal.x * normal.z; - r[2][1] = -static_cast(2) * normal.y * normal.z; - r[2][2] = static_cast(1) - static_cast(2) * normal.z * normal.z; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<3, 3, T, Q> proj2D( - const mat<3, 3, T, Q>& m, - const vec<3, T, Q>& normal) - { - mat<3, 3, T, Q> r(static_cast(1)); - r[0][0] = static_cast(1) - normal.x * normal.x; - r[0][1] = - normal.x * normal.y; - r[1][0] = - normal.x * normal.y; - r[1][1] = static_cast(1) - normal.y * normal.y; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> proj3D( - const mat<4, 4, T, Q>& m, - const vec<3, T, Q>& normal) - { - mat<4, 4, T, Q> r(static_cast(1)); - r[0][0] = static_cast(1) - normal.x * normal.x; - r[0][1] = - normal.x * normal.y; - r[0][2] = - normal.x * normal.z; - r[1][0] = - normal.x * normal.y; - r[1][1] = static_cast(1) - normal.y * normal.y; - r[1][2] = - normal.y * normal.z; - r[2][0] = - normal.x * normal.z; - r[2][1] = - normal.y * normal.z; - r[2][2] = static_cast(1) - normal.z * normal.z; - return m * r; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scaleBias(T scale, T bias) - { - mat<4, 4, T, Q> result; - result[3] = vec<4, T, Q>(vec<3, T, Q>(bias), static_cast(1)); - result[0][0] = scale; - result[1][1] = scale; - result[2][2] = scale; - return result; - } - - template - GLM_FUNC_QUALIFIER mat<4, 4, T, Q> scaleBias(mat<4, 4, T, Q> const& m, T scale, T bias) - { - return m * scaleBias(scale, bias); - } -}//namespace glm - diff --git a/third_party/glm/gtx/type_aligned.hpp b/third_party/glm/gtx/type_aligned.hpp deleted file mode 100755 index 2ae522c..0000000 --- a/third_party/glm/gtx/type_aligned.hpp +++ /dev/null @@ -1,982 +0,0 @@ -/// @ref gtx_type_aligned -/// @file glm/gtx/type_aligned.hpp -/// -/// @see core (dependence) -/// @see gtc_quaternion (dependence) -/// -/// @defgroup gtx_type_aligned GLM_GTX_type_aligned -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Defines aligned types. - -#pragma once - -// Dependency: -#include "../gtc/type_precision.hpp" -#include "../gtc/quaternion.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_type_aligned is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_type_aligned extension included") -# endif -#endif - -namespace glm -{ - /////////////////////////// - // Signed int vector types - - /// @addtogroup gtx_type_aligned - /// @{ - - /// Low qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int8, aligned_lowp_int8, 1); - - /// Low qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int16, aligned_lowp_int16, 2); - - /// Low qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int32, aligned_lowp_int32, 4); - - /// Low qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int64, aligned_lowp_int64, 8); - - - /// Low qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int8_t, aligned_lowp_int8_t, 1); - - /// Low qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int16_t, aligned_lowp_int16_t, 2); - - /// Low qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int32_t, aligned_lowp_int32_t, 4); - - /// Low qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_int64_t, aligned_lowp_int64_t, 8); - - - /// Low qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_i8, aligned_lowp_i8, 1); - - /// Low qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_i16, aligned_lowp_i16, 2); - - /// Low qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_i32, aligned_lowp_i32, 4); - - /// Low qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_i64, aligned_lowp_i64, 8); - - - /// Medium qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int8, aligned_mediump_int8, 1); - - /// Medium qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int16, aligned_mediump_int16, 2); - - /// Medium qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int32, aligned_mediump_int32, 4); - - /// Medium qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int64, aligned_mediump_int64, 8); - - - /// Medium qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int8_t, aligned_mediump_int8_t, 1); - - /// Medium qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int16_t, aligned_mediump_int16_t, 2); - - /// Medium qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int32_t, aligned_mediump_int32_t, 4); - - /// Medium qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_int64_t, aligned_mediump_int64_t, 8); - - - /// Medium qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_i8, aligned_mediump_i8, 1); - - /// Medium qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_i16, aligned_mediump_i16, 2); - - /// Medium qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_i32, aligned_mediump_i32, 4); - - /// Medium qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_i64, aligned_mediump_i64, 8); - - - /// High qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int8, aligned_highp_int8, 1); - - /// High qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int16, aligned_highp_int16, 2); - - /// High qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int32, aligned_highp_int32, 4); - - /// High qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int64, aligned_highp_int64, 8); - - - /// High qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int8_t, aligned_highp_int8_t, 1); - - /// High qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int16_t, aligned_highp_int16_t, 2); - - /// High qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int32_t, aligned_highp_int32_t, 4); - - /// High qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_int64_t, aligned_highp_int64_t, 8); - - - /// High qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_i8, aligned_highp_i8, 1); - - /// High qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_i16, aligned_highp_i16, 2); - - /// High qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_i32, aligned_highp_i32, 4); - - /// High qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_i64, aligned_highp_i64, 8); - - - /// Default qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int8, aligned_int8, 1); - - /// Default qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int16, aligned_int16, 2); - - /// Default qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int32, aligned_int32, 4); - - /// Default qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int64, aligned_int64, 8); - - - /// Default qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int8_t, aligned_int8_t, 1); - - /// Default qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int16_t, aligned_int16_t, 2); - - /// Default qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int32_t, aligned_int32_t, 4); - - /// Default qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(int64_t, aligned_int64_t, 8); - - - /// Default qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i8, aligned_i8, 1); - - /// Default qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i16, aligned_i16, 2); - - /// Default qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i32, aligned_i32, 4); - - /// Default qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i64, aligned_i64, 8); - - - /// Default qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(ivec1, aligned_ivec1, 4); - - /// Default qualifier 32 bit signed integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(ivec2, aligned_ivec2, 8); - - /// Default qualifier 32 bit signed integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(ivec3, aligned_ivec3, 16); - - /// Default qualifier 32 bit signed integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(ivec4, aligned_ivec4, 16); - - - /// Default qualifier 8 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i8vec1, aligned_i8vec1, 1); - - /// Default qualifier 8 bit signed integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i8vec2, aligned_i8vec2, 2); - - /// Default qualifier 8 bit signed integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i8vec3, aligned_i8vec3, 4); - - /// Default qualifier 8 bit signed integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i8vec4, aligned_i8vec4, 4); - - - /// Default qualifier 16 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i16vec1, aligned_i16vec1, 2); - - /// Default qualifier 16 bit signed integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i16vec2, aligned_i16vec2, 4); - - /// Default qualifier 16 bit signed integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i16vec3, aligned_i16vec3, 8); - - /// Default qualifier 16 bit signed integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i16vec4, aligned_i16vec4, 8); - - - /// Default qualifier 32 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i32vec1, aligned_i32vec1, 4); - - /// Default qualifier 32 bit signed integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i32vec2, aligned_i32vec2, 8); - - /// Default qualifier 32 bit signed integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i32vec3, aligned_i32vec3, 16); - - /// Default qualifier 32 bit signed integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i32vec4, aligned_i32vec4, 16); - - - /// Default qualifier 64 bit signed integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i64vec1, aligned_i64vec1, 8); - - /// Default qualifier 64 bit signed integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i64vec2, aligned_i64vec2, 16); - - /// Default qualifier 64 bit signed integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i64vec3, aligned_i64vec3, 32); - - /// Default qualifier 64 bit signed integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(i64vec4, aligned_i64vec4, 32); - - - ///////////////////////////// - // Unsigned int vector types - - /// Low qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint8, aligned_lowp_uint8, 1); - - /// Low qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint16, aligned_lowp_uint16, 2); - - /// Low qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint32, aligned_lowp_uint32, 4); - - /// Low qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint64, aligned_lowp_uint64, 8); - - - /// Low qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint8_t, aligned_lowp_uint8_t, 1); - - /// Low qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint16_t, aligned_lowp_uint16_t, 2); - - /// Low qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint32_t, aligned_lowp_uint32_t, 4); - - /// Low qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_uint64_t, aligned_lowp_uint64_t, 8); - - - /// Low qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_u8, aligned_lowp_u8, 1); - - /// Low qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_u16, aligned_lowp_u16, 2); - - /// Low qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_u32, aligned_lowp_u32, 4); - - /// Low qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(lowp_u64, aligned_lowp_u64, 8); - - - /// Medium qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint8, aligned_mediump_uint8, 1); - - /// Medium qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint16, aligned_mediump_uint16, 2); - - /// Medium qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint32, aligned_mediump_uint32, 4); - - /// Medium qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint64, aligned_mediump_uint64, 8); - - - /// Medium qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint8_t, aligned_mediump_uint8_t, 1); - - /// Medium qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint16_t, aligned_mediump_uint16_t, 2); - - /// Medium qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint32_t, aligned_mediump_uint32_t, 4); - - /// Medium qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_uint64_t, aligned_mediump_uint64_t, 8); - - - /// Medium qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_u8, aligned_mediump_u8, 1); - - /// Medium qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_u16, aligned_mediump_u16, 2); - - /// Medium qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_u32, aligned_mediump_u32, 4); - - /// Medium qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mediump_u64, aligned_mediump_u64, 8); - - - /// High qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint8, aligned_highp_uint8, 1); - - /// High qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint16, aligned_highp_uint16, 2); - - /// High qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint32, aligned_highp_uint32, 4); - - /// High qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint64, aligned_highp_uint64, 8); - - - /// High qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint8_t, aligned_highp_uint8_t, 1); - - /// High qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint16_t, aligned_highp_uint16_t, 2); - - /// High qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint32_t, aligned_highp_uint32_t, 4); - - /// High qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_uint64_t, aligned_highp_uint64_t, 8); - - - /// High qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_u8, aligned_highp_u8, 1); - - /// High qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_u16, aligned_highp_u16, 2); - - /// High qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_u32, aligned_highp_u32, 4); - - /// High qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(highp_u64, aligned_highp_u64, 8); - - - /// Default qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint8, aligned_uint8, 1); - - /// Default qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint16, aligned_uint16, 2); - - /// Default qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint32, aligned_uint32, 4); - - /// Default qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint64, aligned_uint64, 8); - - - /// Default qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint8_t, aligned_uint8_t, 1); - - /// Default qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint16_t, aligned_uint16_t, 2); - - /// Default qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint32_t, aligned_uint32_t, 4); - - /// Default qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uint64_t, aligned_uint64_t, 8); - - - /// Default qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u8, aligned_u8, 1); - - /// Default qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u16, aligned_u16, 2); - - /// Default qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u32, aligned_u32, 4); - - /// Default qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u64, aligned_u64, 8); - - - /// Default qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uvec1, aligned_uvec1, 4); - - /// Default qualifier 32 bit unsigned integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uvec2, aligned_uvec2, 8); - - /// Default qualifier 32 bit unsigned integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uvec3, aligned_uvec3, 16); - - /// Default qualifier 32 bit unsigned integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(uvec4, aligned_uvec4, 16); - - - /// Default qualifier 8 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u8vec1, aligned_u8vec1, 1); - - /// Default qualifier 8 bit unsigned integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u8vec2, aligned_u8vec2, 2); - - /// Default qualifier 8 bit unsigned integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u8vec3, aligned_u8vec3, 4); - - /// Default qualifier 8 bit unsigned integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u8vec4, aligned_u8vec4, 4); - - - /// Default qualifier 16 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u16vec1, aligned_u16vec1, 2); - - /// Default qualifier 16 bit unsigned integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u16vec2, aligned_u16vec2, 4); - - /// Default qualifier 16 bit unsigned integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u16vec3, aligned_u16vec3, 8); - - /// Default qualifier 16 bit unsigned integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u16vec4, aligned_u16vec4, 8); - - - /// Default qualifier 32 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u32vec1, aligned_u32vec1, 4); - - /// Default qualifier 32 bit unsigned integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u32vec2, aligned_u32vec2, 8); - - /// Default qualifier 32 bit unsigned integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u32vec3, aligned_u32vec3, 16); - - /// Default qualifier 32 bit unsigned integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u32vec4, aligned_u32vec4, 16); - - - /// Default qualifier 64 bit unsigned integer aligned scalar type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u64vec1, aligned_u64vec1, 8); - - /// Default qualifier 64 bit unsigned integer aligned vector of 2 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u64vec2, aligned_u64vec2, 16); - - /// Default qualifier 64 bit unsigned integer aligned vector of 3 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u64vec3, aligned_u64vec3, 32); - - /// Default qualifier 64 bit unsigned integer aligned vector of 4 components type. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(u64vec4, aligned_u64vec4, 32); - - - ////////////////////// - // Float vector types - - /// 32 bit single-qualifier floating-point aligned scalar. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(float32, aligned_float32, 4); - - /// 32 bit single-qualifier floating-point aligned scalar. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(float32_t, aligned_float32_t, 4); - - /// 32 bit single-qualifier floating-point aligned scalar. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(float32, aligned_f32, 4); - -# ifndef GLM_FORCE_SINGLE_ONLY - - /// 64 bit double-qualifier floating-point aligned scalar. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(float64, aligned_float64, 8); - - /// 64 bit double-qualifier floating-point aligned scalar. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(float64_t, aligned_float64_t, 8); - - /// 64 bit double-qualifier floating-point aligned scalar. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(float64, aligned_f64, 8); - -# endif//GLM_FORCE_SINGLE_ONLY - - - /// Single-qualifier floating-point aligned vector of 1 component. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(vec1, aligned_vec1, 4); - - /// Single-qualifier floating-point aligned vector of 2 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(vec2, aligned_vec2, 8); - - /// Single-qualifier floating-point aligned vector of 3 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(vec3, aligned_vec3, 16); - - /// Single-qualifier floating-point aligned vector of 4 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(vec4, aligned_vec4, 16); - - - /// Single-qualifier floating-point aligned vector of 1 component. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fvec1, aligned_fvec1, 4); - - /// Single-qualifier floating-point aligned vector of 2 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fvec2, aligned_fvec2, 8); - - /// Single-qualifier floating-point aligned vector of 3 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fvec3, aligned_fvec3, 16); - - /// Single-qualifier floating-point aligned vector of 4 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fvec4, aligned_fvec4, 16); - - - /// Single-qualifier floating-point aligned vector of 1 component. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32vec1, aligned_f32vec1, 4); - - /// Single-qualifier floating-point aligned vector of 2 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32vec2, aligned_f32vec2, 8); - - /// Single-qualifier floating-point aligned vector of 3 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32vec3, aligned_f32vec3, 16); - - /// Single-qualifier floating-point aligned vector of 4 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32vec4, aligned_f32vec4, 16); - - - /// Double-qualifier floating-point aligned vector of 1 component. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(dvec1, aligned_dvec1, 8); - - /// Double-qualifier floating-point aligned vector of 2 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(dvec2, aligned_dvec2, 16); - - /// Double-qualifier floating-point aligned vector of 3 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(dvec3, aligned_dvec3, 32); - - /// Double-qualifier floating-point aligned vector of 4 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(dvec4, aligned_dvec4, 32); - - -# ifndef GLM_FORCE_SINGLE_ONLY - - /// Double-qualifier floating-point aligned vector of 1 component. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64vec1, aligned_f64vec1, 8); - - /// Double-qualifier floating-point aligned vector of 2 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64vec2, aligned_f64vec2, 16); - - /// Double-qualifier floating-point aligned vector of 3 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64vec3, aligned_f64vec3, 32); - - /// Double-qualifier floating-point aligned vector of 4 components. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64vec4, aligned_f64vec4, 32); - -# endif//GLM_FORCE_SINGLE_ONLY - - ////////////////////// - // Float matrix types - - /// Single-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef detail::tmat1 mat1; - - /// Single-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mat2, aligned_mat2, 16); - - /// Single-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mat3, aligned_mat3, 16); - - /// Single-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mat4, aligned_mat4, 16); - - - /// Single-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef detail::tmat1x1 mat1; - - /// Single-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mat2x2, aligned_mat2x2, 16); - - /// Single-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mat3x3, aligned_mat3x3, 16); - - /// Single-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(mat4x4, aligned_mat4x4, 16); - - - /// Single-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef detail::tmat1x1 fmat1; - - /// Single-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat2x2, aligned_fmat2, 16); - - /// Single-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat3x3, aligned_fmat3, 16); - - /// Single-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat4x4, aligned_fmat4, 16); - - - /// Single-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef f32 fmat1x1; - - /// Single-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat2x2, aligned_fmat2x2, 16); - - /// Single-qualifier floating-point aligned 2x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat2x3, aligned_fmat2x3, 16); - - /// Single-qualifier floating-point aligned 2x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat2x4, aligned_fmat2x4, 16); - - /// Single-qualifier floating-point aligned 3x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat3x2, aligned_fmat3x2, 16); - - /// Single-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat3x3, aligned_fmat3x3, 16); - - /// Single-qualifier floating-point aligned 3x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat3x4, aligned_fmat3x4, 16); - - /// Single-qualifier floating-point aligned 4x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat4x2, aligned_fmat4x2, 16); - - /// Single-qualifier floating-point aligned 4x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat4x3, aligned_fmat4x3, 16); - - /// Single-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(fmat4x4, aligned_fmat4x4, 16); - - - /// Single-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef detail::tmat1x1 f32mat1; - - /// Single-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat2x2, aligned_f32mat2, 16); - - /// Single-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat3x3, aligned_f32mat3, 16); - - /// Single-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat4x4, aligned_f32mat4, 16); - - - /// Single-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef f32 f32mat1x1; - - /// Single-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat2x2, aligned_f32mat2x2, 16); - - /// Single-qualifier floating-point aligned 2x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat2x3, aligned_f32mat2x3, 16); - - /// Single-qualifier floating-point aligned 2x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat2x4, aligned_f32mat2x4, 16); - - /// Single-qualifier floating-point aligned 3x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat3x2, aligned_f32mat3x2, 16); - - /// Single-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat3x3, aligned_f32mat3x3, 16); - - /// Single-qualifier floating-point aligned 3x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat3x4, aligned_f32mat3x4, 16); - - /// Single-qualifier floating-point aligned 4x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat4x2, aligned_f32mat4x2, 16); - - /// Single-qualifier floating-point aligned 4x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat4x3, aligned_f32mat4x3, 16); - - /// Single-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32mat4x4, aligned_f32mat4x4, 16); - - -# ifndef GLM_FORCE_SINGLE_ONLY - - /// Double-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef detail::tmat1x1 f64mat1; - - /// Double-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat2x2, aligned_f64mat2, 32); - - /// Double-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat3x3, aligned_f64mat3, 32); - - /// Double-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat4x4, aligned_f64mat4, 32); - - - /// Double-qualifier floating-point aligned 1x1 matrix. - /// @see gtx_type_aligned - //typedef f64 f64mat1x1; - - /// Double-qualifier floating-point aligned 2x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat2x2, aligned_f64mat2x2, 32); - - /// Double-qualifier floating-point aligned 2x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat2x3, aligned_f64mat2x3, 32); - - /// Double-qualifier floating-point aligned 2x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat2x4, aligned_f64mat2x4, 32); - - /// Double-qualifier floating-point aligned 3x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat3x2, aligned_f64mat3x2, 32); - - /// Double-qualifier floating-point aligned 3x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat3x3, aligned_f64mat3x3, 32); - - /// Double-qualifier floating-point aligned 3x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat3x4, aligned_f64mat3x4, 32); - - /// Double-qualifier floating-point aligned 4x2 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat4x2, aligned_f64mat4x2, 32); - - /// Double-qualifier floating-point aligned 4x3 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat4x3, aligned_f64mat4x3, 32); - - /// Double-qualifier floating-point aligned 4x4 matrix. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64mat4x4, aligned_f64mat4x4, 32); - -# endif//GLM_FORCE_SINGLE_ONLY - - - ////////////////////////// - // Quaternion types - - /// Single-qualifier floating-point aligned quaternion. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(quat, aligned_quat, 16); - - /// Single-qualifier floating-point aligned quaternion. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(quat, aligned_fquat, 16); - - /// Double-qualifier floating-point aligned quaternion. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(dquat, aligned_dquat, 32); - - /// Single-qualifier floating-point aligned quaternion. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f32quat, aligned_f32quat, 16); - -# ifndef GLM_FORCE_SINGLE_ONLY - - /// Double-qualifier floating-point aligned quaternion. - /// @see gtx_type_aligned - GLM_ALIGNED_TYPEDEF(f64quat, aligned_f64quat, 32); - -# endif//GLM_FORCE_SINGLE_ONLY - - /// @} -}//namespace glm - -#include "type_aligned.inl" diff --git a/third_party/glm/gtx/type_aligned.inl b/third_party/glm/gtx/type_aligned.inl deleted file mode 100755 index 54c1b81..0000000 --- a/third_party/glm/gtx/type_aligned.inl +++ /dev/null @@ -1,6 +0,0 @@ -/// @ref gtc_type_aligned - -namespace glm -{ - -} diff --git a/third_party/glm/gtx/type_trait.hpp b/third_party/glm/gtx/type_trait.hpp deleted file mode 100755 index 56685c8..0000000 --- a/third_party/glm/gtx/type_trait.hpp +++ /dev/null @@ -1,85 +0,0 @@ -/// @ref gtx_type_trait -/// @file glm/gtx/type_trait.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_type_trait GLM_GTX_type_trait -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Defines traits for each type. - -#pragma once - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_type_trait is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_type_trait extension included") -# endif -#endif - -// Dependency: -#include "../detail/qualifier.hpp" -#include "../gtc/quaternion.hpp" -#include "../gtx/dual_quaternion.hpp" - -namespace glm -{ - /// @addtogroup gtx_type_trait - /// @{ - - template - struct type - { - static bool const is_vec = false; - static bool const is_mat = false; - static bool const is_quat = false; - static length_t const components = 0; - static length_t const cols = 0; - static length_t const rows = 0; - }; - - template - struct type > - { - static bool const is_vec = true; - static bool const is_mat = false; - static bool const is_quat = false; - static length_t const components = L; - }; - - template - struct type > - { - static bool const is_vec = false; - static bool const is_mat = true; - static bool const is_quat = false; - static length_t const components = C; - static length_t const cols = C; - static length_t const rows = R; - }; - - template - struct type > - { - static bool const is_vec = false; - static bool const is_mat = false; - static bool const is_quat = true; - static length_t const components = 4; - }; - - template - struct type > - { - static bool const is_vec = false; - static bool const is_mat = false; - static bool const is_quat = true; - static length_t const components = 8; - }; - - /// @} -}//namespace glm - -#include "type_trait.inl" diff --git a/third_party/glm/gtx/type_trait.inl b/third_party/glm/gtx/type_trait.inl deleted file mode 100755 index 045de95..0000000 --- a/third_party/glm/gtx/type_trait.inl +++ /dev/null @@ -1,61 +0,0 @@ -/// @ref gtx_type_trait - -namespace glm -{ - template - bool const type::is_vec; - template - bool const type::is_mat; - template - bool const type::is_quat; - template - length_t const type::components; - template - length_t const type::cols; - template - length_t const type::rows; - - // vec - template - bool const type >::is_vec; - template - bool const type >::is_mat; - template - bool const type >::is_quat; - template - length_t const type >::components; - - // mat - template - bool const type >::is_vec; - template - bool const type >::is_mat; - template - bool const type >::is_quat; - template - length_t const type >::components; - template - length_t const type >::cols; - template - length_t const type >::rows; - - // tquat - template - bool const type >::is_vec; - template - bool const type >::is_mat; - template - bool const type >::is_quat; - template - length_t const type >::components; - - // tdualquat - template - bool const type >::is_vec; - template - bool const type >::is_mat; - template - bool const type >::is_quat; - template - length_t const type >::components; -}//namespace glm diff --git a/third_party/glm/gtx/vec_swizzle.hpp b/third_party/glm/gtx/vec_swizzle.hpp deleted file mode 100755 index 1c49abc..0000000 --- a/third_party/glm/gtx/vec_swizzle.hpp +++ /dev/null @@ -1,2782 +0,0 @@ -/// @ref gtx_vec_swizzle -/// @file glm/gtx/vec_swizzle.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_vec_swizzle GLM_GTX_vec_swizzle -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Functions to perform swizzle operation. - -#pragma once - -#include "../glm.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_vec_swizzle is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_vec_swizzle extension included") -# endif -#endif - -namespace glm { - // xx - template - GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<1, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.x); - } - - template - GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<2, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.x); - } - - template - GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.x); - } - - template - GLM_INLINE glm::vec<2, T, Q> xx(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.x); - } - - // xy - template - GLM_INLINE glm::vec<2, T, Q> xy(const glm::vec<2, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.y); - } - - template - GLM_INLINE glm::vec<2, T, Q> xy(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.y); - } - - template - GLM_INLINE glm::vec<2, T, Q> xy(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.y); - } - - // xz - template - GLM_INLINE glm::vec<2, T, Q> xz(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.z); - } - - template - GLM_INLINE glm::vec<2, T, Q> xz(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.z); - } - - // xw - template - GLM_INLINE glm::vec<2, T, Q> xw(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.x, v.w); - } - - // yx - template - GLM_INLINE glm::vec<2, T, Q> yx(const glm::vec<2, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.x); - } - - template - GLM_INLINE glm::vec<2, T, Q> yx(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.x); - } - - template - GLM_INLINE glm::vec<2, T, Q> yx(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.x); - } - - // yy - template - GLM_INLINE glm::vec<2, T, Q> yy(const glm::vec<2, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.y); - } - - template - GLM_INLINE glm::vec<2, T, Q> yy(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.y); - } - - template - GLM_INLINE glm::vec<2, T, Q> yy(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.y); - } - - // yz - template - GLM_INLINE glm::vec<2, T, Q> yz(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.z); - } - - template - GLM_INLINE glm::vec<2, T, Q> yz(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.z); - } - - // yw - template - GLM_INLINE glm::vec<2, T, Q> yw(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.y, v.w); - } - - // zx - template - GLM_INLINE glm::vec<2, T, Q> zx(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.z, v.x); - } - - template - GLM_INLINE glm::vec<2, T, Q> zx(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.z, v.x); - } - - // zy - template - GLM_INLINE glm::vec<2, T, Q> zy(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.z, v.y); - } - - template - GLM_INLINE glm::vec<2, T, Q> zy(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.z, v.y); - } - - // zz - template - GLM_INLINE glm::vec<2, T, Q> zz(const glm::vec<3, T, Q> &v) { - return glm::vec<2, T, Q>(v.z, v.z); - } - - template - GLM_INLINE glm::vec<2, T, Q> zz(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.z, v.z); - } - - // zw - template - GLM_INLINE glm::vec<2, T, Q> zw(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.z, v.w); - } - - // wx - template - GLM_INLINE glm::vec<2, T, Q> wx(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.w, v.x); - } - - // wy - template - GLM_INLINE glm::vec<2, T, Q> wy(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.w, v.y); - } - - // wz - template - GLM_INLINE glm::vec<2, T, Q> wz(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.w, v.z); - } - - // ww - template - GLM_INLINE glm::vec<2, T, Q> ww(const glm::vec<4, T, Q> &v) { - return glm::vec<2, T, Q>(v.w, v.w); - } - - // xxx - template - GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<1, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> xxx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.x); - } - - // xxy - template - GLM_INLINE glm::vec<3, T, Q> xxy(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> xxy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> xxy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.y); - } - - // xxz - template - GLM_INLINE glm::vec<3, T, Q> xxz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> xxz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.z); - } - - // xxw - template - GLM_INLINE glm::vec<3, T, Q> xxw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.x, v.w); - } - - // xyx - template - GLM_INLINE glm::vec<3, T, Q> xyx(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> xyx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> xyx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.x); - } - - // xyy - template - GLM_INLINE glm::vec<3, T, Q> xyy(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> xyy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> xyy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.y); - } - - // xyz - template - GLM_INLINE glm::vec<3, T, Q> xyz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> xyz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.z); - } - - // xyw - template - GLM_INLINE glm::vec<3, T, Q> xyw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.y, v.w); - } - - // xzx - template - GLM_INLINE glm::vec<3, T, Q> xzx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.z, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> xzx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.z, v.x); - } - - // xzy - template - GLM_INLINE glm::vec<3, T, Q> xzy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.z, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> xzy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.z, v.y); - } - - // xzz - template - GLM_INLINE glm::vec<3, T, Q> xzz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.z, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> xzz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.z, v.z); - } - - // xzw - template - GLM_INLINE glm::vec<3, T, Q> xzw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.z, v.w); - } - - // xwx - template - GLM_INLINE glm::vec<3, T, Q> xwx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.w, v.x); - } - - // xwy - template - GLM_INLINE glm::vec<3, T, Q> xwy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.w, v.y); - } - - // xwz - template - GLM_INLINE glm::vec<3, T, Q> xwz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.w, v.z); - } - - // xww - template - GLM_INLINE glm::vec<3, T, Q> xww(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.x, v.w, v.w); - } - - // yxx - template - GLM_INLINE glm::vec<3, T, Q> yxx(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> yxx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> yxx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.x); - } - - // yxy - template - GLM_INLINE glm::vec<3, T, Q> yxy(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> yxy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> yxy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.y); - } - - // yxz - template - GLM_INLINE glm::vec<3, T, Q> yxz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> yxz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.z); - } - - // yxw - template - GLM_INLINE glm::vec<3, T, Q> yxw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.x, v.w); - } - - // yyx - template - GLM_INLINE glm::vec<3, T, Q> yyx(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> yyx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> yyx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.x); - } - - // yyy - template - GLM_INLINE glm::vec<3, T, Q> yyy(const glm::vec<2, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> yyy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> yyy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.y); - } - - // yyz - template - GLM_INLINE glm::vec<3, T, Q> yyz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> yyz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.z); - } - - // yyw - template - GLM_INLINE glm::vec<3, T, Q> yyw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.y, v.w); - } - - // yzx - template - GLM_INLINE glm::vec<3, T, Q> yzx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.z, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> yzx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.z, v.x); - } - - // yzy - template - GLM_INLINE glm::vec<3, T, Q> yzy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.z, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> yzy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.z, v.y); - } - - // yzz - template - GLM_INLINE glm::vec<3, T, Q> yzz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.z, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> yzz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.z, v.z); - } - - // yzw - template - GLM_INLINE glm::vec<3, T, Q> yzw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.z, v.w); - } - - // ywx - template - GLM_INLINE glm::vec<3, T, Q> ywx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.w, v.x); - } - - // ywy - template - GLM_INLINE glm::vec<3, T, Q> ywy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.w, v.y); - } - - // ywz - template - GLM_INLINE glm::vec<3, T, Q> ywz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.w, v.z); - } - - // yww - template - GLM_INLINE glm::vec<3, T, Q> yww(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.y, v.w, v.w); - } - - // zxx - template - GLM_INLINE glm::vec<3, T, Q> zxx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.x, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> zxx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.x, v.x); - } - - // zxy - template - GLM_INLINE glm::vec<3, T, Q> zxy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.x, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> zxy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.x, v.y); - } - - // zxz - template - GLM_INLINE glm::vec<3, T, Q> zxz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.x, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> zxz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.x, v.z); - } - - // zxw - template - GLM_INLINE glm::vec<3, T, Q> zxw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.x, v.w); - } - - // zyx - template - GLM_INLINE glm::vec<3, T, Q> zyx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.y, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> zyx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.y, v.x); - } - - // zyy - template - GLM_INLINE glm::vec<3, T, Q> zyy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.y, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> zyy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.y, v.y); - } - - // zyz - template - GLM_INLINE glm::vec<3, T, Q> zyz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.y, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> zyz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.y, v.z); - } - - // zyw - template - GLM_INLINE glm::vec<3, T, Q> zyw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.y, v.w); - } - - // zzx - template - GLM_INLINE glm::vec<3, T, Q> zzx(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.z, v.x); - } - - template - GLM_INLINE glm::vec<3, T, Q> zzx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.z, v.x); - } - - // zzy - template - GLM_INLINE glm::vec<3, T, Q> zzy(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.z, v.y); - } - - template - GLM_INLINE glm::vec<3, T, Q> zzy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.z, v.y); - } - - // zzz - template - GLM_INLINE glm::vec<3, T, Q> zzz(const glm::vec<3, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.z, v.z); - } - - template - GLM_INLINE glm::vec<3, T, Q> zzz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.z, v.z); - } - - // zzw - template - GLM_INLINE glm::vec<3, T, Q> zzw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.z, v.w); - } - - // zwx - template - GLM_INLINE glm::vec<3, T, Q> zwx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.w, v.x); - } - - // zwy - template - GLM_INLINE glm::vec<3, T, Q> zwy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.w, v.y); - } - - // zwz - template - GLM_INLINE glm::vec<3, T, Q> zwz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.w, v.z); - } - - // zww - template - GLM_INLINE glm::vec<3, T, Q> zww(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.z, v.w, v.w); - } - - // wxx - template - GLM_INLINE glm::vec<3, T, Q> wxx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.x, v.x); - } - - // wxy - template - GLM_INLINE glm::vec<3, T, Q> wxy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.x, v.y); - } - - // wxz - template - GLM_INLINE glm::vec<3, T, Q> wxz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.x, v.z); - } - - // wxw - template - GLM_INLINE glm::vec<3, T, Q> wxw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.x, v.w); - } - - // wyx - template - GLM_INLINE glm::vec<3, T, Q> wyx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.y, v.x); - } - - // wyy - template - GLM_INLINE glm::vec<3, T, Q> wyy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.y, v.y); - } - - // wyz - template - GLM_INLINE glm::vec<3, T, Q> wyz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.y, v.z); - } - - // wyw - template - GLM_INLINE glm::vec<3, T, Q> wyw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.y, v.w); - } - - // wzx - template - GLM_INLINE glm::vec<3, T, Q> wzx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.z, v.x); - } - - // wzy - template - GLM_INLINE glm::vec<3, T, Q> wzy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.z, v.y); - } - - // wzz - template - GLM_INLINE glm::vec<3, T, Q> wzz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.z, v.z); - } - - // wzw - template - GLM_INLINE glm::vec<3, T, Q> wzw(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.z, v.w); - } - - // wwx - template - GLM_INLINE glm::vec<3, T, Q> wwx(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.w, v.x); - } - - // wwy - template - GLM_INLINE glm::vec<3, T, Q> wwy(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.w, v.y); - } - - // wwz - template - GLM_INLINE glm::vec<3, T, Q> wwz(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.w, v.z); - } - - // www - template - GLM_INLINE glm::vec<3, T, Q> www(const glm::vec<4, T, Q> &v) { - return glm::vec<3, T, Q>(v.w, v.w, v.w); - } - - // xxxx - template - GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<1, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.x); - } - - // xxxy - template - GLM_INLINE glm::vec<4, T, Q> xxxy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.y); - } - - // xxxz - template - GLM_INLINE glm::vec<4, T, Q> xxxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.z); - } - - // xxxw - template - GLM_INLINE glm::vec<4, T, Q> xxxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.x, v.w); - } - - // xxyx - template - GLM_INLINE glm::vec<4, T, Q> xxyx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.x); - } - - // xxyy - template - GLM_INLINE glm::vec<4, T, Q> xxyy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.y); - } - - // xxyz - template - GLM_INLINE glm::vec<4, T, Q> xxyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.z); - } - - // xxyw - template - GLM_INLINE glm::vec<4, T, Q> xxyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.y, v.w); - } - - // xxzx - template - GLM_INLINE glm::vec<4, T, Q> xxzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.z, v.x); - } - - // xxzy - template - GLM_INLINE glm::vec<4, T, Q> xxzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.z, v.y); - } - - // xxzz - template - GLM_INLINE glm::vec<4, T, Q> xxzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xxzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.z, v.z); - } - - // xxzw - template - GLM_INLINE glm::vec<4, T, Q> xxzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.z, v.w); - } - - // xxwx - template - GLM_INLINE glm::vec<4, T, Q> xxwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.w, v.x); - } - - // xxwy - template - GLM_INLINE glm::vec<4, T, Q> xxwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.w, v.y); - } - - // xxwz - template - GLM_INLINE glm::vec<4, T, Q> xxwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.w, v.z); - } - - // xxww - template - GLM_INLINE glm::vec<4, T, Q> xxww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.x, v.w, v.w); - } - - // xyxx - template - GLM_INLINE glm::vec<4, T, Q> xyxx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.x); - } - - // xyxy - template - GLM_INLINE glm::vec<4, T, Q> xyxy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.y); - } - - // xyxz - template - GLM_INLINE glm::vec<4, T, Q> xyxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.z); - } - - // xyxw - template - GLM_INLINE glm::vec<4, T, Q> xyxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.x, v.w); - } - - // xyyx - template - GLM_INLINE glm::vec<4, T, Q> xyyx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.x); - } - - // xyyy - template - GLM_INLINE glm::vec<4, T, Q> xyyy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.y); - } - - // xyyz - template - GLM_INLINE glm::vec<4, T, Q> xyyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.z); - } - - // xyyw - template - GLM_INLINE glm::vec<4, T, Q> xyyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.y, v.w); - } - - // xyzx - template - GLM_INLINE glm::vec<4, T, Q> xyzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.z, v.x); - } - - // xyzy - template - GLM_INLINE glm::vec<4, T, Q> xyzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.z, v.y); - } - - // xyzz - template - GLM_INLINE glm::vec<4, T, Q> xyzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xyzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.z, v.z); - } - - // xyzw - template - GLM_INLINE glm::vec<4, T, Q> xyzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.z, v.w); - } - - // xywx - template - GLM_INLINE glm::vec<4, T, Q> xywx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.w, v.x); - } - - // xywy - template - GLM_INLINE glm::vec<4, T, Q> xywy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.w, v.y); - } - - // xywz - template - GLM_INLINE glm::vec<4, T, Q> xywz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.w, v.z); - } - - // xyww - template - GLM_INLINE glm::vec<4, T, Q> xyww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.y, v.w, v.w); - } - - // xzxx - template - GLM_INLINE glm::vec<4, T, Q> xzxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.x, v.x); - } - - // xzxy - template - GLM_INLINE glm::vec<4, T, Q> xzxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.x, v.y); - } - - // xzxz - template - GLM_INLINE glm::vec<4, T, Q> xzxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.x, v.z); - } - - // xzxw - template - GLM_INLINE glm::vec<4, T, Q> xzxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.x, v.w); - } - - // xzyx - template - GLM_INLINE glm::vec<4, T, Q> xzyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.y, v.x); - } - - // xzyy - template - GLM_INLINE glm::vec<4, T, Q> xzyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.y, v.y); - } - - // xzyz - template - GLM_INLINE glm::vec<4, T, Q> xzyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.y, v.z); - } - - // xzyw - template - GLM_INLINE glm::vec<4, T, Q> xzyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.y, v.w); - } - - // xzzx - template - GLM_INLINE glm::vec<4, T, Q> xzzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.z, v.x); - } - - // xzzy - template - GLM_INLINE glm::vec<4, T, Q> xzzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.z, v.y); - } - - // xzzz - template - GLM_INLINE glm::vec<4, T, Q> xzzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> xzzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.z, v.z); - } - - // xzzw - template - GLM_INLINE glm::vec<4, T, Q> xzzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.z, v.w); - } - - // xzwx - template - GLM_INLINE glm::vec<4, T, Q> xzwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.w, v.x); - } - - // xzwy - template - GLM_INLINE glm::vec<4, T, Q> xzwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.w, v.y); - } - - // xzwz - template - GLM_INLINE glm::vec<4, T, Q> xzwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.w, v.z); - } - - // xzww - template - GLM_INLINE glm::vec<4, T, Q> xzww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.z, v.w, v.w); - } - - // xwxx - template - GLM_INLINE glm::vec<4, T, Q> xwxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.x, v.x); - } - - // xwxy - template - GLM_INLINE glm::vec<4, T, Q> xwxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.x, v.y); - } - - // xwxz - template - GLM_INLINE glm::vec<4, T, Q> xwxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.x, v.z); - } - - // xwxw - template - GLM_INLINE glm::vec<4, T, Q> xwxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.x, v.w); - } - - // xwyx - template - GLM_INLINE glm::vec<4, T, Q> xwyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.y, v.x); - } - - // xwyy - template - GLM_INLINE glm::vec<4, T, Q> xwyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.y, v.y); - } - - // xwyz - template - GLM_INLINE glm::vec<4, T, Q> xwyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.y, v.z); - } - - // xwyw - template - GLM_INLINE glm::vec<4, T, Q> xwyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.y, v.w); - } - - // xwzx - template - GLM_INLINE glm::vec<4, T, Q> xwzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.z, v.x); - } - - // xwzy - template - GLM_INLINE glm::vec<4, T, Q> xwzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.z, v.y); - } - - // xwzz - template - GLM_INLINE glm::vec<4, T, Q> xwzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.z, v.z); - } - - // xwzw - template - GLM_INLINE glm::vec<4, T, Q> xwzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.z, v.w); - } - - // xwwx - template - GLM_INLINE glm::vec<4, T, Q> xwwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.w, v.x); - } - - // xwwy - template - GLM_INLINE glm::vec<4, T, Q> xwwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.w, v.y); - } - - // xwwz - template - GLM_INLINE glm::vec<4, T, Q> xwwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.w, v.z); - } - - // xwww - template - GLM_INLINE glm::vec<4, T, Q> xwww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.x, v.w, v.w, v.w); - } - - // yxxx - template - GLM_INLINE glm::vec<4, T, Q> yxxx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.x); - } - - // yxxy - template - GLM_INLINE glm::vec<4, T, Q> yxxy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.y); - } - - // yxxz - template - GLM_INLINE glm::vec<4, T, Q> yxxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.z); - } - - // yxxw - template - GLM_INLINE glm::vec<4, T, Q> yxxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.x, v.w); - } - - // yxyx - template - GLM_INLINE glm::vec<4, T, Q> yxyx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.x); - } - - // yxyy - template - GLM_INLINE glm::vec<4, T, Q> yxyy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.y); - } - - // yxyz - template - GLM_INLINE glm::vec<4, T, Q> yxyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.z); - } - - // yxyw - template - GLM_INLINE glm::vec<4, T, Q> yxyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.y, v.w); - } - - // yxzx - template - GLM_INLINE glm::vec<4, T, Q> yxzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.z, v.x); - } - - // yxzy - template - GLM_INLINE glm::vec<4, T, Q> yxzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.z, v.y); - } - - // yxzz - template - GLM_INLINE glm::vec<4, T, Q> yxzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yxzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.z, v.z); - } - - // yxzw - template - GLM_INLINE glm::vec<4, T, Q> yxzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.z, v.w); - } - - // yxwx - template - GLM_INLINE glm::vec<4, T, Q> yxwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.w, v.x); - } - - // yxwy - template - GLM_INLINE glm::vec<4, T, Q> yxwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.w, v.y); - } - - // yxwz - template - GLM_INLINE glm::vec<4, T, Q> yxwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.w, v.z); - } - - // yxww - template - GLM_INLINE glm::vec<4, T, Q> yxww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.x, v.w, v.w); - } - - // yyxx - template - GLM_INLINE glm::vec<4, T, Q> yyxx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.x); - } - - // yyxy - template - GLM_INLINE glm::vec<4, T, Q> yyxy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.y); - } - - // yyxz - template - GLM_INLINE glm::vec<4, T, Q> yyxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.z); - } - - // yyxw - template - GLM_INLINE glm::vec<4, T, Q> yyxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.x, v.w); - } - - // yyyx - template - GLM_INLINE glm::vec<4, T, Q> yyyx(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.x); - } - - // yyyy - template - GLM_INLINE glm::vec<4, T, Q> yyyy(const glm::vec<2, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.y); - } - - // yyyz - template - GLM_INLINE glm::vec<4, T, Q> yyyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.z); - } - - // yyyw - template - GLM_INLINE glm::vec<4, T, Q> yyyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.y, v.w); - } - - // yyzx - template - GLM_INLINE glm::vec<4, T, Q> yyzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.z, v.x); - } - - // yyzy - template - GLM_INLINE glm::vec<4, T, Q> yyzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.z, v.y); - } - - // yyzz - template - GLM_INLINE glm::vec<4, T, Q> yyzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yyzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.z, v.z); - } - - // yyzw - template - GLM_INLINE glm::vec<4, T, Q> yyzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.z, v.w); - } - - // yywx - template - GLM_INLINE glm::vec<4, T, Q> yywx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.w, v.x); - } - - // yywy - template - GLM_INLINE glm::vec<4, T, Q> yywy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.w, v.y); - } - - // yywz - template - GLM_INLINE glm::vec<4, T, Q> yywz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.w, v.z); - } - - // yyww - template - GLM_INLINE glm::vec<4, T, Q> yyww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.y, v.w, v.w); - } - - // yzxx - template - GLM_INLINE glm::vec<4, T, Q> yzxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.x, v.x); - } - - // yzxy - template - GLM_INLINE glm::vec<4, T, Q> yzxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.x, v.y); - } - - // yzxz - template - GLM_INLINE glm::vec<4, T, Q> yzxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.x, v.z); - } - - // yzxw - template - GLM_INLINE glm::vec<4, T, Q> yzxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.x, v.w); - } - - // yzyx - template - GLM_INLINE glm::vec<4, T, Q> yzyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.y, v.x); - } - - // yzyy - template - GLM_INLINE glm::vec<4, T, Q> yzyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.y, v.y); - } - - // yzyz - template - GLM_INLINE glm::vec<4, T, Q> yzyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.y, v.z); - } - - // yzyw - template - GLM_INLINE glm::vec<4, T, Q> yzyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.y, v.w); - } - - // yzzx - template - GLM_INLINE glm::vec<4, T, Q> yzzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.z, v.x); - } - - // yzzy - template - GLM_INLINE glm::vec<4, T, Q> yzzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.z, v.y); - } - - // yzzz - template - GLM_INLINE glm::vec<4, T, Q> yzzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> yzzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.z, v.z); - } - - // yzzw - template - GLM_INLINE glm::vec<4, T, Q> yzzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.z, v.w); - } - - // yzwx - template - GLM_INLINE glm::vec<4, T, Q> yzwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.w, v.x); - } - - // yzwy - template - GLM_INLINE glm::vec<4, T, Q> yzwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.w, v.y); - } - - // yzwz - template - GLM_INLINE glm::vec<4, T, Q> yzwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.w, v.z); - } - - // yzww - template - GLM_INLINE glm::vec<4, T, Q> yzww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.z, v.w, v.w); - } - - // ywxx - template - GLM_INLINE glm::vec<4, T, Q> ywxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.x, v.x); - } - - // ywxy - template - GLM_INLINE glm::vec<4, T, Q> ywxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.x, v.y); - } - - // ywxz - template - GLM_INLINE glm::vec<4, T, Q> ywxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.x, v.z); - } - - // ywxw - template - GLM_INLINE glm::vec<4, T, Q> ywxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.x, v.w); - } - - // ywyx - template - GLM_INLINE glm::vec<4, T, Q> ywyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.y, v.x); - } - - // ywyy - template - GLM_INLINE glm::vec<4, T, Q> ywyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.y, v.y); - } - - // ywyz - template - GLM_INLINE glm::vec<4, T, Q> ywyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.y, v.z); - } - - // ywyw - template - GLM_INLINE glm::vec<4, T, Q> ywyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.y, v.w); - } - - // ywzx - template - GLM_INLINE glm::vec<4, T, Q> ywzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.z, v.x); - } - - // ywzy - template - GLM_INLINE glm::vec<4, T, Q> ywzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.z, v.y); - } - - // ywzz - template - GLM_INLINE glm::vec<4, T, Q> ywzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.z, v.z); - } - - // ywzw - template - GLM_INLINE glm::vec<4, T, Q> ywzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.z, v.w); - } - - // ywwx - template - GLM_INLINE glm::vec<4, T, Q> ywwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.w, v.x); - } - - // ywwy - template - GLM_INLINE glm::vec<4, T, Q> ywwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.w, v.y); - } - - // ywwz - template - GLM_INLINE glm::vec<4, T, Q> ywwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.w, v.z); - } - - // ywww - template - GLM_INLINE glm::vec<4, T, Q> ywww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.y, v.w, v.w, v.w); - } - - // zxxx - template - GLM_INLINE glm::vec<4, T, Q> zxxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.x, v.x); - } - - // zxxy - template - GLM_INLINE glm::vec<4, T, Q> zxxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.x, v.y); - } - - // zxxz - template - GLM_INLINE glm::vec<4, T, Q> zxxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.x, v.z); - } - - // zxxw - template - GLM_INLINE glm::vec<4, T, Q> zxxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.x, v.w); - } - - // zxyx - template - GLM_INLINE glm::vec<4, T, Q> zxyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.y, v.x); - } - - // zxyy - template - GLM_INLINE glm::vec<4, T, Q> zxyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.y, v.y); - } - - // zxyz - template - GLM_INLINE glm::vec<4, T, Q> zxyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.y, v.z); - } - - // zxyw - template - GLM_INLINE glm::vec<4, T, Q> zxyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.y, v.w); - } - - // zxzx - template - GLM_INLINE glm::vec<4, T, Q> zxzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.z, v.x); - } - - // zxzy - template - GLM_INLINE glm::vec<4, T, Q> zxzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.z, v.y); - } - - // zxzz - template - GLM_INLINE glm::vec<4, T, Q> zxzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zxzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.z, v.z); - } - - // zxzw - template - GLM_INLINE glm::vec<4, T, Q> zxzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.z, v.w); - } - - // zxwx - template - GLM_INLINE glm::vec<4, T, Q> zxwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.w, v.x); - } - - // zxwy - template - GLM_INLINE glm::vec<4, T, Q> zxwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.w, v.y); - } - - // zxwz - template - GLM_INLINE glm::vec<4, T, Q> zxwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.w, v.z); - } - - // zxww - template - GLM_INLINE glm::vec<4, T, Q> zxww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.x, v.w, v.w); - } - - // zyxx - template - GLM_INLINE glm::vec<4, T, Q> zyxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.x, v.x); - } - - // zyxy - template - GLM_INLINE glm::vec<4, T, Q> zyxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.x, v.y); - } - - // zyxz - template - GLM_INLINE glm::vec<4, T, Q> zyxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.x, v.z); - } - - // zyxw - template - GLM_INLINE glm::vec<4, T, Q> zyxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.x, v.w); - } - - // zyyx - template - GLM_INLINE glm::vec<4, T, Q> zyyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.y, v.x); - } - - // zyyy - template - GLM_INLINE glm::vec<4, T, Q> zyyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.y, v.y); - } - - // zyyz - template - GLM_INLINE glm::vec<4, T, Q> zyyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.y, v.z); - } - - // zyyw - template - GLM_INLINE glm::vec<4, T, Q> zyyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.y, v.w); - } - - // zyzx - template - GLM_INLINE glm::vec<4, T, Q> zyzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.z, v.x); - } - - // zyzy - template - GLM_INLINE glm::vec<4, T, Q> zyzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.z, v.y); - } - - // zyzz - template - GLM_INLINE glm::vec<4, T, Q> zyzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zyzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.z, v.z); - } - - // zyzw - template - GLM_INLINE glm::vec<4, T, Q> zyzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.z, v.w); - } - - // zywx - template - GLM_INLINE glm::vec<4, T, Q> zywx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.w, v.x); - } - - // zywy - template - GLM_INLINE glm::vec<4, T, Q> zywy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.w, v.y); - } - - // zywz - template - GLM_INLINE glm::vec<4, T, Q> zywz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.w, v.z); - } - - // zyww - template - GLM_INLINE glm::vec<4, T, Q> zyww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.y, v.w, v.w); - } - - // zzxx - template - GLM_INLINE glm::vec<4, T, Q> zzxx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.x, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.x, v.x); - } - - // zzxy - template - GLM_INLINE glm::vec<4, T, Q> zzxy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.x, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.x, v.y); - } - - // zzxz - template - GLM_INLINE glm::vec<4, T, Q> zzxz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.x, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.x, v.z); - } - - // zzxw - template - GLM_INLINE glm::vec<4, T, Q> zzxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.x, v.w); - } - - // zzyx - template - GLM_INLINE glm::vec<4, T, Q> zzyx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.y, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.y, v.x); - } - - // zzyy - template - GLM_INLINE glm::vec<4, T, Q> zzyy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.y, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.y, v.y); - } - - // zzyz - template - GLM_INLINE glm::vec<4, T, Q> zzyz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.y, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.y, v.z); - } - - // zzyw - template - GLM_INLINE glm::vec<4, T, Q> zzyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.y, v.w); - } - - // zzzx - template - GLM_INLINE glm::vec<4, T, Q> zzzx(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.z, v.x); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.z, v.x); - } - - // zzzy - template - GLM_INLINE glm::vec<4, T, Q> zzzy(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.z, v.y); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.z, v.y); - } - - // zzzz - template - GLM_INLINE glm::vec<4, T, Q> zzzz(const glm::vec<3, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.z, v.z); - } - - template - GLM_INLINE glm::vec<4, T, Q> zzzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.z, v.z); - } - - // zzzw - template - GLM_INLINE glm::vec<4, T, Q> zzzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.z, v.w); - } - - // zzwx - template - GLM_INLINE glm::vec<4, T, Q> zzwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.w, v.x); - } - - // zzwy - template - GLM_INLINE glm::vec<4, T, Q> zzwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.w, v.y); - } - - // zzwz - template - GLM_INLINE glm::vec<4, T, Q> zzwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.w, v.z); - } - - // zzww - template - GLM_INLINE glm::vec<4, T, Q> zzww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.z, v.w, v.w); - } - - // zwxx - template - GLM_INLINE glm::vec<4, T, Q> zwxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.x, v.x); - } - - // zwxy - template - GLM_INLINE glm::vec<4, T, Q> zwxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.x, v.y); - } - - // zwxz - template - GLM_INLINE glm::vec<4, T, Q> zwxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.x, v.z); - } - - // zwxw - template - GLM_INLINE glm::vec<4, T, Q> zwxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.x, v.w); - } - - // zwyx - template - GLM_INLINE glm::vec<4, T, Q> zwyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.y, v.x); - } - - // zwyy - template - GLM_INLINE glm::vec<4, T, Q> zwyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.y, v.y); - } - - // zwyz - template - GLM_INLINE glm::vec<4, T, Q> zwyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.y, v.z); - } - - // zwyw - template - GLM_INLINE glm::vec<4, T, Q> zwyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.y, v.w); - } - - // zwzx - template - GLM_INLINE glm::vec<4, T, Q> zwzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.z, v.x); - } - - // zwzy - template - GLM_INLINE glm::vec<4, T, Q> zwzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.z, v.y); - } - - // zwzz - template - GLM_INLINE glm::vec<4, T, Q> zwzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.z, v.z); - } - - // zwzw - template - GLM_INLINE glm::vec<4, T, Q> zwzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.z, v.w); - } - - // zwwx - template - GLM_INLINE glm::vec<4, T, Q> zwwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.w, v.x); - } - - // zwwy - template - GLM_INLINE glm::vec<4, T, Q> zwwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.w, v.y); - } - - // zwwz - template - GLM_INLINE glm::vec<4, T, Q> zwwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.w, v.z); - } - - // zwww - template - GLM_INLINE glm::vec<4, T, Q> zwww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.z, v.w, v.w, v.w); - } - - // wxxx - template - GLM_INLINE glm::vec<4, T, Q> wxxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.x, v.x); - } - - // wxxy - template - GLM_INLINE glm::vec<4, T, Q> wxxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.x, v.y); - } - - // wxxz - template - GLM_INLINE glm::vec<4, T, Q> wxxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.x, v.z); - } - - // wxxw - template - GLM_INLINE glm::vec<4, T, Q> wxxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.x, v.w); - } - - // wxyx - template - GLM_INLINE glm::vec<4, T, Q> wxyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.y, v.x); - } - - // wxyy - template - GLM_INLINE glm::vec<4, T, Q> wxyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.y, v.y); - } - - // wxyz - template - GLM_INLINE glm::vec<4, T, Q> wxyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.y, v.z); - } - - // wxyw - template - GLM_INLINE glm::vec<4, T, Q> wxyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.y, v.w); - } - - // wxzx - template - GLM_INLINE glm::vec<4, T, Q> wxzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.z, v.x); - } - - // wxzy - template - GLM_INLINE glm::vec<4, T, Q> wxzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.z, v.y); - } - - // wxzz - template - GLM_INLINE glm::vec<4, T, Q> wxzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.z, v.z); - } - - // wxzw - template - GLM_INLINE glm::vec<4, T, Q> wxzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.z, v.w); - } - - // wxwx - template - GLM_INLINE glm::vec<4, T, Q> wxwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.w, v.x); - } - - // wxwy - template - GLM_INLINE glm::vec<4, T, Q> wxwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.w, v.y); - } - - // wxwz - template - GLM_INLINE glm::vec<4, T, Q> wxwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.w, v.z); - } - - // wxww - template - GLM_INLINE glm::vec<4, T, Q> wxww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.x, v.w, v.w); - } - - // wyxx - template - GLM_INLINE glm::vec<4, T, Q> wyxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.x, v.x); - } - - // wyxy - template - GLM_INLINE glm::vec<4, T, Q> wyxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.x, v.y); - } - - // wyxz - template - GLM_INLINE glm::vec<4, T, Q> wyxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.x, v.z); - } - - // wyxw - template - GLM_INLINE glm::vec<4, T, Q> wyxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.x, v.w); - } - - // wyyx - template - GLM_INLINE glm::vec<4, T, Q> wyyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.y, v.x); - } - - // wyyy - template - GLM_INLINE glm::vec<4, T, Q> wyyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.y, v.y); - } - - // wyyz - template - GLM_INLINE glm::vec<4, T, Q> wyyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.y, v.z); - } - - // wyyw - template - GLM_INLINE glm::vec<4, T, Q> wyyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.y, v.w); - } - - // wyzx - template - GLM_INLINE glm::vec<4, T, Q> wyzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.z, v.x); - } - - // wyzy - template - GLM_INLINE glm::vec<4, T, Q> wyzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.z, v.y); - } - - // wyzz - template - GLM_INLINE glm::vec<4, T, Q> wyzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.z, v.z); - } - - // wyzw - template - GLM_INLINE glm::vec<4, T, Q> wyzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.z, v.w); - } - - // wywx - template - GLM_INLINE glm::vec<4, T, Q> wywx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.w, v.x); - } - - // wywy - template - GLM_INLINE glm::vec<4, T, Q> wywy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.w, v.y); - } - - // wywz - template - GLM_INLINE glm::vec<4, T, Q> wywz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.w, v.z); - } - - // wyww - template - GLM_INLINE glm::vec<4, T, Q> wyww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.y, v.w, v.w); - } - - // wzxx - template - GLM_INLINE glm::vec<4, T, Q> wzxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.x, v.x); - } - - // wzxy - template - GLM_INLINE glm::vec<4, T, Q> wzxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.x, v.y); - } - - // wzxz - template - GLM_INLINE glm::vec<4, T, Q> wzxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.x, v.z); - } - - // wzxw - template - GLM_INLINE glm::vec<4, T, Q> wzxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.x, v.w); - } - - // wzyx - template - GLM_INLINE glm::vec<4, T, Q> wzyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.y, v.x); - } - - // wzyy - template - GLM_INLINE glm::vec<4, T, Q> wzyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.y, v.y); - } - - // wzyz - template - GLM_INLINE glm::vec<4, T, Q> wzyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.y, v.z); - } - - // wzyw - template - GLM_INLINE glm::vec<4, T, Q> wzyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.y, v.w); - } - - // wzzx - template - GLM_INLINE glm::vec<4, T, Q> wzzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.z, v.x); - } - - // wzzy - template - GLM_INLINE glm::vec<4, T, Q> wzzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.z, v.y); - } - - // wzzz - template - GLM_INLINE glm::vec<4, T, Q> wzzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.z, v.z); - } - - // wzzw - template - GLM_INLINE glm::vec<4, T, Q> wzzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.z, v.w); - } - - // wzwx - template - GLM_INLINE glm::vec<4, T, Q> wzwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.w, v.x); - } - - // wzwy - template - GLM_INLINE glm::vec<4, T, Q> wzwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.w, v.y); - } - - // wzwz - template - GLM_INLINE glm::vec<4, T, Q> wzwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.w, v.z); - } - - // wzww - template - GLM_INLINE glm::vec<4, T, Q> wzww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.z, v.w, v.w); - } - - // wwxx - template - GLM_INLINE glm::vec<4, T, Q> wwxx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.x, v.x); - } - - // wwxy - template - GLM_INLINE glm::vec<4, T, Q> wwxy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.x, v.y); - } - - // wwxz - template - GLM_INLINE glm::vec<4, T, Q> wwxz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.x, v.z); - } - - // wwxw - template - GLM_INLINE glm::vec<4, T, Q> wwxw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.x, v.w); - } - - // wwyx - template - GLM_INLINE glm::vec<4, T, Q> wwyx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.y, v.x); - } - - // wwyy - template - GLM_INLINE glm::vec<4, T, Q> wwyy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.y, v.y); - } - - // wwyz - template - GLM_INLINE glm::vec<4, T, Q> wwyz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.y, v.z); - } - - // wwyw - template - GLM_INLINE glm::vec<4, T, Q> wwyw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.y, v.w); - } - - // wwzx - template - GLM_INLINE glm::vec<4, T, Q> wwzx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.z, v.x); - } - - // wwzy - template - GLM_INLINE glm::vec<4, T, Q> wwzy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.z, v.y); - } - - // wwzz - template - GLM_INLINE glm::vec<4, T, Q> wwzz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.z, v.z); - } - - // wwzw - template - GLM_INLINE glm::vec<4, T, Q> wwzw(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.z, v.w); - } - - // wwwx - template - GLM_INLINE glm::vec<4, T, Q> wwwx(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.w, v.x); - } - - // wwwy - template - GLM_INLINE glm::vec<4, T, Q> wwwy(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.w, v.y); - } - - // wwwz - template - GLM_INLINE glm::vec<4, T, Q> wwwz(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.w, v.z); - } - - // wwww - template - GLM_INLINE glm::vec<4, T, Q> wwww(const glm::vec<4, T, Q> &v) { - return glm::vec<4, T, Q>(v.w, v.w, v.w, v.w); - } - -} diff --git a/third_party/glm/gtx/vector_angle.hpp b/third_party/glm/gtx/vector_angle.hpp deleted file mode 100755 index 9ae4371..0000000 --- a/third_party/glm/gtx/vector_angle.hpp +++ /dev/null @@ -1,57 +0,0 @@ -/// @ref gtx_vector_angle -/// @file glm/gtx/vector_angle.hpp -/// -/// @see core (dependence) -/// @see gtx_quaternion (dependence) -/// @see gtx_epsilon (dependence) -/// -/// @defgroup gtx_vector_angle GLM_GTX_vector_angle -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Compute angle between vectors - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/epsilon.hpp" -#include "../gtx/quaternion.hpp" -#include "../gtx/rotate_vector.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_vector_angle is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_vector_angle extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_vector_angle - /// @{ - - //! Returns the absolute angle between two vectors. - //! Parameters need to be normalized. - /// @see gtx_vector_angle extension. - template - GLM_FUNC_DECL T angle(vec const& x, vec const& y); - - //! Returns the oriented angle between two 2d vectors. - //! Parameters need to be normalized. - /// @see gtx_vector_angle extension. - template - GLM_FUNC_DECL T orientedAngle(vec<2, T, Q> const& x, vec<2, T, Q> const& y); - - //! Returns the oriented angle between two 3d vectors based from a reference axis. - //! Parameters need to be normalized. - /// @see gtx_vector_angle extension. - template - GLM_FUNC_DECL T orientedAngle(vec<3, T, Q> const& x, vec<3, T, Q> const& y, vec<3, T, Q> const& ref); - - /// @} -}// namespace glm - -#include "vector_angle.inl" diff --git a/third_party/glm/gtx/vector_angle.inl b/third_party/glm/gtx/vector_angle.inl deleted file mode 100755 index a1f957a..0000000 --- a/third_party/glm/gtx/vector_angle.inl +++ /dev/null @@ -1,44 +0,0 @@ -/// @ref gtx_vector_angle - -namespace glm -{ - template - GLM_FUNC_QUALIFIER genType angle - ( - genType const& x, - genType const& y - ) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'angle' only accept floating-point inputs"); - return acos(clamp(dot(x, y), genType(-1), genType(1))); - } - - template - GLM_FUNC_QUALIFIER T angle(vec const& x, vec const& y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'angle' only accept floating-point inputs"); - return acos(clamp(dot(x, y), T(-1), T(1))); - } - - //! \todo epsilon is hard coded to 0.01 - template - GLM_FUNC_QUALIFIER T orientedAngle(vec<2, T, Q> const& x, vec<2, T, Q> const& y) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'orientedAngle' only accept floating-point inputs"); - T const Angle(acos(clamp(dot(x, y), T(-1), T(1)))); - - if(all(epsilonEqual(y, glm::rotate(x, Angle), T(0.0001)))) - return Angle; - else - return -Angle; - } - - template - GLM_FUNC_QUALIFIER T orientedAngle(vec<3, T, Q> const& x, vec<3, T, Q> const& y, vec<3, T, Q> const& ref) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'orientedAngle' only accept floating-point inputs"); - - T const Angle(acos(clamp(dot(x, y), T(-1), T(1)))); - return mix(Angle, -Angle, dot(ref, cross(x, y)) < T(0)); - } -}//namespace glm diff --git a/third_party/glm/gtx/vector_query.hpp b/third_party/glm/gtx/vector_query.hpp deleted file mode 100755 index 77c7b97..0000000 --- a/third_party/glm/gtx/vector_query.hpp +++ /dev/null @@ -1,66 +0,0 @@ -/// @ref gtx_vector_query -/// @file glm/gtx/vector_query.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_vector_query GLM_GTX_vector_query -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Query informations of vector types - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include -#include - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_vector_query is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_vector_query extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_vector_query - /// @{ - - //! Check whether two vectors are collinears. - /// @see gtx_vector_query extensions. - template - GLM_FUNC_DECL bool areCollinear(vec const& v0, vec const& v1, T const& epsilon); - - //! Check whether two vectors are orthogonals. - /// @see gtx_vector_query extensions. - template - GLM_FUNC_DECL bool areOrthogonal(vec const& v0, vec const& v1, T const& epsilon); - - //! Check whether a vector is normalized. - /// @see gtx_vector_query extensions. - template - GLM_FUNC_DECL bool isNormalized(vec const& v, T const& epsilon); - - //! Check whether a vector is null. - /// @see gtx_vector_query extensions. - template - GLM_FUNC_DECL bool isNull(vec const& v, T const& epsilon); - - //! Check whether a each component of a vector is null. - /// @see gtx_vector_query extensions. - template - GLM_FUNC_DECL vec isCompNull(vec const& v, T const& epsilon); - - //! Check whether two vectors are orthonormal. - /// @see gtx_vector_query extensions. - template - GLM_FUNC_DECL bool areOrthonormal(vec const& v0, vec const& v1, T const& epsilon); - - /// @} -}// namespace glm - -#include "vector_query.inl" diff --git a/third_party/glm/gtx/vector_query.inl b/third_party/glm/gtx/vector_query.inl deleted file mode 100755 index d1a5c9b..0000000 --- a/third_party/glm/gtx/vector_query.inl +++ /dev/null @@ -1,154 +0,0 @@ -/// @ref gtx_vector_query - -#include - -namespace glm{ -namespace detail -{ - template - struct compute_areCollinear{}; - - template - struct compute_areCollinear<2, T, Q> - { - GLM_FUNC_QUALIFIER static bool call(vec<2, T, Q> const& v0, vec<2, T, Q> const& v1, T const& epsilon) - { - return length(cross(vec<3, T, Q>(v0, static_cast(0)), vec<3, T, Q>(v1, static_cast(0)))) < epsilon; - } - }; - - template - struct compute_areCollinear<3, T, Q> - { - GLM_FUNC_QUALIFIER static bool call(vec<3, T, Q> const& v0, vec<3, T, Q> const& v1, T const& epsilon) - { - return length(cross(v0, v1)) < epsilon; - } - }; - - template - struct compute_areCollinear<4, T, Q> - { - GLM_FUNC_QUALIFIER static bool call(vec<4, T, Q> const& v0, vec<4, T, Q> const& v1, T const& epsilon) - { - return length(cross(vec<3, T, Q>(v0), vec<3, T, Q>(v1))) < epsilon; - } - }; - - template - struct compute_isCompNull{}; - - template - struct compute_isCompNull<2, T, Q> - { - GLM_FUNC_QUALIFIER static vec<2, bool, Q> call(vec<2, T, Q> const& v, T const& epsilon) - { - return vec<2, bool, Q>( - (abs(v.x) < epsilon), - (abs(v.y) < epsilon)); - } - }; - - template - struct compute_isCompNull<3, T, Q> - { - GLM_FUNC_QUALIFIER static vec<3, bool, Q> call(vec<3, T, Q> const& v, T const& epsilon) - { - return vec<3, bool, Q>( - (abs(v.x) < epsilon), - (abs(v.y) < epsilon), - (abs(v.z) < epsilon)); - } - }; - - template - struct compute_isCompNull<4, T, Q> - { - GLM_FUNC_QUALIFIER static vec<4, bool, Q> call(vec<4, T, Q> const& v, T const& epsilon) - { - return vec<4, bool, Q>( - (abs(v.x) < epsilon), - (abs(v.y) < epsilon), - (abs(v.z) < epsilon), - (abs(v.w) < epsilon)); - } - }; - -}//namespace detail - - template - GLM_FUNC_QUALIFIER bool areCollinear(vec const& v0, vec const& v1, T const& epsilon) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'areCollinear' only accept floating-point inputs"); - - return detail::compute_areCollinear::call(v0, v1, epsilon); - } - - template - GLM_FUNC_QUALIFIER bool areOrthogonal(vec const& v0, vec const& v1, T const& epsilon) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'areOrthogonal' only accept floating-point inputs"); - - return abs(dot(v0, v1)) <= max( - static_cast(1), - length(v0)) * max(static_cast(1), length(v1)) * epsilon; - } - - template - GLM_FUNC_QUALIFIER bool isNormalized(vec const& v, T const& epsilon) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isNormalized' only accept floating-point inputs"); - - return abs(length(v) - static_cast(1)) <= static_cast(2) * epsilon; - } - - template - GLM_FUNC_QUALIFIER bool isNull(vec const& v, T const& epsilon) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isNull' only accept floating-point inputs"); - - return length(v) <= epsilon; - } - - template - GLM_FUNC_QUALIFIER vec isCompNull(vec const& v, T const& epsilon) - { - GLM_STATIC_ASSERT(std::numeric_limits::is_iec559, "'isCompNull' only accept floating-point inputs"); - - return detail::compute_isCompNull::call(v, epsilon); - } - - template - GLM_FUNC_QUALIFIER vec<2, bool, Q> isCompNull(vec<2, T, Q> const& v, T const& epsilon) - { - return vec<2, bool, Q>( - abs(v.x) < epsilon, - abs(v.y) < epsilon); - } - - template - GLM_FUNC_QUALIFIER vec<3, bool, Q> isCompNull(vec<3, T, Q> const& v, T const& epsilon) - { - return vec<3, bool, Q>( - abs(v.x) < epsilon, - abs(v.y) < epsilon, - abs(v.z) < epsilon); - } - - template - GLM_FUNC_QUALIFIER vec<4, bool, Q> isCompNull(vec<4, T, Q> const& v, T const& epsilon) - { - return vec<4, bool, Q>( - abs(v.x) < epsilon, - abs(v.y) < epsilon, - abs(v.z) < epsilon, - abs(v.w) < epsilon); - } - - template - GLM_FUNC_QUALIFIER bool areOrthonormal(vec const& v0, vec const& v1, T const& epsilon) - { - return isNormalized(v0, epsilon) && isNormalized(v1, epsilon) && (abs(dot(v0, v1)) <= epsilon); - } - -}//namespace glm diff --git a/third_party/glm/gtx/wrap.hpp b/third_party/glm/gtx/wrap.hpp deleted file mode 100755 index 02c5196..0000000 --- a/third_party/glm/gtx/wrap.hpp +++ /dev/null @@ -1,55 +0,0 @@ -/// @ref gtx_wrap -/// @file glm/gtx/wrap.hpp -/// -/// @see core (dependence) -/// -/// @defgroup gtx_wrap GLM_GTX_wrap -/// @ingroup gtx -/// -/// Include to use the features of this extension. -/// -/// Wrapping mode of texture coordinates. - -#pragma once - -// Dependency: -#include "../glm.hpp" -#include "../gtc/vec1.hpp" - -#if GLM_MESSAGES == GLM_ENABLE && !defined(GLM_EXT_INCLUDED) -# ifndef GLM_ENABLE_EXPERIMENTAL -# pragma message("GLM: GLM_GTX_wrap is an experimental extension and may change in the future. Use #define GLM_ENABLE_EXPERIMENTAL before including it, if you really want to use it.") -# else -# pragma message("GLM: GLM_GTX_wrap extension included") -# endif -#endif - -namespace glm -{ - /// @addtogroup gtx_wrap - /// @{ - - /// Simulate GL_CLAMP OpenGL wrap mode - /// @see gtx_wrap extension. - template - GLM_FUNC_DECL genType clamp(genType const& Texcoord); - - /// Simulate GL_REPEAT OpenGL wrap mode - /// @see gtx_wrap extension. - template - GLM_FUNC_DECL genType repeat(genType const& Texcoord); - - /// Simulate GL_MIRRORED_REPEAT OpenGL wrap mode - /// @see gtx_wrap extension. - template - GLM_FUNC_DECL genType mirrorClamp(genType const& Texcoord); - - /// Simulate GL_MIRROR_REPEAT OpenGL wrap mode - /// @see gtx_wrap extension. - template - GLM_FUNC_DECL genType mirrorRepeat(genType const& Texcoord); - - /// @} -}// namespace glm - -#include "wrap.inl" diff --git a/third_party/glm/gtx/wrap.inl b/third_party/glm/gtx/wrap.inl deleted file mode 100755 index 409a316..0000000 --- a/third_party/glm/gtx/wrap.inl +++ /dev/null @@ -1,57 +0,0 @@ -/// @ref gtx_wrap - -namespace glm -{ - template - GLM_FUNC_QUALIFIER vec clamp(vec const& Texcoord) - { - return glm::clamp(Texcoord, vec(0), vec(1)); - } - - template - GLM_FUNC_QUALIFIER genType clamp(genType const& Texcoord) - { - return clamp(vec<1, genType, defaultp>(Texcoord)).x; - } - - template - GLM_FUNC_QUALIFIER vec repeat(vec const& Texcoord) - { - return glm::fract(Texcoord); - } - - template - GLM_FUNC_QUALIFIER genType repeat(genType const& Texcoord) - { - return repeat(vec<1, genType, defaultp>(Texcoord)).x; - } - - template - GLM_FUNC_QUALIFIER vec mirrorClamp(vec const& Texcoord) - { - return glm::fract(glm::abs(Texcoord)); - } - - template - GLM_FUNC_QUALIFIER genType mirrorClamp(genType const& Texcoord) - { - return mirrorClamp(vec<1, genType, defaultp>(Texcoord)).x; - } - - template - GLM_FUNC_QUALIFIER vec mirrorRepeat(vec const& Texcoord) - { - vec const Abs = glm::abs(Texcoord); - vec const Clamp = glm::mod(glm::floor(Abs), vec(2)); - vec const Floor = glm::floor(Abs); - vec const Rest = Abs - Floor; - vec const Mirror = Clamp + Rest; - return mix(Rest, vec(1) - Rest, glm::greaterThanEqual(Mirror, vec(1))); - } - - template - GLM_FUNC_QUALIFIER genType mirrorRepeat(genType const& Texcoord) - { - return mirrorRepeat(vec<1, genType, defaultp>(Texcoord)).x; - } -}//namespace glm diff --git a/third_party/glm/integer.hpp b/third_party/glm/integer.hpp deleted file mode 100755 index 8817db3..0000000 --- a/third_party/glm/integer.hpp +++ /dev/null @@ -1,212 +0,0 @@ -/// @ref core -/// @file glm/integer.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.8 Integer Functions -/// -/// @defgroup core_func_integer Integer functions -/// @ingroup core -/// -/// Provides GLSL functions on integer types -/// -/// These all operate component-wise. The description is per component. -/// The notation [a, b] means the set of bits from bit-number a through bit-number -/// b, inclusive. The lowest-order bit is bit 0. -/// -/// Include to use these core features. - -#pragma once - -#include "detail/qualifier.hpp" -#include "common.hpp" -#include "vector_relational.hpp" - -namespace glm -{ - /// @addtogroup core_func_integer - /// @{ - - /// Adds 32-bit unsigned integer x and y, returning the sum - /// modulo pow(2, 32). The value carry is set to 0 if the sum was - /// less than pow(2, 32), or to 1 otherwise. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// - /// @see GLSL uaddCarry man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec uaddCarry( - vec const& x, - vec const& y, - vec & carry); - - /// Subtracts the 32-bit unsigned integer y from x, returning - /// the difference if non-negative, or pow(2, 32) plus the difference - /// otherwise. The value borrow is set to 0 if x >= y, or to 1 otherwise. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// - /// @see GLSL usubBorrow man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec usubBorrow( - vec const& x, - vec const& y, - vec & borrow); - - /// Multiplies 32-bit integers x and y, producing a 64-bit - /// result. The 32 least-significant bits are returned in lsb. - /// The 32 most-significant bits are returned in msb. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// - /// @see GLSL umulExtended man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL void umulExtended( - vec const& x, - vec const& y, - vec & msb, - vec & lsb); - - /// Multiplies 32-bit integers x and y, producing a 64-bit - /// result. The 32 least-significant bits are returned in lsb. - /// The 32 most-significant bits are returned in msb. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// - /// @see GLSL imulExtended man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL void imulExtended( - vec const& x, - vec const& y, - vec & msb, - vec & lsb); - - /// Extracts bits [offset, offset + bits - 1] from value, - /// returning them in the least significant bits of the result. - /// For unsigned data types, the most significant bits of the - /// result will be set to zero. For signed data types, the - /// most significant bits will be set to the value of bit offset + base - 1. - /// - /// If bits is zero, the result will be zero. The result will be - /// undefined if offset or bits is negative, or if the sum of - /// offset and bits is greater than the number of bits used - /// to store the operand. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Signed or unsigned integer scalar types. - /// - /// @see GLSL bitfieldExtract man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec bitfieldExtract( - vec const& Value, - int Offset, - int Bits); - - /// Returns the insertion the bits least-significant bits of insert into base. - /// - /// The result will have bits [offset, offset + bits - 1] taken - /// from bits [0, bits - 1] of insert, and all other bits taken - /// directly from the corresponding bits of base. If bits is - /// zero, the result will simply be base. The result will be - /// undefined if offset or bits is negative, or if the sum of - /// offset and bits is greater than the number of bits used to - /// store the operand. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Signed or unsigned integer scalar or vector types. - /// - /// @see GLSL bitfieldInsert man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec bitfieldInsert( - vec const& Base, - vec const& Insert, - int Offset, - int Bits); - - /// Returns the reversal of the bits of value. - /// The bit numbered n of the result will be taken from bit (bits - 1) - n of value, - /// where bits is the total number of bits used to represent value. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Signed or unsigned integer scalar or vector types. - /// - /// @see GLSL bitfieldReverse man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec bitfieldReverse(vec const& v); - - /// Returns the number of bits set to 1 in the binary representation of value. - /// - /// @tparam genType Signed or unsigned integer scalar or vector types. - /// - /// @see GLSL bitCount man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL int bitCount(genType v); - - /// Returns the number of bits set to 1 in the binary representation of value. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Signed or unsigned integer scalar or vector types. - /// - /// @see GLSL bitCount man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec bitCount(vec const& v); - - /// Returns the bit number of the least significant bit set to - /// 1 in the binary representation of value. - /// If value is zero, -1 will be returned. - /// - /// @tparam genIUType Signed or unsigned integer scalar types. - /// - /// @see GLSL findLSB man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL int findLSB(genIUType x); - - /// Returns the bit number of the least significant bit set to - /// 1 in the binary representation of value. - /// If value is zero, -1 will be returned. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Signed or unsigned integer scalar types. - /// - /// @see GLSL findLSB man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec findLSB(vec const& v); - - /// Returns the bit number of the most significant bit in the binary representation of value. - /// For positive integers, the result will be the bit number of the most significant bit set to 1. - /// For negative integers, the result will be the bit number of the most significant - /// bit set to 0. For a value of zero or negative one, -1 will be returned. - /// - /// @tparam genIUType Signed or unsigned integer scalar types. - /// - /// @see GLSL findMSB man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL int findMSB(genIUType x); - - /// Returns the bit number of the most significant bit in the binary representation of value. - /// For positive integers, the result will be the bit number of the most significant bit set to 1. - /// For negative integers, the result will be the bit number of the most significant - /// bit set to 0. For a value of zero or negative one, -1 will be returned. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T Signed or unsigned integer scalar types. - /// - /// @see GLSL findMSB man page - /// @see GLSL 4.20.8 specification, section 8.8 Integer Functions - template - GLM_FUNC_DECL vec findMSB(vec const& v); - - /// @} -}//namespace glm - -#include "detail/func_integer.inl" diff --git a/third_party/glm/mat2x2.hpp b/third_party/glm/mat2x2.hpp deleted file mode 100755 index 96bec96..0000000 --- a/third_party/glm/mat2x2.hpp +++ /dev/null @@ -1,9 +0,0 @@ -/// @ref core -/// @file glm/mat2x2.hpp - -#pragma once -#include "./ext/matrix_double2x2.hpp" -#include "./ext/matrix_double2x2_precision.hpp" -#include "./ext/matrix_float2x2.hpp" -#include "./ext/matrix_float2x2_precision.hpp" - diff --git a/third_party/glm/mat2x3.hpp b/third_party/glm/mat2x3.hpp deleted file mode 100755 index d68dc25..0000000 --- a/third_party/glm/mat2x3.hpp +++ /dev/null @@ -1,9 +0,0 @@ -/// @ref core -/// @file glm/mat2x3.hpp - -#pragma once -#include "./ext/matrix_double2x3.hpp" -#include "./ext/matrix_double2x3_precision.hpp" -#include "./ext/matrix_float2x3.hpp" -#include "./ext/matrix_float2x3_precision.hpp" - diff --git a/third_party/glm/mat2x4.hpp b/third_party/glm/mat2x4.hpp deleted file mode 100755 index b04b738..0000000 --- a/third_party/glm/mat2x4.hpp +++ /dev/null @@ -1,9 +0,0 @@ -/// @ref core -/// @file glm/mat2x4.hpp - -#pragma once -#include "./ext/matrix_double2x4.hpp" -#include "./ext/matrix_double2x4_precision.hpp" -#include "./ext/matrix_float2x4.hpp" -#include "./ext/matrix_float2x4_precision.hpp" - diff --git a/third_party/glm/mat3x2.hpp b/third_party/glm/mat3x2.hpp deleted file mode 100755 index c853153..0000000 --- a/third_party/glm/mat3x2.hpp +++ /dev/null @@ -1,9 +0,0 @@ -/// @ref core -/// @file glm/mat3x2.hpp - -#pragma once -#include "./ext/matrix_double3x2.hpp" -#include "./ext/matrix_double3x2_precision.hpp" -#include "./ext/matrix_float3x2.hpp" -#include "./ext/matrix_float3x2_precision.hpp" - diff --git a/third_party/glm/mat3x3.hpp b/third_party/glm/mat3x3.hpp deleted file mode 100755 index fd4fa31..0000000 --- a/third_party/glm/mat3x3.hpp +++ /dev/null @@ -1,8 +0,0 @@ -/// @ref core -/// @file glm/mat3x3.hpp - -#pragma once -#include "./ext/matrix_double3x3.hpp" -#include "./ext/matrix_double3x3_precision.hpp" -#include "./ext/matrix_float3x3.hpp" -#include "./ext/matrix_float3x3_precision.hpp" diff --git a/third_party/glm/mat3x4.hpp b/third_party/glm/mat3x4.hpp deleted file mode 100755 index 6342bf5..0000000 --- a/third_party/glm/mat3x4.hpp +++ /dev/null @@ -1,8 +0,0 @@ -/// @ref core -/// @file glm/mat3x4.hpp - -#pragma once -#include "./ext/matrix_double3x4.hpp" -#include "./ext/matrix_double3x4_precision.hpp" -#include "./ext/matrix_float3x4.hpp" -#include "./ext/matrix_float3x4_precision.hpp" diff --git a/third_party/glm/mat4x2.hpp b/third_party/glm/mat4x2.hpp deleted file mode 100755 index e013e46..0000000 --- a/third_party/glm/mat4x2.hpp +++ /dev/null @@ -1,9 +0,0 @@ -/// @ref core -/// @file glm/mat4x2.hpp - -#pragma once -#include "./ext/matrix_double4x2.hpp" -#include "./ext/matrix_double4x2_precision.hpp" -#include "./ext/matrix_float4x2.hpp" -#include "./ext/matrix_float4x2_precision.hpp" - diff --git a/third_party/glm/mat4x3.hpp b/third_party/glm/mat4x3.hpp deleted file mode 100755 index 205725a..0000000 --- a/third_party/glm/mat4x3.hpp +++ /dev/null @@ -1,8 +0,0 @@ -/// @ref core -/// @file glm/mat4x3.hpp - -#pragma once -#include "./ext/matrix_double4x3.hpp" -#include "./ext/matrix_double4x3_precision.hpp" -#include "./ext/matrix_float4x3.hpp" -#include "./ext/matrix_float4x3_precision.hpp" diff --git a/third_party/glm/mat4x4.hpp b/third_party/glm/mat4x4.hpp deleted file mode 100755 index 3515f7f..0000000 --- a/third_party/glm/mat4x4.hpp +++ /dev/null @@ -1,9 +0,0 @@ -/// @ref core -/// @file glm/mat4x4.hpp - -#pragma once -#include "./ext/matrix_double4x4.hpp" -#include "./ext/matrix_double4x4_precision.hpp" -#include "./ext/matrix_float4x4.hpp" -#include "./ext/matrix_float4x4_precision.hpp" - diff --git a/third_party/glm/matrix.hpp b/third_party/glm/matrix.hpp deleted file mode 100755 index 6badf53..0000000 --- a/third_party/glm/matrix.hpp +++ /dev/null @@ -1,161 +0,0 @@ -/// @ref core -/// @file glm/matrix.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions -/// -/// @defgroup core_func_matrix Matrix functions -/// @ingroup core -/// -/// Provides GLSL matrix functions. -/// -/// Include to use these core features. - -#pragma once - -// Dependencies -#include "detail/qualifier.hpp" -#include "detail/setup.hpp" -#include "vec2.hpp" -#include "vec3.hpp" -#include "vec4.hpp" -#include "mat2x2.hpp" -#include "mat2x3.hpp" -#include "mat2x4.hpp" -#include "mat3x2.hpp" -#include "mat3x3.hpp" -#include "mat3x4.hpp" -#include "mat4x2.hpp" -#include "mat4x3.hpp" -#include "mat4x4.hpp" - -namespace glm { -namespace detail -{ - template - struct outerProduct_trait{}; - - template - struct outerProduct_trait<2, 2, T, Q> - { - typedef mat<2, 2, T, Q> type; - }; - - template - struct outerProduct_trait<2, 3, T, Q> - { - typedef mat<3, 2, T, Q> type; - }; - - template - struct outerProduct_trait<2, 4, T, Q> - { - typedef mat<4, 2, T, Q> type; - }; - - template - struct outerProduct_trait<3, 2, T, Q> - { - typedef mat<2, 3, T, Q> type; - }; - - template - struct outerProduct_trait<3, 3, T, Q> - { - typedef mat<3, 3, T, Q> type; - }; - - template - struct outerProduct_trait<3, 4, T, Q> - { - typedef mat<4, 3, T, Q> type; - }; - - template - struct outerProduct_trait<4, 2, T, Q> - { - typedef mat<2, 4, T, Q> type; - }; - - template - struct outerProduct_trait<4, 3, T, Q> - { - typedef mat<3, 4, T, Q> type; - }; - - template - struct outerProduct_trait<4, 4, T, Q> - { - typedef mat<4, 4, T, Q> type; - }; -}//namespace detail - - /// @addtogroup core_func_matrix - /// @{ - - /// Multiply matrix x by matrix y component-wise, i.e., - /// result[i][j] is the scalar product of x[i][j] and y[i][j]. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number a column - /// @tparam R Integer between 1 and 4 included that qualify the number a row - /// @tparam T Floating-point or signed integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL matrixCompMult man page - /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions - template - GLM_FUNC_DECL mat matrixCompMult(mat const& x, mat const& y); - - /// Treats the first parameter c as a column vector - /// and the second parameter r as a row vector - /// and does a linear algebraic matrix multiply c * r. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number a column - /// @tparam R Integer between 1 and 4 included that qualify the number a row - /// @tparam T Floating-point or signed integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL outerProduct man page - /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions - template - GLM_FUNC_DECL typename detail::outerProduct_trait::type outerProduct(vec const& c, vec const& r); - - /// Returns the transposed matrix of x - /// - /// @tparam C Integer between 1 and 4 included that qualify the number a column - /// @tparam R Integer between 1 and 4 included that qualify the number a row - /// @tparam T Floating-point or signed integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL transpose man page - /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions - template - GLM_FUNC_DECL typename mat::transpose_type transpose(mat const& x); - - /// Return the determinant of a squared matrix. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number a column - /// @tparam R Integer between 1 and 4 included that qualify the number a row - /// @tparam T Floating-point or signed integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL determinant man page - /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions - template - GLM_FUNC_DECL T determinant(mat const& m); - - /// Return the inverse of a squared matrix. - /// - /// @tparam C Integer between 1 and 4 included that qualify the number a column - /// @tparam R Integer between 1 and 4 included that qualify the number a row - /// @tparam T Floating-point or signed integer scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL inverse man page - /// @see GLSL 4.20.8 specification, section 8.6 Matrix Functions - template - GLM_FUNC_DECL mat inverse(mat const& m); - - /// @} -}//namespace glm - -#include "detail/func_matrix.inl" diff --git a/third_party/glm/packing.hpp b/third_party/glm/packing.hpp deleted file mode 100755 index ca83ac1..0000000 --- a/third_party/glm/packing.hpp +++ /dev/null @@ -1,173 +0,0 @@ -/// @ref core -/// @file glm/packing.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions -/// @see gtc_packing -/// -/// @defgroup core_func_packing Floating-Point Pack and Unpack Functions -/// @ingroup core -/// -/// Provides GLSL functions to pack and unpack half, single and double-precision floating point values into more compact integer types. -/// -/// These functions do not operate component-wise, rather as described in each case. -/// -/// Include to use these core features. - -#pragma once - -#include "./ext/vector_uint2.hpp" -#include "./ext/vector_float2.hpp" -#include "./ext/vector_float4.hpp" - -namespace glm -{ - /// @addtogroup core_func_packing - /// @{ - - /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packUnorm2x16: round(clamp(c, 0, +1) * 65535.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see GLSL packUnorm2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint packUnorm2x16(vec2 const& v); - - /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packSnorm2x16: round(clamp(v, -1, +1) * 32767.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see GLSL packSnorm2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint packSnorm2x16(vec2 const& v); - - /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packUnorm4x8: round(clamp(c, 0, +1) * 255.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see GLSL packUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint packUnorm4x8(vec4 const& v); - - /// First, converts each component of the normalized floating-point value v into 8- or 16-bit integer values. - /// Then, the results are packed into the returned 32-bit unsigned integer. - /// - /// The conversion for component c of v to fixed point is done as follows: - /// packSnorm4x8: round(clamp(c, -1, +1) * 127.0) - /// - /// The first component of the vector will be written to the least significant bits of the output; - /// the last component will be written to the most significant bits. - /// - /// @see GLSL packSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint packSnorm4x8(vec4 const& v); - - /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackUnorm2x16: f / 65535.0 - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see GLSL unpackUnorm2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec2 unpackUnorm2x16(uint p); - - /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm2x16: clamp(f / 32767.0, -1, +1) - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see GLSL unpackSnorm2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec2 unpackSnorm2x16(uint p); - - /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackUnorm4x8: f / 255.0 - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see GLSL unpackUnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec4 unpackUnorm4x8(uint p); - - /// First, unpacks a single 32-bit unsigned integer p into a pair of 16-bit unsigned integers, four 8-bit unsigned integers, or four 8-bit signed integers. - /// Then, each component is converted to a normalized floating-point value to generate the returned two- or four-component vector. - /// - /// The conversion for unpacked fixed-point value f to floating point is done as follows: - /// unpackSnorm4x8: clamp(f / 127.0, -1, +1) - /// - /// The first component of the returned vector will be extracted from the least significant bits of the input; - /// the last component will be extracted from the most significant bits. - /// - /// @see GLSL unpackSnorm4x8 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec4 unpackSnorm4x8(uint p); - - /// Returns a double-qualifier value obtained by packing the components of v into a 64-bit value. - /// If an IEEE 754 Inf or NaN is created, it will not signal, and the resulting floating point value is unspecified. - /// Otherwise, the bit- level representation of v is preserved. - /// The first vector component specifies the 32 least significant bits; - /// the second component specifies the 32 most significant bits. - /// - /// @see GLSL packDouble2x32 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL double packDouble2x32(uvec2 const& v); - - /// Returns a two-component unsigned integer vector representation of v. - /// The bit-level representation of v is preserved. - /// The first component of the vector contains the 32 least significant bits of the double; - /// the second component consists the 32 most significant bits. - /// - /// @see GLSL unpackDouble2x32 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uvec2 unpackDouble2x32(double v); - - /// Returns an unsigned integer obtained by converting the components of a two-component floating-point vector - /// to the 16-bit floating-point representation found in the OpenGL Specification, - /// and then packing these two 16- bit integers into a 32-bit unsigned integer. - /// The first vector component specifies the 16 least-significant bits of the result; - /// the second component specifies the 16 most-significant bits. - /// - /// @see GLSL packHalf2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL uint packHalf2x16(vec2 const& v); - - /// Returns a two-component floating-point vector with components obtained by unpacking a 32-bit unsigned integer into a pair of 16-bit values, - /// interpreting those values as 16-bit floating-point numbers according to the OpenGL Specification, - /// and converting them to 32-bit floating-point values. - /// The first component of the vector is obtained from the 16 least-significant bits of v; - /// the second component is obtained from the 16 most-significant bits of v. - /// - /// @see GLSL unpackHalf2x16 man page - /// @see GLSL 4.20.8 specification, section 8.4 Floating-Point Pack and Unpack Functions - GLM_FUNC_DECL vec2 unpackHalf2x16(uint v); - - /// @} -}//namespace glm - -#include "detail/func_packing.inl" diff --git a/third_party/glm/simd/common.h b/third_party/glm/simd/common.h deleted file mode 100755 index 9b017cb..0000000 --- a/third_party/glm/simd/common.h +++ /dev/null @@ -1,240 +0,0 @@ -/// @ref simd -/// @file glm/simd/common.h - -#pragma once - -#include "platform.h" - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_add(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_add_ps(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_add(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_add_ss(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_sub(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_sub_ps(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_sub(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_sub_ss(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_mul(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_mul_ps(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_mul(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_mul_ss(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_div(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_div_ps(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_div(glm_f32vec4 a, glm_f32vec4 b) -{ - return _mm_div_ss(a, b); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_div_lowp(glm_f32vec4 a, glm_f32vec4 b) -{ - return glm_vec4_mul(a, _mm_rcp_ps(b)); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_swizzle_xyzw(glm_f32vec4 a) -{ -# if GLM_ARCH & GLM_ARCH_AVX2_BIT - return _mm_permute_ps(a, _MM_SHUFFLE(3, 2, 1, 0)); -# else - return _mm_shuffle_ps(a, a, _MM_SHUFFLE(3, 2, 1, 0)); -# endif -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_fma(glm_f32vec4 a, glm_f32vec4 b, glm_f32vec4 c) -{ -# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) && !(GLM_COMPILER & GLM_COMPILER_CLANG) - return _mm_fmadd_ss(a, b, c); -# else - return _mm_add_ss(_mm_mul_ss(a, b), c); -# endif -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_fma(glm_f32vec4 a, glm_f32vec4 b, glm_f32vec4 c) -{ -# if (GLM_ARCH & GLM_ARCH_AVX2_BIT) && !(GLM_COMPILER & GLM_COMPILER_CLANG) - return _mm_fmadd_ps(a, b, c); -# else - return glm_vec4_add(glm_vec4_mul(a, b), c); -# endif -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_abs(glm_f32vec4 x) -{ - return _mm_and_ps(x, _mm_castsi128_ps(_mm_set1_epi32(0x7FFFFFFF))); -} - -GLM_FUNC_QUALIFIER glm_ivec4 glm_ivec4_abs(glm_ivec4 x) -{ -# if GLM_ARCH & GLM_ARCH_SSSE3_BIT - return _mm_sign_epi32(x, x); -# else - glm_ivec4 const sgn0 = _mm_srai_epi32(x, 31); - glm_ivec4 const inv0 = _mm_xor_si128(x, sgn0); - glm_ivec4 const sub0 = _mm_sub_epi32(inv0, sgn0); - return sub0; -# endif -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_sign(glm_vec4 x) -{ - glm_vec4 const zro0 = _mm_setzero_ps(); - glm_vec4 const cmp0 = _mm_cmplt_ps(x, zro0); - glm_vec4 const cmp1 = _mm_cmpgt_ps(x, zro0); - glm_vec4 const and0 = _mm_and_ps(cmp0, _mm_set1_ps(-1.0f)); - glm_vec4 const and1 = _mm_and_ps(cmp1, _mm_set1_ps(1.0f)); - glm_vec4 const or0 = _mm_or_ps(and0, and1); - return or0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_round(glm_vec4 x) -{ -# if GLM_ARCH & GLM_ARCH_SSE41_BIT - return _mm_round_ps(x, _MM_FROUND_TO_NEAREST_INT); -# else - glm_vec4 const sgn0 = _mm_castsi128_ps(_mm_set1_epi32(int(0x80000000))); - glm_vec4 const and0 = _mm_and_ps(sgn0, x); - glm_vec4 const or0 = _mm_or_ps(and0, _mm_set_ps1(8388608.0f)); - glm_vec4 const add0 = glm_vec4_add(x, or0); - glm_vec4 const sub0 = glm_vec4_sub(add0, or0); - return sub0; -# endif -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_floor(glm_vec4 x) -{ -# if GLM_ARCH & GLM_ARCH_SSE41_BIT - return _mm_floor_ps(x); -# else - glm_vec4 const rnd0 = glm_vec4_round(x); - glm_vec4 const cmp0 = _mm_cmplt_ps(x, rnd0); - glm_vec4 const and0 = _mm_and_ps(cmp0, _mm_set1_ps(1.0f)); - glm_vec4 const sub0 = glm_vec4_sub(rnd0, and0); - return sub0; -# endif -} - -/* trunc TODO -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_trunc(glm_vec4 x) -{ - return glm_vec4(); -} -*/ - -//roundEven -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_roundEven(glm_vec4 x) -{ - glm_vec4 const sgn0 = _mm_castsi128_ps(_mm_set1_epi32(int(0x80000000))); - glm_vec4 const and0 = _mm_and_ps(sgn0, x); - glm_vec4 const or0 = _mm_or_ps(and0, _mm_set_ps1(8388608.0f)); - glm_vec4 const add0 = glm_vec4_add(x, or0); - glm_vec4 const sub0 = glm_vec4_sub(add0, or0); - return sub0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_ceil(glm_vec4 x) -{ -# if GLM_ARCH & GLM_ARCH_SSE41_BIT - return _mm_ceil_ps(x); -# else - glm_vec4 const rnd0 = glm_vec4_round(x); - glm_vec4 const cmp0 = _mm_cmpgt_ps(x, rnd0); - glm_vec4 const and0 = _mm_and_ps(cmp0, _mm_set1_ps(1.0f)); - glm_vec4 const add0 = glm_vec4_add(rnd0, and0); - return add0; -# endif -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_fract(glm_vec4 x) -{ - glm_vec4 const flr0 = glm_vec4_floor(x); - glm_vec4 const sub0 = glm_vec4_sub(x, flr0); - return sub0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_mod(glm_vec4 x, glm_vec4 y) -{ - glm_vec4 const div0 = glm_vec4_div(x, y); - glm_vec4 const flr0 = glm_vec4_floor(div0); - glm_vec4 const mul0 = glm_vec4_mul(y, flr0); - glm_vec4 const sub0 = glm_vec4_sub(x, mul0); - return sub0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_clamp(glm_vec4 v, glm_vec4 minVal, glm_vec4 maxVal) -{ - glm_vec4 const min0 = _mm_min_ps(v, maxVal); - glm_vec4 const max0 = _mm_max_ps(min0, minVal); - return max0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_mix(glm_vec4 v1, glm_vec4 v2, glm_vec4 a) -{ - glm_vec4 const sub0 = glm_vec4_sub(_mm_set1_ps(1.0f), a); - glm_vec4 const mul0 = glm_vec4_mul(v1, sub0); - glm_vec4 const mad0 = glm_vec4_fma(v2, a, mul0); - return mad0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_step(glm_vec4 edge, glm_vec4 x) -{ - glm_vec4 const cmp = _mm_cmple_ps(x, edge); - return _mm_movemask_ps(cmp) == 0 ? _mm_set1_ps(1.0f) : _mm_setzero_ps(); -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_smoothstep(glm_vec4 edge0, glm_vec4 edge1, glm_vec4 x) -{ - glm_vec4 const sub0 = glm_vec4_sub(x, edge0); - glm_vec4 const sub1 = glm_vec4_sub(edge1, edge0); - glm_vec4 const div0 = glm_vec4_sub(sub0, sub1); - glm_vec4 const clp0 = glm_vec4_clamp(div0, _mm_setzero_ps(), _mm_set1_ps(1.0f)); - glm_vec4 const mul0 = glm_vec4_mul(_mm_set1_ps(2.0f), clp0); - glm_vec4 const sub2 = glm_vec4_sub(_mm_set1_ps(3.0f), mul0); - glm_vec4 const mul1 = glm_vec4_mul(clp0, clp0); - glm_vec4 const mul2 = glm_vec4_mul(mul1, sub2); - return mul2; -} - -// Agner Fog method -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_nan(glm_vec4 x) -{ - glm_ivec4 const t1 = _mm_castps_si128(x); // reinterpret as 32-bit integer - glm_ivec4 const t2 = _mm_sll_epi32(t1, _mm_cvtsi32_si128(1)); // shift out sign bit - glm_ivec4 const t3 = _mm_set1_epi32(int(0xFF000000)); // exponent mask - glm_ivec4 const t4 = _mm_and_si128(t2, t3); // exponent - glm_ivec4 const t5 = _mm_andnot_si128(t3, t2); // fraction - glm_ivec4 const Equal = _mm_cmpeq_epi32(t3, t4); - glm_ivec4 const Nequal = _mm_cmpeq_epi32(t5, _mm_setzero_si128()); - glm_ivec4 const And = _mm_and_si128(Equal, Nequal); - return _mm_castsi128_ps(And); // exponent = all 1s and fraction != 0 -} - -// Agner Fog method -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_inf(glm_vec4 x) -{ - glm_ivec4 const t1 = _mm_castps_si128(x); // reinterpret as 32-bit integer - glm_ivec4 const t2 = _mm_sll_epi32(t1, _mm_cvtsi32_si128(1)); // shift out sign bit - return _mm_castsi128_ps(_mm_cmpeq_epi32(t2, _mm_set1_epi32(int(0xFF000000)))); // exponent is all 1s, fraction is 0 -} - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/simd/exponential.h b/third_party/glm/simd/exponential.h deleted file mode 100755 index bc351d0..0000000 --- a/third_party/glm/simd/exponential.h +++ /dev/null @@ -1,20 +0,0 @@ -/// @ref simd -/// @file glm/simd/experimental.h - -#pragma once - -#include "platform.h" - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec1_sqrt_lowp(glm_f32vec4 x) -{ - return _mm_mul_ss(_mm_rsqrt_ss(x), x); -} - -GLM_FUNC_QUALIFIER glm_f32vec4 glm_vec4_sqrt_lowp(glm_f32vec4 x) -{ - return _mm_mul_ps(_mm_rsqrt_ps(x), x); -} - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/simd/geometric.h b/third_party/glm/simd/geometric.h deleted file mode 100755 index 07d7cbc..0000000 --- a/third_party/glm/simd/geometric.h +++ /dev/null @@ -1,124 +0,0 @@ -/// @ref simd -/// @file glm/simd/geometric.h - -#pragma once - -#include "common.h" - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -GLM_FUNC_DECL glm_vec4 glm_vec4_dot(glm_vec4 v1, glm_vec4 v2); -GLM_FUNC_DECL glm_vec4 glm_vec1_dot(glm_vec4 v1, glm_vec4 v2); - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_length(glm_vec4 x) -{ - glm_vec4 const dot0 = glm_vec4_dot(x, x); - glm_vec4 const sqt0 = _mm_sqrt_ps(dot0); - return sqt0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_distance(glm_vec4 p0, glm_vec4 p1) -{ - glm_vec4 const sub0 = _mm_sub_ps(p0, p1); - glm_vec4 const len0 = glm_vec4_length(sub0); - return len0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_dot(glm_vec4 v1, glm_vec4 v2) -{ -# if GLM_ARCH & GLM_ARCH_AVX_BIT - return _mm_dp_ps(v1, v2, 0xff); -# elif GLM_ARCH & GLM_ARCH_SSE3_BIT - glm_vec4 const mul0 = _mm_mul_ps(v1, v2); - glm_vec4 const hadd0 = _mm_hadd_ps(mul0, mul0); - glm_vec4 const hadd1 = _mm_hadd_ps(hadd0, hadd0); - return hadd1; -# else - glm_vec4 const mul0 = _mm_mul_ps(v1, v2); - glm_vec4 const swp0 = _mm_shuffle_ps(mul0, mul0, _MM_SHUFFLE(2, 3, 0, 1)); - glm_vec4 const add0 = _mm_add_ps(mul0, swp0); - glm_vec4 const swp1 = _mm_shuffle_ps(add0, add0, _MM_SHUFFLE(0, 1, 2, 3)); - glm_vec4 const add1 = _mm_add_ps(add0, swp1); - return add1; -# endif -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec1_dot(glm_vec4 v1, glm_vec4 v2) -{ -# if GLM_ARCH & GLM_ARCH_AVX_BIT - return _mm_dp_ps(v1, v2, 0xff); -# elif GLM_ARCH & GLM_ARCH_SSE3_BIT - glm_vec4 const mul0 = _mm_mul_ps(v1, v2); - glm_vec4 const had0 = _mm_hadd_ps(mul0, mul0); - glm_vec4 const had1 = _mm_hadd_ps(had0, had0); - return had1; -# else - glm_vec4 const mul0 = _mm_mul_ps(v1, v2); - glm_vec4 const mov0 = _mm_movehl_ps(mul0, mul0); - glm_vec4 const add0 = _mm_add_ps(mov0, mul0); - glm_vec4 const swp1 = _mm_shuffle_ps(add0, add0, 1); - glm_vec4 const add1 = _mm_add_ss(add0, swp1); - return add1; -# endif -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_cross(glm_vec4 v1, glm_vec4 v2) -{ - glm_vec4 const swp0 = _mm_shuffle_ps(v1, v1, _MM_SHUFFLE(3, 0, 2, 1)); - glm_vec4 const swp1 = _mm_shuffle_ps(v1, v1, _MM_SHUFFLE(3, 1, 0, 2)); - glm_vec4 const swp2 = _mm_shuffle_ps(v2, v2, _MM_SHUFFLE(3, 0, 2, 1)); - glm_vec4 const swp3 = _mm_shuffle_ps(v2, v2, _MM_SHUFFLE(3, 1, 0, 2)); - glm_vec4 const mul0 = _mm_mul_ps(swp0, swp3); - glm_vec4 const mul1 = _mm_mul_ps(swp1, swp2); - glm_vec4 const sub0 = _mm_sub_ps(mul0, mul1); - return sub0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_normalize(glm_vec4 v) -{ - glm_vec4 const dot0 = glm_vec4_dot(v, v); - glm_vec4 const isr0 = _mm_rsqrt_ps(dot0); - glm_vec4 const mul0 = _mm_mul_ps(v, isr0); - return mul0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_faceforward(glm_vec4 N, glm_vec4 I, glm_vec4 Nref) -{ - glm_vec4 const dot0 = glm_vec4_dot(Nref, I); - glm_vec4 const sgn0 = glm_vec4_sign(dot0); - glm_vec4 const mul0 = _mm_mul_ps(sgn0, _mm_set1_ps(-1.0f)); - glm_vec4 const mul1 = _mm_mul_ps(N, mul0); - return mul1; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_vec4_reflect(glm_vec4 I, glm_vec4 N) -{ - glm_vec4 const dot0 = glm_vec4_dot(N, I); - glm_vec4 const mul0 = _mm_mul_ps(N, dot0); - glm_vec4 const mul1 = _mm_mul_ps(mul0, _mm_set1_ps(2.0f)); - glm_vec4 const sub0 = _mm_sub_ps(I, mul1); - return sub0; -} - -GLM_FUNC_QUALIFIER __m128 glm_vec4_refract(glm_vec4 I, glm_vec4 N, glm_vec4 eta) -{ - glm_vec4 const dot0 = glm_vec4_dot(N, I); - glm_vec4 const mul0 = _mm_mul_ps(eta, eta); - glm_vec4 const mul1 = _mm_mul_ps(dot0, dot0); - glm_vec4 const sub0 = _mm_sub_ps(_mm_set1_ps(1.0f), mul0); - glm_vec4 const sub1 = _mm_sub_ps(_mm_set1_ps(1.0f), mul1); - glm_vec4 const mul2 = _mm_mul_ps(sub0, sub1); - - if(_mm_movemask_ps(_mm_cmplt_ss(mul2, _mm_set1_ps(0.0f))) == 0) - return _mm_set1_ps(0.0f); - - glm_vec4 const sqt0 = _mm_sqrt_ps(mul2); - glm_vec4 const mad0 = glm_vec4_fma(eta, dot0, sqt0); - glm_vec4 const mul4 = _mm_mul_ps(mad0, N); - glm_vec4 const mul5 = _mm_mul_ps(eta, I); - glm_vec4 const sub2 = _mm_sub_ps(mul5, mul4); - - return sub2; -} - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/simd/integer.h b/third_party/glm/simd/integer.h deleted file mode 100755 index 9381418..0000000 --- a/third_party/glm/simd/integer.h +++ /dev/null @@ -1,115 +0,0 @@ -/// @ref simd -/// @file glm/simd/integer.h - -#pragma once - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave(glm_uvec4 x) -{ - glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); - glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); - glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); - glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); - glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); - - glm_uvec4 Reg1; - glm_uvec4 Reg2; - - // REG1 = x; - // REG2 = y; - //Reg1 = _mm_unpacklo_epi64(x, y); - Reg1 = x; - - //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); - //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); - Reg2 = _mm_slli_si128(Reg1, 2); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask4); - - //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); - //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); - Reg2 = _mm_slli_si128(Reg1, 1); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask3); - - //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); - //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); - Reg2 = _mm_slli_epi32(Reg1, 4); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask2); - - //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); - //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); - Reg2 = _mm_slli_epi32(Reg1, 2); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask1); - - //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); - //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); - Reg2 = _mm_slli_epi32(Reg1, 1); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask0); - - //return REG1 | (REG2 << 1); - Reg2 = _mm_slli_epi32(Reg1, 1); - Reg2 = _mm_srli_si128(Reg2, 8); - Reg1 = _mm_or_si128(Reg1, Reg2); - - return Reg1; -} - -GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave2(glm_uvec4 x, glm_uvec4 y) -{ - glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); - glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); - glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); - glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); - glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); - - glm_uvec4 Reg1; - glm_uvec4 Reg2; - - // REG1 = x; - // REG2 = y; - Reg1 = _mm_unpacklo_epi64(x, y); - - //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); - //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); - Reg2 = _mm_slli_si128(Reg1, 2); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask4); - - //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); - //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); - Reg2 = _mm_slli_si128(Reg1, 1); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask3); - - //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); - //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); - Reg2 = _mm_slli_epi32(Reg1, 4); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask2); - - //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); - //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); - Reg2 = _mm_slli_epi32(Reg1, 2); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask1); - - //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); - //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); - Reg2 = _mm_slli_epi32(Reg1, 1); - Reg1 = _mm_or_si128(Reg2, Reg1); - Reg1 = _mm_and_si128(Reg1, Mask0); - - //return REG1 | (REG2 << 1); - Reg2 = _mm_slli_epi32(Reg1, 1); - Reg2 = _mm_srli_si128(Reg2, 8); - Reg1 = _mm_or_si128(Reg1, Reg2); - - return Reg1; -} - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/simd/matrix.h b/third_party/glm/simd/matrix.h deleted file mode 100755 index b6c42ea..0000000 --- a/third_party/glm/simd/matrix.h +++ /dev/null @@ -1,1028 +0,0 @@ -/// @ref simd -/// @file glm/simd/matrix.h - -#pragma once - -#include "geometric.h" - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -GLM_FUNC_QUALIFIER void glm_mat4_matrixCompMult(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) -{ - out[0] = _mm_mul_ps(in1[0], in2[0]); - out[1] = _mm_mul_ps(in1[1], in2[1]); - out[2] = _mm_mul_ps(in1[2], in2[2]); - out[3] = _mm_mul_ps(in1[3], in2[3]); -} - -GLM_FUNC_QUALIFIER void glm_mat4_add(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) -{ - out[0] = _mm_add_ps(in1[0], in2[0]); - out[1] = _mm_add_ps(in1[1], in2[1]); - out[2] = _mm_add_ps(in1[2], in2[2]); - out[3] = _mm_add_ps(in1[3], in2[3]); -} - -GLM_FUNC_QUALIFIER void glm_mat4_sub(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) -{ - out[0] = _mm_sub_ps(in1[0], in2[0]); - out[1] = _mm_sub_ps(in1[1], in2[1]); - out[2] = _mm_sub_ps(in1[2], in2[2]); - out[3] = _mm_sub_ps(in1[3], in2[3]); -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_mul_vec4(glm_vec4 const m[4], glm_vec4 v) -{ - __m128 v0 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 v1 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(1, 1, 1, 1)); - __m128 v2 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(2, 2, 2, 2)); - __m128 v3 = _mm_shuffle_ps(v, v, _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 m0 = _mm_mul_ps(m[0], v0); - __m128 m1 = _mm_mul_ps(m[1], v1); - __m128 m2 = _mm_mul_ps(m[2], v2); - __m128 m3 = _mm_mul_ps(m[3], v3); - - __m128 a0 = _mm_add_ps(m0, m1); - __m128 a1 = _mm_add_ps(m2, m3); - __m128 a2 = _mm_add_ps(a0, a1); - - return a2; -} - -GLM_FUNC_QUALIFIER __m128 glm_vec4_mul_mat4(glm_vec4 v, glm_vec4 const m[4]) -{ - __m128 i0 = m[0]; - __m128 i1 = m[1]; - __m128 i2 = m[2]; - __m128 i3 = m[3]; - - __m128 m0 = _mm_mul_ps(v, i0); - __m128 m1 = _mm_mul_ps(v, i1); - __m128 m2 = _mm_mul_ps(v, i2); - __m128 m3 = _mm_mul_ps(v, i3); - - __m128 u0 = _mm_unpacklo_ps(m0, m1); - __m128 u1 = _mm_unpackhi_ps(m0, m1); - __m128 a0 = _mm_add_ps(u0, u1); - - __m128 u2 = _mm_unpacklo_ps(m2, m3); - __m128 u3 = _mm_unpackhi_ps(m2, m3); - __m128 a1 = _mm_add_ps(u2, u3); - - __m128 f0 = _mm_movelh_ps(a0, a1); - __m128 f1 = _mm_movehl_ps(a1, a0); - __m128 f2 = _mm_add_ps(f0, f1); - - return f2; -} - -GLM_FUNC_QUALIFIER void glm_mat4_mul(glm_vec4 const in1[4], glm_vec4 const in2[4], glm_vec4 out[4]) -{ - { - __m128 e0 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 e1 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 e2 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 e3 = _mm_shuffle_ps(in2[0], in2[0], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 m0 = _mm_mul_ps(in1[0], e0); - __m128 m1 = _mm_mul_ps(in1[1], e1); - __m128 m2 = _mm_mul_ps(in1[2], e2); - __m128 m3 = _mm_mul_ps(in1[3], e3); - - __m128 a0 = _mm_add_ps(m0, m1); - __m128 a1 = _mm_add_ps(m2, m3); - __m128 a2 = _mm_add_ps(a0, a1); - - out[0] = a2; - } - - { - __m128 e0 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 e1 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 e2 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 e3 = _mm_shuffle_ps(in2[1], in2[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 m0 = _mm_mul_ps(in1[0], e0); - __m128 m1 = _mm_mul_ps(in1[1], e1); - __m128 m2 = _mm_mul_ps(in1[2], e2); - __m128 m3 = _mm_mul_ps(in1[3], e3); - - __m128 a0 = _mm_add_ps(m0, m1); - __m128 a1 = _mm_add_ps(m2, m3); - __m128 a2 = _mm_add_ps(a0, a1); - - out[1] = a2; - } - - { - __m128 e0 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 e1 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 e2 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 e3 = _mm_shuffle_ps(in2[2], in2[2], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 m0 = _mm_mul_ps(in1[0], e0); - __m128 m1 = _mm_mul_ps(in1[1], e1); - __m128 m2 = _mm_mul_ps(in1[2], e2); - __m128 m3 = _mm_mul_ps(in1[3], e3); - - __m128 a0 = _mm_add_ps(m0, m1); - __m128 a1 = _mm_add_ps(m2, m3); - __m128 a2 = _mm_add_ps(a0, a1); - - out[2] = a2; - } - - { - //(__m128&)_mm_shuffle_epi32(__m128i&)in2[0], _MM_SHUFFLE(3, 3, 3, 3)) - __m128 e0 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 e1 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 e2 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 e3 = _mm_shuffle_ps(in2[3], in2[3], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 m0 = _mm_mul_ps(in1[0], e0); - __m128 m1 = _mm_mul_ps(in1[1], e1); - __m128 m2 = _mm_mul_ps(in1[2], e2); - __m128 m3 = _mm_mul_ps(in1[3], e3); - - __m128 a0 = _mm_add_ps(m0, m1); - __m128 a1 = _mm_add_ps(m2, m3); - __m128 a2 = _mm_add_ps(a0, a1); - - out[3] = a2; - } -} - -GLM_FUNC_QUALIFIER void glm_mat4_transpose(glm_vec4 const in[4], glm_vec4 out[4]) -{ - __m128 tmp0 = _mm_shuffle_ps(in[0], in[1], 0x44); - __m128 tmp2 = _mm_shuffle_ps(in[0], in[1], 0xEE); - __m128 tmp1 = _mm_shuffle_ps(in[2], in[3], 0x44); - __m128 tmp3 = _mm_shuffle_ps(in[2], in[3], 0xEE); - - out[0] = _mm_shuffle_ps(tmp0, tmp1, 0x88); - out[1] = _mm_shuffle_ps(tmp0, tmp1, 0xDD); - out[2] = _mm_shuffle_ps(tmp2, tmp3, 0x88); - out[3] = _mm_shuffle_ps(tmp2, tmp3, 0xDD); -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_determinant_highp(glm_vec4 const in[4]) -{ - __m128 Fac0; - { - // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // valType SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; - // valType SubFactor13 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac0 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac1; - { - // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // valType SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; - // valType SubFactor14 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac1 = _mm_sub_ps(Mul00, Mul01); - } - - - __m128 Fac2; - { - // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // valType SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; - // valType SubFactor15 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac2 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac3; - { - // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // valType SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; - // valType SubFactor16 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac3 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac4; - { - // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // valType SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; - // valType SubFactor17 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac4 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac5; - { - // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // valType SubFactor12 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; - // valType SubFactor18 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac5 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 SignA = _mm_set_ps( 1.0f,-1.0f, 1.0f,-1.0f); - __m128 SignB = _mm_set_ps(-1.0f, 1.0f,-1.0f, 1.0f); - - // m[1][0] - // m[0][0] - // m[0][0] - // m[0][0] - __m128 Temp0 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Vec0 = _mm_shuffle_ps(Temp0, Temp0, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][1] - // m[0][1] - // m[0][1] - // m[0][1] - __m128 Temp1 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Vec1 = _mm_shuffle_ps(Temp1, Temp1, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][2] - // m[0][2] - // m[0][2] - // m[0][2] - __m128 Temp2 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Vec2 = _mm_shuffle_ps(Temp2, Temp2, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][3] - // m[0][3] - // m[0][3] - // m[0][3] - __m128 Temp3 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Vec3 = _mm_shuffle_ps(Temp3, Temp3, _MM_SHUFFLE(2, 2, 2, 0)); - - // col0 - // + (Vec1[0] * Fac0[0] - Vec2[0] * Fac1[0] + Vec3[0] * Fac2[0]), - // - (Vec1[1] * Fac0[1] - Vec2[1] * Fac1[1] + Vec3[1] * Fac2[1]), - // + (Vec1[2] * Fac0[2] - Vec2[2] * Fac1[2] + Vec3[2] * Fac2[2]), - // - (Vec1[3] * Fac0[3] - Vec2[3] * Fac1[3] + Vec3[3] * Fac2[3]), - __m128 Mul00 = _mm_mul_ps(Vec1, Fac0); - __m128 Mul01 = _mm_mul_ps(Vec2, Fac1); - __m128 Mul02 = _mm_mul_ps(Vec3, Fac2); - __m128 Sub00 = _mm_sub_ps(Mul00, Mul01); - __m128 Add00 = _mm_add_ps(Sub00, Mul02); - __m128 Inv0 = _mm_mul_ps(SignB, Add00); - - // col1 - // - (Vec0[0] * Fac0[0] - Vec2[0] * Fac3[0] + Vec3[0] * Fac4[0]), - // + (Vec0[0] * Fac0[1] - Vec2[1] * Fac3[1] + Vec3[1] * Fac4[1]), - // - (Vec0[0] * Fac0[2] - Vec2[2] * Fac3[2] + Vec3[2] * Fac4[2]), - // + (Vec0[0] * Fac0[3] - Vec2[3] * Fac3[3] + Vec3[3] * Fac4[3]), - __m128 Mul03 = _mm_mul_ps(Vec0, Fac0); - __m128 Mul04 = _mm_mul_ps(Vec2, Fac3); - __m128 Mul05 = _mm_mul_ps(Vec3, Fac4); - __m128 Sub01 = _mm_sub_ps(Mul03, Mul04); - __m128 Add01 = _mm_add_ps(Sub01, Mul05); - __m128 Inv1 = _mm_mul_ps(SignA, Add01); - - // col2 - // + (Vec0[0] * Fac1[0] - Vec1[0] * Fac3[0] + Vec3[0] * Fac5[0]), - // - (Vec0[0] * Fac1[1] - Vec1[1] * Fac3[1] + Vec3[1] * Fac5[1]), - // + (Vec0[0] * Fac1[2] - Vec1[2] * Fac3[2] + Vec3[2] * Fac5[2]), - // - (Vec0[0] * Fac1[3] - Vec1[3] * Fac3[3] + Vec3[3] * Fac5[3]), - __m128 Mul06 = _mm_mul_ps(Vec0, Fac1); - __m128 Mul07 = _mm_mul_ps(Vec1, Fac3); - __m128 Mul08 = _mm_mul_ps(Vec3, Fac5); - __m128 Sub02 = _mm_sub_ps(Mul06, Mul07); - __m128 Add02 = _mm_add_ps(Sub02, Mul08); - __m128 Inv2 = _mm_mul_ps(SignB, Add02); - - // col3 - // - (Vec1[0] * Fac2[0] - Vec1[0] * Fac4[0] + Vec2[0] * Fac5[0]), - // + (Vec1[0] * Fac2[1] - Vec1[1] * Fac4[1] + Vec2[1] * Fac5[1]), - // - (Vec1[0] * Fac2[2] - Vec1[2] * Fac4[2] + Vec2[2] * Fac5[2]), - // + (Vec1[0] * Fac2[3] - Vec1[3] * Fac4[3] + Vec2[3] * Fac5[3])); - __m128 Mul09 = _mm_mul_ps(Vec0, Fac2); - __m128 Mul10 = _mm_mul_ps(Vec1, Fac4); - __m128 Mul11 = _mm_mul_ps(Vec2, Fac5); - __m128 Sub03 = _mm_sub_ps(Mul09, Mul10); - __m128 Add03 = _mm_add_ps(Sub03, Mul11); - __m128 Inv3 = _mm_mul_ps(SignA, Add03); - - __m128 Row0 = _mm_shuffle_ps(Inv0, Inv1, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Row1 = _mm_shuffle_ps(Inv2, Inv3, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Row2 = _mm_shuffle_ps(Row0, Row1, _MM_SHUFFLE(2, 0, 2, 0)); - - // valType Determinant = m[0][0] * Inverse[0][0] - // + m[0][1] * Inverse[1][0] - // + m[0][2] * Inverse[2][0] - // + m[0][3] * Inverse[3][0]; - __m128 Det0 = glm_vec4_dot(in[0], Row2); - return Det0; -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_determinant_lowp(glm_vec4 const m[4]) -{ - // _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128( - - //T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - //T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - //T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - //T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - //T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - //T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - - // First 2 columns - __m128 Swp2A = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[2]), _MM_SHUFFLE(0, 1, 1, 2))); - __m128 Swp3A = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[3]), _MM_SHUFFLE(3, 2, 3, 3))); - __m128 MulA = _mm_mul_ps(Swp2A, Swp3A); - - // Second 2 columns - __m128 Swp2B = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[2]), _MM_SHUFFLE(3, 2, 3, 3))); - __m128 Swp3B = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[3]), _MM_SHUFFLE(0, 1, 1, 2))); - __m128 MulB = _mm_mul_ps(Swp2B, Swp3B); - - // Columns subtraction - __m128 SubE = _mm_sub_ps(MulA, MulB); - - // Last 2 rows - __m128 Swp2C = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[2]), _MM_SHUFFLE(0, 0, 1, 2))); - __m128 Swp3C = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[3]), _MM_SHUFFLE(1, 2, 0, 0))); - __m128 MulC = _mm_mul_ps(Swp2C, Swp3C); - __m128 SubF = _mm_sub_ps(_mm_movehl_ps(MulC, MulC), MulC); - - //vec<4, T, Q> DetCof( - // + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02), - // - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04), - // + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05), - // - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05)); - - __m128 SubFacA = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(SubE), _MM_SHUFFLE(2, 1, 0, 0))); - __m128 SwpFacA = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[1]), _MM_SHUFFLE(0, 0, 0, 1))); - __m128 MulFacA = _mm_mul_ps(SwpFacA, SubFacA); - - __m128 SubTmpB = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(0, 0, 3, 1)); - __m128 SubFacB = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(SubTmpB), _MM_SHUFFLE(3, 1, 1, 0)));//SubF[0], SubE[3], SubE[3], SubE[1]; - __m128 SwpFacB = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[1]), _MM_SHUFFLE(1, 1, 2, 2))); - __m128 MulFacB = _mm_mul_ps(SwpFacB, SubFacB); - - __m128 SubRes = _mm_sub_ps(MulFacA, MulFacB); - - __m128 SubTmpC = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(1, 0, 2, 2)); - __m128 SubFacC = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(SubTmpC), _MM_SHUFFLE(3, 3, 2, 0))); - __m128 SwpFacC = _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(m[1]), _MM_SHUFFLE(2, 3, 3, 3))); - __m128 MulFacC = _mm_mul_ps(SwpFacC, SubFacC); - - __m128 AddRes = _mm_add_ps(SubRes, MulFacC); - __m128 DetCof = _mm_mul_ps(AddRes, _mm_setr_ps( 1.0f,-1.0f, 1.0f,-1.0f)); - - //return m[0][0] * DetCof[0] - // + m[0][1] * DetCof[1] - // + m[0][2] * DetCof[2] - // + m[0][3] * DetCof[3]; - - return glm_vec4_dot(m[0], DetCof); -} - -GLM_FUNC_QUALIFIER glm_vec4 glm_mat4_determinant(glm_vec4 const m[4]) -{ - // _mm_castsi128_ps(_mm_shuffle_epi32(_mm_castps_si128(add) - - //T SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - //T SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - //T SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - //T SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - //T SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - //T SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - - // First 2 columns - __m128 Swp2A = _mm_shuffle_ps(m[2], m[2], _MM_SHUFFLE(0, 1, 1, 2)); - __m128 Swp3A = _mm_shuffle_ps(m[3], m[3], _MM_SHUFFLE(3, 2, 3, 3)); - __m128 MulA = _mm_mul_ps(Swp2A, Swp3A); - - // Second 2 columns - __m128 Swp2B = _mm_shuffle_ps(m[2], m[2], _MM_SHUFFLE(3, 2, 3, 3)); - __m128 Swp3B = _mm_shuffle_ps(m[3], m[3], _MM_SHUFFLE(0, 1, 1, 2)); - __m128 MulB = _mm_mul_ps(Swp2B, Swp3B); - - // Columns subtraction - __m128 SubE = _mm_sub_ps(MulA, MulB); - - // Last 2 rows - __m128 Swp2C = _mm_shuffle_ps(m[2], m[2], _MM_SHUFFLE(0, 0, 1, 2)); - __m128 Swp3C = _mm_shuffle_ps(m[3], m[3], _MM_SHUFFLE(1, 2, 0, 0)); - __m128 MulC = _mm_mul_ps(Swp2C, Swp3C); - __m128 SubF = _mm_sub_ps(_mm_movehl_ps(MulC, MulC), MulC); - - //vec<4, T, Q> DetCof( - // + (m[1][1] * SubFactor00 - m[1][2] * SubFactor01 + m[1][3] * SubFactor02), - // - (m[1][0] * SubFactor00 - m[1][2] * SubFactor03 + m[1][3] * SubFactor04), - // + (m[1][0] * SubFactor01 - m[1][1] * SubFactor03 + m[1][3] * SubFactor05), - // - (m[1][0] * SubFactor02 - m[1][1] * SubFactor04 + m[1][2] * SubFactor05)); - - __m128 SubFacA = _mm_shuffle_ps(SubE, SubE, _MM_SHUFFLE(2, 1, 0, 0)); - __m128 SwpFacA = _mm_shuffle_ps(m[1], m[1], _MM_SHUFFLE(0, 0, 0, 1)); - __m128 MulFacA = _mm_mul_ps(SwpFacA, SubFacA); - - __m128 SubTmpB = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(0, 0, 3, 1)); - __m128 SubFacB = _mm_shuffle_ps(SubTmpB, SubTmpB, _MM_SHUFFLE(3, 1, 1, 0));//SubF[0], SubE[3], SubE[3], SubE[1]; - __m128 SwpFacB = _mm_shuffle_ps(m[1], m[1], _MM_SHUFFLE(1, 1, 2, 2)); - __m128 MulFacB = _mm_mul_ps(SwpFacB, SubFacB); - - __m128 SubRes = _mm_sub_ps(MulFacA, MulFacB); - - __m128 SubTmpC = _mm_shuffle_ps(SubE, SubF, _MM_SHUFFLE(1, 0, 2, 2)); - __m128 SubFacC = _mm_shuffle_ps(SubTmpC, SubTmpC, _MM_SHUFFLE(3, 3, 2, 0)); - __m128 SwpFacC = _mm_shuffle_ps(m[1], m[1], _MM_SHUFFLE(2, 3, 3, 3)); - __m128 MulFacC = _mm_mul_ps(SwpFacC, SubFacC); - - __m128 AddRes = _mm_add_ps(SubRes, MulFacC); - __m128 DetCof = _mm_mul_ps(AddRes, _mm_setr_ps( 1.0f,-1.0f, 1.0f,-1.0f)); - - //return m[0][0] * DetCof[0] - // + m[0][1] * DetCof[1] - // + m[0][2] * DetCof[2] - // + m[0][3] * DetCof[3]; - - return glm_vec4_dot(m[0], DetCof); -} - -GLM_FUNC_QUALIFIER void glm_mat4_inverse(glm_vec4 const in[4], glm_vec4 out[4]) -{ - __m128 Fac0; - { - // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // valType SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; - // valType SubFactor13 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac0 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac1; - { - // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // valType SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; - // valType SubFactor14 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac1 = _mm_sub_ps(Mul00, Mul01); - } - - - __m128 Fac2; - { - // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // valType SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; - // valType SubFactor15 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac2 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac3; - { - // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // valType SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; - // valType SubFactor16 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac3 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac4; - { - // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // valType SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; - // valType SubFactor17 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac4 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac5; - { - // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // valType SubFactor12 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; - // valType SubFactor18 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac5 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 SignA = _mm_set_ps( 1.0f,-1.0f, 1.0f,-1.0f); - __m128 SignB = _mm_set_ps(-1.0f, 1.0f,-1.0f, 1.0f); - - // m[1][0] - // m[0][0] - // m[0][0] - // m[0][0] - __m128 Temp0 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Vec0 = _mm_shuffle_ps(Temp0, Temp0, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][1] - // m[0][1] - // m[0][1] - // m[0][1] - __m128 Temp1 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Vec1 = _mm_shuffle_ps(Temp1, Temp1, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][2] - // m[0][2] - // m[0][2] - // m[0][2] - __m128 Temp2 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Vec2 = _mm_shuffle_ps(Temp2, Temp2, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][3] - // m[0][3] - // m[0][3] - // m[0][3] - __m128 Temp3 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Vec3 = _mm_shuffle_ps(Temp3, Temp3, _MM_SHUFFLE(2, 2, 2, 0)); - - // col0 - // + (Vec1[0] * Fac0[0] - Vec2[0] * Fac1[0] + Vec3[0] * Fac2[0]), - // - (Vec1[1] * Fac0[1] - Vec2[1] * Fac1[1] + Vec3[1] * Fac2[1]), - // + (Vec1[2] * Fac0[2] - Vec2[2] * Fac1[2] + Vec3[2] * Fac2[2]), - // - (Vec1[3] * Fac0[3] - Vec2[3] * Fac1[3] + Vec3[3] * Fac2[3]), - __m128 Mul00 = _mm_mul_ps(Vec1, Fac0); - __m128 Mul01 = _mm_mul_ps(Vec2, Fac1); - __m128 Mul02 = _mm_mul_ps(Vec3, Fac2); - __m128 Sub00 = _mm_sub_ps(Mul00, Mul01); - __m128 Add00 = _mm_add_ps(Sub00, Mul02); - __m128 Inv0 = _mm_mul_ps(SignB, Add00); - - // col1 - // - (Vec0[0] * Fac0[0] - Vec2[0] * Fac3[0] + Vec3[0] * Fac4[0]), - // + (Vec0[0] * Fac0[1] - Vec2[1] * Fac3[1] + Vec3[1] * Fac4[1]), - // - (Vec0[0] * Fac0[2] - Vec2[2] * Fac3[2] + Vec3[2] * Fac4[2]), - // + (Vec0[0] * Fac0[3] - Vec2[3] * Fac3[3] + Vec3[3] * Fac4[3]), - __m128 Mul03 = _mm_mul_ps(Vec0, Fac0); - __m128 Mul04 = _mm_mul_ps(Vec2, Fac3); - __m128 Mul05 = _mm_mul_ps(Vec3, Fac4); - __m128 Sub01 = _mm_sub_ps(Mul03, Mul04); - __m128 Add01 = _mm_add_ps(Sub01, Mul05); - __m128 Inv1 = _mm_mul_ps(SignA, Add01); - - // col2 - // + (Vec0[0] * Fac1[0] - Vec1[0] * Fac3[0] + Vec3[0] * Fac5[0]), - // - (Vec0[0] * Fac1[1] - Vec1[1] * Fac3[1] + Vec3[1] * Fac5[1]), - // + (Vec0[0] * Fac1[2] - Vec1[2] * Fac3[2] + Vec3[2] * Fac5[2]), - // - (Vec0[0] * Fac1[3] - Vec1[3] * Fac3[3] + Vec3[3] * Fac5[3]), - __m128 Mul06 = _mm_mul_ps(Vec0, Fac1); - __m128 Mul07 = _mm_mul_ps(Vec1, Fac3); - __m128 Mul08 = _mm_mul_ps(Vec3, Fac5); - __m128 Sub02 = _mm_sub_ps(Mul06, Mul07); - __m128 Add02 = _mm_add_ps(Sub02, Mul08); - __m128 Inv2 = _mm_mul_ps(SignB, Add02); - - // col3 - // - (Vec1[0] * Fac2[0] - Vec1[0] * Fac4[0] + Vec2[0] * Fac5[0]), - // + (Vec1[0] * Fac2[1] - Vec1[1] * Fac4[1] + Vec2[1] * Fac5[1]), - // - (Vec1[0] * Fac2[2] - Vec1[2] * Fac4[2] + Vec2[2] * Fac5[2]), - // + (Vec1[0] * Fac2[3] - Vec1[3] * Fac4[3] + Vec2[3] * Fac5[3])); - __m128 Mul09 = _mm_mul_ps(Vec0, Fac2); - __m128 Mul10 = _mm_mul_ps(Vec1, Fac4); - __m128 Mul11 = _mm_mul_ps(Vec2, Fac5); - __m128 Sub03 = _mm_sub_ps(Mul09, Mul10); - __m128 Add03 = _mm_add_ps(Sub03, Mul11); - __m128 Inv3 = _mm_mul_ps(SignA, Add03); - - __m128 Row0 = _mm_shuffle_ps(Inv0, Inv1, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Row1 = _mm_shuffle_ps(Inv2, Inv3, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Row2 = _mm_shuffle_ps(Row0, Row1, _MM_SHUFFLE(2, 0, 2, 0)); - - // valType Determinant = m[0][0] * Inverse[0][0] - // + m[0][1] * Inverse[1][0] - // + m[0][2] * Inverse[2][0] - // + m[0][3] * Inverse[3][0]; - __m128 Det0 = glm_vec4_dot(in[0], Row2); - __m128 Rcp0 = _mm_div_ps(_mm_set1_ps(1.0f), Det0); - //__m128 Rcp0 = _mm_rcp_ps(Det0); - - // Inverse /= Determinant; - out[0] = _mm_mul_ps(Inv0, Rcp0); - out[1] = _mm_mul_ps(Inv1, Rcp0); - out[2] = _mm_mul_ps(Inv2, Rcp0); - out[3] = _mm_mul_ps(Inv3, Rcp0); -} - -GLM_FUNC_QUALIFIER void glm_mat4_inverse_lowp(glm_vec4 const in[4], glm_vec4 out[4]) -{ - __m128 Fac0; - { - // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // valType SubFactor00 = m[2][2] * m[3][3] - m[3][2] * m[2][3]; - // valType SubFactor06 = m[1][2] * m[3][3] - m[3][2] * m[1][3]; - // valType SubFactor13 = m[1][2] * m[2][3] - m[2][2] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac0 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac1; - { - // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // valType SubFactor01 = m[2][1] * m[3][3] - m[3][1] * m[2][3]; - // valType SubFactor07 = m[1][1] * m[3][3] - m[3][1] * m[1][3]; - // valType SubFactor14 = m[1][1] * m[2][3] - m[2][1] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac1 = _mm_sub_ps(Mul00, Mul01); - } - - - __m128 Fac2; - { - // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // valType SubFactor02 = m[2][1] * m[3][2] - m[3][1] * m[2][2]; - // valType SubFactor08 = m[1][1] * m[3][2] - m[3][1] * m[1][2]; - // valType SubFactor15 = m[1][1] * m[2][2] - m[2][1] * m[1][2]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac2 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac3; - { - // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // valType SubFactor03 = m[2][0] * m[3][3] - m[3][0] * m[2][3]; - // valType SubFactor09 = m[1][0] * m[3][3] - m[3][0] * m[1][3]; - // valType SubFactor16 = m[1][0] * m[2][3] - m[2][0] * m[1][3]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(3, 3, 3, 3)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac3 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac4; - { - // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // valType SubFactor04 = m[2][0] * m[3][2] - m[3][0] * m[2][2]; - // valType SubFactor10 = m[1][0] * m[3][2] - m[3][0] * m[1][2]; - // valType SubFactor17 = m[1][0] * m[2][2] - m[2][0] * m[1][2]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(2, 2, 2, 2)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac4 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 Fac5; - { - // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // valType SubFactor05 = m[2][0] * m[3][1] - m[3][0] * m[2][1]; - // valType SubFactor12 = m[1][0] * m[3][1] - m[3][0] * m[1][1]; - // valType SubFactor18 = m[1][0] * m[2][1] - m[2][0] * m[1][1]; - - __m128 Swp0a = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Swp0b = _mm_shuffle_ps(in[3], in[2], _MM_SHUFFLE(0, 0, 0, 0)); - - __m128 Swp00 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Swp01 = _mm_shuffle_ps(Swp0a, Swp0a, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp02 = _mm_shuffle_ps(Swp0b, Swp0b, _MM_SHUFFLE(2, 0, 0, 0)); - __m128 Swp03 = _mm_shuffle_ps(in[2], in[1], _MM_SHUFFLE(1, 1, 1, 1)); - - __m128 Mul00 = _mm_mul_ps(Swp00, Swp01); - __m128 Mul01 = _mm_mul_ps(Swp02, Swp03); - Fac5 = _mm_sub_ps(Mul00, Mul01); - } - - __m128 SignA = _mm_set_ps( 1.0f,-1.0f, 1.0f,-1.0f); - __m128 SignB = _mm_set_ps(-1.0f, 1.0f,-1.0f, 1.0f); - - // m[1][0] - // m[0][0] - // m[0][0] - // m[0][0] - __m128 Temp0 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Vec0 = _mm_shuffle_ps(Temp0, Temp0, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][1] - // m[0][1] - // m[0][1] - // m[0][1] - __m128 Temp1 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(1, 1, 1, 1)); - __m128 Vec1 = _mm_shuffle_ps(Temp1, Temp1, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][2] - // m[0][2] - // m[0][2] - // m[0][2] - __m128 Temp2 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(2, 2, 2, 2)); - __m128 Vec2 = _mm_shuffle_ps(Temp2, Temp2, _MM_SHUFFLE(2, 2, 2, 0)); - - // m[1][3] - // m[0][3] - // m[0][3] - // m[0][3] - __m128 Temp3 = _mm_shuffle_ps(in[1], in[0], _MM_SHUFFLE(3, 3, 3, 3)); - __m128 Vec3 = _mm_shuffle_ps(Temp3, Temp3, _MM_SHUFFLE(2, 2, 2, 0)); - - // col0 - // + (Vec1[0] * Fac0[0] - Vec2[0] * Fac1[0] + Vec3[0] * Fac2[0]), - // - (Vec1[1] * Fac0[1] - Vec2[1] * Fac1[1] + Vec3[1] * Fac2[1]), - // + (Vec1[2] * Fac0[2] - Vec2[2] * Fac1[2] + Vec3[2] * Fac2[2]), - // - (Vec1[3] * Fac0[3] - Vec2[3] * Fac1[3] + Vec3[3] * Fac2[3]), - __m128 Mul00 = _mm_mul_ps(Vec1, Fac0); - __m128 Mul01 = _mm_mul_ps(Vec2, Fac1); - __m128 Mul02 = _mm_mul_ps(Vec3, Fac2); - __m128 Sub00 = _mm_sub_ps(Mul00, Mul01); - __m128 Add00 = _mm_add_ps(Sub00, Mul02); - __m128 Inv0 = _mm_mul_ps(SignB, Add00); - - // col1 - // - (Vec0[0] * Fac0[0] - Vec2[0] * Fac3[0] + Vec3[0] * Fac4[0]), - // + (Vec0[0] * Fac0[1] - Vec2[1] * Fac3[1] + Vec3[1] * Fac4[1]), - // - (Vec0[0] * Fac0[2] - Vec2[2] * Fac3[2] + Vec3[2] * Fac4[2]), - // + (Vec0[0] * Fac0[3] - Vec2[3] * Fac3[3] + Vec3[3] * Fac4[3]), - __m128 Mul03 = _mm_mul_ps(Vec0, Fac0); - __m128 Mul04 = _mm_mul_ps(Vec2, Fac3); - __m128 Mul05 = _mm_mul_ps(Vec3, Fac4); - __m128 Sub01 = _mm_sub_ps(Mul03, Mul04); - __m128 Add01 = _mm_add_ps(Sub01, Mul05); - __m128 Inv1 = _mm_mul_ps(SignA, Add01); - - // col2 - // + (Vec0[0] * Fac1[0] - Vec1[0] * Fac3[0] + Vec3[0] * Fac5[0]), - // - (Vec0[0] * Fac1[1] - Vec1[1] * Fac3[1] + Vec3[1] * Fac5[1]), - // + (Vec0[0] * Fac1[2] - Vec1[2] * Fac3[2] + Vec3[2] * Fac5[2]), - // - (Vec0[0] * Fac1[3] - Vec1[3] * Fac3[3] + Vec3[3] * Fac5[3]), - __m128 Mul06 = _mm_mul_ps(Vec0, Fac1); - __m128 Mul07 = _mm_mul_ps(Vec1, Fac3); - __m128 Mul08 = _mm_mul_ps(Vec3, Fac5); - __m128 Sub02 = _mm_sub_ps(Mul06, Mul07); - __m128 Add02 = _mm_add_ps(Sub02, Mul08); - __m128 Inv2 = _mm_mul_ps(SignB, Add02); - - // col3 - // - (Vec1[0] * Fac2[0] - Vec1[0] * Fac4[0] + Vec2[0] * Fac5[0]), - // + (Vec1[0] * Fac2[1] - Vec1[1] * Fac4[1] + Vec2[1] * Fac5[1]), - // - (Vec1[0] * Fac2[2] - Vec1[2] * Fac4[2] + Vec2[2] * Fac5[2]), - // + (Vec1[0] * Fac2[3] - Vec1[3] * Fac4[3] + Vec2[3] * Fac5[3])); - __m128 Mul09 = _mm_mul_ps(Vec0, Fac2); - __m128 Mul10 = _mm_mul_ps(Vec1, Fac4); - __m128 Mul11 = _mm_mul_ps(Vec2, Fac5); - __m128 Sub03 = _mm_sub_ps(Mul09, Mul10); - __m128 Add03 = _mm_add_ps(Sub03, Mul11); - __m128 Inv3 = _mm_mul_ps(SignA, Add03); - - __m128 Row0 = _mm_shuffle_ps(Inv0, Inv1, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Row1 = _mm_shuffle_ps(Inv2, Inv3, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Row2 = _mm_shuffle_ps(Row0, Row1, _MM_SHUFFLE(2, 0, 2, 0)); - - // valType Determinant = m[0][0] * Inverse[0][0] - // + m[0][1] * Inverse[1][0] - // + m[0][2] * Inverse[2][0] - // + m[0][3] * Inverse[3][0]; - __m128 Det0 = glm_vec4_dot(in[0], Row2); - __m128 Rcp0 = _mm_rcp_ps(Det0); - //__m128 Rcp0 = _mm_div_ps(one, Det0); - // Inverse /= Determinant; - out[0] = _mm_mul_ps(Inv0, Rcp0); - out[1] = _mm_mul_ps(Inv1, Rcp0); - out[2] = _mm_mul_ps(Inv2, Rcp0); - out[3] = _mm_mul_ps(Inv3, Rcp0); -} -/* -GLM_FUNC_QUALIFIER void glm_mat4_rotate(__m128 const in[4], float Angle, float const v[3], __m128 out[4]) -{ - float a = glm::radians(Angle); - float c = cos(a); - float s = sin(a); - - glm::vec4 AxisA(v[0], v[1], v[2], float(0)); - __m128 AxisB = _mm_set_ps(AxisA.w, AxisA.z, AxisA.y, AxisA.x); - __m128 AxisC = detail::sse_nrm_ps(AxisB); - - __m128 Cos0 = _mm_set_ss(c); - __m128 CosA = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 Sin0 = _mm_set_ss(s); - __m128 SinA = _mm_shuffle_ps(Sin0, Sin0, _MM_SHUFFLE(0, 0, 0, 0)); - - // vec<3, T, Q> temp = (valType(1) - c) * axis; - __m128 Temp0 = _mm_sub_ps(one, CosA); - __m128 Temp1 = _mm_mul_ps(Temp0, AxisC); - - //Rotate[0][0] = c + temp[0] * axis[0]; - //Rotate[0][1] = 0 + temp[0] * axis[1] + s * axis[2]; - //Rotate[0][2] = 0 + temp[0] * axis[2] - s * axis[1]; - __m128 Axis0 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(0, 0, 0, 0)); - __m128 TmpA0 = _mm_mul_ps(Axis0, AxisC); - __m128 CosA0 = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(1, 1, 1, 0)); - __m128 TmpA1 = _mm_add_ps(CosA0, TmpA0); - __m128 SinA0 = SinA;//_mm_set_ps(0.0f, s, -s, 0.0f); - __m128 TmpA2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(3, 1, 2, 3)); - __m128 TmpA3 = _mm_mul_ps(SinA0, TmpA2); - __m128 TmpA4 = _mm_add_ps(TmpA1, TmpA3); - - //Rotate[1][0] = 0 + temp[1] * axis[0] - s * axis[2]; - //Rotate[1][1] = c + temp[1] * axis[1]; - //Rotate[1][2] = 0 + temp[1] * axis[2] + s * axis[0]; - __m128 Axis1 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(1, 1, 1, 1)); - __m128 TmpB0 = _mm_mul_ps(Axis1, AxisC); - __m128 CosA1 = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(1, 1, 0, 1)); - __m128 TmpB1 = _mm_add_ps(CosA1, TmpB0); - __m128 SinB0 = SinA;//_mm_set_ps(-s, 0.0f, s, 0.0f); - __m128 TmpB2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(3, 0, 3, 2)); - __m128 TmpB3 = _mm_mul_ps(SinA0, TmpB2); - __m128 TmpB4 = _mm_add_ps(TmpB1, TmpB3); - - //Rotate[2][0] = 0 + temp[2] * axis[0] + s * axis[1]; - //Rotate[2][1] = 0 + temp[2] * axis[1] - s * axis[0]; - //Rotate[2][2] = c + temp[2] * axis[2]; - __m128 Axis2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(2, 2, 2, 2)); - __m128 TmpC0 = _mm_mul_ps(Axis2, AxisC); - __m128 CosA2 = _mm_shuffle_ps(Cos0, Cos0, _MM_SHUFFLE(1, 0, 1, 1)); - __m128 TmpC1 = _mm_add_ps(CosA2, TmpC0); - __m128 SinC0 = SinA;//_mm_set_ps(s, -s, 0.0f, 0.0f); - __m128 TmpC2 = _mm_shuffle_ps(AxisC, AxisC, _MM_SHUFFLE(3, 3, 0, 1)); - __m128 TmpC3 = _mm_mul_ps(SinA0, TmpC2); - __m128 TmpC4 = _mm_add_ps(TmpC1, TmpC3); - - __m128 Result[4]; - Result[0] = TmpA4; - Result[1] = TmpB4; - Result[2] = TmpC4; - Result[3] = _mm_set_ps(1, 0, 0, 0); - - //mat<4, 4, valType> Result; - //Result[0] = m[0] * Rotate[0][0] + m[1] * Rotate[0][1] + m[2] * Rotate[0][2]; - //Result[1] = m[0] * Rotate[1][0] + m[1] * Rotate[1][1] + m[2] * Rotate[1][2]; - //Result[2] = m[0] * Rotate[2][0] + m[1] * Rotate[2][1] + m[2] * Rotate[2][2]; - //Result[3] = m[3]; - //return Result; - sse_mul_ps(in, Result, out); -} -*/ -GLM_FUNC_QUALIFIER void glm_mat4_outerProduct(__m128 const& c, __m128 const& r, __m128 out[4]) -{ - out[0] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(0, 0, 0, 0))); - out[1] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(1, 1, 1, 1))); - out[2] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(2, 2, 2, 2))); - out[3] = _mm_mul_ps(c, _mm_shuffle_ps(r, r, _MM_SHUFFLE(3, 3, 3, 3))); -} - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/simd/neon.h b/third_party/glm/simd/neon.h deleted file mode 100755 index 6c38b06..0000000 --- a/third_party/glm/simd/neon.h +++ /dev/null @@ -1,155 +0,0 @@ -/// @ref simd_neon -/// @file glm/simd/neon.h - -#pragma once - -#if GLM_ARCH & GLM_ARCH_NEON_BIT -#include - -namespace glm { - namespace neon { - static float32x4_t dupq_lane(float32x4_t vsrc, int lane) { - switch(lane) { -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - case 0: return vdupq_laneq_f32(vsrc, 0); - case 1: return vdupq_laneq_f32(vsrc, 1); - case 2: return vdupq_laneq_f32(vsrc, 2); - case 3: return vdupq_laneq_f32(vsrc, 3); -#else - case 0: return vdupq_n_f32(vgetq_lane_f32(vsrc, 0)); - case 1: return vdupq_n_f32(vgetq_lane_f32(vsrc, 1)); - case 2: return vdupq_n_f32(vgetq_lane_f32(vsrc, 2)); - case 3: return vdupq_n_f32(vgetq_lane_f32(vsrc, 3)); -#endif - } - assert(!"Unreachable code executed!"); - return vdupq_n_f32(0.0f); - } - - static float32x2_t dup_lane(float32x4_t vsrc, int lane) { - switch(lane) { -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - case 0: return vdup_laneq_f32(vsrc, 0); - case 1: return vdup_laneq_f32(vsrc, 1); - case 2: return vdup_laneq_f32(vsrc, 2); - case 3: return vdup_laneq_f32(vsrc, 3); -#else - case 0: return vdup_n_f32(vgetq_lane_f32(vsrc, 0)); - case 1: return vdup_n_f32(vgetq_lane_f32(vsrc, 1)); - case 2: return vdup_n_f32(vgetq_lane_f32(vsrc, 2)); - case 3: return vdup_n_f32(vgetq_lane_f32(vsrc, 3)); -#endif - } - assert(!"Unreachable code executed!"); - return vdup_n_f32(0.0f); - } - - static float32x4_t copy_lane(float32x4_t vdst, int dlane, float32x4_t vsrc, int slane) { -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - switch(dlane) { - case 0: - switch(slane) { - case 0: return vcopyq_laneq_f32(vdst, 0, vsrc, 0); - case 1: return vcopyq_laneq_f32(vdst, 0, vsrc, 1); - case 2: return vcopyq_laneq_f32(vdst, 0, vsrc, 2); - case 3: return vcopyq_laneq_f32(vdst, 0, vsrc, 3); - } - assert(!"Unreachable code executed!"); - case 1: - switch(slane) { - case 0: return vcopyq_laneq_f32(vdst, 1, vsrc, 0); - case 1: return vcopyq_laneq_f32(vdst, 1, vsrc, 1); - case 2: return vcopyq_laneq_f32(vdst, 1, vsrc, 2); - case 3: return vcopyq_laneq_f32(vdst, 1, vsrc, 3); - } - assert(!"Unreachable code executed!"); - case 2: - switch(slane) { - case 0: return vcopyq_laneq_f32(vdst, 2, vsrc, 0); - case 1: return vcopyq_laneq_f32(vdst, 2, vsrc, 1); - case 2: return vcopyq_laneq_f32(vdst, 2, vsrc, 2); - case 3: return vcopyq_laneq_f32(vdst, 2, vsrc, 3); - } - assert(!"Unreachable code executed!"); - case 3: - switch(slane) { - case 0: return vcopyq_laneq_f32(vdst, 3, vsrc, 0); - case 1: return vcopyq_laneq_f32(vdst, 3, vsrc, 1); - case 2: return vcopyq_laneq_f32(vdst, 3, vsrc, 2); - case 3: return vcopyq_laneq_f32(vdst, 3, vsrc, 3); - } - assert(!"Unreachable code executed!"); - } -#else - - float l; - switch(slane) { - case 0: l = vgetq_lane_f32(vsrc, 0); break; - case 1: l = vgetq_lane_f32(vsrc, 1); break; - case 2: l = vgetq_lane_f32(vsrc, 2); break; - case 3: l = vgetq_lane_f32(vsrc, 3); break; - default: - assert(!"Unreachable code executed!"); - } - switch(dlane) { - case 0: return vsetq_lane_f32(l, vdst, 0); - case 1: return vsetq_lane_f32(l, vdst, 1); - case 2: return vsetq_lane_f32(l, vdst, 2); - case 3: return vsetq_lane_f32(l, vdst, 3); - } -#endif - assert(!"Unreachable code executed!"); - return vdupq_n_f32(0.0f); - } - - static float32x4_t mul_lane(float32x4_t v, float32x4_t vlane, int lane) { -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT - switch(lane) { - case 0: return vmulq_laneq_f32(v, vlane, 0); break; - case 1: return vmulq_laneq_f32(v, vlane, 1); break; - case 2: return vmulq_laneq_f32(v, vlane, 2); break; - case 3: return vmulq_laneq_f32(v, vlane, 3); break; - default: - assert(!"Unreachable code executed!"); - } - assert(!"Unreachable code executed!"); - return vdupq_n_f32(0.0f); -#else - return vmulq_f32(v, dupq_lane(vlane, lane)); -#endif - } - - static float32x4_t madd_lane(float32x4_t acc, float32x4_t v, float32x4_t vlane, int lane) { -#if GLM_ARCH & GLM_ARCH_ARMV8_BIT -#ifdef GLM_CONFIG_FORCE_FMA -# define FMADD_LANE(acc, x, y, L) do { asm volatile ("fmla %0.4s, %1.4s, %2.4s" : "+w"(acc) : "w"(x), "w"(dup_lane(y, L))); } while(0) -#else -# define FMADD_LANE(acc, x, y, L) do { acc = vmlaq_laneq_f32(acc, x, y, L); } while(0) -#endif - - switch(lane) { - case 0: - FMADD_LANE(acc, v, vlane, 0); - return acc; - case 1: - FMADD_LANE(acc, v, vlane, 1); - return acc; - case 2: - FMADD_LANE(acc, v, vlane, 2); - return acc; - case 3: - FMADD_LANE(acc, v, vlane, 3); - return acc; - default: - assert(!"Unreachable code executed!"); - } - assert(!"Unreachable code executed!"); - return vdupq_n_f32(0.0f); -# undef FMADD_LANE -#else - return vaddq_f32(acc, vmulq_f32(v, dupq_lane(vlane, lane))); -#endif - } - } //namespace neon -} // namespace glm -#endif // GLM_ARCH & GLM_ARCH_NEON_BIT diff --git a/third_party/glm/simd/packing.h b/third_party/glm/simd/packing.h deleted file mode 100755 index 609163e..0000000 --- a/third_party/glm/simd/packing.h +++ /dev/null @@ -1,8 +0,0 @@ -/// @ref simd -/// @file glm/simd/packing.h - -#pragma once - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/simd/platform.h b/third_party/glm/simd/platform.h deleted file mode 100755 index ad25cc1..0000000 --- a/third_party/glm/simd/platform.h +++ /dev/null @@ -1,398 +0,0 @@ -#pragma once - -/////////////////////////////////////////////////////////////////////////////////// -// Platform - -#define GLM_PLATFORM_UNKNOWN 0x00000000 -#define GLM_PLATFORM_WINDOWS 0x00010000 -#define GLM_PLATFORM_LINUX 0x00020000 -#define GLM_PLATFORM_APPLE 0x00040000 -//#define GLM_PLATFORM_IOS 0x00080000 -#define GLM_PLATFORM_ANDROID 0x00100000 -#define GLM_PLATFORM_CHROME_NACL 0x00200000 -#define GLM_PLATFORM_UNIX 0x00400000 -#define GLM_PLATFORM_QNXNTO 0x00800000 -#define GLM_PLATFORM_WINCE 0x01000000 -#define GLM_PLATFORM_CYGWIN 0x02000000 - -#ifdef GLM_FORCE_PLATFORM_UNKNOWN -# define GLM_PLATFORM GLM_PLATFORM_UNKNOWN -#elif defined(__CYGWIN__) -# define GLM_PLATFORM GLM_PLATFORM_CYGWIN -#elif defined(__QNXNTO__) -# define GLM_PLATFORM GLM_PLATFORM_QNXNTO -#elif defined(__APPLE__) -# define GLM_PLATFORM GLM_PLATFORM_APPLE -#elif defined(WINCE) -# define GLM_PLATFORM GLM_PLATFORM_WINCE -#elif defined(_WIN32) -# define GLM_PLATFORM GLM_PLATFORM_WINDOWS -#elif defined(__native_client__) -# define GLM_PLATFORM GLM_PLATFORM_CHROME_NACL -#elif defined(__ANDROID__) -# define GLM_PLATFORM GLM_PLATFORM_ANDROID -#elif defined(__linux) -# define GLM_PLATFORM GLM_PLATFORM_LINUX -#elif defined(__unix) -# define GLM_PLATFORM GLM_PLATFORM_UNIX -#else -# define GLM_PLATFORM GLM_PLATFORM_UNKNOWN -#endif// - -/////////////////////////////////////////////////////////////////////////////////// -// Compiler - -#define GLM_COMPILER_UNKNOWN 0x00000000 - -// Intel -#define GLM_COMPILER_INTEL 0x00100000 -#define GLM_COMPILER_INTEL14 0x00100040 -#define GLM_COMPILER_INTEL15 0x00100050 -#define GLM_COMPILER_INTEL16 0x00100060 -#define GLM_COMPILER_INTEL17 0x00100070 - -// Visual C++ defines -#define GLM_COMPILER_VC 0x01000000 -#define GLM_COMPILER_VC12 0x01000001 -#define GLM_COMPILER_VC14 0x01000002 -#define GLM_COMPILER_VC15 0x01000003 -#define GLM_COMPILER_VC15_3 0x01000004 -#define GLM_COMPILER_VC15_5 0x01000005 -#define GLM_COMPILER_VC15_6 0x01000006 -#define GLM_COMPILER_VC15_7 0x01000007 -#define GLM_COMPILER_VC15_8 0x01000008 -#define GLM_COMPILER_VC15_9 0x01000009 -#define GLM_COMPILER_VC16 0x0100000A - -// GCC defines -#define GLM_COMPILER_GCC 0x02000000 -#define GLM_COMPILER_GCC46 0x020000D0 -#define GLM_COMPILER_GCC47 0x020000E0 -#define GLM_COMPILER_GCC48 0x020000F0 -#define GLM_COMPILER_GCC49 0x02000100 -#define GLM_COMPILER_GCC5 0x02000200 -#define GLM_COMPILER_GCC6 0x02000300 -#define GLM_COMPILER_GCC7 0x02000400 -#define GLM_COMPILER_GCC8 0x02000500 - -// CUDA -#define GLM_COMPILER_CUDA 0x10000000 -#define GLM_COMPILER_CUDA75 0x10000001 -#define GLM_COMPILER_CUDA80 0x10000002 -#define GLM_COMPILER_CUDA90 0x10000004 - -// SYCL -#define GLM_COMPILER_SYCL 0x00300000 - -// Clang -#define GLM_COMPILER_CLANG 0x20000000 -#define GLM_COMPILER_CLANG34 0x20000050 -#define GLM_COMPILER_CLANG35 0x20000060 -#define GLM_COMPILER_CLANG36 0x20000070 -#define GLM_COMPILER_CLANG37 0x20000080 -#define GLM_COMPILER_CLANG38 0x20000090 -#define GLM_COMPILER_CLANG39 0x200000A0 -#define GLM_COMPILER_CLANG40 0x200000B0 -#define GLM_COMPILER_CLANG41 0x200000C0 -#define GLM_COMPILER_CLANG42 0x200000D0 - -// Build model -#define GLM_MODEL_32 0x00000010 -#define GLM_MODEL_64 0x00000020 - -// Force generic C++ compiler -#ifdef GLM_FORCE_COMPILER_UNKNOWN -# define GLM_COMPILER GLM_COMPILER_UNKNOWN - -#elif defined(__INTEL_COMPILER) -# if __INTEL_COMPILER >= 1700 -# define GLM_COMPILER GLM_COMPILER_INTEL17 -# elif __INTEL_COMPILER >= 1600 -# define GLM_COMPILER GLM_COMPILER_INTEL16 -# elif __INTEL_COMPILER >= 1500 -# define GLM_COMPILER GLM_COMPILER_INTEL15 -# elif __INTEL_COMPILER >= 1400 -# define GLM_COMPILER GLM_COMPILER_INTEL14 -# elif __INTEL_COMPILER < 1400 -# error "GLM requires ICC 2013 SP1 or newer" -# endif - -// CUDA -#elif defined(__CUDACC__) -# if !defined(CUDA_VERSION) && !defined(GLM_FORCE_CUDA) -# include // make sure version is defined since nvcc does not define it itself! -# endif -# if CUDA_VERSION >= 8000 -# define GLM_COMPILER GLM_COMPILER_CUDA80 -# elif CUDA_VERSION >= 7500 -# define GLM_COMPILER GLM_COMPILER_CUDA75 -# elif CUDA_VERSION >= 7000 -# define GLM_COMPILER GLM_COMPILER_CUDA70 -# elif CUDA_VERSION < 7000 -# error "GLM requires CUDA 7.0 or higher" -# endif - -// SYCL -#elif defined(__SYCL_DEVICE_ONLY__) -# define GLM_COMPILER GLM_COMPILER_SYCL - -// Clang -#elif defined(__clang__) -# if defined(__apple_build_version__) -# if (__clang_major__ < 6) -# error "GLM requires Clang 3.4 / Apple Clang 6.0 or higher" -# elif __clang_major__ == 6 && __clang_minor__ == 0 -# define GLM_COMPILER GLM_COMPILER_CLANG35 -# elif __clang_major__ == 6 && __clang_minor__ >= 1 -# define GLM_COMPILER GLM_COMPILER_CLANG36 -# elif __clang_major__ >= 7 -# define GLM_COMPILER GLM_COMPILER_CLANG37 -# endif -# else -# if ((__clang_major__ == 3) && (__clang_minor__ < 4)) || (__clang_major__ < 3) -# error "GLM requires Clang 3.4 or higher" -# elif __clang_major__ == 3 && __clang_minor__ == 4 -# define GLM_COMPILER GLM_COMPILER_CLANG34 -# elif __clang_major__ == 3 && __clang_minor__ == 5 -# define GLM_COMPILER GLM_COMPILER_CLANG35 -# elif __clang_major__ == 3 && __clang_minor__ == 6 -# define GLM_COMPILER GLM_COMPILER_CLANG36 -# elif __clang_major__ == 3 && __clang_minor__ == 7 -# define GLM_COMPILER GLM_COMPILER_CLANG37 -# elif __clang_major__ == 3 && __clang_minor__ == 8 -# define GLM_COMPILER GLM_COMPILER_CLANG38 -# elif __clang_major__ == 3 && __clang_minor__ >= 9 -# define GLM_COMPILER GLM_COMPILER_CLANG39 -# elif __clang_major__ == 4 && __clang_minor__ == 0 -# define GLM_COMPILER GLM_COMPILER_CLANG40 -# elif __clang_major__ == 4 && __clang_minor__ == 1 -# define GLM_COMPILER GLM_COMPILER_CLANG41 -# elif __clang_major__ == 4 && __clang_minor__ >= 2 -# define GLM_COMPILER GLM_COMPILER_CLANG42 -# elif __clang_major__ >= 4 -# define GLM_COMPILER GLM_COMPILER_CLANG42 -# endif -# endif - -// Visual C++ -#elif defined(_MSC_VER) -# if _MSC_VER >= 1920 -# define GLM_COMPILER GLM_COMPILER_VC16 -# elif _MSC_VER >= 1916 -# define GLM_COMPILER GLM_COMPILER_VC15_9 -# elif _MSC_VER >= 1915 -# define GLM_COMPILER GLM_COMPILER_VC15_8 -# elif _MSC_VER >= 1914 -# define GLM_COMPILER GLM_COMPILER_VC15_7 -# elif _MSC_VER >= 1913 -# define GLM_COMPILER GLM_COMPILER_VC15_6 -# elif _MSC_VER >= 1912 -# define GLM_COMPILER GLM_COMPILER_VC15_5 -# elif _MSC_VER >= 1911 -# define GLM_COMPILER GLM_COMPILER_VC15_3 -# elif _MSC_VER >= 1910 -# define GLM_COMPILER GLM_COMPILER_VC15 -# elif _MSC_VER >= 1900 -# define GLM_COMPILER GLM_COMPILER_VC14 -# elif _MSC_VER >= 1800 -# define GLM_COMPILER GLM_COMPILER_VC12 -# elif _MSC_VER < 1800 -# error "GLM requires Visual C++ 12 - 2013 or higher" -# endif//_MSC_VER - -// G++ -#elif defined(__GNUC__) || defined(__MINGW32__) -# if __GNUC__ >= 8 -# define GLM_COMPILER GLM_COMPILER_GCC8 -# elif __GNUC__ >= 7 -# define GLM_COMPILER GLM_COMPILER_GCC7 -# elif __GNUC__ >= 6 -# define GLM_COMPILER GLM_COMPILER_GCC6 -# elif __GNUC__ >= 5 -# define GLM_COMPILER GLM_COMPILER_GCC5 -# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 9 -# define GLM_COMPILER GLM_COMPILER_GCC49 -# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 8 -# define GLM_COMPILER GLM_COMPILER_GCC48 -# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 7 -# define GLM_COMPILER GLM_COMPILER_GCC47 -# elif __GNUC__ == 4 && __GNUC_MINOR__ >= 6 -# define GLM_COMPILER GLM_COMPILER_GCC46 -# elif ((__GNUC__ == 4) && (__GNUC_MINOR__ < 6)) || (__GNUC__ < 4) -# error "GLM requires GCC 4.6 or higher" -# endif - -#else -# define GLM_COMPILER GLM_COMPILER_UNKNOWN -#endif - -#ifndef GLM_COMPILER -# error "GLM_COMPILER undefined, your compiler may not be supported by GLM. Add #define GLM_COMPILER 0 to ignore this message." -#endif//GLM_COMPILER - -/////////////////////////////////////////////////////////////////////////////////// -// Instruction sets - -// User defines: GLM_FORCE_PURE GLM_FORCE_INTRINSICS GLM_FORCE_SSE2 GLM_FORCE_SSE3 GLM_FORCE_AVX GLM_FORCE_AVX2 GLM_FORCE_AVX2 - -#define GLM_ARCH_MIPS_BIT (0x10000000) -#define GLM_ARCH_PPC_BIT (0x20000000) -#define GLM_ARCH_ARM_BIT (0x40000000) -#define GLM_ARCH_ARMV8_BIT (0x01000000) -#define GLM_ARCH_X86_BIT (0x80000000) - -#define GLM_ARCH_SIMD_BIT (0x00001000) - -#define GLM_ARCH_NEON_BIT (0x00000001) -#define GLM_ARCH_SSE_BIT (0x00000002) -#define GLM_ARCH_SSE2_BIT (0x00000004) -#define GLM_ARCH_SSE3_BIT (0x00000008) -#define GLM_ARCH_SSSE3_BIT (0x00000010) -#define GLM_ARCH_SSE41_BIT (0x00000020) -#define GLM_ARCH_SSE42_BIT (0x00000040) -#define GLM_ARCH_AVX_BIT (0x00000080) -#define GLM_ARCH_AVX2_BIT (0x00000100) - -#define GLM_ARCH_UNKNOWN (0) -#define GLM_ARCH_X86 (GLM_ARCH_X86_BIT) -#define GLM_ARCH_SSE (GLM_ARCH_SSE_BIT | GLM_ARCH_SIMD_BIT | GLM_ARCH_X86) -#define GLM_ARCH_SSE2 (GLM_ARCH_SSE2_BIT | GLM_ARCH_SSE) -#define GLM_ARCH_SSE3 (GLM_ARCH_SSE3_BIT | GLM_ARCH_SSE2) -#define GLM_ARCH_SSSE3 (GLM_ARCH_SSSE3_BIT | GLM_ARCH_SSE3) -#define GLM_ARCH_SSE41 (GLM_ARCH_SSE41_BIT | GLM_ARCH_SSSE3) -#define GLM_ARCH_SSE42 (GLM_ARCH_SSE42_BIT | GLM_ARCH_SSE41) -#define GLM_ARCH_AVX (GLM_ARCH_AVX_BIT | GLM_ARCH_SSE42) -#define GLM_ARCH_AVX2 (GLM_ARCH_AVX2_BIT | GLM_ARCH_AVX) -#define GLM_ARCH_ARM (GLM_ARCH_ARM_BIT) -#define GLM_ARCH_ARMV8 (GLM_ARCH_NEON_BIT | GLM_ARCH_SIMD_BIT | GLM_ARCH_ARM | GLM_ARCH_ARMV8_BIT) -#define GLM_ARCH_NEON (GLM_ARCH_NEON_BIT | GLM_ARCH_SIMD_BIT | GLM_ARCH_ARM) -#define GLM_ARCH_MIPS (GLM_ARCH_MIPS_BIT) -#define GLM_ARCH_PPC (GLM_ARCH_PPC_BIT) - -#if defined(GLM_FORCE_ARCH_UNKNOWN) || defined(GLM_FORCE_PURE) -# define GLM_ARCH GLM_ARCH_UNKNOWN -#elif defined(GLM_FORCE_NEON) -# if __ARM_ARCH >= 8 -# define GLM_ARCH (GLM_ARCH_ARMV8) -# else -# define GLM_ARCH (GLM_ARCH_NEON) -# endif -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_AVX2) -# define GLM_ARCH (GLM_ARCH_AVX2) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_AVX) -# define GLM_ARCH (GLM_ARCH_AVX) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_SSE42) -# define GLM_ARCH (GLM_ARCH_SSE42) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_SSE41) -# define GLM_ARCH (GLM_ARCH_SSE41) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_SSSE3) -# define GLM_ARCH (GLM_ARCH_SSSE3) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_SSE3) -# define GLM_ARCH (GLM_ARCH_SSE3) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_SSE2) -# define GLM_ARCH (GLM_ARCH_SSE2) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_SSE) -# define GLM_ARCH (GLM_ARCH_SSE) -# define GLM_FORCE_INTRINSICS -#elif defined(GLM_FORCE_INTRINSICS) && !defined(GLM_FORCE_XYZW_ONLY) -# if defined(__AVX2__) -# define GLM_ARCH (GLM_ARCH_AVX2) -# elif defined(__AVX__) -# define GLM_ARCH (GLM_ARCH_AVX) -# elif defined(__SSE4_2__) -# define GLM_ARCH (GLM_ARCH_SSE42) -# elif defined(__SSE4_1__) -# define GLM_ARCH (GLM_ARCH_SSE41) -# elif defined(__SSSE3__) -# define GLM_ARCH (GLM_ARCH_SSSE3) -# elif defined(__SSE3__) -# define GLM_ARCH (GLM_ARCH_SSE3) -# elif defined(__SSE2__) || defined(__x86_64__) || defined(_M_X64) || defined(_M_IX86_FP) -# define GLM_ARCH (GLM_ARCH_SSE2) -# elif defined(__i386__) -# define GLM_ARCH (GLM_ARCH_X86) -# elif defined(__ARM_ARCH) && (__ARM_ARCH >= 8) -# define GLM_ARCH (GLM_ARCH_ARMV8) -# elif defined(__ARM_NEON) -# define GLM_ARCH (GLM_ARCH_ARM | GLM_ARCH_NEON) -# elif defined(__arm__ ) || defined(_M_ARM) -# define GLM_ARCH (GLM_ARCH_ARM) -# elif defined(__mips__ ) -# define GLM_ARCH (GLM_ARCH_MIPS) -# elif defined(__powerpc__ ) || defined(_M_PPC) -# define GLM_ARCH (GLM_ARCH_PPC) -# else -# define GLM_ARCH (GLM_ARCH_UNKNOWN) -# endif -#else -# if defined(__x86_64__) || defined(_M_X64) || defined(_M_IX86) || defined(__i386__) -# define GLM_ARCH (GLM_ARCH_X86) -# elif defined(__arm__) || defined(_M_ARM) -# define GLM_ARCH (GLM_ARCH_ARM) -# elif defined(__powerpc__) || defined(_M_PPC) -# define GLM_ARCH (GLM_ARCH_PPC) -# elif defined(__mips__) -# define GLM_ARCH (GLM_ARCH_MIPS) -# else -# define GLM_ARCH (GLM_ARCH_UNKNOWN) -# endif -#endif - -#if GLM_ARCH & GLM_ARCH_AVX2_BIT -# include -#elif GLM_ARCH & GLM_ARCH_AVX_BIT -# include -#elif GLM_ARCH & GLM_ARCH_SSE42_BIT -# if GLM_COMPILER & GLM_COMPILER_CLANG -# include -# endif -# include -#elif GLM_ARCH & GLM_ARCH_SSE41_BIT -# include -#elif GLM_ARCH & GLM_ARCH_SSSE3_BIT -# include -#elif GLM_ARCH & GLM_ARCH_SSE3_BIT -# include -#elif GLM_ARCH & GLM_ARCH_SSE2_BIT -# include -#elif GLM_ARCH & GLM_ARCH_NEON_BIT -# include "neon.h" -#endif//GLM_ARCH - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - typedef __m128 glm_f32vec4; - typedef __m128i glm_i32vec4; - typedef __m128i glm_u32vec4; - typedef __m128d glm_f64vec2; - typedef __m128i glm_i64vec2; - typedef __m128i glm_u64vec2; - - typedef glm_f32vec4 glm_vec4; - typedef glm_i32vec4 glm_ivec4; - typedef glm_u32vec4 glm_uvec4; - typedef glm_f64vec2 glm_dvec2; -#endif - -#if GLM_ARCH & GLM_ARCH_AVX_BIT - typedef __m256d glm_f64vec4; - typedef glm_f64vec4 glm_dvec4; -#endif - -#if GLM_ARCH & GLM_ARCH_AVX2_BIT - typedef __m256i glm_i64vec4; - typedef __m256i glm_u64vec4; -#endif - -#if GLM_ARCH & GLM_ARCH_NEON_BIT - typedef float32x4_t glm_f32vec4; - typedef int32x4_t glm_i32vec4; - typedef uint32x4_t glm_u32vec4; -#endif diff --git a/third_party/glm/simd/trigonometric.h b/third_party/glm/simd/trigonometric.h deleted file mode 100755 index 739b796..0000000 --- a/third_party/glm/simd/trigonometric.h +++ /dev/null @@ -1,9 +0,0 @@ -/// @ref simd -/// @file glm/simd/trigonometric.h - -#pragma once - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT - diff --git a/third_party/glm/simd/vector_relational.h b/third_party/glm/simd/vector_relational.h deleted file mode 100755 index f7385e9..0000000 --- a/third_party/glm/simd/vector_relational.h +++ /dev/null @@ -1,8 +0,0 @@ -/// @ref simd -/// @file glm/simd/vector_relational.h - -#pragma once - -#if GLM_ARCH & GLM_ARCH_SSE2_BIT - -#endif//GLM_ARCH & GLM_ARCH_SSE2_BIT diff --git a/third_party/glm/trigonometric.hpp b/third_party/glm/trigonometric.hpp deleted file mode 100755 index fcf07f8..0000000 --- a/third_party/glm/trigonometric.hpp +++ /dev/null @@ -1,210 +0,0 @@ -/// @ref core -/// @file glm/trigonometric.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions -/// -/// @defgroup core_func_trigonometric Angle and Trigonometry Functions -/// @ingroup core -/// -/// Function parameters specified as angle are assumed to be in units of radians. -/// In no case will any of these functions result in a divide by zero error. If -/// the divisor of a ratio is 0, then results will be undefined. -/// -/// These all operate component-wise. The description is per component. -/// -/// Include to use these core features. -/// -/// @see ext_vector_trigonometric - -#pragma once - -#include "detail/setup.hpp" -#include "detail/qualifier.hpp" - -namespace glm -{ - /// @addtogroup core_func_trigonometric - /// @{ - - /// Converts degrees to radians and returns the result. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL radians man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec radians(vec const& degrees); - - /// Converts radians to degrees and returns the result. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL degrees man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec degrees(vec const& radians); - - /// The standard trigonometric sine function. - /// The values returned by this function will range from [-1, 1]. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL sin man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec sin(vec const& angle); - - /// The standard trigonometric cosine function. - /// The values returned by this function will range from [-1, 1]. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL cos man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec cos(vec const& angle); - - /// The standard trigonometric tangent function. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL tan man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec tan(vec const& angle); - - /// Arc sine. Returns an angle whose sine is x. - /// The range of values returned by this function is [-PI/2, PI/2]. - /// Results are undefined if |x| > 1. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL asin man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec asin(vec const& x); - - /// Arc cosine. Returns an angle whose sine is x. - /// The range of values returned by this function is [0, PI]. - /// Results are undefined if |x| > 1. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL acos man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec acos(vec const& x); - - /// Arc tangent. Returns an angle whose tangent is y/x. - /// The signs of x and y are used to determine what - /// quadrant the angle is in. The range of values returned - /// by this function is [-PI, PI]. Results are undefined - /// if x and y are both 0. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL atan man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec atan(vec const& y, vec const& x); - - /// Arc tangent. Returns an angle whose tangent is y_over_x. - /// The range of values returned by this function is [-PI/2, PI/2]. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL atan man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec atan(vec const& y_over_x); - - /// Returns the hyperbolic sine function, (exp(x) - exp(-x)) / 2 - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL sinh man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec sinh(vec const& angle); - - /// Returns the hyperbolic cosine function, (exp(x) + exp(-x)) / 2 - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL cosh man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec cosh(vec const& angle); - - /// Returns the hyperbolic tangent function, sinh(angle) / cosh(angle) - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL tanh man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec tanh(vec const& angle); - - /// Arc hyperbolic sine; returns the inverse of sinh. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL asinh man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec asinh(vec const& x); - - /// Arc hyperbolic cosine; returns the non-negative inverse - /// of cosh. Results are undefined if x < 1. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL acosh man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec acosh(vec const& x); - - /// Arc hyperbolic tangent; returns the inverse of tanh. - /// Results are undefined if abs(x) >= 1. - /// - /// @tparam L Integer between 1 and 4 included that qualify the dimension of the vector - /// @tparam T Floating-point scalar types - /// @tparam Q Value from qualifier enum - /// - /// @see GLSL atanh man page - /// @see GLSL 4.20.8 specification, section 8.1 Angle and Trigonometry Functions - template - GLM_FUNC_DECL vec atanh(vec const& x); - - /// @} -}//namespace glm - -#include "detail/func_trigonometric.inl" diff --git a/third_party/glm/vec2.hpp b/third_party/glm/vec2.hpp deleted file mode 100755 index be768bf..0000000 --- a/third_party/glm/vec2.hpp +++ /dev/null @@ -1,14 +0,0 @@ -/// @ref core -/// @file glm/vec2.hpp - -#pragma once -#include "./ext/vector_bool2.hpp" -#include "./ext/vector_bool2_precision.hpp" -#include "./ext/vector_float2.hpp" -#include "./ext/vector_float2_precision.hpp" -#include "./ext/vector_double2.hpp" -#include "./ext/vector_double2_precision.hpp" -#include "./ext/vector_int2.hpp" -#include "./ext/vector_int2_precision.hpp" -#include "./ext/vector_uint2.hpp" -#include "./ext/vector_uint2_precision.hpp" diff --git a/third_party/glm/vec3.hpp b/third_party/glm/vec3.hpp deleted file mode 100755 index f570722..0000000 --- a/third_party/glm/vec3.hpp +++ /dev/null @@ -1,14 +0,0 @@ -/// @ref core -/// @file glm/vec3.hpp - -#pragma once -#include "./ext/vector_bool3.hpp" -#include "./ext/vector_bool3_precision.hpp" -#include "./ext/vector_float3.hpp" -#include "./ext/vector_float3_precision.hpp" -#include "./ext/vector_double3.hpp" -#include "./ext/vector_double3_precision.hpp" -#include "./ext/vector_int3.hpp" -#include "./ext/vector_int3_precision.hpp" -#include "./ext/vector_uint3.hpp" -#include "./ext/vector_uint3_precision.hpp" diff --git a/third_party/glm/vec4.hpp b/third_party/glm/vec4.hpp deleted file mode 100755 index 9117020..0000000 --- a/third_party/glm/vec4.hpp +++ /dev/null @@ -1,15 +0,0 @@ -/// @ref core -/// @file glm/vec4.hpp - -#pragma once -#include "./ext/vector_bool4.hpp" -#include "./ext/vector_bool4_precision.hpp" -#include "./ext/vector_float4.hpp" -#include "./ext/vector_float4_precision.hpp" -#include "./ext/vector_double4.hpp" -#include "./ext/vector_double4_precision.hpp" -#include "./ext/vector_int4.hpp" -#include "./ext/vector_int4_precision.hpp" -#include "./ext/vector_uint4.hpp" -#include "./ext/vector_uint4_precision.hpp" - diff --git a/third_party/glm/vector_relational.hpp b/third_party/glm/vector_relational.hpp deleted file mode 100755 index a0fe17e..0000000 --- a/third_party/glm/vector_relational.hpp +++ /dev/null @@ -1,121 +0,0 @@ -/// @ref core -/// @file glm/vector_relational.hpp -/// -/// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions -/// -/// @defgroup core_func_vector_relational Vector Relational Functions -/// @ingroup core -/// -/// Relational and equality operators (<, <=, >, >=, ==, !=) are defined to -/// operate on scalars and produce scalar Boolean results. For vector results, -/// use the following built-in functions. -/// -/// In all cases, the sizes of all the input and return vectors for any particular -/// call must match. -/// -/// Include to use these core features. -/// -/// @see ext_vector_relational - -#pragma once - -#include "detail/qualifier.hpp" -#include "detail/setup.hpp" - -namespace glm -{ - /// @addtogroup core_func_vector_relational - /// @{ - - /// Returns the component-wise comparison result of x < y. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T A floating-point or integer scalar type. - /// - /// @see GLSL lessThan man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec lessThan(vec const& x, vec const& y); - - /// Returns the component-wise comparison of result x <= y. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T A floating-point or integer scalar type. - /// - /// @see GLSL lessThanEqual man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec lessThanEqual(vec const& x, vec const& y); - - /// Returns the component-wise comparison of result x > y. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T A floating-point or integer scalar type. - /// - /// @see GLSL greaterThan man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec greaterThan(vec const& x, vec const& y); - - /// Returns the component-wise comparison of result x >= y. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T A floating-point or integer scalar type. - /// - /// @see GLSL greaterThanEqual man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec greaterThanEqual(vec const& x, vec const& y); - - /// Returns the component-wise comparison of result x == y. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T A floating-point, integer or bool scalar type. - /// - /// @see GLSL equal man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec equal(vec const& x, vec const& y); - - /// Returns the component-wise comparison of result x != y. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// @tparam T A floating-point, integer or bool scalar type. - /// - /// @see GLSL notEqual man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec notEqual(vec const& x, vec const& y); - - /// Returns true if any component of x is true. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// - /// @see GLSL any man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR bool any(vec const& v); - - /// Returns true if all components of x are true. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// - /// @see GLSL all man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR bool all(vec const& v); - - /// Returns the component-wise logical complement of x. - /// /!\ Because of language incompatibilities between C++ and GLSL, GLM defines the function not but not_ instead. - /// - /// @tparam L An integer between 1 and 4 included that qualify the dimension of the vector. - /// - /// @see GLSL not man page - /// @see GLSL 4.20.8 specification, section 8.7 Vector Relational Functions - template - GLM_FUNC_DECL GLM_CONSTEXPR vec not_(vec const& v); - - /// @} -}//namespace glm - -#include "detail/func_vector_relational.inl" diff --git a/third_party/kvf.h b/third_party/kvf.h new file mode 100755 index 0000000..a8b6008 --- /dev/null +++ b/third_party/kvf.h @@ -0,0 +1,2334 @@ +/*** + * MIT License + * + * Copyright (c) 2023-2024 kbz_8 + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + * + * + * Do this: + * #define KVF_IMPLEMENTATION + * before you include this file in *one* C or C++ file to create the implementation. + * + * // i.e. it should look like this: + * #include ... + * #include ... + * #include ... + * #define KVF_IMPLEMENTATION + * #include "kvf.h" + * + * You can #define KVF_ASSERT(x) before the #include to avoid using assert.h. + * And #define KVF_MALLOC, KVF_REALLOC, and KVF_FREE to avoid using malloc, realloc, free. + * + * By default KVF exits the program if a call to the Vulkan API fails. You can avoid that + * by using #define KVF_NO_EXIT_ON_FAILURE + * + * If you are using Volk or any other meta loader you must define KVF_IMPL_VK_NO_PROTOTYPES + * or VK_NO_PROTOTYPES before including this file to avoid conflicts with Vulkan prototypes. + * + * You can also #define KVF_ENABLE_VALIDATION_LAYERS to enable validation layers. + */ + +#ifndef KBZ_8_VULKAN_FRAMEWORK_H +#define KBZ_8_VULKAN_FRAMEWORK_H + +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + #define VK_NO_PROTOTYPES +#endif + +#include + +#include +#include + +/* ============================================= Prototypes ============================================= */ + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum +{ + KVF_GRAPHICS_QUEUE = 0, + KVF_PRESENT_QUEUE = 1, + KVF_COMPUTE_QUEUE = 2 +} KvfQueueType; + +typedef enum +{ + KVF_IMAGE_COLOR = 0, + KVF_IMAGE_DEPTH = 1, + KVF_IMAGE_DEPTH_ARRAY = 2, + KVF_IMAGE_CUBE = 3, + KVF_IMAGE_OTHER = 4, +} KvfImageType; + +typedef void (*KvfErrorCallback)(const char* message); + +typedef struct KvfGraphicsPipelineBuilder KvfGraphicsPipelineBuilder; + +void kvfSetErrorCallback(KvfErrorCallback callback); +void kvfSetValidationErrorCallback(KvfErrorCallback callback); +void kvfSetValidationWarningCallback(KvfErrorCallback callback); + +void kvfAddLayer(const char* layer); + +VkInstance kvfCreateInstance(const char** extensionsEnabled, uint32_t extensionsCount); +void kvfDestroyInstance(VkInstance instance); + +VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance); +VkPhysicalDevice kvfPickGoodDefaultPhysicalDevice(VkInstance instance, VkSurfaceKHR surface); +VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR surface, const char** deviceExtensions, uint32_t deviceExtensionsCount); + +VkQueue kvfGetDeviceQueue(VkDevice device, KvfQueueType queue); +uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue); +bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index); // return false when the swapchain must be recreated + +VkDevice kvfCreateDefaultDevice(VkPhysicalDevice physical); +VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count); +void kvfDestroyDevice(VkDevice device); + +VkFence kvfCreateFence(VkDevice device); +void kvfWaitForFence(VkDevice device, VkFence fence); +void kvfDestroyFence(VkDevice device, VkFence fence); + +VkSemaphore kvfCreateSemaphore(VkDevice device); +void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore); + +VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool tryVsync); +VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain); +uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain); +uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain); +VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain); +void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain); + +VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage); +void kvfImageBufferToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t size); +void kvfDestroyImage(VkDevice device, VkImage image); +VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect); +void kvfDestroyImageView(VkDevice device, VkImageView image_view); +void kvfTransitionImageLayout(VkDevice device, VkImage image, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer); +VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMode address_modes, VkSamplerMipmapMode mipmap_mode); +void kvfDestroySampler(VkDevice device, VkSampler sampler); + +VkBuffer kvfCreateBuffer(VkDevice device, VkBufferUsageFlags usage, VkDeviceSize size); +void kvfCopyBufferToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkBuffer src, size_t size); +void kvfCopyBufferToImage(VkCommandBuffer cmd, VkImage dst, VkBuffer src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent); +void kvfDestroyBuffer(VkDevice device, VkBuffer buffer); + +VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass renderpass, VkImageView* image_views, size_t image_views_count, VkExtent2D extent); +VkExtent2D kvfGetFramebufferSize(VkFramebuffer buffer); +void kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer); + +VkCommandBuffer kvfCreateCommandBuffer(VkDevice device); +VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLevel level); +void kvfBeginCommandBuffer(VkCommandBuffer buffer, VkCommandBufferUsageFlags flags); +void kvfEndCommandBuffer(VkCommandBuffer buffer); +void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkSemaphore signal, VkSemaphore wait, VkFence fence, VkPipelineStageFlags* stages); +void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkFence fence); + +VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear); +VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR swapchain, bool clear); + +VkRenderPass kvfCreateRenderPass(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point); +void kvfDestroyRenderPass(VkDevice device, VkRenderPass renderpass); +void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer framebuffer, VkExtent2D framebuffer_extent, VkClearValue* clears, size_t clears_count); + +VkShaderModule kvfCreateShaderModule(VkDevice device, uint32_t* code, size_t size); +void kvfDestroyShaderModule(VkDevice device, VkShaderModule shader); + +const char* kvfVerbaliseVkResult(VkResult result); + +bool kvfIsStencilFormat(VkFormat format); +bool kvfIsDepthFormat(VkFormat format); +uint32_t kvfFormatSize(VkFormat format); +VkPipelineStageFlags kvfLayoutToAccessMask(VkImageLayout layout, bool is_destination); +VkPipelineStageFlags kvfAccessFlagsToPipelineStage(VkAccessFlags access_flags, VkPipelineStageFlags stage_flags); +VkFormat kvfFindSupportFormatInCandidates(VkDevice device, VkFormat* candidates, size_t candidates_count, VkImageTiling tiling, VkFormatFeatureFlags flags); + +VkDescriptorSetLayout kvfCreateDescriptorSetLayout(VkDevice device, VkDescriptorSetLayoutBinding* bindings, size_t bindings_count); +void kvfDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout layout); + +VkDescriptorSet kvfAllocateDescriptorSet(VkDevice device, VkDescriptorSetLayout layout); +void kvfUpdateStorageBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding); +void kvfUpdateUniformBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding); +void kvfUpdateImageToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorImageInfo* info, uint32_t binding); +VkWriteDescriptorSet kvfWriteStorageBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding); +VkWriteDescriptorSet kvfWriteUniformBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding); +VkWriteDescriptorSet kvfWriteImageToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorImageInfo* info, uint32_t binding); + +void kvfResetDeviceDescriptorPools(VkDevice device); + +VkPipelineLayout kvfCreatePipelineLayout(VkDevice device, VkDescriptorSetLayout* set_layouts, size_t set_layouts_count, VkPushConstantRange* pc, size_t pc_count); +void kvfDestroyPipelineLayout(VkDevice device, VkPipelineLayout layout); + +KvfGraphicsPipelineBuilder* kvfCreateGPipelineBuilder(); +void kvfDestroyGPipelineBuilder(KvfGraphicsPipelineBuilder* builder); + +void kvfGPipelineBuilderReset(KvfGraphicsPipelineBuilder* builder); +void kvfGPipelineBuilderSetInputTopology(KvfGraphicsPipelineBuilder* builder, VkPrimitiveTopology topology); +void kvfGPipelineBuilderSetPolygonMode(KvfGraphicsPipelineBuilder* builder, VkPolygonMode polygon, float line_width); +void kvfGPipelineBuilderSetCullMode(KvfGraphicsPipelineBuilder* builder, VkCullModeFlags cull, VkFrontFace face); +void kvfGPipelineBuilderDisableBlending(KvfGraphicsPipelineBuilder* builder); +void kvfGPipelineBuilderEnableAdditiveBlending(KvfGraphicsPipelineBuilder* builder); +void kvfGPipelineBuilderEnableAlphaBlending(KvfGraphicsPipelineBuilder* builder); +void kvfGPipelineBuilderEnableDepthTest(KvfGraphicsPipelineBuilder* builder, VkCompareOp op, bool write_enabled); +void kvfGPipelineBuilderDisableDepthTest(KvfGraphicsPipelineBuilder* builder); +void kvfGPipelineBuilderSetVertexInputs(KvfGraphicsPipelineBuilder* builder, VkVertexInputBindingDescription binds, VkVertexInputAttributeDescription* attributes, size_t attributes_count); +void kvfGPipelineBuilderAddShaderStage(KvfGraphicsPipelineBuilder* builder, VkShaderStageFlagBits stage, VkShaderModule module, const char* entry); +void kvfGPipelineBuilderResetShaderStages(KvfGraphicsPipelineBuilder* builder); + +VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, KvfGraphicsPipelineBuilder* builder, VkRenderPass pass); +void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline); + +#ifdef __cplusplus +} +#endif + +#endif // KBZ_8_VULKAN_FRAMEWORK_H + +/* ========================================== Implementation =========================================== */ + +#ifdef KVF_IMPLEMENTATION + +#ifndef KVF_MALLOC + #define KVF_MALLOC(x) malloc(x) +#endif +#ifndef KVF_REALLOC + #define KVF_REALLOC(x, s) realloc(x, s) +#endif +#ifndef KVF_FREE + #define KVF_FREE(x) free(x) +#endif +#ifndef KVF_ASSERT + #include + #define KVF_ASSERT(x) assert(x) +#endif + +#include +#include +#include + +#ifdef KVF_DESCRIPTOR_POOL_CAPACITY + #undef KVF_DESCRIPTOR_POOL_CAPACITY +#endif +#define KVF_DESCRIPTOR_POOL_CAPACITY 512 + +typedef struct +{ + int32_t graphics; + int32_t present; + int32_t compute; +} __KvfQueueFamilies; + +typedef struct +{ + VkDescriptorPool pool; + size_t capacity; + size_t size; +} __KvfDescriptorPool; + +typedef struct +{ + VkDevice device; + VkPhysicalDevice physical; + VkCommandPool cmd_pool; + __KvfQueueFamilies queues; + __KvfDescriptorPool* sets_pools; + size_t sets_pools_size; +} __KvfDevice; + +typedef struct +{ + VkSurfaceCapabilitiesKHR capabilities; + VkSurfaceFormatKHR* formats; + VkPresentModeKHR* presentModes; + uint32_t formatsCount; + uint32_t presentModesCount; +} __KvfSwapchainSupportInternal; + +typedef struct +{ + __KvfSwapchainSupportInternal support; + VkSwapchainKHR swapchain; + VkExtent2D images_extent; + VkFormat images_format; + uint32_t images_count; +} __KvfSwapchain; + +typedef struct +{ + VkFramebuffer framebuffer; + VkExtent2D extent; +} __KvfFramebuffer; + +struct KvfGraphicsPipelineBuilder +{ + VkPipelineShaderStageCreateInfo* shader_stages; + VkPipelineVertexInputStateCreateInfo vertex_input_state; + VkPipelineInputAssemblyStateCreateInfo input_assembly_state; + VkPipelineTessellationStateCreateInfo tessellation_state; + VkPipelineRasterizationStateCreateInfo rasterization_state; + VkPipelineDepthStencilStateCreateInfo depth_stencil_state; + VkPipelineColorBlendAttachmentState color_blend_attachment_state; + size_t shader_stages_count; +}; + +// Dynamic arrays +__KvfDevice* __kvf_internal_devices = NULL; +size_t __kvf_internal_devices_size = 0; +size_t __kvf_internal_devices_capacity = 0; + +__KvfSwapchain* __kvf_internal_swapchains = NULL; +size_t __kvf_internal_swapchains_size = 0; +size_t __kvf_internal_swapchains_capacity = 0; + +__KvfFramebuffer* __kvf_internal_framebuffers = NULL; +size_t __kvf_internal_framebuffers_size = 0; +size_t __kvf_internal_framebuffers_capacity = 0; + +#ifdef KVF_ENABLE_VALIDATION_LAYERS + VkDebugUtilsMessengerEXT __kvf_debug_messenger = VK_NULL_HANDLE; + char** __kvf_extra_layers = NULL; + size_t __kvf_extra_layers_count = 0; +#endif + +KvfErrorCallback __kvf_error_callback = NULL; +KvfErrorCallback __kvf_validation_error_callback = NULL; +KvfErrorCallback __kvf_validation_warning_callback = NULL; + +void __kvfCheckVk(VkResult result, const char* function) +{ + if(result != VK_SUCCESS) + { + if(__kvf_error_callback != NULL) + { + char buffer[1024]; + snprintf(buffer, 1024, "KVF Vulkan error in '%s': %s", function, kvfVerbaliseVkResult(result)); + __kvf_error_callback(buffer); + return; + } + fprintf(stderr, "KVF Vulkan error in '%s': %s\n", function, kvfVerbaliseVkResult(result)); + #ifndef KVF_NO_EXIT_ON_FAILURE + exit(EXIT_FAILURE); + #endif + } +} + +#undef __kvfCheckVk +#define __kvfCheckVk(res) __kvfCheckVk(res, __FUNCTION__) + +void __kvfAddDeviceToArray(VkPhysicalDevice device, int32_t graphics_queue, int32_t present_queue) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + if(__kvf_internal_devices_size == __kvf_internal_devices_capacity) + { + // Resize the dynamic array if necessary + __kvf_internal_devices_capacity += 2; + __kvf_internal_devices = (__KvfDevice*)KVF_REALLOC(__kvf_internal_devices, __kvf_internal_devices_capacity * sizeof(__KvfDevice)); + } + + __kvf_internal_devices[__kvf_internal_devices_size].physical = device; + __kvf_internal_devices[__kvf_internal_devices_size].queues.graphics = graphics_queue; + __kvf_internal_devices[__kvf_internal_devices_size].queues.present = present_queue; + __kvf_internal_devices_size++; +} + +void __kvfCompleteDevice(VkPhysicalDevice physical, VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(physical != VK_NULL_HANDLE); + + __KvfDevice* kvf_device = NULL; + + for(size_t i = 0; i < __kvf_internal_devices_size; i++) + { + if(__kvf_internal_devices[i].physical == physical) + kvf_device = &__kvf_internal_devices[i]; + } + + KVF_ASSERT(kvf_device != NULL); + + VkCommandPool pool; + VkCommandPoolCreateInfo pool_info = {}; + pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; + pool_info.queueFamilyIndex = kvf_device->queues.graphics; + __kvfCheckVk(vkCreateCommandPool(device, &pool_info, NULL, &pool)); + + kvf_device->device = device; + kvf_device->cmd_pool = pool; + kvf_device->sets_pools = NULL; + kvf_device->sets_pools_size = 0; +} + +void __kvfDestroyDescriptorPools(VkDevice device); + +void __kvfDestroyDevice(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_devices_size; i++) + { + if(__kvf_internal_devices[i].device == device) + { + vkDestroyCommandPool(device, __kvf_internal_devices[i].cmd_pool, NULL); + __kvfDestroyDescriptorPools(device); + vkDestroyDevice(device, NULL); + // Shift the elements to fill the gap + for(size_t j = i; j < __kvf_internal_devices_size - 1; j++) + __kvf_internal_devices[j] = __kvf_internal_devices[j + 1]; + __kvf_internal_devices_size--; + if(__kvf_internal_devices_size == 0) + { + KVF_FREE(__kvf_internal_devices); + __kvf_internal_devices_capacity = 0; + } + return; + } + } +} + +__KvfDevice* __kvfGetKvfDeviceFromVkPhysicalDevice(VkPhysicalDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_devices_size; i++) + { + if(__kvf_internal_devices[i].physical == device) + return &__kvf_internal_devices[i]; + } + return NULL; +} + +__KvfDevice* __kvfGetKvfDeviceFromVkDevice(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_devices_size; i++) + { + if(__kvf_internal_devices[i].device == device) + return &__kvf_internal_devices[i]; + } + return NULL; +} + +void __kvfAddSwapchainToArray(VkSwapchainKHR swapchain, __KvfSwapchainSupportInternal support, VkFormat format, uint32_t images_count, VkExtent2D extent) +{ + KVF_ASSERT(swapchain != VK_NULL_HANDLE); + if(__kvf_internal_swapchains_size == __kvf_internal_swapchains_capacity) + { + // Resize the dynamic array if necessary + __kvf_internal_swapchains_capacity += 2; + __kvf_internal_swapchains = (__KvfSwapchain*)KVF_REALLOC(__kvf_internal_swapchains, __kvf_internal_swapchains_capacity * sizeof(__KvfSwapchain)); + } + + __kvf_internal_swapchains[__kvf_internal_swapchains_size].swapchain = swapchain; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].support = support; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_format = format; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_count = images_count; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_extent = extent; + __kvf_internal_swapchains_size++; +} + +void __kvfDestroySwapchain(VkDevice device, VkSwapchainKHR swapchain) +{ + KVF_ASSERT(swapchain != VK_NULL_HANDLE); + KVF_ASSERT(device != VK_NULL_HANDLE); + + for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) + { + if(__kvf_internal_swapchains[i].swapchain == swapchain) + { + vkDestroySwapchainKHR(device, swapchain, NULL); + // Shift the elements to fill the gap + for(size_t j = i; j < __kvf_internal_swapchains_size - 1; j++) + __kvf_internal_swapchains[j] = __kvf_internal_swapchains[j + 1]; + __kvf_internal_swapchains_size--; + if(__kvf_internal_swapchains_size == 0) + { + KVF_FREE(__kvf_internal_swapchains); + __kvf_internal_swapchains_capacity = 0; + } + return; + } + } +} + +__KvfSwapchain* __kvfGetKvfSwapchainFromVkSwapchainKHR(VkSwapchainKHR swapchain) +{ + KVF_ASSERT(swapchain != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) + { + if(__kvf_internal_swapchains[i].swapchain == swapchain) + return &__kvf_internal_swapchains[i]; + } + return NULL; +} + +void __kvfAddFramebufferToArray(VkFramebuffer framebuffer, VkExtent2D extent) +{ + KVF_ASSERT(framebuffer != VK_NULL_HANDLE); + if(__kvf_internal_framebuffers_size == __kvf_internal_framebuffers_capacity) + { + // Resize the dynamic array if necessary + __kvf_internal_framebuffers_capacity += 2; + __kvf_internal_framebuffers = (__KvfFramebuffer*)KVF_REALLOC(__kvf_internal_framebuffers, __kvf_internal_framebuffers_capacity * sizeof(__KvfFramebuffer)); + } + + __kvf_internal_framebuffers[__kvf_internal_framebuffers_size].framebuffer = framebuffer; + __kvf_internal_framebuffers[__kvf_internal_framebuffers_size].extent = extent; + __kvf_internal_framebuffers_size++; +} + +void __kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) +{ + KVF_ASSERT(framebuffer != VK_NULL_HANDLE); + KVF_ASSERT(device != VK_NULL_HANDLE); + + for(size_t i = 0; i < __kvf_internal_framebuffers_size; i++) + { + if(__kvf_internal_framebuffers[i].framebuffer == framebuffer) + { + vkDestroyFramebuffer(device, framebuffer, NULL); + // Shift the elements to fill the gap + for(size_t j = i; j < __kvf_internal_framebuffers_size - 1; j++) + __kvf_internal_framebuffers[j] = __kvf_internal_framebuffers[j + 1]; + __kvf_internal_framebuffers_size--; + if(__kvf_internal_framebuffers_size == 0) + { + KVF_FREE(__kvf_internal_framebuffers); + __kvf_internal_framebuffers_capacity = 0; + } + return; + } + } +} + +__KvfFramebuffer* __kvfGetKvfSwapchainFromVkFramebuffer(VkFramebuffer framebuffer) +{ + KVF_ASSERT(framebuffer != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_framebuffers_size; i++) + { + if(__kvf_internal_framebuffers[i].framebuffer == framebuffer) + return &__kvf_internal_framebuffers[i]; + } + return NULL; +} + +VkDescriptorPool __kvfDeviceCreateDescriptorPool(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + kvf_device->sets_pools_size++; + kvf_device->sets_pools = (__KvfDescriptorPool*)KVF_REALLOC(kvf_device->sets_pools, kvf_device->sets_pools_size * sizeof(__KvfDescriptorPool)); + memset(&kvf_device->sets_pools[kvf_device->sets_pools_size - 1], 0, sizeof(__KvfDescriptorPool)); + + VkDescriptorPoolSize pool_sizes[] = { + { VK_DESCRIPTOR_TYPE_SAMPLER, 1024 }, + { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1024 }, + { VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1024 }, + { VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1024 }, + { VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1024 }, + { VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1024 }, + { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1024 }, + { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1024 }, + { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1024 }, + { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 1024 }, + { VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1024 } + }; + + VkDescriptorPoolCreateInfo pool_info = {}; + pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; + pool_info.poolSizeCount = sizeof(pool_sizes) / sizeof(VkDescriptorPoolSize); + pool_info.pPoolSizes = pool_sizes; + pool_info.maxSets = KVF_DESCRIPTOR_POOL_CAPACITY; + pool_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; + + __kvfCheckVk(vkCreateDescriptorPool(device, &pool_info, NULL, &kvf_device->sets_pools[kvf_device->sets_pools_size - 1].pool)); + kvf_device->sets_pools[kvf_device->sets_pools_size - 1].capacity = KVF_DESCRIPTOR_POOL_CAPACITY; + return kvf_device->sets_pools[kvf_device->sets_pools_size - 1].pool; +} + +void __kvfDestroyDescriptorPools(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + + for(size_t i = 0; i < kvf_device->sets_pools_size; i++) + vkDestroyDescriptorPool(device, kvf_device->sets_pools[i].pool, NULL); + KVF_FREE(kvf_device->sets_pools); + kvf_device->sets_pools_size = 0; +} + +void kvfSetErrorCallback(KvfErrorCallback callback) +{ + __kvf_error_callback = callback; +} + +void kvfSetValidationErrorCallback(KvfErrorCallback callback) +{ + __kvf_validation_error_callback = callback; +} + +void kvfSetValidationWarningCallback(KvfErrorCallback callback) +{ + __kvf_validation_warning_callback = callback; +} + +bool kvfIsStencilFormat(VkFormat format) +{ + switch(format) + { + case VK_FORMAT_D32_SFLOAT_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + return true; + + default: return false; + } +} + +bool kvfIsDepthFormat(VkFormat format) +{ + switch(format) + { + case VK_FORMAT_D16_UNORM: + case VK_FORMAT_D32_SFLOAT: + case VK_FORMAT_D32_SFLOAT_S8_UINT: + case VK_FORMAT_D24_UNORM_S8_UINT: + case VK_FORMAT_D16_UNORM_S8_UINT: + return true; + + default: return false; + } +} + +VkPipelineStageFlags kvfLayoutToAccessMask(VkImageLayout layout, bool is_destination) +{ + VkPipelineStageFlags access_mask = 0; + + switch(layout) + { + case VK_IMAGE_LAYOUT_UNDEFINED: + if(is_destination) + KVF_ASSERT(false && "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); + break; + case VK_IMAGE_LAYOUT_GENERAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL: + access_mask = VK_ACCESS_SHADER_READ_BIT; // VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; + break; + case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: access_mask = VK_ACCESS_TRANSFER_READ_BIT; break; + case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: access_mask = VK_ACCESS_TRANSFER_WRITE_BIT; break; + case VK_IMAGE_LAYOUT_PREINITIALIZED: + if(!is_destination) + access_mask = VK_ACCESS_HOST_WRITE_BIT; + else + KVF_ASSERT(false && "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); + break; + case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; + case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: access_mask = VK_ACCESS_MEMORY_READ_BIT; break; + + default: KVF_ASSERT(false && "Vulkan : unexpected image layout"); break; + } + + return access_mask; +} + +VkPipelineStageFlags kvfAccessFlagsToPipelineStage(VkAccessFlags access_flags, VkPipelineStageFlags stage_flags) +{ + VkPipelineStageFlags stages = 0; + + while(access_flags != 0) + { + VkAccessFlagBits _access_flag = (VkAccessFlagBits)(access_flags & (~(access_flags - 1))); + if(_access_flag == 0 || (_access_flag & (_access_flag - 1)) != 0) + KVF_ASSERT(false && "Vulkan : an error has been caught during access flag to pipeline stage operation"); + access_flags &= ~_access_flag; + + switch(_access_flag) + { + case VK_ACCESS_INDIRECT_COMMAND_READ_BIT: stages |= VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT; break; + case VK_ACCESS_INDEX_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; + case VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT: stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT; break; + case VK_ACCESS_UNIFORM_READ_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_INPUT_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; break; + case VK_ACCESS_SHADER_READ_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_SHADER_WRITE_BIT: stages |= stage_flags | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; break; + case VK_ACCESS_COLOR_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; + case VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; break; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; + case VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT: stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; break; + case VK_ACCESS_TRANSFER_READ_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; + case VK_ACCESS_TRANSFER_WRITE_BIT: stages |= VK_PIPELINE_STAGE_TRANSFER_BIT; break; + case VK_ACCESS_HOST_READ_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; + case VK_ACCESS_HOST_WRITE_BIT: stages |= VK_PIPELINE_STAGE_HOST_BIT; break; + case VK_ACCESS_MEMORY_READ_BIT: break; + case VK_ACCESS_MEMORY_WRITE_BIT: break; + + default: KVF_ASSERT(false && "Vulkan : unknown access flag"); break; + } + } + return stages; +} + +VkFormat kvfFindSupportFormatInCandidates(VkDevice device, VkFormat* candidates, size_t candidates_count, VkImageTiling tiling, VkFormatFeatureFlags flags) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + for(size_t i = 0; i < candidates_count; i++) + { + VkFormatProperties props; + vkGetPhysicalDeviceFormatProperties(kvf_device->physical, candidates[i], &props); + if(tiling == VK_IMAGE_TILING_LINEAR && (props.linearTilingFeatures & flags) == flags) + return candidates[i]; + else if(tiling == VK_IMAGE_TILING_OPTIMAL && (props.optimalTilingFeatures & flags) == flags) + return candidates[i]; + } + + KVF_ASSERT(false && "Vulkan : failed to find image format"); + return VK_FORMAT_R8G8B8A8_SRGB; // just to avoir warning +} + +uint32_t kvfFormatSize(VkFormat format) +{ + switch(format) + { + case VK_FORMAT_UNDEFINED: return 0; + case VK_FORMAT_R4G4_UNORM_PACK8: return 1; + case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return 2; + case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return 2; + case VK_FORMAT_R5G6B5_UNORM_PACK16: return 2; + case VK_FORMAT_B5G6R5_UNORM_PACK16: return 2; + case VK_FORMAT_R5G5B5A1_UNORM_PACK16: return 2; + case VK_FORMAT_B5G5R5A1_UNORM_PACK16: return 2; + case VK_FORMAT_A1R5G5B5_UNORM_PACK16: return 2; + case VK_FORMAT_R8_UNORM: return 1; + case VK_FORMAT_R8_SNORM: return 1; + case VK_FORMAT_R8_USCALED: return 1; + case VK_FORMAT_R8_SSCALED: return 1; + case VK_FORMAT_R8_UINT: return 1; + case VK_FORMAT_R8_SINT: return 1; + case VK_FORMAT_R8_SRGB: return 1; + case VK_FORMAT_R8G8_UNORM: return 2; + case VK_FORMAT_R8G8_SNORM: return 2; + case VK_FORMAT_R8G8_USCALED: return 2; + case VK_FORMAT_R8G8_SSCALED: return 2; + case VK_FORMAT_R8G8_UINT: return 2; + case VK_FORMAT_R8G8_SINT: return 2; + case VK_FORMAT_R8G8_SRGB: return 2; + case VK_FORMAT_R8G8B8_UNORM: return 3; + case VK_FORMAT_R8G8B8_SNORM: return 3; + case VK_FORMAT_R8G8B8_USCALED: return 3; + case VK_FORMAT_R8G8B8_SSCALED: return 3; + case VK_FORMAT_R8G8B8_UINT: return 3; + case VK_FORMAT_R8G8B8_SINT: return 3; + case VK_FORMAT_R8G8B8_SRGB: return 3; + case VK_FORMAT_B8G8R8_UNORM: return 3; + case VK_FORMAT_B8G8R8_SNORM: return 3; + case VK_FORMAT_B8G8R8_USCALED: return 3; + case VK_FORMAT_B8G8R8_SSCALED: return 3; + case VK_FORMAT_B8G8R8_UINT: return 3; + case VK_FORMAT_B8G8R8_SINT: return 3; + case VK_FORMAT_B8G8R8_SRGB: return 3; + case VK_FORMAT_R8G8B8A8_UNORM: return 4; + case VK_FORMAT_R8G8B8A8_SNORM: return 4; + case VK_FORMAT_R8G8B8A8_USCALED: return 4; + case VK_FORMAT_R8G8B8A8_SSCALED: return 4; + case VK_FORMAT_R8G8B8A8_UINT: return 4; + case VK_FORMAT_R8G8B8A8_SINT: return 4; + case VK_FORMAT_R8G8B8A8_SRGB: return 4; + case VK_FORMAT_B8G8R8A8_UNORM: return 4; + case VK_FORMAT_B8G8R8A8_SNORM: return 4; + case VK_FORMAT_B8G8R8A8_USCALED: return 4; + case VK_FORMAT_B8G8R8A8_SSCALED: return 4; + case VK_FORMAT_B8G8R8A8_UINT: return 4; + case VK_FORMAT_B8G8R8A8_SINT: return 4; + case VK_FORMAT_B8G8R8A8_SRGB: return 4; + case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SNORM_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_USCALED_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SSCALED_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_UINT_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SINT_PACK32: return 4; + case VK_FORMAT_A8B8G8R8_SRGB_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_SNORM_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_USCALED_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_SSCALED_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_UINT_PACK32: return 4; + case VK_FORMAT_A2R10G10B10_SINT_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_SNORM_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_USCALED_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_SSCALED_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_UINT_PACK32: return 4; + case VK_FORMAT_A2B10G10R10_SINT_PACK32: return 4; + case VK_FORMAT_R16_UNORM: return 2; + case VK_FORMAT_R16_SNORM: return 2; + case VK_FORMAT_R16_USCALED: return 2; + case VK_FORMAT_R16_SSCALED: return 2; + case VK_FORMAT_R16_UINT: return 2; + case VK_FORMAT_R16_SINT: return 2; + case VK_FORMAT_R16_SFLOAT: return 2; + case VK_FORMAT_R16G16_UNORM: return 4; + case VK_FORMAT_R16G16_SNORM: return 4; + case VK_FORMAT_R16G16_USCALED: return 4; + case VK_FORMAT_R16G16_SSCALED: return 4; + case VK_FORMAT_R16G16_UINT: return 4; + case VK_FORMAT_R16G16_SINT: return 4; + case VK_FORMAT_R16G16_SFLOAT: return 4; + case VK_FORMAT_R16G16B16_UNORM: return 6; + case VK_FORMAT_R16G16B16_SNORM: return 6; + case VK_FORMAT_R16G16B16_USCALED: return 6; + case VK_FORMAT_R16G16B16_SSCALED: return 6; + case VK_FORMAT_R16G16B16_UINT: return 6; + case VK_FORMAT_R16G16B16_SINT: return 6; + case VK_FORMAT_R16G16B16_SFLOAT: return 6; + case VK_FORMAT_R16G16B16A16_UNORM: return 8; + case VK_FORMAT_R16G16B16A16_SNORM: return 8; + case VK_FORMAT_R16G16B16A16_USCALED: return 8; + case VK_FORMAT_R16G16B16A16_SSCALED: return 8; + case VK_FORMAT_R16G16B16A16_UINT: return 8; + case VK_FORMAT_R16G16B16A16_SINT: return 8; + case VK_FORMAT_R16G16B16A16_SFLOAT: return 8; + case VK_FORMAT_R32_UINT: return 4; + case VK_FORMAT_R32_SINT: return 4; + case VK_FORMAT_R32_SFLOAT: return 4; + case VK_FORMAT_R32G32_UINT: return 8; + case VK_FORMAT_R32G32_SINT: return 8; + case VK_FORMAT_R32G32_SFLOAT: return 8; + case VK_FORMAT_R32G32B32_UINT: return 12; + case VK_FORMAT_R32G32B32_SINT: return 12; + case VK_FORMAT_R32G32B32_SFLOAT: return 12; + case VK_FORMAT_R32G32B32A32_UINT: return 16; + case VK_FORMAT_R32G32B32A32_SINT: return 16; + case VK_FORMAT_R32G32B32A32_SFLOAT: return 16; + case VK_FORMAT_R64_UINT: return 8; + case VK_FORMAT_R64_SINT: return 8; + case VK_FORMAT_R64_SFLOAT: return 8; + case VK_FORMAT_R64G64_UINT: return 16; + case VK_FORMAT_R64G64_SINT: return 16; + case VK_FORMAT_R64G64_SFLOAT: return 16; + case VK_FORMAT_R64G64B64_UINT: return 24; + case VK_FORMAT_R64G64B64_SINT: return 24; + case VK_FORMAT_R64G64B64_SFLOAT: return 24; + case VK_FORMAT_R64G64B64A64_UINT: return 32; + case VK_FORMAT_R64G64B64A64_SINT: return 32; + case VK_FORMAT_R64G64B64A64_SFLOAT: return 32; + case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return 4; + case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: return 4; + + default: return 0; + } +} + +const char* kvfVerbaliseVkResult(VkResult result) +{ + switch(result) + { + case VK_SUCCESS: return "Success"; + case VK_NOT_READY: return "A fence or query has not yet completed"; + case VK_TIMEOUT: return "A wait operation has not completed in the specified time"; + case VK_EVENT_SET: return "An event is signaled"; + case VK_EVENT_RESET: return "An event is unsignaled"; + case VK_INCOMPLETE: return "A return array was too small for the result"; + case VK_ERROR_OUT_OF_HOST_MEMORY: return "A host memory allocation has failed"; + case VK_ERROR_OUT_OF_DEVICE_MEMORY: return "A device memory allocation has failed"; + case VK_ERROR_INITIALIZATION_FAILED: return "Initialization of an object could not be completed for implementation-specific reasons"; + case VK_ERROR_DEVICE_LOST: return "The logical or physical device has been lost"; + case VK_ERROR_MEMORY_MAP_FAILED: return "Mapping of a memory object has failed"; + case VK_ERROR_LAYER_NOT_PRESENT: return "A requested layer is not present or could not be loaded"; + case VK_ERROR_EXTENSION_NOT_PRESENT: return "A requested extension is not supported"; + case VK_ERROR_FEATURE_NOT_PRESENT: return "A requested feature is not supported"; + case VK_ERROR_INCOMPATIBLE_DRIVER: return "The requested version of Vulkan is not supported by the driver or is otherwise incompatible"; + case VK_ERROR_TOO_MANY_OBJECTS: return "Too many objects of the type have already been created"; + case VK_ERROR_FORMAT_NOT_SUPPORTED: return "A requested format is not supported on this device"; + case VK_ERROR_SURFACE_LOST_KHR: return "A surface is no longer available"; + case VK_SUBOPTIMAL_KHR: return "A swapchain no longer matches the surface properties exactly, but can still be used"; + case VK_ERROR_OUT_OF_DATE_KHR: return "A surface has changed in such a way that it is no longer compatible with the swapchain"; + case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR: return "The display used by a swapchain does not use the same presentable image layout"; + case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR: return "The requested window is already connected to a VkSurfaceKHR, or to some other non-Vulkan API"; + case VK_ERROR_VALIDATION_FAILED_EXT: return "A validation layer found an error"; + + default: return "Unknown Vulkan error"; + } + return NULL; // just to avoid warnings +} + +#ifdef KVF_ENABLE_VALIDATION_LAYERS + bool __kvfCheckValidationLayerSupport() + { + uint32_t layer_count; + vkEnumerateInstanceLayerProperties(&layer_count, NULL); + VkLayerProperties* available_layers = (VkLayerProperties*)KVF_MALLOC(sizeof(VkLayerProperties) * layer_count); + vkEnumerateInstanceLayerProperties(&layer_count, available_layers); + for(size_t i = 0; i < __kvf_extra_layers_count; i++) + { + bool found = false; + for(size_t j = 0; j < layer_count; j++) + { + if(strcmp(available_layers[j].layerName, __kvf_extra_layers[i]) == 0) + { + found = true; + break; + } + } + if(!found) + { + KVF_FREE(available_layers); + return false; + } + } + KVF_FREE(available_layers); + return true; + } + + VKAPI_ATTR VkBool32 VKAPI_CALL __kvfDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData) + { + if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) + { + if(__kvf_validation_error_callback != NULL) + { + char buffer[4096]; + snprintf(buffer, 4096, "KVF Vulkan validation error : %s", pCallbackData->pMessage); + __kvf_validation_error_callback(buffer); + return VK_FALSE; + } + fprintf(stderr, "\nKVF Vulkan validation error : %s\n", pCallbackData->pMessage); + } + else if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) + { + if(__kvf_validation_warning_callback != NULL) + { + char buffer[4096]; + snprintf(buffer, 4096, "KVF Vulkan validation warning : %s", pCallbackData->pMessage); + __kvf_validation_warning_callback(buffer); + return VK_FALSE; + } + fprintf(stderr, "\nKVF Vulkan validation warning : %s\n", pCallbackData->pMessage); + } + return VK_FALSE; + } + + void __kvfPopulateDebugMessengerCreateInfo(VkDebugUtilsMessengerCreateInfoEXT* create_info) + { + create_info->sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + create_info->messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; + create_info->messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; + create_info->pfnUserCallback = __kvfDebugCallback; + } + + VkResult __kvfCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* create_info, VkDebugUtilsMessengerEXT* messenger) + { + PFN_vkCreateDebugUtilsMessengerEXT func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugUtilsMessengerEXT"); + return func ? func(instance, create_info, NULL, messenger) : VK_ERROR_EXTENSION_NOT_PRESENT; + } + + void __kvfInitValidationLayers(VkInstance instance) + { + uint32_t extension_count; + vkEnumerateInstanceExtensionProperties(NULL, &extension_count, NULL); + VkExtensionProperties* extensions = (VkExtensionProperties*)KVF_MALLOC(extension_count * sizeof(VkExtensionProperties)); + vkEnumerateInstanceExtensionProperties(NULL, &extension_count, extensions); + bool extension_found = false; + for(uint32_t i = 0; i < extension_count; i++) + { + if(strcmp(extensions[i].extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) + { + extension_found = true; + break; + } + } + if(!extension_found) + { + if(__kvf_validation_warning_callback != NULL) + { + char buffer[1024]; + snprintf(buffer, 1024, "KVF Vulkan warning: %s is not present; cannot enable validation layers", VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + __kvf_validation_warning_callback(buffer); + return; + } + printf("KVF Vulkan warning: %s is not present; cannot enable validation layers", VK_EXT_DEBUG_UTILS_EXTENSION_NAME); + KVF_FREE(extensions); + return; + } + VkDebugUtilsMessengerCreateInfoEXT create_info = {}; + __kvfPopulateDebugMessengerCreateInfo(&create_info); + __kvfCheckVk(__kvfCreateDebugUtilsMessengerEXT(instance, &create_info, &__kvf_debug_messenger)); + } + + void __kvfDestroyDebugUtilsMessengerEXT(VkInstance instance) + { + PFN_vkDestroyDebugUtilsMessengerEXT func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugUtilsMessengerEXT"); + if(func) + func(instance, __kvf_debug_messenger, NULL); + } +#endif // KVF_ENABLE_VALIDATION_LAYERS + +void kvfAddLayer(const char* layer) +{ + #ifdef KVF_ENABLE_VALIDATION_LAYERS + __kvf_extra_layers = (char**)KVF_REALLOC(__kvf_extra_layers, sizeof(char*) * (__kvf_extra_layers_count + 1)); + KVF_ASSERT(__kvf_extra_layers != NULL); + __kvf_extra_layers[__kvf_extra_layers_count] = (char*)KVF_MALLOC(strlen(layer) + 1); + KVF_ASSERT(__kvf_extra_layers[__kvf_extra_layers_count] != NULL); + strcpy(__kvf_extra_layers[__kvf_extra_layers_count], layer); + __kvf_extra_layers_count++; + #else + if(__kvf_validation_error_callback != NULL) + { + char buffer[4096]; + snprintf(buffer, 4096, "KVF Vulkan validation error : cannot add extra layers, validation layers are not enabled. Try adding #define KVF_ENABLE_VALIDATION_LAYERS"); + __kvf_validation_error_callback(buffer); + return; + } + fprintf(stderr, "KVF Vulkan validation error : cannot add extra layers, validation layers are not enabled. Try adding #define KVF_ENABLE_VALIDATION_LAYERS"); + #endif +} + +VkInstance kvfCreateInstance(const char** extensions_enabled, uint32_t extensions_count) +{ + VkInstance instance = VK_NULL_HANDLE; + + VkInstanceCreateInfo create_info = {}; + create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; + create_info.pApplicationInfo = NULL; + create_info.flags = 0; + create_info.enabledExtensionCount = extensions_count; + create_info.ppEnabledExtensionNames = extensions_enabled; + create_info.enabledLayerCount = 0; + create_info.ppEnabledLayerNames = NULL; + create_info.pNext = NULL; + +#ifdef KVF_ENABLE_VALIDATION_LAYERS + kvfAddLayer("VK_LAYER_KHRONOS_validation"); + const char** new_extension_set = NULL; + if(__kvfCheckValidationLayerSupport()) + { + VkDebugUtilsMessengerCreateInfoEXT debug_create_info = {}; + __kvfPopulateDebugMessengerCreateInfo(&debug_create_info); + new_extension_set = (const char**)KVF_MALLOC(sizeof(char*) * (extensions_count + 1)); + memcpy(new_extension_set, extensions_enabled, sizeof(char*) * extensions_count); + new_extension_set[extensions_count] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; + + create_info.enabledExtensionCount = extensions_count + 1; + create_info.ppEnabledExtensionNames = new_extension_set; + create_info.enabledLayerCount = __kvf_extra_layers_count; + create_info.ppEnabledLayerNames = (const char* const*)__kvf_extra_layers; + create_info.pNext = (VkDebugUtilsMessengerCreateInfoEXT*)&debug_create_info; + } +#endif + + __kvfCheckVk(vkCreateInstance(&create_info, NULL, &instance)); +#ifdef KVF_ENABLE_VALIDATION_LAYERS + KVF_FREE(new_extension_set); + __kvfInitValidationLayers(instance); +#endif + return instance; +} + +void kvfDestroyInstance(VkInstance instance) +{ + if(instance == VK_NULL_HANDLE) + return; +#ifdef KVF_ENABLE_VALIDATION_LAYERS + __kvfDestroyDebugUtilsMessengerEXT(instance); + for(size_t i = 0; i < __kvf_extra_layers_count; i++) + KVF_FREE(__kvf_extra_layers[i]); + KVF_FREE(__kvf_extra_layers); + __kvf_extra_layers_count = 0; +#endif + vkDestroyInstance(instance, NULL); +} + +VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance) +{ + uint32_t device_count; + VkPhysicalDevice* devices = NULL; + VkPhysicalDevice chosen_one = VK_NULL_HANDLE; + + KVF_ASSERT(instance != VK_NULL_HANDLE); + + vkEnumeratePhysicalDevices(instance, &device_count, NULL); + devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); + vkEnumeratePhysicalDevices(instance, &device_count, devices); + chosen_one = devices[0]; + KVF_FREE(devices); + return chosen_one; +} + +__KvfQueueFamilies __kvfFindQueueFamilies(VkPhysicalDevice physical, VkSurfaceKHR surface) +{ + __KvfQueueFamilies queues = { -1, -1, -1 }; + uint32_t queue_family_count; + vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, NULL); + VkQueueFamilyProperties* queue_families = (VkQueueFamilyProperties*)KVF_MALLOC(sizeof(VkQueueFamilyProperties) * queue_family_count); + vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, queue_families); + + for(int i = 0; i < queue_family_count; i++) + { + // try to find a queue family index that supports compute but not graphics + if(queue_families[i].queueFlags & VK_QUEUE_COMPUTE_BIT && (queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0) + queues.compute = i; + else if(queues.compute != -1 && queue_families[i].queueFlags & VK_QUEUE_COMPUTE_BIT) // else just find a compute queue + queues.compute = i; + if(queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) + queues.graphics = i; + VkBool32 present_support = false; + vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); + if(present_support) + queues.present = i; + + if(queues.graphics != -1 && queues.present != -1 && queues.compute != -1) + break; + } + KVF_FREE(queue_families); + return queues; +} + +VkPhysicalDevice kvfPickGoodDefaultPhysicalDevice(VkInstance instance, VkSurfaceKHR surface) +{ + const char* extensions[] = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; + return kvfPickGoodPhysicalDevice(instance, surface, extensions, sizeof(extensions) / sizeof(extensions[0])); +} + +int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, const char** deviceExtensions, uint32_t deviceExtensionsCount) +{ + /* Check Extensions Support */ + uint32_t extension_count; + vkEnumerateDeviceExtensionProperties(device, NULL, &extension_count, NULL); + VkExtensionProperties* props = (VkExtensionProperties*)KVF_MALLOC(sizeof(VkExtensionProperties) * extension_count + 1); + vkEnumerateDeviceExtensionProperties(device, NULL, &extension_count, props); + + bool are_there_required_device_extensions = true; + for(int j = 0; j < deviceExtensionsCount; j++) + { + bool is_there_extension = false; + for(int k = 0; k < extension_count; k++) + { + if(strcmp(deviceExtensions[j], props[k].extensionName) == 0) + { + is_there_extension = true; + break; + } + } + if(is_there_extension == false) + { + are_there_required_device_extensions = false; + break; + } + } + KVF_FREE(props); + if(are_there_required_device_extensions == false) + return -1; + + /* Check Queue Families Support */ + __KvfQueueFamilies queues = __kvfFindQueueFamilies(device, surface); + if(queues.graphics == -1 || queues.present == -1) + return -1; + + /* Check Surface Formats Counts */ + uint32_t format_count; + vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, NULL); + if(format_count == 0) + return -1; + + VkPhysicalDeviceProperties device_props; + vkGetPhysicalDeviceProperties(device, &device_props); + + VkPhysicalDeviceFeatures device_features; + vkGetPhysicalDeviceFeatures(device, &device_features); + + int32_t score = -1; + if(device_props.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU) + score += 1000; + + if(!device_features.geometryShader) + return -1; + + score += device_props.limits.maxImageDimension2D; + score += device_props.limits.maxBoundDescriptorSets; + + return score; +} + +VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR surface, const char** deviceExtensions, uint32_t deviceExtensionsCount) +{ + VkPhysicalDevice* devices = NULL; + VkPhysicalDevice chosen_one = VK_NULL_HANDLE; + uint32_t device_count; + int32_t best_device_score = -1; + + KVF_ASSERT(instance != VK_NULL_HANDLE); + KVF_ASSERT(surface != VK_NULL_HANDLE); + + vkEnumeratePhysicalDevices(instance, &device_count, NULL); + devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); + vkEnumeratePhysicalDevices(instance, &device_count, devices); + + for(int i = 0; i < device_count; i++) + { + int32_t current_device_score = __kvfScorePhysicalDevice(devices[i], surface, deviceExtensions, deviceExtensionsCount); + if(current_device_score > best_device_score) + { + best_device_score = current_device_score; + chosen_one = devices[i]; + } + } + KVF_FREE(devices); + if(chosen_one != VK_NULL_HANDLE) + { + __KvfQueueFamilies queues = __kvfFindQueueFamilies(chosen_one, surface); + __kvfAddDeviceToArray(chosen_one, queues.graphics, queues.present); + return chosen_one; + } + return VK_NULL_HANDLE; +} + +VkDevice kvfCreateDefaultDevice(VkPhysicalDevice physical) +{ + const char* extensions[] = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; + return kvfCreateDevice(physical, extensions, sizeof(extensions) / sizeof(extensions[0])); +} + +VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count) +{ + const float queue_priority = 1.0f; + + __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkPhysicalDevice(physical); + + KVF_ASSERT(kvfdevice != NULL); + KVF_ASSERT(kvfdevice->queues.graphics != -1); + KVF_ASSERT(kvfdevice->queues.present != -1); + + VkDeviceQueueCreateInfo queue_create_info[2]; + queue_create_info[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_info[0].queueFamilyIndex = kvfdevice->queues.graphics; + queue_create_info[0].queueCount = 1; + queue_create_info[0].pQueuePriorities = &queue_priority; + queue_create_info[0].flags = 0; + queue_create_info[0].pNext = NULL; + queue_create_info[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_info[1].queueFamilyIndex = kvfdevice->queues.present; + queue_create_info[1].queueCount = 1; + queue_create_info[1].pQueuePriorities = &queue_priority; + queue_create_info[1].flags = 0; + queue_create_info[1].pNext = NULL; + + VkPhysicalDeviceFeatures device_features = { VK_FALSE }; + + VkDeviceCreateInfo createInfo; + createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + createInfo.queueCreateInfoCount = (kvfdevice->queues.graphics == kvfdevice->queues.present ? 1 : 2); + createInfo.pQueueCreateInfos = queue_create_info; + createInfo.pEnabledFeatures = &device_features; + createInfo.enabledExtensionCount = extensions_count; + createInfo.ppEnabledExtensionNames = extensions; + createInfo.enabledLayerCount = 0; + createInfo.ppEnabledLayerNames = NULL; + createInfo.flags = 0; + createInfo.pNext = NULL; + + VkDevice device; + __kvfCheckVk(vkCreateDevice(physical, &createInfo, NULL, &device)); + __kvfCompleteDevice(physical, device); + + return device; +} + +void kvfDestroyDevice(VkDevice device) +{ + if(device == VK_NULL_HANDLE) + return; + __kvfDestroyDevice(device); +} + +VkQueue kvfGetDeviceQueue(VkDevice device, KvfQueueType queue) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvfdevice != NULL); + VkQueue vk_queue = VK_NULL_HANDLE; + if(queue == KVF_GRAPHICS_QUEUE) + vkGetDeviceQueue(device, kvfdevice->queues.graphics, 0, &vk_queue); + else if(queue == KVF_PRESENT_QUEUE) + vkGetDeviceQueue(device, kvfdevice->queues.present, 0, &vk_queue); + else if(queue == KVF_COMPUTE_QUEUE) + vkGetDeviceQueue(device, kvfdevice->queues.compute, 0, &vk_queue); + return vk_queue; +} + +uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvfdevice != NULL); + VkQueue vk_queue = VK_NULL_HANDLE; + if(queue == KVF_GRAPHICS_QUEUE) + return kvfdevice->queues.graphics; + else if(queue == KVF_PRESENT_QUEUE) + return kvfdevice->queues.present; + else if(queue == KVF_COMPUTE_QUEUE) + return kvfdevice->queues.compute; + KVF_ASSERT(false && "invalid queue"); + return 0; +} + +bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkPresentInfoKHR present_info = {}; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + present_info.waitSemaphoreCount = 1; + present_info.pWaitSemaphores = &wait; + present_info.swapchainCount = 1; + present_info.pSwapchains = &swapchain; + present_info.pImageIndices = &image_index; + VkResult result = vkQueuePresentKHR(kvfGetDeviceQueue(device, KVF_PRESENT_QUEUE), &present_info); + if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) + return false; + else + __kvfCheckVk(result); + return true; +} + +VkFence kvfCreateFence(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkFenceCreateInfo fence_info = {}; + fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; + fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; + VkFence fence; + __kvfCheckVk(vkCreateFence(device, &fence_info, NULL, &fence)); + return fence; +} + +void kvfWaitForFence(VkDevice device, VkFence fence) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(fence != VK_NULL_HANDLE); + vkWaitForFences(device, 1, &fence, VK_TRUE, UINT64_MAX); +} + +void kvfDestroyFence(VkDevice device, VkFence fence) +{ + if(fence == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyFence(device, fence, NULL); +} + +VkSemaphore kvfCreateSemaphore(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkSemaphoreCreateInfo semaphore_info = {}; + semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; + VkSemaphore semaphore; + __kvfCheckVk(vkCreateSemaphore(device, &semaphore_info, NULL, &semaphore)); + return semaphore; +} + +void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) +{ + if(semaphore == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroySemaphore(device, semaphore, NULL); +} + +__KvfSwapchainSupportInternal __kvfQuerySwapchainSupport(VkPhysicalDevice physical, VkSurfaceKHR surface) +{ + __KvfSwapchainSupportInternal support; + + __kvfCheckVk(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical, surface, &support.capabilities)); + + vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formatsCount, NULL); + if(support.formatsCount != 0) + { + support.formats = (VkSurfaceFormatKHR*)KVF_MALLOC(sizeof(VkSurfaceFormatKHR) * support.formatsCount); + vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formatsCount, support.formats); + } + + vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModesCount, NULL); + if(support.presentModesCount != 0) + { + support.presentModes = (VkPresentModeKHR*)KVF_MALLOC(sizeof(VkPresentModeKHR) * support.presentModesCount); + vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModesCount, support.presentModes); + } + return support; +} + +VkSurfaceFormatKHR __kvfChooseSwapSurfaceFormat(__KvfSwapchainSupportInternal* support) +{ + for(int i = 0; i < support->formatsCount; i++) + { + if(support->formats[i].format == VK_FORMAT_R8G8B8A8_SRGB && support->formats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) + return support->formats[i]; + } + return support->formats[0]; +} + +VkPresentModeKHR __kvfChooseSwapPresentMode(__KvfSwapchainSupportInternal* support, bool tryVsync) +{ + if(tryVsync == false) + return VK_PRESENT_MODE_IMMEDIATE_KHR; + for(int i = 0; i < support->presentModesCount; i++) + { + if(support->presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR) + return support->presentModes[i]; + } + return VK_PRESENT_MODE_FIFO_KHR; +} + +uint32_t __kvfClamp(uint32_t i, uint32_t min, uint32_t max) +{ + const uint32_t t = i < min ? min : i; + return t > max ? max : t; +} + +VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool tryVsync) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkSwapchainKHR swapchain; + __KvfSwapchainSupportInternal support = __kvfQuerySwapchainSupport(physical, surface); + + VkSurfaceFormatKHR surfaceFormat = __kvfChooseSwapSurfaceFormat(&support); + VkPresentModeKHR presentMode = __kvfChooseSwapPresentMode(&support, tryVsync); + + uint32_t imageCount = support.capabilities.minImageCount + 1; + if(support.capabilities.maxImageCount > 0 && imageCount > support.capabilities.maxImageCount) + imageCount = support.capabilities.maxImageCount; + + __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvfdevice != NULL); + + uint32_t queueFamilyIndices[] = { (uint32_t)kvfdevice->queues.graphics, (uint32_t)kvfdevice->queues.present }; + + if(support.capabilities.currentExtent.width != UINT32_MAX) + extent = support.capabilities.currentExtent; + else + { + extent.width = __kvfClamp(extent.width, support.capabilities.minImageExtent.width, support.capabilities.maxImageExtent.width); + extent.height = __kvfClamp(extent.height, support.capabilities.minImageExtent.height, support.capabilities.maxImageExtent.height); + } + + VkSwapchainCreateInfoKHR createInfo = {}; + createInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + createInfo.surface = surface; + createInfo.minImageCount = imageCount; + createInfo.imageFormat = surfaceFormat.format; + createInfo.imageColorSpace = surfaceFormat.colorSpace; + createInfo.imageExtent = extent; + createInfo.imageArrayLayers = 1; + createInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + createInfo.preTransform = support.capabilities.currentTransform; + createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; + createInfo.presentMode = presentMode; + createInfo.clipped = VK_TRUE; + createInfo.oldSwapchain = VK_NULL_HANDLE; + + if(kvfdevice->queues.graphics != kvfdevice->queues.present) + { + createInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT; + createInfo.queueFamilyIndexCount = 2; + createInfo.pQueueFamilyIndices = queueFamilyIndices; + } + else + createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; + + __kvfCheckVk(vkCreateSwapchainKHR(device, &createInfo, NULL, &swapchain)); + + uint32_t images_count; + vkGetSwapchainImagesKHR(device, swapchain, (uint32_t*)&images_count, NULL); + + __kvfAddSwapchainToArray(swapchain, support, surfaceFormat.format, images_count, extent); + + return swapchain; +} + +VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain) +{ + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->images_format; +} + +uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain) +{ + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->images_count; +} + +uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain) +{ + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->support.capabilities.minImageCount; +} + +VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain) +{ + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->images_extent; +} + +void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain) +{ + if(swapchain == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + __kvfDestroySwapchain(device, swapchain); +} + +VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkImageCreateInfo image_info = {}; + image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; + image_info.imageType = VK_IMAGE_TYPE_2D; + image_info.extent.width = width; + image_info.extent.height = height; + image_info.extent.depth = 1; + image_info.mipLevels = 1; + image_info.arrayLayers = 1; + image_info.format = format; + image_info.tiling = tiling; + image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + image_info.usage = usage; + image_info.samples = VK_SAMPLE_COUNT_1_BIT; + image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + VkImage image; + __kvfCheckVk(vkCreateImage(device, &image_info, NULL, &image)); + return image; +} + +void kvfDestroyImage(VkDevice device, VkImage image) +{ + if(image == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyImage(device, image, NULL); +} + +VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkImageViewCreateInfo create_info = {}; + create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; + create_info.image = image; + create_info.viewType = type; + create_info.format = format; + create_info.components.r = VK_COMPONENT_SWIZZLE_IDENTITY; + create_info.components.g = VK_COMPONENT_SWIZZLE_IDENTITY; + create_info.components.b = VK_COMPONENT_SWIZZLE_IDENTITY; + create_info.components.a = VK_COMPONENT_SWIZZLE_IDENTITY; + create_info.subresourceRange.aspectMask = aspect; + create_info.subresourceRange.baseMipLevel = 0; + create_info.subresourceRange.levelCount = 1; + create_info.subresourceRange.baseArrayLayer = 0; + create_info.subresourceRange.layerCount = 1; + VkImageView view; + __kvfCheckVk(vkCreateImageView(device, &create_info, NULL, &view)); + return view; +} + +void kvfDestroyImageView(VkDevice device, VkImageView image_view) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(image_view != VK_NULL_HANDLE); + vkDestroyImageView(device, image_view, NULL); +} + +void kvfTransitionImageLayout(VkDevice device, VkImage image, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + + if(new_layout == old_layout) + return; + + if(is_single_time_cmd_buffer) + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + + VkImageMemoryBarrier barrier = {}; + barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + barrier.oldLayout = old_layout; + barrier.newLayout = new_layout; + barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + barrier.image = image; + barrier.subresourceRange.aspectMask = kvfIsDepthFormat(format) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT; + barrier.subresourceRange.baseMipLevel = 0; + barrier.subresourceRange.levelCount = 1; + barrier.subresourceRange.baseArrayLayer = 0; + barrier.subresourceRange.layerCount = 1; + barrier.srcAccessMask = kvfLayoutToAccessMask(old_layout, false); + barrier.dstAccessMask = kvfLayoutToAccessMask(new_layout, true); + if(kvfIsStencilFormat(format)) + barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT; + + VkPipelineStageFlags source_stage = 0; + if(barrier.oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) + source_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + else if(barrier.srcAccessMask != 0) + source_stage = kvfAccessFlagsToPipelineStage(barrier.srcAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); + else + source_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + + VkPipelineStageFlags destination_stage = 0; + if(barrier.newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) + destination_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + else if(barrier.dstAccessMask != 0) + destination_stage = kvfAccessFlagsToPipelineStage(barrier.dstAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); + else + destination_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + + vkCmdPipelineBarrier(cmd, source_stage, destination_stage, 0, 0, NULL, 0, NULL, 1, &barrier); + + if(is_single_time_cmd_buffer) + { + kvfEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(device); + kvfSubmitSingleTimeCommandBuffer(device, cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(device, fence); + } +} + +VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMode address_modes, VkSamplerMipmapMode mipmap_mode) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkSamplerCreateInfo info = {}; + info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; + info.magFilter = filters; + info.minFilter = filters; + info.mipmapMode = mipmap_mode; + info.addressModeU = address_modes; + info.addressModeV = address_modes; + info.addressModeW = address_modes; + info.minLod = -1000; + info.maxLod = 1000; + info.anisotropyEnable = VK_FALSE; + info.maxAnisotropy = 1.0f; + VkSampler sampler; + __kvfCheckVk(vkCreateSampler(device, &info, NULL, &sampler)); + return sampler; +} + +void kvfDestroySampler(VkDevice device, VkSampler sampler) +{ + if(sampler == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroySampler(device, sampler, NULL); +} + +VkBuffer kvfCreateBuffer(VkDevice device, VkBufferUsageFlags usage, VkDeviceSize size) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkBufferCreateInfo buffer_info = {}; + buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + buffer_info.size = size; + buffer_info.usage = usage; + buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + VkBuffer buffer; + __kvfCheckVk(vkCreateBuffer(device, &buffer_info, NULL, &buffer)); + return buffer; +} + +void kvfCopyBufferToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkBuffer src, size_t size) +{ + KVF_ASSERT(cmd != VK_NULL_HANDLE); + KVF_ASSERT(dst != VK_NULL_HANDLE); + KVF_ASSERT(src != VK_NULL_HANDLE); + VkBufferCopy copy_region = {}; + copy_region.size = size; + vkCmdCopyBuffer(cmd, src, dst, 1, ©_region); +} + +void kvfCopyBufferToImage(VkCommandBuffer cmd, VkImage dst, VkBuffer src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent) +{ + KVF_ASSERT(cmd != VK_NULL_HANDLE); + KVF_ASSERT(dst != VK_NULL_HANDLE); + KVF_ASSERT(src != VK_NULL_HANDLE); + VkOffset3D offset = { 0, 0, 0 }; + VkBufferImageCopy region = {}; + region.bufferOffset = buffer_offset; + region.bufferRowLength = 0; + region.bufferImageHeight = 0; + region.imageSubresource.aspectMask = aspect; + region.imageSubresource.mipLevel = 0; + region.imageSubresource.baseArrayLayer = 0; + region.imageSubresource.layerCount = 1; + region.imageOffset = offset; + region.imageExtent = extent; + vkCmdCopyBufferToImage(cmd, src, dst, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); +} + +void kvfDestroyBuffer(VkDevice device, VkBuffer buffer) +{ + if(buffer != VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyBuffer(device, buffer, NULL); +} + +VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass render_pass, VkImageView* image_views, size_t image_views_count, VkExtent2D extent) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(image_views != NULL); + + VkFramebufferCreateInfo framebuffer_info = {}; + framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; + framebuffer_info.renderPass = render_pass; + framebuffer_info.attachmentCount = image_views_count; + framebuffer_info.pAttachments = image_views; + framebuffer_info.width = extent.width; + framebuffer_info.height = extent.height; + framebuffer_info.layers = 1; + VkFramebuffer framebuffer = VK_NULL_HANDLE; + __kvfCheckVk(vkCreateFramebuffer(device, &framebuffer_info, NULL, &framebuffer)); + __kvfAddFramebufferToArray(framebuffer, extent); + return framebuffer; +} + +VkExtent2D kvfGetFramebufferSize(VkFramebuffer buffer) +{ + __KvfFramebuffer* kvf_framebuffer = __kvfGetKvfSwapchainFromVkFramebuffer(buffer); + KVF_ASSERT(kvf_framebuffer != NULL); + return kvf_framebuffer->extent; +} + +void kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) +{ + if(framebuffer == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + __kvfDestroyFramebuffer(device, framebuffer); +} + +VkCommandBuffer kvfCreateCommandBuffer(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + return kvfCreateCommandBufferLeveled(device, VK_COMMAND_BUFFER_LEVEL_PRIMARY); +} + +VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLevel level) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvfdevice != NULL); + + VkCommandPool pool = kvfdevice->cmd_pool; + VkCommandBuffer buffer; + VkCommandBufferAllocateInfo alloc_info = {}; + alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; + alloc_info.commandPool = pool; + alloc_info.level = level; + alloc_info.commandBufferCount = 1; + __kvfCheckVk(vkAllocateCommandBuffers(device, &alloc_info, &buffer)); + return buffer; +} + +void kvfBeginCommandBuffer(VkCommandBuffer buffer, VkCommandBufferUsageFlags usage) +{ + KVF_ASSERT(buffer != VK_NULL_HANDLE); + VkCommandBufferBeginInfo begin_info = {}; + begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; + begin_info.flags = usage; + __kvfCheckVk(vkBeginCommandBuffer(buffer, &begin_info)); +} + +void kvfEndCommandBuffer(VkCommandBuffer buffer) +{ + KVF_ASSERT(buffer != VK_NULL_HANDLE); + __kvfCheckVk(vkEndCommandBuffer(buffer)); +} + +void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkSemaphore signal, VkSemaphore wait, VkFence fence, VkPipelineStageFlags* stages) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + + VkSemaphore signal_semaphores[1]; + VkSemaphore wait_semaphores[1]; + signal_semaphores[0] = signal; + wait_semaphores[0] = wait; + + if(fence != VK_NULL_HANDLE) + vkResetFences(device, 1, &fence); + + VkSubmitInfo submit_info = {}; + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.waitSemaphoreCount = (wait == VK_NULL_HANDLE ? 0 : 1); + submit_info.pWaitSemaphores = wait_semaphores; + submit_info.pWaitDstStageMask = stages; + submit_info.commandBufferCount = 1; + submit_info.pCommandBuffers = &buffer; + submit_info.signalSemaphoreCount = (signal == VK_NULL_HANDLE ? 0 : 1); + submit_info.pSignalSemaphores = signal_semaphores; + __kvfCheckVk(vkQueueSubmit(kvfGetDeviceQueue(device, queue), 1, &submit_info, fence)); +} + +void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkFence fence) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + + if(fence != VK_NULL_HANDLE) + vkResetFences(device, 1, &fence); + + VkSubmitInfo submit_info = {}; + submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; + submit_info.commandBufferCount = 1; + submit_info.pCommandBuffers = &buffer; + __kvfCheckVk(vkQueueSubmit(kvfGetDeviceQueue(device, queue), 1, &submit_info, fence)); + if(fence != VK_NULL_HANDLE) + vkWaitForFences(device, 1, &fence, VK_TRUE, UINT64_MAX); +} + +VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear) +{ + VkAttachmentDescription attachment = {}; + + switch(type) + { + case KVF_IMAGE_CUBE: + case KVF_IMAGE_DEPTH_ARRAY: + case KVF_IMAGE_COLOR: + case KVF_IMAGE_DEPTH: + { + attachment.format = format; + attachment.initialLayout = initial; + attachment.finalLayout = final; + break; + } + + default: KVF_ASSERT(false && "KVF Attachment Description builder : unsupported image type"); break; + } + + if(clear) + { + attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_CLEAR; + attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; + } + else + { + attachment.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; + attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD; + } + + attachment.samples = VK_SAMPLE_COUNT_1_BIT; + attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + attachment.flags = 0; + + return attachment; +} + +VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR swapchain, bool clear) +{ + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + KVF_ASSERT(kvf_swapchain->images_count != 0); + return kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, kvf_swapchain->images_format, VK_IMAGE_LAYOUT_UNDEFINED,VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, clear); +} + +VkRenderPass kvfCreateRenderPass(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + + size_t color_attachment_count = 0; + size_t depth_attachment_count = 0; + + for(size_t i = 0; i < attachments_count; i++) + { + if(kvfIsDepthFormat(attachments[i].format)) + depth_attachment_count++; + else + color_attachment_count++; + } + + VkAttachmentReference* color_references = NULL; + VkAttachmentReference* depth_references = NULL; + + if(color_attachment_count != 0) + { + color_references = (VkAttachmentReference*)KVF_MALLOC(color_attachment_count * sizeof(VkAttachmentReference)); + KVF_ASSERT(color_references != NULL); + } + if(depth_attachment_count != 0) + { + depth_references = (VkAttachmentReference*)KVF_MALLOC(depth_attachment_count * sizeof(VkAttachmentReference)); + KVF_ASSERT(depth_references != NULL); + } + + for(size_t i = 0, c = 0, d = 0; i < attachments_count; i++) + { + if(!kvfIsDepthFormat(attachments[i].format)) + { + VkImageLayout layout = attachments[i].finalLayout; + color_references[c].attachment = i; + color_references[c].layout = layout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ? VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL : layout; + c++; + } + else + { + depth_references[d].attachment = i; + depth_references[d].layout = attachments[i].finalLayout; + d++; + } + } + + VkSubpassDescription subpass = {}; + subpass.pipelineBindPoint = bind_point; + subpass.colorAttachmentCount = color_attachment_count; + subpass.pColorAttachments = color_references; + subpass.pDepthStencilAttachment = depth_references; + + VkRenderPassCreateInfo renderpass_create_info = {}; + renderpass_create_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO; + renderpass_create_info.attachmentCount = attachments_count; + renderpass_create_info.pAttachments = attachments; + renderpass_create_info.subpassCount = 1; + renderpass_create_info.pSubpasses = &subpass; + renderpass_create_info.dependencyCount = 0; + renderpass_create_info.pDependencies = NULL; + + VkRenderPass render_pass = VK_NULL_HANDLE; + __kvfCheckVk(vkCreateRenderPass(device, &renderpass_create_info, NULL, &render_pass)); + KVF_FREE(color_references); + KVF_FREE(depth_references); + return render_pass; +} + +void kvfDestroyRenderPass(VkDevice device, VkRenderPass renderPass) +{ + if(renderPass == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyRenderPass(device, renderPass, NULL); +} + +void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer framebuffer, VkExtent2D framebuffer_extent, VkClearValue* clears, size_t clears_count) +{ + KVF_ASSERT(pass != VK_NULL_HANDLE); + KVF_ASSERT(framebuffer != VK_NULL_HANDLE); + + VkOffset2D offset = { 0, 0 }; + VkRenderPassBeginInfo renderpass_info = {}; + renderpass_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO; + renderpass_info.renderPass = pass; + renderpass_info.framebuffer = framebuffer; + renderpass_info.renderArea.offset = offset; + renderpass_info.renderArea.extent = framebuffer_extent; + renderpass_info.clearValueCount = clears_count; + renderpass_info.pClearValues = clears; + vkCmdBeginRenderPass(cmd, &renderpass_info, VK_SUBPASS_CONTENTS_INLINE); +} + +VkShaderModule kvfCreateShaderModule(VkDevice device, uint32_t* code, size_t size) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkShaderModuleCreateInfo createInfo = {}; + createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; + createInfo.codeSize = size * sizeof(uint32_t); + createInfo.pCode = code; + VkShaderModule shader = VK_NULL_HANDLE; + __kvfCheckVk(vkCreateShaderModule(device, &createInfo, NULL, &shader)); + return shader; +} + +void kvfDestroyShaderModule(VkDevice device, VkShaderModule shader) +{ + if(shader == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyShaderModule(device, shader, NULL); +} + +VkDescriptorSetLayout kvfCreateDescriptorSetLayout(VkDevice device, VkDescriptorSetLayoutBinding* bindings, size_t bindings_count) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkDescriptorSetLayoutCreateInfo layout_info = {}; + layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + layout_info.bindingCount = bindings_count; + layout_info.pBindings = bindings; + + VkDescriptorSetLayout layout; + __kvfCheckVk(vkCreateDescriptorSetLayout(device, &layout_info, NULL, &layout)); + return layout; +} + +void kvfDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout layout) +{ + if(layout == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyDescriptorSetLayout(device, layout, NULL); +} + +VkDescriptorSet kvfAllocateDescriptorSet(VkDevice device, VkDescriptorSetLayout layout) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + VkDescriptorPool pool = VK_NULL_HANDLE; + for(int i = 0; i < kvf_device->sets_pools_size; i++) + { + if(kvf_device->sets_pools[i].size < kvf_device->sets_pools[i].capacity) + pool = kvf_device->sets_pools[i].pool; + } + if(pool == VK_NULL_HANDLE) + pool = __kvfDeviceCreateDescriptorPool(device); + KVF_ASSERT(pool != VK_NULL_HANDLE); + + VkDescriptorSet set = VK_NULL_HANDLE; + VkDescriptorSetAllocateInfo alloc_info = {}; + alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + alloc_info.descriptorPool = pool; + alloc_info.descriptorSetCount = 1; + alloc_info.pSetLayouts = &layout; + __kvfCheckVk(vkAllocateDescriptorSets(device, &alloc_info, &set)); + KVF_ASSERT(set != VK_NULL_HANDLE); + return set; +} + +void kvfUpdateStorageBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding) +{ + VkWriteDescriptorSet write = kvfWriteStorageBufferToDescriptorSet(device, set, info, binding); + vkUpdateDescriptorSets(device, 1, &write, 0, NULL); +} + +void kvfUpdateUniformBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding) +{ + VkWriteDescriptorSet write = kvfWriteUniformBufferToDescriptorSet(device, set, info, binding); + vkUpdateDescriptorSets(device, 1, &write, 0, NULL); +} + +void kvfUpdateImageToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorImageInfo* info, uint32_t binding) +{ + VkWriteDescriptorSet write = kvfWriteImageToDescriptorSet(device, set, info, binding); + vkUpdateDescriptorSets(device, 1, &write, 0, NULL); +} + +VkWriteDescriptorSet kvfWriteStorageBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(set != VK_NULL_HANDLE); + VkWriteDescriptorSet descriptor_write = {}; + descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptor_write.dstSet = set; + descriptor_write.dstBinding = binding; + descriptor_write.dstArrayElement = 0; + descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; + descriptor_write.descriptorCount = 1; + descriptor_write.pBufferInfo = info; + return descriptor_write; +} + +VkWriteDescriptorSet kvfWriteUniformBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(set != VK_NULL_HANDLE); + VkWriteDescriptorSet descriptor_write = {}; + descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptor_write.dstSet = set; + descriptor_write.dstBinding = binding; + descriptor_write.dstArrayElement = 0; + descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; + descriptor_write.descriptorCount = 1; + descriptor_write.pBufferInfo = info; + return descriptor_write; +} + +VkWriteDescriptorSet kvfWriteImageToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorImageInfo* info, uint32_t binding) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(set != VK_NULL_HANDLE); + VkWriteDescriptorSet descriptor_write = {}; + descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptor_write.dstSet = set; + descriptor_write.dstBinding = binding; + descriptor_write.dstArrayElement = 0; + descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; + descriptor_write.descriptorCount = 1; + descriptor_write.pImageInfo = info; + return descriptor_write; +} + +VkPipelineLayout kvfCreatePipelineLayout(VkDevice device, VkDescriptorSetLayout* set_layouts, size_t set_layouts_count, VkPushConstantRange* pc, size_t pc_count) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + VkPipelineLayoutCreateInfo pipeline_layout_info = {}; + pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; + pipeline_layout_info.setLayoutCount = set_layouts_count; + pipeline_layout_info.pSetLayouts = set_layouts; + pipeline_layout_info.pushConstantRangeCount = pc_count; + pipeline_layout_info.pPushConstantRanges = pc; + + VkPipelineLayout layout; + __kvfCheckVk(vkCreatePipelineLayout(device, &pipeline_layout_info, NULL, &layout)); + return layout; +} + +void kvfDestroyPipelineLayout(VkDevice device, VkPipelineLayout layout) +{ + if(layout == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyPipelineLayout(device, layout, NULL); +} + +void kvfResetDeviceDescriptorPools(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + for(int i = 0; i < kvf_device->sets_pools_size; i++) + { + vkResetDescriptorPool(device, kvf_device->sets_pools[i].pool, 0); + kvf_device->sets_pools[i].size = 0; + } +} + +KvfGraphicsPipelineBuilder* kvfCreateGPipelineBuilder() +{ + KvfGraphicsPipelineBuilder* builder = (KvfGraphicsPipelineBuilder*)KVF_MALLOC(sizeof(KvfGraphicsPipelineBuilder)); + memset(builder, 0, sizeof(KvfGraphicsPipelineBuilder)); + kvfGPipelineBuilderReset(builder); + return builder; +} + +void kvfDestroyGPipelineBuilder(KvfGraphicsPipelineBuilder* builder) +{ + KVF_ASSERT(builder != NULL); + KVF_FREE(builder->shader_stages); + if(builder->vertex_input_state.pVertexAttributeDescriptions != NULL) + KVF_FREE((VkVertexInputAttributeDescription*)builder->vertex_input_state.pVertexAttributeDescriptions); + if(builder->vertex_input_state.pVertexBindingDescriptions != NULL) + KVF_FREE((VkVertexInputBindingDescription*)builder->vertex_input_state.pVertexBindingDescriptions); + KVF_FREE(builder); +} + +void kvfGPipelineBuilderReset(KvfGraphicsPipelineBuilder* builder) +{ + KVF_ASSERT(builder != NULL); + KVF_FREE(builder->shader_stages); + if(builder->vertex_input_state.pVertexAttributeDescriptions != NULL) + KVF_FREE((VkVertexInputAttributeDescription*)builder->vertex_input_state.pVertexAttributeDescriptions); + if(builder->vertex_input_state.pVertexBindingDescriptions != NULL) + KVF_FREE((VkVertexInputBindingDescription*)builder->vertex_input_state.pVertexBindingDescriptions); + memset(builder, 0, sizeof(KvfGraphicsPipelineBuilder)); + builder->vertex_input_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO; + builder->input_assembly_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO; + builder->tessellation_state.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO; + builder->rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; + builder->depth_stencil_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; +} + +void kvfGPipelineBuilderSetInputTopology(KvfGraphicsPipelineBuilder* builder, VkPrimitiveTopology topology) +{ + KVF_ASSERT(builder != NULL); + builder->input_assembly_state.topology = topology; + builder->input_assembly_state.primitiveRestartEnable = VK_FALSE; +} + +void kvfGPipelineBuilderSetPolygonMode(KvfGraphicsPipelineBuilder* builder, VkPolygonMode polygon, float line_width) +{ + KVF_ASSERT(builder != NULL); + builder->rasterization_state.polygonMode = polygon; + builder->rasterization_state.lineWidth = line_width; +} + +void kvfGPipelineBuilderSetCullMode(KvfGraphicsPipelineBuilder* builder, VkCullModeFlags cull, VkFrontFace face) +{ + KVF_ASSERT(builder != NULL); + builder->rasterization_state.cullMode = cull; + builder->rasterization_state.frontFace = face; +} + +void kvfGPipelineBuilderDisableBlending(KvfGraphicsPipelineBuilder* builder) +{ + KVF_ASSERT(builder != NULL); + builder->color_blend_attachment_state.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + builder->color_blend_attachment_state.blendEnable = VK_FALSE; +} + +void kvfGPipelineBuilderEnableAdditiveBlending(KvfGraphicsPipelineBuilder* builder) +{ + KVF_ASSERT(builder != NULL); + builder->color_blend_attachment_state.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + builder->color_blend_attachment_state.blendEnable = VK_TRUE; + builder->color_blend_attachment_state.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + builder->color_blend_attachment_state.dstColorBlendFactor = VK_BLEND_FACTOR_ONE; + builder->color_blend_attachment_state.colorBlendOp = VK_BLEND_OP_ADD; + builder->color_blend_attachment_state.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; + builder->color_blend_attachment_state.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO; + builder->color_blend_attachment_state.alphaBlendOp = VK_BLEND_OP_ADD; +} + +void kvfGPipelineBuilderEnableAlphaBlending(KvfGraphicsPipelineBuilder* builder) +{ + KVF_ASSERT(builder != NULL); + builder->color_blend_attachment_state.colorWriteMask = VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT; + builder->color_blend_attachment_state.blendEnable = VK_TRUE; + builder->color_blend_attachment_state.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA; + builder->color_blend_attachment_state.dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA; + builder->color_blend_attachment_state.colorBlendOp = VK_BLEND_OP_ADD; + builder->color_blend_attachment_state.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE; + builder->color_blend_attachment_state.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO; + builder->color_blend_attachment_state.alphaBlendOp = VK_BLEND_OP_ADD; +} + +void kvfGPipelineBuilderEnableDepthTest(KvfGraphicsPipelineBuilder* builder, VkCompareOp op, bool write_enabled) +{ + KVF_ASSERT(builder != NULL); + builder->depth_stencil_state.depthTestEnable = VK_TRUE; + builder->depth_stencil_state.depthWriteEnable = write_enabled; + builder->depth_stencil_state.depthCompareOp = op; + builder->depth_stencil_state.depthBoundsTestEnable = VK_FALSE; + builder->depth_stencil_state.stencilTestEnable = VK_FALSE; + builder->depth_stencil_state.minDepthBounds = 0.f; + builder->depth_stencil_state.maxDepthBounds = 1.f; +} + +void kvfGPipelineBuilderDisableDepthTest(KvfGraphicsPipelineBuilder* builder) +{ + KVF_ASSERT(builder != NULL); + builder->depth_stencil_state.depthTestEnable = VK_FALSE; + builder->depth_stencil_state.depthWriteEnable = VK_FALSE; + builder->depth_stencil_state.depthCompareOp = VK_COMPARE_OP_NEVER; + builder->depth_stencil_state.depthBoundsTestEnable = VK_FALSE; + builder->depth_stencil_state.stencilTestEnable = VK_FALSE; + builder->depth_stencil_state.minDepthBounds = 0.f; + builder->depth_stencil_state.maxDepthBounds = 1.f; +} + +void kvfGPipelineBuilderSetVertexInputs(KvfGraphicsPipelineBuilder* builder, VkVertexInputBindingDescription binds, VkVertexInputAttributeDescription* attributes, size_t attributes_count) +{ + KVF_ASSERT(builder != NULL); + KVF_ASSERT(attributes != NULL); + VkVertexInputBindingDescription* binds_ptr = (VkVertexInputBindingDescription*)KVF_MALLOC(sizeof(VkVertexInputBindingDescription)); + KVF_ASSERT(binds_ptr != NULL); + *binds_ptr = binds; + VkVertexInputAttributeDescription* attributes_descriptions = (VkVertexInputAttributeDescription*)KVF_MALLOC(sizeof(VkVertexInputAttributeDescription) * attributes_count); + KVF_ASSERT(attributes_descriptions != NULL); + memcpy(attributes_descriptions, attributes, sizeof(VkVertexInputAttributeDescription) * attributes_count); + builder->vertex_input_state.vertexBindingDescriptionCount = 1; + builder->vertex_input_state.pVertexBindingDescriptions = binds_ptr; + builder->vertex_input_state.vertexAttributeDescriptionCount = attributes_count; + builder->vertex_input_state.pVertexAttributeDescriptions = attributes_descriptions; +} + +void kvfGPipelineBuilderAddShaderStage(KvfGraphicsPipelineBuilder* builder, VkShaderStageFlagBits stage, VkShaderModule module, const char* entry) +{ + KVF_ASSERT(builder != NULL); + builder->shader_stages = (VkPipelineShaderStageCreateInfo*)KVF_REALLOC(builder->shader_stages, sizeof(VkPipelineShaderStageCreateInfo) * (builder->shader_stages_count + 1)); + KVF_ASSERT(builder->shader_stages != NULL); + memset(&builder->shader_stages[builder->shader_stages_count], 0, sizeof(VkPipelineShaderStageCreateInfo)); + char* entry_ptr = (char*)KVF_MALLOC(strlen(entry)); + KVF_ASSERT(entry_ptr != NULL); + strcpy(entry_ptr, entry); + builder->shader_stages[builder->shader_stages_count].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; + builder->shader_stages[builder->shader_stages_count].stage = stage; + builder->shader_stages[builder->shader_stages_count].module = module; + builder->shader_stages[builder->shader_stages_count].pName = entry_ptr; + builder->shader_stages_count++; +} + +void kvfGPipelineBuilderResetShaderStages(KvfGraphicsPipelineBuilder* builder) +{ + KVF_ASSERT(builder != NULL); + if(builder->shader_stages == NULL) + return; + + for(size_t i = 0; builder->shader_stages_count; i++) + KVF_FREE((char*)builder->shader_stages[i].pName); + KVF_FREE(builder->shader_stages); + builder->shader_stages_count = 0; +} + +VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, KvfGraphicsPipelineBuilder* builder, VkRenderPass pass) +{ + KVF_ASSERT(builder != NULL); + KVF_ASSERT(device != VK_NULL_HANDLE); + + VkPipelineColorBlendStateCreateInfo color_blending = {}; + color_blending.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO; + color_blending.logicOpEnable = VK_FALSE; + color_blending.logicOp = VK_LOGIC_OP_COPY; + color_blending.attachmentCount = 1; + color_blending.pAttachments = &builder->color_blend_attachment_state; + color_blending.blendConstants[0] = 0.0f; + color_blending.blendConstants[1] = 0.0f; + color_blending.blendConstants[2] = 0.0f; + color_blending.blendConstants[3] = 0.0f; + + VkDynamicState states[] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; + + VkPipelineDynamicStateCreateInfo dynamic_states = {}; + dynamic_states.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; + dynamic_states.dynamicStateCount = sizeof(states) / sizeof(VkDynamicState); + dynamic_states.pDynamicStates = states; + + VkPipelineViewportStateCreateInfo viewport_state = {}; + viewport_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO; + viewport_state.viewportCount = 1; + viewport_state.pViewports = NULL; + viewport_state.scissorCount = 1; + viewport_state.pScissors = NULL; + + VkPipelineMultisampleStateCreateInfo multisampling = {}; + multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; + multisampling.sampleShadingEnable = VK_FALSE; + multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; + + VkGraphicsPipelineCreateInfo pipeline_info = {}; + pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; + pipeline_info.stageCount = builder->shader_stages_count; + pipeline_info.pStages = builder->shader_stages; + pipeline_info.pVertexInputState = &builder->vertex_input_state; + pipeline_info.pInputAssemblyState = &builder->input_assembly_state; + pipeline_info.pViewportState = &viewport_state; + pipeline_info.pRasterizationState = &builder->rasterization_state; + pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pColorBlendState = &color_blending; + pipeline_info.pDynamicState = &dynamic_states; + pipeline_info.layout = layout; + pipeline_info.renderPass = pass; + pipeline_info.subpass = 0; + pipeline_info.basePipelineHandle = VK_NULL_HANDLE; + pipeline_info.pDepthStencilState = &builder->depth_stencil_state; + + VkPipeline pipeline; + __kvfCheckVk(vkCreateGraphicsPipelines(device, VK_NULL_HANDLE, 1, &pipeline_info, NULL, &pipeline)); + return pipeline; +} + +void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + vkDestroyPipeline(device, pipeline, NULL); +} + +#endif // KVF_IMPLEMENTATION diff --git a/third_party/vma.h b/third_party/vma.h index 60f5720..2307325 100644 --- a/third_party/vma.h +++ b/third_party/vma.h @@ -1,19558 +1,18676 @@ -// -// Copyright (c) 2017-2022 Advanced Micro Devices, Inc. All rights reserved. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. -// - -#ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H -#define AMD_VULKAN_MEMORY_ALLOCATOR_H - -/** \mainpage Vulkan Memory Allocator - -Version 3.0.1 (2022-05-26) - -Copyright (c) 2017-2022 Advanced Micro Devices, Inc. All rights reserved. \n -License: MIT - -API documentation divided into groups: [Modules](modules.html) - -\section main_table_of_contents Table of contents - -- User guide - - \subpage quick_start - - [Project setup](@ref quick_start_project_setup) - - [Initialization](@ref quick_start_initialization) - - [Resource allocation](@ref quick_start_resource_allocation) - - \subpage choosing_memory_type - - [Usage](@ref choosing_memory_type_usage) - - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags) - - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types) - - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools) - - [Dedicated allocations](@ref choosing_memory_type_dedicated_allocations) - - \subpage memory_mapping - - [Mapping functions](@ref memory_mapping_mapping_functions) - - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory) - - [Cache flush and invalidate](@ref memory_mapping_cache_control) - - \subpage staying_within_budget - - [Querying for budget](@ref staying_within_budget_querying_for_budget) - - [Controlling memory usage](@ref staying_within_budget_controlling_memory_usage) - - \subpage resource_aliasing - - \subpage custom_memory_pools - - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex) - - [Linear allocation algorithm](@ref linear_algorithm) - - [Free-at-once](@ref linear_algorithm_free_at_once) - - [Stack](@ref linear_algorithm_stack) - - [Double stack](@ref linear_algorithm_double_stack) - - [Ring buffer](@ref linear_algorithm_ring_buffer) - - \subpage defragmentation - - \subpage statistics - - [Numeric statistics](@ref statistics_numeric_statistics) - - [JSON dump](@ref statistics_json_dump) - - \subpage allocation_annotation - - [Allocation user data](@ref allocation_user_data) - - [Allocation names](@ref allocation_names) - - \subpage virtual_allocator - - \subpage debugging_memory_usage - - [Memory initialization](@ref debugging_memory_usage_initialization) - - [Margins](@ref debugging_memory_usage_margins) - - [Corruption detection](@ref debugging_memory_usage_corruption_detection) - - \subpage opengl_interop -- \subpage usage_patterns - - [GPU-only resource](@ref usage_patterns_gpu_only) - - [Staging copy for upload](@ref usage_patterns_staging_copy_upload) - - [Readback](@ref usage_patterns_readback) - - [Advanced data uploading](@ref usage_patterns_advanced_data_uploading) - - [Other use cases](@ref usage_patterns_other_use_cases) -- \subpage configuration - - [Pointers to Vulkan functions](@ref config_Vulkan_functions) - - [Custom host memory allocator](@ref custom_memory_allocator) - - [Device memory allocation callbacks](@ref allocation_callbacks) - - [Device heap memory limit](@ref heap_memory_limit) -- Extension support - - \subpage vk_khr_dedicated_allocation - - \subpage enabling_buffer_device_address - - \subpage vk_ext_memory_priority - - \subpage vk_amd_device_coherent_memory -- \subpage general_considerations - - [Thread safety](@ref general_considerations_thread_safety) - - [Versioning and compatibility](@ref general_considerations_versioning_and_compatibility) - - [Validation layer warnings](@ref general_considerations_validation_layer_warnings) - - [Allocation algorithm](@ref general_considerations_allocation_algorithm) - - [Features not supported](@ref general_considerations_features_not_supported) - -\section main_see_also See also - -- [**Product page on GPUOpen**](https://gpuopen.com/gaming-product/vulkan-memory-allocator/) -- [**Source repository on GitHub**](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator) - -\defgroup group_init Library initialization - -\brief API elements related to the initialization and management of the entire library, especially #VmaAllocator object. - -\defgroup group_alloc Memory allocation - -\brief API elements related to the allocation, deallocation, and management of Vulkan memory, buffers, images. -Most basic ones being: vmaCreateBuffer(), vmaCreateImage(). - -\defgroup group_virtual Virtual allocator - -\brief API elements related to the mechanism of \ref virtual_allocator - using the core allocation algorithm -for user-defined purpose without allocating any real GPU memory. - -\defgroup group_stats Statistics - -\brief API elements that query current status of the allocator, from memory usage, budget, to full dump of the internal state in JSON format. -See documentation chapter: \ref statistics. -*/ - - -#ifdef __cplusplus -extern "C" { -#endif - -#ifndef VULKAN_H_ - #include -#endif - -// Define this macro to declare maximum supported Vulkan version in format AAABBBCCC, -// where AAA = major, BBB = minor, CCC = patch. -// If you want to use version > 1.0, it still needs to be enabled via VmaAllocatorCreateInfo::vulkanApiVersion. -#if !defined(VMA_VULKAN_VERSION) - #if defined(VK_VERSION_1_3) - #define VMA_VULKAN_VERSION 1003000 - #elif defined(VK_VERSION_1_2) - #define VMA_VULKAN_VERSION 1002000 - #elif defined(VK_VERSION_1_1) - #define VMA_VULKAN_VERSION 1001000 - #else - #define VMA_VULKAN_VERSION 1000000 - #endif -#endif - -#if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS - extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; - extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; - extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; - extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; - extern PFN_vkAllocateMemory vkAllocateMemory; - extern PFN_vkFreeMemory vkFreeMemory; - extern PFN_vkMapMemory vkMapMemory; - extern PFN_vkUnmapMemory vkUnmapMemory; - extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; - extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; - extern PFN_vkBindBufferMemory vkBindBufferMemory; - extern PFN_vkBindImageMemory vkBindImageMemory; - extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; - extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; - extern PFN_vkCreateBuffer vkCreateBuffer; - extern PFN_vkDestroyBuffer vkDestroyBuffer; - extern PFN_vkCreateImage vkCreateImage; - extern PFN_vkDestroyImage vkDestroyImage; - extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; - #if VMA_VULKAN_VERSION >= 1001000 - extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; - extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; - extern PFN_vkBindBufferMemory2 vkBindBufferMemory2; - extern PFN_vkBindImageMemory2 vkBindImageMemory2; - extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; - #endif // #if VMA_VULKAN_VERSION >= 1001000 -#endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES - -#if !defined(VMA_DEDICATED_ALLOCATION) - #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation - #define VMA_DEDICATED_ALLOCATION 1 - #else - #define VMA_DEDICATED_ALLOCATION 0 - #endif -#endif - -#if !defined(VMA_BIND_MEMORY2) - #if VK_KHR_bind_memory2 - #define VMA_BIND_MEMORY2 1 - #else - #define VMA_BIND_MEMORY2 0 - #endif -#endif - -#if !defined(VMA_MEMORY_BUDGET) - #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000) - #define VMA_MEMORY_BUDGET 1 - #else - #define VMA_MEMORY_BUDGET 0 - #endif -#endif - -// Defined to 1 when VK_KHR_buffer_device_address device extension or equivalent core Vulkan 1.2 feature is defined in its headers. -#if !defined(VMA_BUFFER_DEVICE_ADDRESS) - #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000 - #define VMA_BUFFER_DEVICE_ADDRESS 1 - #else - #define VMA_BUFFER_DEVICE_ADDRESS 0 - #endif -#endif - -// Defined to 1 when VK_EXT_memory_priority device extension is defined in Vulkan headers. -#if !defined(VMA_MEMORY_PRIORITY) - #if VK_EXT_memory_priority - #define VMA_MEMORY_PRIORITY 1 - #else - #define VMA_MEMORY_PRIORITY 0 - #endif -#endif - -// Defined to 1 when VK_KHR_external_memory device extension is defined in Vulkan headers. -#if !defined(VMA_EXTERNAL_MEMORY) - #if VK_KHR_external_memory - #define VMA_EXTERNAL_MEMORY 1 - #else - #define VMA_EXTERNAL_MEMORY 0 - #endif -#endif - -// Define these macros to decorate all public functions with additional code, -// before and after returned type, appropriately. This may be useful for -// exporting the functions when compiling VMA as a separate library. Example: -// #define VMA_CALL_PRE __declspec(dllexport) -// #define VMA_CALL_POST __cdecl -#ifndef VMA_CALL_PRE - #define VMA_CALL_PRE -#endif -#ifndef VMA_CALL_POST - #define VMA_CALL_POST -#endif - -// Define this macro to decorate pointers with an attribute specifying the -// length of the array they point to if they are not null. -// -// The length may be one of -// - The name of another parameter in the argument list where the pointer is declared -// - The name of another member in the struct where the pointer is declared -// - The name of a member of a struct type, meaning the value of that member in -// the context of the call. For example -// VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount"), -// this means the number of memory heaps available in the device associated -// with the VmaAllocator being dealt with. -#ifndef VMA_LEN_IF_NOT_NULL - #define VMA_LEN_IF_NOT_NULL(len) -#endif - -// The VMA_NULLABLE macro is defined to be _Nullable when compiling with Clang. -// see: https://clang.llvm.org/docs/AttributeReference.html#nullable -#ifndef VMA_NULLABLE - #ifdef __clang__ - #define VMA_NULLABLE _Nullable - #else - #define VMA_NULLABLE - #endif -#endif - -// The VMA_NOT_NULL macro is defined to be _Nonnull when compiling with Clang. -// see: https://clang.llvm.org/docs/AttributeReference.html#nonnull -#ifndef VMA_NOT_NULL - #ifdef __clang__ - #define VMA_NOT_NULL _Nonnull - #else - #define VMA_NOT_NULL - #endif -#endif - -// If non-dispatchable handles are represented as pointers then we can give -// then nullability annotations -#ifndef VMA_NOT_NULL_NON_DISPATCHABLE - #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) - #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL - #else - #define VMA_NOT_NULL_NON_DISPATCHABLE - #endif -#endif - -#ifndef VMA_NULLABLE_NON_DISPATCHABLE - #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) - #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE - #else - #define VMA_NULLABLE_NON_DISPATCHABLE - #endif -#endif - -#ifndef VMA_STATS_STRING_ENABLED - #define VMA_STATS_STRING_ENABLED 1 -#endif - -//////////////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////////////// -// -// INTERFACE -// -//////////////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////////////// - -// Sections for managing code placement in file, only for development purposes e.g. for convenient folding inside an IDE. -#ifndef _VMA_ENUM_DECLARATIONS - -/** -\addtogroup group_init -@{ -*/ - -/// Flags for created #VmaAllocator. -typedef enum VmaAllocatorCreateFlagBits -{ - /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you. - - Using this flag may increase performance because internal mutexes are not used. - */ - VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, - /** \brief Enables usage of VK_KHR_dedicated_allocation extension. - - The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. - When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. - - Using this extension will automatically allocate dedicated blocks of memory for - some buffers and images instead of suballocating place for them out of bigger - memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT - flag) when it is recommended by the driver. It may improve performance on some - GPUs. - - You may set this flag only if you found out that following device extensions are - supported, you enabled them while creating Vulkan device passed as - VmaAllocatorCreateInfo::device, and you want them to be used internally by this - library: - - - VK_KHR_get_memory_requirements2 (device extension) - - VK_KHR_dedicated_allocation (device extension) - - When this flag is set, you can experience following warnings reported by Vulkan - validation layer. You can ignore them. - - > vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer. - */ - VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002, - /** - Enables usage of VK_KHR_bind_memory2 extension. - - The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. - When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. - - You may set this flag only if you found out that this device extension is supported, - you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, - and you want it to be used internally by this library. - - The extension provides functions `vkBindBufferMemory2KHR` and `vkBindImageMemory2KHR`, - which allow to pass a chain of `pNext` structures while binding. - This flag is required if you use `pNext` parameter in vmaBindBufferMemory2() or vmaBindImageMemory2(). - */ - VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT = 0x00000004, - /** - Enables usage of VK_EXT_memory_budget extension. - - You may set this flag only if you found out that this device extension is supported, - you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, - and you want it to be used internally by this library, along with another instance extension - VK_KHR_get_physical_device_properties2, which is required by it (or Vulkan 1.1, where this extension is promoted). - - The extension provides query for current memory usage and budget, which will probably - be more accurate than an estimation used by the library otherwise. - */ - VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT = 0x00000008, - /** - Enables usage of VK_AMD_device_coherent_memory extension. - - You may set this flag only if you: - - - found out that this device extension is supported and enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, - - checked that `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true and set it while creating the Vulkan device, - - want it to be used internally by this library. - - The extension and accompanying device feature provide access to memory types with - `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flags. - They are useful mostly for writing breadcrumb markers - a common method for debugging GPU crash/hang/TDR. - - When the extension is not enabled, such memory types are still enumerated, but their usage is illegal. - To protect from this error, if you don't create the allocator with this flag, it will refuse to allocate any memory or create a custom pool in such memory type, - returning `VK_ERROR_FEATURE_NOT_PRESENT`. - */ - VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT = 0x00000010, - /** - Enables usage of "buffer device address" feature, which allows you to use function - `vkGetBufferDeviceAddress*` to get raw GPU pointer to a buffer and pass it for usage inside a shader. - - You may set this flag only if you: - - 1. (For Vulkan version < 1.2) Found as available and enabled device extension - VK_KHR_buffer_device_address. - This extension is promoted to core Vulkan 1.2. - 2. Found as available and enabled device feature `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress`. - - When this flag is set, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT` using VMA. - The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT` to - allocated memory blocks wherever it might be needed. - - For more information, see documentation chapter \ref enabling_buffer_device_address. - */ - VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT = 0x00000020, - /** - Enables usage of VK_EXT_memory_priority extension in the library. - - You may set this flag only if you found available and enabled this device extension, - along with `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority == VK_TRUE`, - while creating Vulkan device passed as VmaAllocatorCreateInfo::device. - - When this flag is used, VmaAllocationCreateInfo::priority and VmaPoolCreateInfo::priority - are used to set priorities of allocated Vulkan memory. Without it, these variables are ignored. - - A priority must be a floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. - Larger values are higher priority. The granularity of the priorities is implementation-dependent. - It is automatically passed to every call to `vkAllocateMemory` done by the library using structure `VkMemoryPriorityAllocateInfoEXT`. - The value to be used for default priority is 0.5. - For more details, see the documentation of the VK_EXT_memory_priority extension. - */ - VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT = 0x00000040, - - VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VmaAllocatorCreateFlagBits; -/// See #VmaAllocatorCreateFlagBits. -typedef VkFlags VmaAllocatorCreateFlags; - -/** @} */ - -/** -\addtogroup group_alloc -@{ -*/ - -/// \brief Intended usage of the allocated memory. -typedef enum VmaMemoryUsage -{ - /** No intended memory usage specified. - Use other members of VmaAllocationCreateInfo to specify your requirements. - */ - VMA_MEMORY_USAGE_UNKNOWN = 0, - /** - \deprecated Obsolete, preserved for backward compatibility. - Prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. - */ - VMA_MEMORY_USAGE_GPU_ONLY = 1, - /** - \deprecated Obsolete, preserved for backward compatibility. - Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` and `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT`. - */ - VMA_MEMORY_USAGE_CPU_ONLY = 2, - /** - \deprecated Obsolete, preserved for backward compatibility. - Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. - */ - VMA_MEMORY_USAGE_CPU_TO_GPU = 3, - /** - \deprecated Obsolete, preserved for backward compatibility. - Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. - */ - VMA_MEMORY_USAGE_GPU_TO_CPU = 4, - /** - \deprecated Obsolete, preserved for backward compatibility. - Prefers not `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. - */ - VMA_MEMORY_USAGE_CPU_COPY = 5, - /** - Lazily allocated GPU memory having `VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT`. - Exists mostly on mobile platforms. Using it on desktop PC or other GPUs with no such memory type present will fail the allocation. - - Usage: Memory for transient attachment images (color attachments, depth attachments etc.), created with `VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT`. - - Allocations with this usage are always created as dedicated - it implies #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. - */ - VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED = 6, - /** - Selects best memory type automatically. - This flag is recommended for most common use cases. - - When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), - you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT - in VmaAllocationCreateInfo::flags. - - It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. - vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() - and not with generic memory allocation functions. - */ - VMA_MEMORY_USAGE_AUTO = 7, - /** - Selects best memory type automatically with preference for GPU (device) memory. - - When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), - you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT - in VmaAllocationCreateInfo::flags. - - It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. - vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() - and not with generic memory allocation functions. - */ - VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE = 8, - /** - Selects best memory type automatically with preference for CPU (host) memory. - - When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), - you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT - in VmaAllocationCreateInfo::flags. - - It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. - vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() - and not with generic memory allocation functions. - */ - VMA_MEMORY_USAGE_AUTO_PREFER_HOST = 9, - - VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF -} VmaMemoryUsage; - -/// Flags to be passed as VmaAllocationCreateInfo::flags. -typedef enum VmaAllocationCreateFlagBits -{ - /** \brief Set this flag if the allocation should have its own memory block. - - Use it for special, big resources, like fullscreen images used as attachments. - */ - VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001, - - /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block. - - If new allocation cannot be placed in any of the existing blocks, allocation - fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error. - - You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and - #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense. - */ - VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002, - /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it. - - Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData. - - It is valid to use this flag for allocation made from memory type that is not - `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is - useful if you need an allocation that is efficient to use on GPU - (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that - support it (e.g. Intel GPU). - */ - VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004, - /** \deprecated Preserved for backward compatibility. Consider using vmaSetAllocationName() instead. - - Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a - null-terminated string. Instead of copying pointer value, a local copy of the - string is made and stored in allocation's `pName`. The string is automatically - freed together with the allocation. It is also used in vmaBuildStatsString(). - */ - VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020, - /** Allocation will be created from upper stack in a double stack pool. - - This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag. - */ - VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040, - /** Create both buffer/image and allocation, but don't bind them together. - It is useful when you want to bind yourself to do some more advanced binding, e.g. using some extensions. - The flag is meaningful only with functions that bind by default: vmaCreateBuffer(), vmaCreateImage(). - Otherwise it is ignored. - - If you want to make sure the new buffer/image is not tied to the new memory allocation - through `VkMemoryDedicatedAllocateInfoKHR` structure in case the allocation ends up in its own memory block, - use also flag #VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT. - */ - VMA_ALLOCATION_CREATE_DONT_BIND_BIT = 0x00000080, - /** Create allocation only if additional device memory required for it, if any, won't exceed - memory budget. Otherwise return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. - */ - VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT = 0x00000100, - /** \brief Set this flag if the allocated memory will have aliasing resources. - - Usage of this flag prevents supplying `VkMemoryDedicatedAllocateInfoKHR` when #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT is specified. - Otherwise created dedicated memory will not be suitable for aliasing resources, resulting in Vulkan Validation Layer errors. - */ - VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT = 0x00000200, - /** - Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). - - - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, - you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. - - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. - This includes allocations created in \ref custom_memory_pools. - - Declares that mapped memory will only be written sequentially, e.g. using `memcpy()` or a loop writing number-by-number, - never read or accessed randomly, so a memory type can be selected that is uncached and write-combined. - - \warning Violating this declaration may work correctly, but will likely be very slow. - Watch out for implicit reads introduced by doing e.g. `pMappedData[i] += x;` - Better prepare your data in a local variable and `memcpy()` it to the mapped pointer all at once. - */ - VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT = 0x00000400, - /** - Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). - - - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, - you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. - - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. - This includes allocations created in \ref custom_memory_pools. - - Declares that mapped memory can be read, written, and accessed in random order, - so a `HOST_CACHED` memory type is required. - */ - VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT = 0x00000800, - /** - Together with #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT, - it says that despite request for host access, a not-`HOST_VISIBLE` memory type can be selected - if it may improve performance. - - By using this flag, you declare that you will check if the allocation ended up in a `HOST_VISIBLE` memory type - (e.g. using vmaGetAllocationMemoryProperties()) and if not, you will create some "staging" buffer and - issue an explicit transfer to write/read your data. - To prepare for this possibility, don't forget to add appropriate flags like - `VK_BUFFER_USAGE_TRANSFER_DST_BIT`, `VK_BUFFER_USAGE_TRANSFER_SRC_BIT` to the parameters of created buffer or image. - */ - VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT = 0x00001000, - /** Allocation strategy that chooses smallest possible free range for the allocation - to minimize memory usage and fragmentation, possibly at the expense of allocation time. - */ - VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = 0x00010000, - /** Allocation strategy that chooses first suitable free range for the allocation - - not necessarily in terms of the smallest offset but the one that is easiest and fastest to find - to minimize allocation time, possibly at the expense of allocation quality. - */ - VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = 0x00020000, - /** Allocation strategy that chooses always the lowest offset in available space. - This is not the most efficient strategy but achieves highly packed data. - Used internally by defragmentation, not recomended in typical usage. - */ - VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = 0x00040000, - /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT. - */ - VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, - /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT. - */ - VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, - /** A bit mask to extract only `STRATEGY` bits from entire set of flags. - */ - VMA_ALLOCATION_CREATE_STRATEGY_MASK = - VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT | - VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT | - VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, - - VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VmaAllocationCreateFlagBits; -/// See #VmaAllocationCreateFlagBits. -typedef VkFlags VmaAllocationCreateFlags; - -/// Flags to be passed as VmaPoolCreateInfo::flags. -typedef enum VmaPoolCreateFlagBits -{ - /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored. - - This is an optional optimization flag. - - If you always allocate using vmaCreateBuffer(), vmaCreateImage(), - vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator - knows exact type of your allocations so it can handle Buffer-Image Granularity - in the optimal way. - - If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(), - exact type of such allocations is not known, so allocator must be conservative - in handling Buffer-Image Granularity, which can lead to suboptimal allocation - (wasted memory). In that case, if you can make sure you always allocate only - buffers and linear images or only optimal images out of this pool, use this flag - to make allocator disregard Buffer-Image Granularity and so make allocations - faster and more optimal. - */ - VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002, - - /** \brief Enables alternative, linear allocation algorithm in this pool. - - Specify this flag to enable linear allocation algorithm, which always creates - new allocations after last one and doesn't reuse space from allocations freed in - between. It trades memory consumption for simplified algorithm and data - structure, which has better performance and uses less memory for metadata. - - By using this flag, you can achieve behavior of free-at-once, stack, - ring buffer, and double stack. - For details, see documentation chapter \ref linear_algorithm. - */ - VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004, - - /** Bit mask to extract only `ALGORITHM` bits from entire set of flags. - */ - VMA_POOL_CREATE_ALGORITHM_MASK = - VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT, - - VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VmaPoolCreateFlagBits; -/// Flags to be passed as VmaPoolCreateInfo::flags. See #VmaPoolCreateFlagBits. -typedef VkFlags VmaPoolCreateFlags; - -/// Flags to be passed as VmaDefragmentationInfo::flags. -typedef enum VmaDefragmentationFlagBits -{ - /* \brief Use simple but fast algorithm for defragmentation. - May not achieve best results but will require least time to compute and least allocations to copy. - */ - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT = 0x1, - /* \brief Default defragmentation algorithm, applied also when no `ALGORITHM` flag is specified. - Offers a balance between defragmentation quality and the amount of allocations and bytes that need to be moved. - */ - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT = 0x2, - /* \brief Perform full defragmentation of memory. - Can result in notably more time to compute and allocations to copy, but will achieve best memory packing. - */ - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT = 0x4, - /** \brief Use the most roboust algorithm at the cost of time to compute and number of copies to make. - Only available when bufferImageGranularity is greater than 1, since it aims to reduce - alignment issues between different types of resources. - Otherwise falls back to same behavior as #VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT. - */ - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT = 0x8, - - /// A bit mask to extract only `ALGORITHM` bits from entire set of flags. - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK = - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT | - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT | - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT | - VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT, - - VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VmaDefragmentationFlagBits; -/// See #VmaDefragmentationFlagBits. -typedef VkFlags VmaDefragmentationFlags; - -/// Operation performed on single defragmentation move. See structure #VmaDefragmentationMove. -typedef enum VmaDefragmentationMoveOperation -{ - /// Buffer/image has been recreated at `dstTmpAllocation`, data has been copied, old buffer/image has been destroyed. `srcAllocation` should be changed to point to the new place. This is the default value set by vmaBeginDefragmentationPass(). - VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY = 0, - /// Set this value if you cannot move the allocation. New place reserved at `dstTmpAllocation` will be freed. `srcAllocation` will remain unchanged. - VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE = 1, - /// Set this value if you decide to abandon the allocation and you destroyed the buffer/image. New place reserved at `dstTmpAllocation` will be freed, along with `srcAllocation`, which will be destroyed. - VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY = 2, -} VmaDefragmentationMoveOperation; - -/** @} */ - -/** -\addtogroup group_virtual -@{ -*/ - -/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. -typedef enum VmaVirtualBlockCreateFlagBits -{ - /** \brief Enables alternative, linear allocation algorithm in this virtual block. - - Specify this flag to enable linear allocation algorithm, which always creates - new allocations after last one and doesn't reuse space from allocations freed in - between. It trades memory consumption for simplified algorithm and data - structure, which has better performance and uses less memory for metadata. - - By using this flag, you can achieve behavior of free-at-once, stack, - ring buffer, and double stack. - For details, see documentation chapter \ref linear_algorithm. - */ - VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT = 0x00000001, - - /** \brief Bit mask to extract only `ALGORITHM` bits from entire set of flags. - */ - VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK = - VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT, - - VMA_VIRTUAL_BLOCK_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VmaVirtualBlockCreateFlagBits; -/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. See #VmaVirtualBlockCreateFlagBits. -typedef VkFlags VmaVirtualBlockCreateFlags; - -/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. -typedef enum VmaVirtualAllocationCreateFlagBits -{ - /** \brief Allocation will be created from upper stack in a double stack pool. - - This flag is only allowed for virtual blocks created with #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT flag. - */ - VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT, - /** \brief Allocation strategy that tries to minimize memory usage. - */ - VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, - /** \brief Allocation strategy that tries to minimize allocation time. - */ - VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, - /** Allocation strategy that chooses always the lowest offset in available space. - This is not the most efficient strategy but achieves highly packed data. - */ - VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, - /** \brief A bit mask to extract only `STRATEGY` bits from entire set of flags. - - These strategy flags are binary compatible with equivalent flags in #VmaAllocationCreateFlagBits. - */ - VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK = VMA_ALLOCATION_CREATE_STRATEGY_MASK, - - VMA_VIRTUAL_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VmaVirtualAllocationCreateFlagBits; -/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. See #VmaVirtualAllocationCreateFlagBits. -typedef VkFlags VmaVirtualAllocationCreateFlags; - -/** @} */ - -#endif // _VMA_ENUM_DECLARATIONS - -#ifndef _VMA_DATA_TYPES_DECLARATIONS - -/** -\addtogroup group_init -@{ */ - -/** \struct VmaAllocator -\brief Represents main object of this library initialized. - -Fill structure #VmaAllocatorCreateInfo and call function vmaCreateAllocator() to create it. -Call function vmaDestroyAllocator() to destroy it. - -It is recommended to create just one object of this type per `VkDevice` object, -right after Vulkan is initialized and keep it alive until before Vulkan device is destroyed. -*/ -VK_DEFINE_HANDLE(VmaAllocator) - -/** @} */ - -/** -\addtogroup group_alloc -@{ -*/ - -/** \struct VmaPool -\brief Represents custom memory pool - -Fill structure VmaPoolCreateInfo and call function vmaCreatePool() to create it. -Call function vmaDestroyPool() to destroy it. - -For more information see [Custom memory pools](@ref choosing_memory_type_custom_memory_pools). -*/ -VK_DEFINE_HANDLE(VmaPool) - -/** \struct VmaAllocation -\brief Represents single memory allocation. - -It may be either dedicated block of `VkDeviceMemory` or a specific region of a bigger block of this type -plus unique offset. - -There are multiple ways to create such object. -You need to fill structure VmaAllocationCreateInfo. -For more information see [Choosing memory type](@ref choosing_memory_type). - -Although the library provides convenience functions that create Vulkan buffer or image, -allocate memory for it and bind them together, -binding of the allocation to a buffer or an image is out of scope of the allocation itself. -Allocation object can exist without buffer/image bound, -binding can be done manually by the user, and destruction of it can be done -independently of destruction of the allocation. - -The object also remembers its size and some other information. -To retrieve this information, use function vmaGetAllocationInfo() and inspect -returned structure VmaAllocationInfo. -*/ -VK_DEFINE_HANDLE(VmaAllocation) - -/** \struct VmaDefragmentationContext -\brief An opaque object that represents started defragmentation process. - -Fill structure #VmaDefragmentationInfo and call function vmaBeginDefragmentation() to create it. -Call function vmaEndDefragmentation() to destroy it. -*/ -VK_DEFINE_HANDLE(VmaDefragmentationContext) - -/** @} */ - -/** -\addtogroup group_virtual -@{ -*/ - -/** \struct VmaVirtualAllocation -\brief Represents single memory allocation done inside VmaVirtualBlock. - -Use it as a unique identifier to virtual allocation within the single block. - -Use value `VK_NULL_HANDLE` to represent a null/invalid allocation. -*/ -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaVirtualAllocation); - -/** @} */ - -/** -\addtogroup group_virtual -@{ -*/ - -/** \struct VmaVirtualBlock -\brief Handle to a virtual block object that allows to use core allocation algorithm without allocating any real GPU memory. - -Fill in #VmaVirtualBlockCreateInfo structure and use vmaCreateVirtualBlock() to create it. Use vmaDestroyVirtualBlock() to destroy it. -For more information, see documentation chapter \ref virtual_allocator. - -This object is not thread-safe - should not be used from multiple threads simultaneously, must be synchronized externally. -*/ -VK_DEFINE_HANDLE(VmaVirtualBlock) - -/** @} */ - -/** -\addtogroup group_init -@{ -*/ - -/// Callback function called after successful vkAllocateMemory. -typedef void (VKAPI_PTR* PFN_vmaAllocateDeviceMemoryFunction)( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t memoryType, - VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, - VkDeviceSize size, - void* VMA_NULLABLE pUserData); - -/// Callback function called before vkFreeMemory. -typedef void (VKAPI_PTR* PFN_vmaFreeDeviceMemoryFunction)( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t memoryType, - VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, - VkDeviceSize size, - void* VMA_NULLABLE pUserData); - -/** \brief Set of callbacks that the library will call for `vkAllocateMemory` and `vkFreeMemory`. - -Provided for informative purpose, e.g. to gather statistics about number of -allocations or total amount of memory allocated in Vulkan. - -Used in VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. -*/ -typedef struct VmaDeviceMemoryCallbacks -{ - /// Optional, can be null. - PFN_vmaAllocateDeviceMemoryFunction VMA_NULLABLE pfnAllocate; - /// Optional, can be null. - PFN_vmaFreeDeviceMemoryFunction VMA_NULLABLE pfnFree; - /// Optional, can be null. - void* VMA_NULLABLE pUserData; -} VmaDeviceMemoryCallbacks; - -/** \brief Pointers to some Vulkan functions - a subset used by the library. - -Used in VmaAllocatorCreateInfo::pVulkanFunctions. -*/ -typedef struct VmaVulkanFunctions -{ - /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. - PFN_vkGetInstanceProcAddr VMA_NULLABLE vkGetInstanceProcAddr; - /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. - PFN_vkGetDeviceProcAddr VMA_NULLABLE vkGetDeviceProcAddr; - PFN_vkGetPhysicalDeviceProperties VMA_NULLABLE vkGetPhysicalDeviceProperties; - PFN_vkGetPhysicalDeviceMemoryProperties VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties; - PFN_vkAllocateMemory VMA_NULLABLE vkAllocateMemory; - PFN_vkFreeMemory VMA_NULLABLE vkFreeMemory; - PFN_vkMapMemory VMA_NULLABLE vkMapMemory; - PFN_vkUnmapMemory VMA_NULLABLE vkUnmapMemory; - PFN_vkFlushMappedMemoryRanges VMA_NULLABLE vkFlushMappedMemoryRanges; - PFN_vkInvalidateMappedMemoryRanges VMA_NULLABLE vkInvalidateMappedMemoryRanges; - PFN_vkBindBufferMemory VMA_NULLABLE vkBindBufferMemory; - PFN_vkBindImageMemory VMA_NULLABLE vkBindImageMemory; - PFN_vkGetBufferMemoryRequirements VMA_NULLABLE vkGetBufferMemoryRequirements; - PFN_vkGetImageMemoryRequirements VMA_NULLABLE vkGetImageMemoryRequirements; - PFN_vkCreateBuffer VMA_NULLABLE vkCreateBuffer; - PFN_vkDestroyBuffer VMA_NULLABLE vkDestroyBuffer; - PFN_vkCreateImage VMA_NULLABLE vkCreateImage; - PFN_vkDestroyImage VMA_NULLABLE vkDestroyImage; - PFN_vkCmdCopyBuffer VMA_NULLABLE vkCmdCopyBuffer; -#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - /// Fetch "vkGetBufferMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetBufferMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. - PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR; - /// Fetch "vkGetImageMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetImageMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. - PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR; -#endif -#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 - /// Fetch "vkBindBufferMemory2" on Vulkan >= 1.1, fetch "vkBindBufferMemory2KHR" when using VK_KHR_bind_memory2 extension. - PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR; - /// Fetch "vkBindImageMemory2" on Vulkan >= 1.1, fetch "vkBindImageMemory2KHR" when using VK_KHR_bind_memory2 extension. - PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR; -#endif -#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 - PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR; -#endif -#if VMA_VULKAN_VERSION >= 1003000 - /// Fetch from "vkGetDeviceBufferMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceBufferMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. - PFN_vkGetDeviceBufferMemoryRequirements VMA_NULLABLE vkGetDeviceBufferMemoryRequirements; - /// Fetch from "vkGetDeviceImageMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceImageMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. - PFN_vkGetDeviceImageMemoryRequirements VMA_NULLABLE vkGetDeviceImageMemoryRequirements; -#endif -} VmaVulkanFunctions; - -/// Description of a Allocator to be created. -typedef struct VmaAllocatorCreateInfo -{ - /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum. - VmaAllocatorCreateFlags flags; - /// Vulkan physical device. - /** It must be valid throughout whole lifetime of created allocator. */ - VkPhysicalDevice VMA_NOT_NULL physicalDevice; - /// Vulkan device. - /** It must be valid throughout whole lifetime of created allocator. */ - VkDevice VMA_NOT_NULL device; - /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional. - /** Set to 0 to use default, which is currently 256 MiB. */ - VkDeviceSize preferredLargeHeapBlockSize; - /// Custom CPU memory allocation callbacks. Optional. - /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */ - const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; - /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional. - /** Optional, can be null. */ - const VmaDeviceMemoryCallbacks* VMA_NULLABLE pDeviceMemoryCallbacks; - /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap. - - If not NULL, it must be a pointer to an array of - `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on - maximum number of bytes that can be allocated out of particular Vulkan memory - heap. - - Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that - heap. This is also the default in case of `pHeapSizeLimit` = NULL. - - If there is a limit defined for a heap: - - - If user tries to allocate more memory from that heap using this allocator, - the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. - - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the - value of this limit will be reported instead when using vmaGetMemoryProperties(). - - Warning! Using this feature may not be equivalent to installing a GPU with - smaller amount of memory, because graphics driver doesn't necessary fail new - allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is - exceeded. It may return success and just silently migrate some device memory - blocks to system RAM. This driver behavior can also be controlled using - VK_AMD_memory_overallocation_behavior extension. - */ - const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pHeapSizeLimit; - - /** \brief Pointers to Vulkan functions. Can be null. - - For details see [Pointers to Vulkan functions](@ref config_Vulkan_functions). - */ - const VmaVulkanFunctions* VMA_NULLABLE pVulkanFunctions; - /** \brief Handle to Vulkan instance object. - - Starting from version 3.0.0 this member is no longer optional, it must be set! - */ - VkInstance VMA_NOT_NULL instance; - /** \brief Optional. The highest version of Vulkan that the application is designed to use. - - It must be a value in the format as created by macro `VK_MAKE_VERSION` or a constant like: `VK_API_VERSION_1_1`, `VK_API_VERSION_1_0`. - The patch version number specified is ignored. Only the major and minor versions are considered. - It must be less or equal (preferably equal) to value as passed to `vkCreateInstance` as `VkApplicationInfo::apiVersion`. - Only versions 1.0, 1.1, 1.2, 1.3 are supported by the current implementation. - Leaving it initialized to zero is equivalent to `VK_API_VERSION_1_0`. - */ - uint32_t vulkanApiVersion; -#if VMA_EXTERNAL_MEMORY - /** \brief Either null or a pointer to an array of external memory handle types for each Vulkan memory type. - - If not NULL, it must be a pointer to an array of `VkPhysicalDeviceMemoryProperties::memoryTypeCount` - elements, defining external memory handle types of particular Vulkan memory type, - to be passed using `VkExportMemoryAllocateInfoKHR`. - - Any of the elements may be equal to 0, which means not to use `VkExportMemoryAllocateInfoKHR` on this memory type. - This is also the default in case of `pTypeExternalMemoryHandleTypes` = NULL. - */ - const VkExternalMemoryHandleTypeFlagsKHR* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryTypeCount") pTypeExternalMemoryHandleTypes; -#endif // #if VMA_EXTERNAL_MEMORY -} VmaAllocatorCreateInfo; - -/// Information about existing #VmaAllocator object. -typedef struct VmaAllocatorInfo -{ - /** \brief Handle to Vulkan instance object. - - This is the same value as has been passed through VmaAllocatorCreateInfo::instance. - */ - VkInstance VMA_NOT_NULL instance; - /** \brief Handle to Vulkan physical device object. - - This is the same value as has been passed through VmaAllocatorCreateInfo::physicalDevice. - */ - VkPhysicalDevice VMA_NOT_NULL physicalDevice; - /** \brief Handle to Vulkan device object. - - This is the same value as has been passed through VmaAllocatorCreateInfo::device. - */ - VkDevice VMA_NOT_NULL device; -} VmaAllocatorInfo; - -/** @} */ - -/** -\addtogroup group_stats -@{ -*/ - -/** \brief Calculated statistics of memory usage e.g. in a specific memory type, heap, custom pool, or total. - -These are fast to calculate. -See functions: vmaGetHeapBudgets(), vmaGetPoolStatistics(). -*/ -typedef struct VmaStatistics -{ - /** \brief Number of `VkDeviceMemory` objects - Vulkan memory blocks allocated. - */ - uint32_t blockCount; - /** \brief Number of #VmaAllocation objects allocated. - - Dedicated allocations have their own blocks, so each one adds 1 to `allocationCount` as well as `blockCount`. - */ - uint32_t allocationCount; - /** \brief Number of bytes allocated in `VkDeviceMemory` blocks. - - \note To avoid confusion, please be aware that what Vulkan calls an "allocation" - a whole `VkDeviceMemory` object - (e.g. as in `VkPhysicalDeviceLimits::maxMemoryAllocationCount`) is called a "block" in VMA, while VMA calls - "allocation" a #VmaAllocation object that represents a memory region sub-allocated from such block, usually for a single buffer or image. - */ - VkDeviceSize blockBytes; - /** \brief Total number of bytes occupied by all #VmaAllocation objects. - - Always less or equal than `blockBytes`. - Difference `(blockBytes - allocationBytes)` is the amount of memory allocated from Vulkan - but unused by any #VmaAllocation. - */ - VkDeviceSize allocationBytes; -} VmaStatistics; - -/** \brief More detailed statistics than #VmaStatistics. - -These are slower to calculate. Use for debugging purposes. -See functions: vmaCalculateStatistics(), vmaCalculatePoolStatistics(). - -Previous version of the statistics API provided averages, but they have been removed -because they can be easily calculated as: - -\code -VkDeviceSize allocationSizeAvg = detailedStats.statistics.allocationBytes / detailedStats.statistics.allocationCount; -VkDeviceSize unusedBytes = detailedStats.statistics.blockBytes - detailedStats.statistics.allocationBytes; -VkDeviceSize unusedRangeSizeAvg = unusedBytes / detailedStats.unusedRangeCount; -\endcode -*/ -typedef struct VmaDetailedStatistics -{ - /// Basic statistics. - VmaStatistics statistics; - /// Number of free ranges of memory between allocations. - uint32_t unusedRangeCount; - /// Smallest allocation size. `VK_WHOLE_SIZE` if there are 0 allocations. - VkDeviceSize allocationSizeMin; - /// Largest allocation size. 0 if there are 0 allocations. - VkDeviceSize allocationSizeMax; - /// Smallest empty range size. `VK_WHOLE_SIZE` if there are 0 empty ranges. - VkDeviceSize unusedRangeSizeMin; - /// Largest empty range size. 0 if there are 0 empty ranges. - VkDeviceSize unusedRangeSizeMax; -} VmaDetailedStatistics; - -/** \brief General statistics from current state of the Allocator - -total memory usage across all memory heaps and types. - -These are slower to calculate. Use for debugging purposes. -See function vmaCalculateStatistics(). -*/ -typedef struct VmaTotalStatistics -{ - VmaDetailedStatistics memoryType[VK_MAX_MEMORY_TYPES]; - VmaDetailedStatistics memoryHeap[VK_MAX_MEMORY_HEAPS]; - VmaDetailedStatistics total; -} VmaTotalStatistics; - -/** \brief Statistics of current memory usage and available budget for a specific memory heap. - -These are fast to calculate. -See function vmaGetHeapBudgets(). -*/ -typedef struct VmaBudget -{ - /** \brief Statistics fetched from the library. - */ - VmaStatistics statistics; - /** \brief Estimated current memory usage of the program, in bytes. - - Fetched from system using VK_EXT_memory_budget extension if enabled. - - It might be different than `statistics.blockBytes` (usually higher) due to additional implicit objects - also occupying the memory, like swapchain, pipelines, descriptor heaps, command buffers, or - `VkDeviceMemory` blocks allocated outside of this library, if any. - */ - VkDeviceSize usage; - /** \brief Estimated amount of memory available to the program, in bytes. - - Fetched from system using VK_EXT_memory_budget extension if enabled. - - It might be different (most probably smaller) than `VkMemoryHeap::size[heapIndex]` due to factors - external to the program, decided by the operating system. - Difference `budget - usage` is the amount of additional memory that can probably - be allocated without problems. Exceeding the budget may result in various problems. - */ - VkDeviceSize budget; -} VmaBudget; - -/** @} */ - -/** -\addtogroup group_alloc -@{ -*/ - -/** \brief Parameters of new #VmaAllocation. - -To be used with functions like vmaCreateBuffer(), vmaCreateImage(), and many others. -*/ -typedef struct VmaAllocationCreateInfo -{ - /// Use #VmaAllocationCreateFlagBits enum. - VmaAllocationCreateFlags flags; - /** \brief Intended usage of memory. - - You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n - If `pool` is not null, this member is ignored. - */ - VmaMemoryUsage usage; - /** \brief Flags that must be set in a Memory Type chosen for an allocation. - - Leave 0 if you specify memory requirements in other way. \n - If `pool` is not null, this member is ignored.*/ - VkMemoryPropertyFlags requiredFlags; - /** \brief Flags that preferably should be set in a memory type chosen for an allocation. - - Set to 0 if no additional flags are preferred. \n - If `pool` is not null, this member is ignored. */ - VkMemoryPropertyFlags preferredFlags; - /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation. - - Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if - it meets other requirements specified by this structure, with no further - restrictions on memory type index. \n - If `pool` is not null, this member is ignored. - */ - uint32_t memoryTypeBits; - /** \brief Pool that this allocation should be created in. - - Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members: - `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored. - */ - VmaPool VMA_NULLABLE pool; - /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData(). - - If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either - null or pointer to a null-terminated string. The string will be then copied to - internal buffer, so it doesn't need to be valid after allocation call. - */ - void* VMA_NULLABLE pUserData; - /** \brief A floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. - - It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object - and this allocation ends up as dedicated or is explicitly forced as dedicated using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. - Otherwise, it has the priority of a memory block where it is placed and this variable is ignored. - */ - float priority; -} VmaAllocationCreateInfo; - -/// Describes parameter of created #VmaPool. -typedef struct VmaPoolCreateInfo -{ - /** \brief Vulkan memory type index to allocate this pool from. - */ - uint32_t memoryTypeIndex; - /** \brief Use combination of #VmaPoolCreateFlagBits. - */ - VmaPoolCreateFlags flags; - /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional. - - Specify nonzero to set explicit, constant size of memory blocks used by this - pool. - - Leave 0 to use default and let the library manage block sizes automatically. - Sizes of particular blocks may vary. - In this case, the pool will also support dedicated allocations. - */ - VkDeviceSize blockSize; - /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty. - - Set to 0 to have no preallocated blocks and allow the pool be completely empty. - */ - size_t minBlockCount; - /** \brief Maximum number of blocks that can be allocated in this pool. Optional. - - Set to 0 to use default, which is `SIZE_MAX`, which means no limit. - - Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated - throughout whole lifetime of this pool. - */ - size_t maxBlockCount; - /** \brief A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relative to other memory allocations. - - It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object. - Otherwise, this variable is ignored. - */ - float priority; - /** \brief Additional minimum alignment to be used for all allocations created from this pool. Can be 0. - - Leave 0 (default) not to impose any additional alignment. If not 0, it must be a power of two. - It can be useful in cases where alignment returned by Vulkan by functions like `vkGetBufferMemoryRequirements` is not enough, - e.g. when doing interop with OpenGL. - */ - VkDeviceSize minAllocationAlignment; - /** \brief Additional `pNext` chain to be attached to `VkMemoryAllocateInfo` used for every allocation made by this pool. Optional. - - Optional, can be null. If not null, it must point to a `pNext` chain of structures that can be attached to `VkMemoryAllocateInfo`. - It can be useful for special needs such as adding `VkExportMemoryAllocateInfoKHR`. - Structures pointed by this member must remain alive and unchanged for the whole lifetime of the custom pool. - - Please note that some structures, e.g. `VkMemoryPriorityAllocateInfoEXT`, `VkMemoryDedicatedAllocateInfoKHR`, - can be attached automatically by this library when using other, more convenient of its features. - */ - void* VMA_NULLABLE pMemoryAllocateNext; -} VmaPoolCreateInfo; - -/** @} */ - -/** -\addtogroup group_alloc -@{ -*/ - -/// Parameters of #VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo(). -typedef struct VmaAllocationInfo -{ - /** \brief Memory type index that this allocation was allocated from. - - It never changes. - */ - uint32_t memoryType; - /** \brief Handle to Vulkan memory object. - - Same memory object can be shared by multiple allocations. - - It can change after the allocation is moved during \ref defragmentation. - */ - VkDeviceMemory VMA_NULLABLE_NON_DISPATCHABLE deviceMemory; - /** \brief Offset in `VkDeviceMemory` object to the beginning of this allocation, in bytes. `(deviceMemory, offset)` pair is unique to this allocation. - - You usually don't need to use this offset. If you create a buffer or an image together with the allocation using e.g. function - vmaCreateBuffer(), vmaCreateImage(), functions that operate on these resources refer to the beginning of the buffer or image, - not entire device memory block. Functions like vmaMapMemory(), vmaBindBufferMemory() also refer to the beginning of the allocation - and apply this offset automatically. - - It can change after the allocation is moved during \ref defragmentation. - */ - VkDeviceSize offset; - /** \brief Size of this allocation, in bytes. - - It never changes. - - \note Allocation size returned in this variable may be greater than the size - requested for the resource e.g. as `VkBufferCreateInfo::size`. Whole size of the - allocation is accessible for operations on memory e.g. using a pointer after - mapping with vmaMapMemory(), but operations on the resource e.g. using - `vkCmdCopyBuffer` must be limited to the size of the resource. - */ - VkDeviceSize size; - /** \brief Pointer to the beginning of this allocation as mapped data. - - If the allocation hasn't been mapped using vmaMapMemory() and hasn't been - created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value is null. - - It can change after call to vmaMapMemory(), vmaUnmapMemory(). - It can also change after the allocation is moved during \ref defragmentation. - */ - void* VMA_NULLABLE pMappedData; - /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData(). - - It can change after call to vmaSetAllocationUserData() for this allocation. - */ - void* VMA_NULLABLE pUserData; - /** \brief Custom allocation name that was set with vmaSetAllocationName(). - - It can change after call to vmaSetAllocationName() for this allocation. - - Another way to set custom name is to pass it in VmaAllocationCreateInfo::pUserData with - additional flag #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT set [DEPRECATED]. - */ - const char* VMA_NULLABLE pName; -} VmaAllocationInfo; - -/** \brief Parameters for defragmentation. - -To be used with function vmaBeginDefragmentation(). -*/ -typedef struct VmaDefragmentationInfo -{ - /// \brief Use combination of #VmaDefragmentationFlagBits. - VmaDefragmentationFlags flags; - /** \brief Custom pool to be defragmented. - - If null then default pools will undergo defragmentation process. - */ - VmaPool VMA_NULLABLE pool; - /** \brief Maximum numbers of bytes that can be copied during single pass, while moving allocations to different places. - - `0` means no limit. - */ - VkDeviceSize maxBytesPerPass; - /** \brief Maximum number of allocations that can be moved during single pass to a different place. - - `0` means no limit. - */ - uint32_t maxAllocationsPerPass; -} VmaDefragmentationInfo; - -/// Single move of an allocation to be done for defragmentation. -typedef struct VmaDefragmentationMove -{ - /// Operation to be performed on the allocation by vmaEndDefragmentationPass(). Default value is #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY. You can modify it. - VmaDefragmentationMoveOperation operation; - /// Allocation that should be moved. - VmaAllocation VMA_NOT_NULL srcAllocation; - /** \brief Temporary allocation pointing to destination memory that will replace `srcAllocation`. - - \warning Do not store this allocation in your data structures! It exists only temporarily, for the duration of the defragmentation pass, - to be used for binding new buffer/image to the destination memory using e.g. vmaBindBufferMemory(). - vmaEndDefragmentationPass() will destroy it and make `srcAllocation` point to this memory. - */ - VmaAllocation VMA_NOT_NULL dstTmpAllocation; -} VmaDefragmentationMove; - -/** \brief Parameters for incremental defragmentation steps. - -To be used with function vmaBeginDefragmentationPass(). -*/ -typedef struct VmaDefragmentationPassMoveInfo -{ - /// Number of elements in the `pMoves` array. - uint32_t moveCount; - /** \brief Array of moves to be performed by the user in the current defragmentation pass. - - Pointer to an array of `moveCount` elements, owned by VMA, created in vmaBeginDefragmentationPass(), destroyed in vmaEndDefragmentationPass(). - - For each element, you should: - - 1. Create a new buffer/image in the place pointed by VmaDefragmentationMove::dstMemory + VmaDefragmentationMove::dstOffset. - 2. Copy data from the VmaDefragmentationMove::srcAllocation e.g. using `vkCmdCopyBuffer`, `vkCmdCopyImage`. - 3. Make sure these commands finished executing on the GPU. - 4. Destroy the old buffer/image. - - Only then you can finish defragmentation pass by calling vmaEndDefragmentationPass(). - After this call, the allocation will point to the new place in memory. - - Alternatively, if you cannot move specific allocation, you can set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. - - Alternatively, if you decide you want to completely remove the allocation: - - 1. Destroy its buffer/image. - 2. Set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. - - Then, after vmaEndDefragmentationPass() the allocation will be freed. - */ - VmaDefragmentationMove* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(moveCount) pMoves; -} VmaDefragmentationPassMoveInfo; - -/// Statistics returned for defragmentation process in function vmaEndDefragmentation(). -typedef struct VmaDefragmentationStats -{ - /// Total number of bytes that have been copied while moving allocations to different places. - VkDeviceSize bytesMoved; - /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects. - VkDeviceSize bytesFreed; - /// Number of allocations that have been moved to different places. - uint32_t allocationsMoved; - /// Number of empty `VkDeviceMemory` objects that have been released to the system. - uint32_t deviceMemoryBlocksFreed; -} VmaDefragmentationStats; - -/** @} */ - -/** -\addtogroup group_virtual -@{ -*/ - -/// Parameters of created #VmaVirtualBlock object to be passed to vmaCreateVirtualBlock(). -typedef struct VmaVirtualBlockCreateInfo -{ - /** \brief Total size of the virtual block. - - Sizes can be expressed in bytes or any units you want as long as you are consistent in using them. - For example, if you allocate from some array of structures, 1 can mean single instance of entire structure. - */ - VkDeviceSize size; - - /** \brief Use combination of #VmaVirtualBlockCreateFlagBits. - */ - VmaVirtualBlockCreateFlags flags; - - /** \brief Custom CPU memory allocation callbacks. Optional. - - Optional, can be null. When specified, they will be used for all CPU-side memory allocations. - */ - const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; -} VmaVirtualBlockCreateInfo; - -/// Parameters of created virtual allocation to be passed to vmaVirtualAllocate(). -typedef struct VmaVirtualAllocationCreateInfo -{ - /** \brief Size of the allocation. - - Cannot be zero. - */ - VkDeviceSize size; - /** \brief Required alignment of the allocation. Optional. - - Must be power of two. Special value 0 has the same meaning as 1 - means no special alignment is required, so allocation can start at any offset. - */ - VkDeviceSize alignment; - /** \brief Use combination of #VmaVirtualAllocationCreateFlagBits. - */ - VmaVirtualAllocationCreateFlags flags; - /** \brief Custom pointer to be associated with the allocation. Optional. - - It can be any value and can be used for user-defined purposes. It can be fetched or changed later. - */ - void* VMA_NULLABLE pUserData; -} VmaVirtualAllocationCreateInfo; - -/// Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo(). -typedef struct VmaVirtualAllocationInfo -{ - /** \brief Offset of the allocation. - - Offset at which the allocation was made. - */ - VkDeviceSize offset; - /** \brief Size of the allocation. - - Same value as passed in VmaVirtualAllocationCreateInfo::size. - */ - VkDeviceSize size; - /** \brief Custom pointer associated with the allocation. - - Same value as passed in VmaVirtualAllocationCreateInfo::pUserData or to vmaSetVirtualAllocationUserData(). - */ - void* VMA_NULLABLE pUserData; -} VmaVirtualAllocationInfo; - -/** @} */ - -#endif // _VMA_DATA_TYPES_DECLARATIONS - -#ifndef _VMA_FUNCTION_HEADERS - -/** -\addtogroup group_init -@{ -*/ - -/// Creates #VmaAllocator object. -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( - const VmaAllocatorCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaAllocator VMA_NULLABLE* VMA_NOT_NULL pAllocator); - -/// Destroys allocator object. -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( - VmaAllocator VMA_NULLABLE allocator); - -/** \brief Returns information about existing #VmaAllocator object - handle to Vulkan device etc. - -It might be useful if you want to keep just the #VmaAllocator handle and fetch other required handles to -`VkPhysicalDevice`, `VkDevice` etc. every time using this function. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocatorInfo* VMA_NOT_NULL pAllocatorInfo); - -/** -PhysicalDeviceProperties are fetched from physicalDevice by the allocator. -You can access it here, without fetching it again on your own. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( - VmaAllocator VMA_NOT_NULL allocator, - const VkPhysicalDeviceProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceProperties); - -/** -PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator. -You can access it here, without fetching it again on your own. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( - VmaAllocator VMA_NOT_NULL allocator, - const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceMemoryProperties); - -/** -\brief Given Memory Type Index, returns Property Flags of this memory type. - -This is just a convenience function. Same information can be obtained using -vmaGetMemoryProperties(). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t memoryTypeIndex, - VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); - -/** \brief Sets index of the current frame. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t frameIndex); - -/** @} */ - -/** -\addtogroup group_stats -@{ -*/ - -/** \brief Retrieves statistics from current state of the Allocator. - -This function is called "calculate" not "get" because it has to traverse all -internal data structures, so it may be quite slow. Use it for debugging purposes. -For faster but more brief statistics suitable to be called every frame or every allocation, -use vmaGetHeapBudgets(). - -Note that when using allocator from multiple threads, returned information may immediately -become outdated. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( - VmaAllocator VMA_NOT_NULL allocator, - VmaTotalStatistics* VMA_NOT_NULL pStats); - -/** \brief Retrieves information about current memory usage and budget for all memory heaps. - -\param allocator -\param[out] pBudgets Must point to array with number of elements at least equal to number of memory heaps in physical device used. - -This function is called "get" not "calculate" because it is very fast, suitable to be called -every frame or every allocation. For more detailed statistics use vmaCalculateStatistics(). - -Note that when using allocator from multiple threads, returned information may immediately -become outdated. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( - VmaAllocator VMA_NOT_NULL allocator, - VmaBudget* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pBudgets); - -/** @} */ - -/** -\addtogroup group_alloc -@{ -*/ - -/** -\brief Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo. - -This algorithm tries to find a memory type that: - -- Is allowed by memoryTypeBits. -- Contains all the flags from pAllocationCreateInfo->requiredFlags. -- Matches intended usage. -- Has as many flags from pAllocationCreateInfo->preferredFlags as possible. - -\return Returns VK_ERROR_FEATURE_NOT_PRESENT if not found. Receiving such result -from this function or any other allocating function probably means that your -device doesn't support any memory type with requested features for the specific -type of resource you want to use it for. Please check parameters of your -resource, like image layout (OPTIMAL versus LINEAR) or mip level count. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t memoryTypeBits, - const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, - uint32_t* VMA_NOT_NULL pMemoryTypeIndex); - -/** -\brief Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo. - -It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. -It internally creates a temporary, dummy buffer that never has memory bound. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( - VmaAllocator VMA_NOT_NULL allocator, - const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, - const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, - uint32_t* VMA_NOT_NULL pMemoryTypeIndex); - -/** -\brief Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo. - -It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. -It internally creates a temporary, dummy image that never has memory bound. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( - VmaAllocator VMA_NOT_NULL allocator, - const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, - const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, - uint32_t* VMA_NOT_NULL pMemoryTypeIndex); - -/** \brief Allocates Vulkan device memory and creates #VmaPool object. - -\param allocator Allocator object. -\param pCreateInfo Parameters of pool to create. -\param[out] pPool Handle to created pool. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( - VmaAllocator VMA_NOT_NULL allocator, - const VmaPoolCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaPool VMA_NULLABLE* VMA_NOT_NULL pPool); - -/** \brief Destroys #VmaPool object and frees Vulkan device memory. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( - VmaAllocator VMA_NOT_NULL allocator, - VmaPool VMA_NULLABLE pool); - -/** @} */ - -/** -\addtogroup group_stats -@{ -*/ - -/** \brief Retrieves statistics of existing #VmaPool object. - -\param allocator Allocator object. -\param pool Pool object. -\param[out] pPoolStats Statistics of specified pool. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( - VmaAllocator VMA_NOT_NULL allocator, - VmaPool VMA_NOT_NULL pool, - VmaStatistics* VMA_NOT_NULL pPoolStats); - -/** \brief Retrieves detailed statistics of existing #VmaPool object. - -\param allocator Allocator object. -\param pool Pool object. -\param[out] pPoolStats Statistics of specified pool. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( - VmaAllocator VMA_NOT_NULL allocator, - VmaPool VMA_NOT_NULL pool, - VmaDetailedStatistics* VMA_NOT_NULL pPoolStats); - -/** @} */ - -/** -\addtogroup group_alloc -@{ -*/ - -/** \brief Checks magic number in margins around all allocations in given memory pool in search for corruptions. - -Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, -`VMA_DEBUG_MARGIN` is defined to nonzero and the pool is created in memory type that is -`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). - -Possible return values: - -- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for specified pool. -- `VK_SUCCESS` - corruption detection has been performed and succeeded. -- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. - `VMA_ASSERT` is also fired in that case. -- Other value: Error returned by Vulkan, e.g. memory mapping failure. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption( - VmaAllocator VMA_NOT_NULL allocator, - VmaPool VMA_NOT_NULL pool); - -/** \brief Retrieves name of a custom pool. - -After the call `ppName` is either null or points to an internally-owned null-terminated string -containing name of the pool that was previously set. The pointer becomes invalid when the pool is -destroyed or its name is changed using vmaSetPoolName(). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( - VmaAllocator VMA_NOT_NULL allocator, - VmaPool VMA_NOT_NULL pool, - const char* VMA_NULLABLE* VMA_NOT_NULL ppName); - -/** \brief Sets name of a custom pool. - -`pName` can be either null or pointer to a null-terminated string with new name for the pool. -Function makes internal copy of the string, so it can be changed or freed immediately after this call. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( - VmaAllocator VMA_NOT_NULL allocator, - VmaPool VMA_NOT_NULL pool, - const char* VMA_NULLABLE pName); - -/** \brief General purpose memory allocation. - -\param allocator -\param pVkMemoryRequirements -\param pCreateInfo -\param[out] pAllocation Handle to allocated memory. -\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). - -You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages(). - -It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(), -vmaCreateBuffer(), vmaCreateImage() instead whenever possible. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( - VmaAllocator VMA_NOT_NULL allocator, - const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements, - const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, - VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); - -/** \brief General purpose memory allocation for multiple allocation objects at once. - -\param allocator Allocator object. -\param pVkMemoryRequirements Memory requirements for each allocation. -\param pCreateInfo Creation parameters for each allocation. -\param allocationCount Number of allocations to make. -\param[out] pAllocations Pointer to array that will be filled with handles to created allocations. -\param[out] pAllocationInfo Optional. Pointer to array that will be filled with parameters of created allocations. - -You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages(). - -Word "pages" is just a suggestion to use this function to allocate pieces of memory needed for sparse binding. -It is just a general purpose allocation function able to make multiple allocations at once. -It may be internally optimized to be more efficient than calling vmaAllocateMemory() `allocationCount` times. - -All allocations are made using same parameters. All of them are created out of the same memory pool and type. -If any allocation fails, all allocations already made within this function call are also freed, so that when -returned result is not `VK_SUCCESS`, `pAllocation` array is always entirely filled with `VK_NULL_HANDLE`. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( - VmaAllocator VMA_NOT_NULL allocator, - const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements, - const VmaAllocationCreateInfo* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pCreateInfo, - size_t allocationCount, - VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations, - VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo); - -/** \brief Allocates memory suitable for given `VkBuffer`. - -\param allocator -\param buffer -\param pCreateInfo -\param[out] pAllocation Handle to allocated memory. -\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). - -It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindBufferMemory(). - -This is a special-purpose function. In most cases you should use vmaCreateBuffer(). - -You must free the allocation using vmaFreeMemory() when no longer needed. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( - VmaAllocator VMA_NOT_NULL allocator, - VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, - const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, - VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); - -/** \brief Allocates memory suitable for given `VkImage`. - -\param allocator -\param image -\param pCreateInfo -\param[out] pAllocation Handle to allocated memory. -\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). - -It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindImageMemory(). - -This is a special-purpose function. In most cases you should use vmaCreateImage(). - -You must free the allocation using vmaFreeMemory() when no longer needed. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( - VmaAllocator VMA_NOT_NULL allocator, - VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, - const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, - VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); - -/** \brief Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage(). - -Passing `VK_NULL_HANDLE` as `allocation` is valid. Such function call is just skipped. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( - VmaAllocator VMA_NOT_NULL allocator, - const VmaAllocation VMA_NULLABLE allocation); - -/** \brief Frees memory and destroys multiple allocations. - -Word "pages" is just a suggestion to use this function to free pieces of memory used for sparse binding. -It is just a general purpose function to free memory and destroy allocations made using e.g. vmaAllocateMemory(), -vmaAllocateMemoryPages() and other functions. -It may be internally optimized to be more efficient than calling vmaFreeMemory() `allocationCount` times. - -Allocations in `pAllocations` array can come from any memory pools and types. -Passing `VK_NULL_HANDLE` as elements of `pAllocations` array is valid. Such entries are just skipped. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( - VmaAllocator VMA_NOT_NULL allocator, - size_t allocationCount, - const VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations); - -/** \brief Returns current information about specified allocation. - -Current paramteres of given allocation are returned in `pAllocationInfo`. - -Although this function doesn't lock any mutex, so it should be quite efficient, -you should avoid calling it too often. -You can retrieve same VmaAllocationInfo structure while creating your resource, from function -vmaCreateBuffer(), vmaCreateImage(). You can remember it if you are sure parameters don't change -(e.g. due to defragmentation). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VmaAllocationInfo* VMA_NOT_NULL pAllocationInfo); - -/** \brief Sets pUserData in given allocation to new value. - -The value of pointer `pUserData` is copied to allocation's `pUserData`. -It is opaque, so you can use it however you want - e.g. -as a pointer, ordinal number or some handle to you own data. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - void* VMA_NULLABLE pUserData); - -/** \brief Sets pName in given allocation to new value. - -`pName` must be either null, or pointer to a null-terminated string. The function -makes local copy of the string and sets it as allocation's `pName`. String -passed as pName doesn't need to be valid for whole lifetime of the allocation - -you can free it after this call. String previously pointed by allocation's -`pName` is freed from memory. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - const char* VMA_NULLABLE pName); - -/** -\brief Given an allocation, returns Property Flags of its memory type. - -This is just a convenience function. Same information can be obtained using -vmaGetAllocationInfo() + vmaGetMemoryProperties(). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); - -/** \brief Maps memory represented by given allocation and returns pointer to it. - -Maps memory represented by given allocation to make it accessible to CPU code. -When succeeded, `*ppData` contains pointer to first byte of this memory. - -\warning -If the allocation is part of a bigger `VkDeviceMemory` block, returned pointer is -correctly offsetted to the beginning of region assigned to this particular allocation. -Unlike the result of `vkMapMemory`, it points to the allocation, not to the beginning of the whole block. -You should not add VmaAllocationInfo::offset to it! - -Mapping is internally reference-counted and synchronized, so despite raw Vulkan -function `vkMapMemory()` cannot be used to map same block of `VkDeviceMemory` -multiple times simultaneously, it is safe to call this function on allocations -assigned to the same memory block. Actual Vulkan memory will be mapped on first -mapping and unmapped on last unmapping. - -If the function succeeded, you must call vmaUnmapMemory() to unmap the -allocation when mapping is no longer needed or before freeing the allocation, at -the latest. - -It also safe to call this function multiple times on the same allocation. You -must call vmaUnmapMemory() same number of times as you called vmaMapMemory(). - -It is also safe to call this function on allocation created with -#VMA_ALLOCATION_CREATE_MAPPED_BIT flag. Its memory stays mapped all the time. -You must still call vmaUnmapMemory() same number of times as you called -vmaMapMemory(). You must not call vmaUnmapMemory() additional time to free the -"0-th" mapping made automatically due to #VMA_ALLOCATION_CREATE_MAPPED_BIT flag. - -This function fails when used on allocation made in memory type that is not -`HOST_VISIBLE`. - -This function doesn't automatically flush or invalidate caches. -If the allocation is made from a memory types that is not `HOST_COHERENT`, -you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - void* VMA_NULLABLE* VMA_NOT_NULL ppData); - -/** \brief Unmaps memory represented by given allocation, mapped previously using vmaMapMemory(). - -For details, see description of vmaMapMemory(). - -This function doesn't automatically flush or invalidate caches. -If the allocation is made from a memory types that is not `HOST_COHERENT`, -you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation); - -/** \brief Flushes memory of given allocation. - -Calls `vkFlushMappedMemoryRanges()` for memory associated with given range of given allocation. -It needs to be called after writing to a mapped memory for memory types that are not `HOST_COHERENT`. -Unmap operation doesn't do that automatically. - -- `offset` must be relative to the beginning of allocation. -- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. -- `offset` and `size` don't have to be aligned. - They are internally rounded down/up to multiply of `nonCoherentAtomSize`. -- If `size` is 0, this call is ignored. -- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, - this call is ignored. - -Warning! `offset` and `size` are relative to the contents of given `allocation`. -If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. -Do not pass allocation's offset as `offset`!!! - -This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is -called, otherwise `VK_SUCCESS`. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkDeviceSize offset, - VkDeviceSize size); - -/** \brief Invalidates memory of given allocation. - -Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given range of given allocation. -It needs to be called before reading from a mapped memory for memory types that are not `HOST_COHERENT`. -Map operation doesn't do that automatically. - -- `offset` must be relative to the beginning of allocation. -- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. -- `offset` and `size` don't have to be aligned. - They are internally rounded down/up to multiply of `nonCoherentAtomSize`. -- If `size` is 0, this call is ignored. -- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, - this call is ignored. - -Warning! `offset` and `size` are relative to the contents of given `allocation`. -If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. -Do not pass allocation's offset as `offset`!!! - -This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if -it is called, otherwise `VK_SUCCESS`. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkDeviceSize offset, - VkDeviceSize size); - -/** \brief Flushes memory of given set of allocations. - -Calls `vkFlushMappedMemoryRanges()` for memory associated with given ranges of given allocations. -For more information, see documentation of vmaFlushAllocation(). - -\param allocator -\param allocationCount -\param allocations -\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all ofsets are zero. -\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. - -This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is -called, otherwise `VK_SUCCESS`. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t allocationCount, - const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, - const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, - const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); - -/** \brief Invalidates memory of given set of allocations. - -Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given ranges of given allocations. -For more information, see documentation of vmaInvalidateAllocation(). - -\param allocator -\param allocationCount -\param allocations -\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all ofsets are zero. -\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. - -This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if it is -called, otherwise `VK_SUCCESS`. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t allocationCount, - const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, - const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, - const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); - -/** \brief Checks magic number in margins around all allocations in given memory types (in both default and custom pools) in search for corruptions. - -\param allocator -\param memoryTypeBits Bit mask, where each bit set means that a memory type with that index should be checked. - -Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, -`VMA_DEBUG_MARGIN` is defined to nonzero and only for memory types that are -`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). - -Possible return values: - -- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for any of specified memory types. -- `VK_SUCCESS` - corruption detection has been performed and succeeded. -- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. - `VMA_ASSERT` is also fired in that case. -- Other value: Error returned by Vulkan, e.g. memory mapping failure. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( - VmaAllocator VMA_NOT_NULL allocator, - uint32_t memoryTypeBits); - -/** \brief Begins defragmentation process. - -\param allocator Allocator object. -\param pInfo Structure filled with parameters of defragmentation. -\param[out] pContext Context object that must be passed to vmaEndDefragmentation() to finish defragmentation. -\returns -- `VK_SUCCESS` if defragmentation can begin. -- `VK_ERROR_FEATURE_NOT_PRESENT` if defragmentation is not supported. - -For more information about defragmentation, see documentation chapter: -[Defragmentation](@ref defragmentation). -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( - VmaAllocator VMA_NOT_NULL allocator, - const VmaDefragmentationInfo* VMA_NOT_NULL pInfo, - VmaDefragmentationContext VMA_NULLABLE* VMA_NOT_NULL pContext); - -/** \brief Ends defragmentation process. - -\param allocator Allocator object. -\param context Context object that has been created by vmaBeginDefragmentation(). -\param[out] pStats Optional stats for the defragmentation. Can be null. - -Use this function to finish defragmentation started by vmaBeginDefragmentation(). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( - VmaAllocator VMA_NOT_NULL allocator, - VmaDefragmentationContext VMA_NOT_NULL context, - VmaDefragmentationStats* VMA_NULLABLE pStats); - -/** \brief Starts single defragmentation pass. - -\param allocator Allocator object. -\param context Context object that has been created by vmaBeginDefragmentation(). -\param[out] pPassInfo Computed informations for current pass. -\returns -- `VK_SUCCESS` if no more moves are possible. Then you can omit call to vmaEndDefragmentationPass() and simply end whole defragmentation. -- `VK_INCOMPLETE` if there are pending moves returned in `pPassInfo`. You need to perform them, call vmaEndDefragmentationPass(), - and then preferably try another pass with vmaBeginDefragmentationPass(). -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( - VmaAllocator VMA_NOT_NULL allocator, - VmaDefragmentationContext VMA_NOT_NULL context, - VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); - -/** \brief Ends single defragmentation pass. - -\param allocator Allocator object. -\param context Context object that has been created by vmaBeginDefragmentation(). -\param pPassInfo Computed informations for current pass filled by vmaBeginDefragmentationPass() and possibly modified by you. - -Returns `VK_SUCCESS` if no more moves are possible or `VK_INCOMPLETE` if more defragmentations are possible. - -Ends incremental defragmentation pass and commits all defragmentation moves from `pPassInfo`. -After this call: - -- Allocations at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY - (which is the default) will be pointing to the new destination place. -- Allocation at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY - will be freed. - -If no more moves are possible you can end whole defragmentation. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( - VmaAllocator VMA_NOT_NULL allocator, - VmaDefragmentationContext VMA_NOT_NULL context, - VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); - -/** \brief Binds buffer to allocation. - -Binds specified buffer to region of memory represented by specified allocation. -Gets `VkDeviceMemory` handle and offset from the allocation. -If you want to create a buffer, allocate memory for it and bind them together separately, -you should use this function for binding instead of standard `vkBindBufferMemory()`, -because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple -allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously -(which is illegal in Vulkan). - -It is recommended to use function vmaCreateBuffer() instead of this one. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer); - -/** \brief Binds buffer to allocation with additional parameters. - -\param allocator -\param allocation -\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. -\param buffer -\param pNext A chain of structures to be attached to `VkBindBufferMemoryInfoKHR` structure used internally. Normally it should be null. - -This function is similar to vmaBindBufferMemory(), but it provides additional parameters. - -If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag -or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkDeviceSize allocationLocalOffset, - VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, - const void* VMA_NULLABLE pNext); - -/** \brief Binds image to allocation. - -Binds specified image to region of memory represented by specified allocation. -Gets `VkDeviceMemory` handle and offset from the allocation. -If you want to create an image, allocate memory for it and bind them together separately, -you should use this function for binding instead of standard `vkBindImageMemory()`, -because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple -allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously -(which is illegal in Vulkan). - -It is recommended to use function vmaCreateImage() instead of this one. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkImage VMA_NOT_NULL_NON_DISPATCHABLE image); - -/** \brief Binds image to allocation with additional parameters. - -\param allocator -\param allocation -\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. -\param image -\param pNext A chain of structures to be attached to `VkBindImageMemoryInfoKHR` structure used internally. Normally it should be null. - -This function is similar to vmaBindImageMemory(), but it provides additional parameters. - -If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag -or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkDeviceSize allocationLocalOffset, - VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, - const void* VMA_NULLABLE pNext); - -/** \brief Creates a new `VkBuffer`, allocates and binds memory for it. - -\param allocator -\param pBufferCreateInfo -\param pAllocationCreateInfo -\param[out] pBuffer Buffer that was created. -\param[out] pAllocation Allocation that was created. -\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). - -This function automatically: - --# Creates buffer. --# Allocates appropriate memory for it. --# Binds the buffer with the memory. - -If any of these operations fail, buffer and allocation are not created, -returned value is negative error code, `*pBuffer` and `*pAllocation` are null. - -If the function succeeded, you must destroy both buffer and allocation when you -no longer need them using either convenience function vmaDestroyBuffer() or -separately, using `vkDestroyBuffer()` and vmaFreeMemory(). - -If #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag was used, -VK_KHR_dedicated_allocation extension is used internally to query driver whether -it requires or prefers the new buffer to have dedicated allocation. If yes, -and if dedicated allocation is possible -(#VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT is not used), it creates dedicated -allocation for this buffer, just like when using -#VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. - -\note This function creates a new `VkBuffer`. Sub-allocation of parts of one large buffer, -although recommended as a good practice, is out of scope of this library and could be implemented -by the user as a higher-level logic on top of VMA. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( - VmaAllocator VMA_NOT_NULL allocator, - const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, - const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, - VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, - VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, - VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); - -/** \brief Creates a buffer with additional minimum alignment. - -Similar to vmaCreateBuffer() but provides additional parameter `minAlignment` which allows to specify custom, -minimum alignment to be used when placing the buffer inside a larger memory block, which may be needed e.g. -for interop with OpenGL. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( - VmaAllocator VMA_NOT_NULL allocator, - const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, - const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, - VkDeviceSize minAlignment, - VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, - VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, - VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); - -/** \brief Creates a new `VkBuffer`, binds already created memory for it. - -\param allocator -\param allocation Allocation that provides memory to be used for binding new buffer to it. -\param pBufferCreateInfo -\param[out] pBuffer Buffer that was created. - -This function automatically: - --# Creates buffer. --# Binds the buffer with the supplied memory. - -If any of these operations fail, buffer is not created, -returned value is negative error code and `*pBuffer` is null. - -If the function succeeded, you must destroy the buffer when you -no longer need it using `vkDestroyBuffer()`. If you want to also destroy the corresponding -allocation you can use convenience function vmaDestroyBuffer(). -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, - VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer); - -/** \brief Destroys Vulkan buffer and frees allocated memory. - -This is just a convenience function equivalent to: - -\code -vkDestroyBuffer(device, buffer, allocationCallbacks); -vmaFreeMemory(allocator, allocation); -\endcode - -It it safe to pass null as buffer and/or allocation. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( - VmaAllocator VMA_NOT_NULL allocator, - VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer, - VmaAllocation VMA_NULLABLE allocation); - -/// Function similar to vmaCreateBuffer(). -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( - VmaAllocator VMA_NOT_NULL allocator, - const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, - const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, - VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage, - VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, - VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); - -/// Function similar to vmaCreateAliasingBuffer(). -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, - VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage); - -/** \brief Destroys Vulkan image and frees allocated memory. - -This is just a convenience function equivalent to: - -\code -vkDestroyImage(device, image, allocationCallbacks); -vmaFreeMemory(allocator, allocation); -\endcode - -It it safe to pass null as image and/or allocation. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( - VmaAllocator VMA_NOT_NULL allocator, - VkImage VMA_NULLABLE_NON_DISPATCHABLE image, - VmaAllocation VMA_NULLABLE allocation); - -/** @} */ - -/** -\addtogroup group_virtual -@{ -*/ - -/** \brief Creates new #VmaVirtualBlock object. - -\param pCreateInfo Parameters for creation. -\param[out] pVirtualBlock Returned virtual block object or `VMA_NULL` if creation failed. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( - const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaVirtualBlock VMA_NULLABLE* VMA_NOT_NULL pVirtualBlock); - -/** \brief Destroys #VmaVirtualBlock object. - -Please note that you should consciously handle virtual allocations that could remain unfreed in the block. -You should either free them individually using vmaVirtualFree() or call vmaClearVirtualBlock() -if you are sure this is what you want. If you do neither, an assert is called. - -If you keep pointers to some additional metadata associated with your virtual allocations in their `pUserData`, -don't forget to free them. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock( - VmaVirtualBlock VMA_NULLABLE virtualBlock); - -/** \brief Returns true of the #VmaVirtualBlock is empty - contains 0 virtual allocations and has all its space available for new allocations. -*/ -VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty( - VmaVirtualBlock VMA_NOT_NULL virtualBlock); - -/** \brief Returns information about a specific virtual allocation within a virtual block, like its size and `pUserData` pointer. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo); - -/** \brief Allocates new virtual allocation inside given #VmaVirtualBlock. - -If the allocation fails due to not enough free space available, `VK_ERROR_OUT_OF_DEVICE_MEMORY` is returned -(despite the function doesn't ever allocate actual GPU memory). -`pAllocation` is then set to `VK_NULL_HANDLE` and `pOffset`, if not null, it set to `UINT64_MAX`. - -\param virtualBlock Virtual block -\param pCreateInfo Parameters for the allocation -\param[out] pAllocation Returned handle of the new allocation -\param[out] pOffset Returned offset of the new allocation. Optional, can be null. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, - VkDeviceSize* VMA_NULLABLE pOffset); - -/** \brief Frees virtual allocation inside given #VmaVirtualBlock. - -It is correct to call this function with `allocation == VK_NULL_HANDLE` - it does nothing. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation); - -/** \brief Frees all virtual allocations inside given #VmaVirtualBlock. - -You must either call this function or free each virtual allocation individually with vmaVirtualFree() -before destroying a virtual block. Otherwise, an assert is called. - -If you keep pointer to some additional metadata associated with your virtual allocation in its `pUserData`, -don't forget to free it as well. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock( - VmaVirtualBlock VMA_NOT_NULL virtualBlock); - -/** \brief Changes custom pointer associated with given virtual allocation. -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, - void* VMA_NULLABLE pUserData); - -/** \brief Calculates and returns statistics about virtual allocations and memory usage in given #VmaVirtualBlock. - -This function is fast to call. For more detailed statistics, see vmaCalculateVirtualBlockStatistics(). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaStatistics* VMA_NOT_NULL pStats); - -/** \brief Calculates and returns detailed statistics about virtual allocations and memory usage in given #VmaVirtualBlock. - -This function is slow to call. Use for debugging purposes. -For less detailed statistics, see vmaGetVirtualBlockStatistics(). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaDetailedStatistics* VMA_NOT_NULL pStats); - -/** @} */ - -#if VMA_STATS_STRING_ENABLED -/** -\addtogroup group_stats -@{ -*/ - -/** \brief Builds and returns a null-terminated string in JSON format with information about given #VmaVirtualBlock. -\param virtualBlock Virtual block. -\param[out] ppStatsString Returned string. -\param detailedMap Pass `VK_FALSE` to only obtain statistics as returned by vmaCalculateVirtualBlockStatistics(). Pass `VK_TRUE` to also obtain full list of allocations and free spaces. - -Returned string must be freed using vmaFreeVirtualBlockStatsString(). -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, - VkBool32 detailedMap); - -/// Frees a string returned by vmaBuildVirtualBlockStatsString(). -VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString( - VmaVirtualBlock VMA_NOT_NULL virtualBlock, - char* VMA_NULLABLE pStatsString); - -/** \brief Builds and returns statistics as a null-terminated string in JSON format. -\param allocator -\param[out] ppStatsString Must be freed using vmaFreeStatsString() function. -\param detailedMap -*/ -VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( - VmaAllocator VMA_NOT_NULL allocator, - char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, - VkBool32 detailedMap); - -VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( - VmaAllocator VMA_NOT_NULL allocator, - char* VMA_NULLABLE pStatsString); - -/** @} */ - -#endif // VMA_STATS_STRING_ENABLED - -#endif // _VMA_FUNCTION_HEADERS - -#ifdef __cplusplus -} -#endif - -#endif // AMD_VULKAN_MEMORY_ALLOCATOR_H - -//////////////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////////////// -// -// IMPLEMENTATION -// -//////////////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////////////// - -// For Visual Studio IntelliSense. -#if defined(__cplusplus) && defined(__INTELLISENSE__) -#define VMA_IMPLEMENTATION -#endif - -#ifdef VMA_IMPLEMENTATION -#undef VMA_IMPLEMENTATION - -#include -#include -#include -#include -#include - -#ifdef _MSC_VER - #include // For functions like __popcnt, _BitScanForward etc. -#endif -#if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 - #include // For std::popcount -#endif - -/******************************************************************************* -CONFIGURATION SECTION - -Define some of these macros before each #include of this header or change them -here if you need other then default behavior depending on your environment. -*/ -#ifndef _VMA_CONFIGURATION - -/* -Define this macro to 1 to make the library fetch pointers to Vulkan functions -internally, like: - - vulkanFunctions.vkAllocateMemory = &vkAllocateMemory; -*/ -#if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) - #define VMA_STATIC_VULKAN_FUNCTIONS 1 -#endif - -/* -Define this macro to 1 to make the library fetch pointers to Vulkan functions -internally, like: - - vulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkGetDeviceProcAddr(device, "vkAllocateMemory"); - -To use this feature in new versions of VMA you now have to pass -VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as -VmaAllocatorCreateInfo::pVulkanFunctions. Other members can be null. -*/ -#if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS) - #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 -#endif - -#ifndef VMA_USE_STL_SHARED_MUTEX - // Compiler conforms to C++17. - #if __cplusplus >= 201703L - #define VMA_USE_STL_SHARED_MUTEX 1 - // Visual studio defines __cplusplus properly only when passed additional parameter: /Zc:__cplusplus - // Otherwise it is always 199711L, despite shared_mutex works since Visual Studio 2015 Update 2. - #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L - #define VMA_USE_STL_SHARED_MUTEX 1 - #else - #define VMA_USE_STL_SHARED_MUTEX 0 - #endif -#endif - -/* -Define this macro to include custom header files without having to edit this file directly, e.g.: - - // Inside of "my_vma_configuration_user_includes.h": - - #include "my_custom_assert.h" // for MY_CUSTOM_ASSERT - #include "my_custom_min.h" // for my_custom_min - #include - #include - - // Inside a different file, which includes "vk_mem_alloc.h": - - #define VMA_CONFIGURATION_USER_INCLUDES_H "my_vma_configuration_user_includes.h" - #define VMA_ASSERT(expr) MY_CUSTOM_ASSERT(expr) - #define VMA_MIN(v1, v2) (my_custom_min(v1, v2)) - #include "vk_mem_alloc.h" - ... - -The following headers are used in this CONFIGURATION section only, so feel free to -remove them if not needed. -*/ -#if !defined(VMA_CONFIGURATION_USER_INCLUDES_H) - #include // for assert - #include // for min, max - #include -#else - #include VMA_CONFIGURATION_USER_INCLUDES_H -#endif - -#ifndef VMA_NULL - // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0. - #define VMA_NULL nullptr -#endif - -#if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) -#include -static void* vma_aligned_alloc(size_t alignment, size_t size) -{ - // alignment must be >= sizeof(void*) - if(alignment < sizeof(void*)) - { - alignment = sizeof(void*); - } - - return memalign(alignment, size); -} -#elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC)) -#include - -#if defined(__APPLE__) -#include -#endif - -static void* vma_aligned_alloc(size_t alignment, size_t size) -{ - // Unfortunately, aligned_alloc causes VMA to crash due to it returning null pointers. (At least under 11.4) - // Therefore, for now disable this specific exception until a proper solution is found. - //#if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0)) - //#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0 - // // For C++14, usr/include/malloc/_malloc.h declares aligned_alloc()) only - // // with the MacOSX11.0 SDK in Xcode 12 (which is what adds - // // MAC_OS_X_VERSION_10_16), even though the function is marked - // // availabe for 10.15. That is why the preprocessor checks for 10.16 but - // // the __builtin_available checks for 10.15. - // // People who use C++17 could call aligned_alloc with the 10.15 SDK already. - // if (__builtin_available(macOS 10.15, iOS 13, *)) - // return aligned_alloc(alignment, size); - //#endif - //#endif - - // alignment must be >= sizeof(void*) - if(alignment < sizeof(void*)) - { - alignment = sizeof(void*); - } - - void *pointer; - if(posix_memalign(&pointer, alignment, size) == 0) - return pointer; - return VMA_NULL; -} -#elif defined(_WIN32) -static void* vma_aligned_alloc(size_t alignment, size_t size) -{ - return _aligned_malloc(size, alignment); -} -#else -static void* vma_aligned_alloc(size_t alignment, size_t size) -{ - return aligned_alloc(alignment, size); -} -#endif - -#if defined(_WIN32) -static void vma_aligned_free(void* ptr) -{ - _aligned_free(ptr); -} -#else -static void vma_aligned_free(void* VMA_NULLABLE ptr) -{ - free(ptr); -} -#endif - -// If your compiler is not compatible with C++11 and definition of -// aligned_alloc() function is missing, uncommeting following line may help: - -//#include - -// Normal assert to check for programmer's errors, especially in Debug configuration. -#ifndef VMA_ASSERT - #ifdef NDEBUG - #define VMA_ASSERT(expr) - #else - #define VMA_ASSERT(expr) assert(expr) - #endif -#endif - -// Assert that will be called very often, like inside data structures e.g. operator[]. -// Making it non-empty can make program slow. -#ifndef VMA_HEAVY_ASSERT - #ifdef NDEBUG - #define VMA_HEAVY_ASSERT(expr) - #else - #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) - #endif -#endif - -#ifndef VMA_ALIGN_OF - #define VMA_ALIGN_OF(type) (__alignof(type)) -#endif - -#ifndef VMA_SYSTEM_ALIGNED_MALLOC - #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size)) -#endif - -#ifndef VMA_SYSTEM_ALIGNED_FREE - // VMA_SYSTEM_FREE is the old name, but might have been defined by the user - #if defined(VMA_SYSTEM_FREE) - #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr) - #else - #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr) - #endif -#endif - -#ifndef VMA_COUNT_BITS_SET - // Returns number of bits set to 1 in (v) - #define VMA_COUNT_BITS_SET(v) VmaCountBitsSet(v) -#endif - -#ifndef VMA_BITSCAN_LSB - // Scans integer for index of first nonzero value from the Least Significant Bit (LSB). If mask is 0 then returns UINT8_MAX - #define VMA_BITSCAN_LSB(mask) VmaBitScanLSB(mask) -#endif - -#ifndef VMA_BITSCAN_MSB - // Scans integer for index of first nonzero value from the Most Significant Bit (MSB). If mask is 0 then returns UINT8_MAX - #define VMA_BITSCAN_MSB(mask) VmaBitScanMSB(mask) -#endif - -#ifndef VMA_MIN - #define VMA_MIN(v1, v2) ((std::min)((v1), (v2))) -#endif - -#ifndef VMA_MAX - #define VMA_MAX(v1, v2) ((std::max)((v1), (v2))) -#endif - -#ifndef VMA_SWAP - #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) -#endif - -#ifndef VMA_SORT - #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) -#endif - -#ifndef VMA_DEBUG_LOG - #define VMA_DEBUG_LOG(format, ...) - /* - #define VMA_DEBUG_LOG(format, ...) do { \ - printf(format, __VA_ARGS__); \ - printf("\n"); \ - } while(false) - */ -#endif - -// Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString. -#if VMA_STATS_STRING_ENABLED - static inline void VmaUint32ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint32_t num) - { - snprintf(outStr, strLen, "%u", static_cast(num)); - } - static inline void VmaUint64ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint64_t num) - { - snprintf(outStr, strLen, "%llu", static_cast(num)); - } - static inline void VmaPtrToStr(char* VMA_NOT_NULL outStr, size_t strLen, const void* ptr) - { - snprintf(outStr, strLen, "%p", ptr); - } -#endif - -#ifndef VMA_MUTEX - class VmaMutex - { - public: - void Lock() { m_Mutex.lock(); } - void Unlock() { m_Mutex.unlock(); } - bool TryLock() { return m_Mutex.try_lock(); } - private: - std::mutex m_Mutex; - }; - #define VMA_MUTEX VmaMutex -#endif - -// Read-write mutex, where "read" is shared access, "write" is exclusive access. -#ifndef VMA_RW_MUTEX - #if VMA_USE_STL_SHARED_MUTEX - // Use std::shared_mutex from C++17. - #include - class VmaRWMutex - { - public: - void LockRead() { m_Mutex.lock_shared(); } - void UnlockRead() { m_Mutex.unlock_shared(); } - bool TryLockRead() { return m_Mutex.try_lock_shared(); } - void LockWrite() { m_Mutex.lock(); } - void UnlockWrite() { m_Mutex.unlock(); } - bool TryLockWrite() { return m_Mutex.try_lock(); } - private: - std::shared_mutex m_Mutex; - }; - #define VMA_RW_MUTEX VmaRWMutex - #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 - // Use SRWLOCK from WinAPI. - // Minimum supported client = Windows Vista, server = Windows Server 2008. - class VmaRWMutex - { - public: - VmaRWMutex() { InitializeSRWLock(&m_Lock); } - void LockRead() { AcquireSRWLockShared(&m_Lock); } - void UnlockRead() { ReleaseSRWLockShared(&m_Lock); } - bool TryLockRead() { return TryAcquireSRWLockShared(&m_Lock) != FALSE; } - void LockWrite() { AcquireSRWLockExclusive(&m_Lock); } - void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); } - bool TryLockWrite() { return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; } - private: - SRWLOCK m_Lock; - }; - #define VMA_RW_MUTEX VmaRWMutex - #else - // Less efficient fallback: Use normal mutex. - class VmaRWMutex - { - public: - void LockRead() { m_Mutex.Lock(); } - void UnlockRead() { m_Mutex.Unlock(); } - bool TryLockRead() { return m_Mutex.TryLock(); } - void LockWrite() { m_Mutex.Lock(); } - void UnlockWrite() { m_Mutex.Unlock(); } - bool TryLockWrite() { return m_Mutex.TryLock(); } - private: - VMA_MUTEX m_Mutex; - }; - #define VMA_RW_MUTEX VmaRWMutex - #endif // #if VMA_USE_STL_SHARED_MUTEX -#endif // #ifndef VMA_RW_MUTEX - -/* -If providing your own implementation, you need to implement a subset of std::atomic. -*/ -#ifndef VMA_ATOMIC_UINT32 - #include - #define VMA_ATOMIC_UINT32 std::atomic -#endif - -#ifndef VMA_ATOMIC_UINT64 - #include - #define VMA_ATOMIC_UINT64 std::atomic -#endif - -#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY - /** - Every allocation will have its own memory block. - Define to 1 for debugging purposes only. - */ - #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) -#endif - -#ifndef VMA_MIN_ALIGNMENT - /** - Minimum alignment of all allocations, in bytes. - Set to more than 1 for debugging purposes. Must be power of two. - */ - #ifdef VMA_DEBUG_ALIGNMENT // Old name - #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT - #else - #define VMA_MIN_ALIGNMENT (1) - #endif -#endif - -#ifndef VMA_DEBUG_MARGIN - /** - Minimum margin after every allocation, in bytes. - Set nonzero for debugging purposes only. - */ - #define VMA_DEBUG_MARGIN (0) -#endif - -#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS - /** - Define this macro to 1 to automatically fill new allocations and destroyed - allocations with some bit pattern. - */ - #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) -#endif - -#ifndef VMA_DEBUG_DETECT_CORRUPTION - /** - Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to - enable writing magic value to the margin after every allocation and - validating it, so that memory corruptions (out-of-bounds writes) are detected. - */ - #define VMA_DEBUG_DETECT_CORRUPTION (0) -#endif - -#ifndef VMA_DEBUG_GLOBAL_MUTEX - /** - Set this to 1 for debugging purposes only, to enable single mutex protecting all - entry calls to the library. Can be useful for debugging multithreading issues. - */ - #define VMA_DEBUG_GLOBAL_MUTEX (0) -#endif - -#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY - /** - Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity. - Set to more than 1 for debugging purposes only. Must be power of two. - */ - #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) -#endif - -#ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT - /* - Set this to 1 to make VMA never exceed VkPhysicalDeviceLimits::maxMemoryAllocationCount - and return error instead of leaving up to Vulkan implementation what to do in such cases. - */ - #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0) -#endif - -#ifndef VMA_SMALL_HEAP_MAX_SIZE - /// Maximum size of a memory heap in Vulkan to consider it "small". - #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) -#endif - -#ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE - /// Default size of a block allocated as single VkDeviceMemory from a "large" heap. - #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) -#endif - -/* -Mapping hysteresis is a logic that launches when vmaMapMemory/vmaUnmapMemory is called -or a persistently mapped allocation is created and destroyed several times in a row. -It keeps additional +1 mapping of a device memory block to prevent calling actual -vkMapMemory/vkUnmapMemory too many times, which may improve performance and help -tools like RenderDOc. -*/ -#ifndef VMA_MAPPING_HYSTERESIS_ENABLED - #define VMA_MAPPING_HYSTERESIS_ENABLED 1 -#endif - -#ifndef VMA_CLASS_NO_COPY - #define VMA_CLASS_NO_COPY(className) \ - private: \ - className(const className&) = delete; \ - className& operator=(const className&) = delete; -#endif - -#define VMA_VALIDATE(cond) do { if(!(cond)) { \ - VMA_ASSERT(0 && "Validation failed: " #cond); \ - return false; \ - } } while(false) - -/******************************************************************************* -END OF CONFIGURATION -*/ -#endif // _VMA_CONFIGURATION - - -static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC; -static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF; -// Decimal 2139416166, float NaN, little-endian binary 66 E6 84 7F. -static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666; - -// Copy of some Vulkan definitions so we don't need to check their existence just to handle few constants. -static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040; -static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080; -static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000; -static const uint32_t VK_IMAGE_CREATE_DISJOINT_BIT_COPY = 0x00000200; -static const int32_t VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY = 1000158000; -static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u; -static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32; -static const uint32_t VMA_VENDOR_ID_AMD = 4098; - -// This one is tricky. Vulkan specification defines this code as available since -// Vulkan 1.0, but doesn't actually define it in Vulkan SDK earlier than 1.2.131. -// See pull request #207. -#define VK_ERROR_UNKNOWN_COPY ((VkResult)-13) - - -#if VMA_STATS_STRING_ENABLED -// Correspond to values of enum VmaSuballocationType. -static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = -{ - "FREE", - "UNKNOWN", - "BUFFER", - "IMAGE_UNKNOWN", - "IMAGE_LINEAR", - "IMAGE_OPTIMAL", -}; -#endif - -static VkAllocationCallbacks VmaEmptyAllocationCallbacks = - { VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL }; - - -#ifndef _VMA_ENUM_DECLARATIONS - -enum VmaSuballocationType -{ - VMA_SUBALLOCATION_TYPE_FREE = 0, - VMA_SUBALLOCATION_TYPE_UNKNOWN = 1, - VMA_SUBALLOCATION_TYPE_BUFFER = 2, - VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3, - VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4, - VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5, - VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF -}; - -enum VMA_CACHE_OPERATION -{ - VMA_CACHE_FLUSH, - VMA_CACHE_INVALIDATE -}; - -enum class VmaAllocationRequestType -{ - Normal, - TLSF, - // Used by "Linear" algorithm. - UpperAddress, - EndOf1st, - EndOf2nd, -}; - -#endif // _VMA_ENUM_DECLARATIONS - -#ifndef _VMA_FORWARD_DECLARATIONS -// Opaque handle used by allocation algorithms to identify single allocation in any conforming way. -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaAllocHandle); - -struct VmaMutexLock; -struct VmaMutexLockRead; -struct VmaMutexLockWrite; - -template -struct AtomicTransactionalIncrement; - -template -struct VmaStlAllocator; - -template -class VmaVector; - -template -class VmaSmallVector; - -template -class VmaPoolAllocator; - -template -struct VmaListItem; - -template -class VmaRawList; - -template -class VmaList; - -template -class VmaIntrusiveLinkedList; - -// Unused in this version -#if 0 -template -struct VmaPair; -template -struct VmaPairFirstLess; - -template -class VmaMap; -#endif - -#if VMA_STATS_STRING_ENABLED -class VmaStringBuilder; -class VmaJsonWriter; -#endif - -class VmaDeviceMemoryBlock; - -struct VmaDedicatedAllocationListItemTraits; -class VmaDedicatedAllocationList; - -struct VmaSuballocation; -struct VmaSuballocationOffsetLess; -struct VmaSuballocationOffsetGreater; -struct VmaSuballocationItemSizeLess; - -typedef VmaList> VmaSuballocationList; - -struct VmaAllocationRequest; - -class VmaBlockMetadata; -class VmaBlockMetadata_Linear; -class VmaBlockMetadata_TLSF; - -class VmaBlockVector; - -struct VmaPoolListItemTraits; - -struct VmaCurrentBudgetData; - -class VmaAllocationObjectAllocator; - -#endif // _VMA_FORWARD_DECLARATIONS - - -#ifndef _VMA_FUNCTIONS - -/* -Returns number of bits set to 1 in (v). - -On specific platforms and compilers you can use instrinsics like: - -Visual Studio: - return __popcnt(v); -GCC, Clang: - return static_cast(__builtin_popcount(v)); - -Define macro VMA_COUNT_BITS_SET to provide your optimized implementation. -But you need to check in runtime whether user's CPU supports these, as some old processors don't. -*/ -static inline uint32_t VmaCountBitsSet(uint32_t v) -{ -#if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 - return std::popcount(v); -#else - uint32_t c = v - ((v >> 1) & 0x55555555); - c = ((c >> 2) & 0x33333333) + (c & 0x33333333); - c = ((c >> 4) + c) & 0x0F0F0F0F; - c = ((c >> 8) + c) & 0x00FF00FF; - c = ((c >> 16) + c) & 0x0000FFFF; - return c; -#endif -} - -static inline uint8_t VmaBitScanLSB(uint64_t mask) -{ -#if defined(_MSC_VER) && defined(_WIN64) - unsigned long pos; - if (_BitScanForward64(&pos, mask)) - return static_cast(pos); - return UINT8_MAX; -#elif defined __GNUC__ || defined __clang__ - return static_cast(__builtin_ffsll(mask)) - 1U; -#else - uint8_t pos = 0; - uint64_t bit = 1; - do - { - if (mask & bit) - return pos; - bit <<= 1; - } while (pos++ < 63); - return UINT8_MAX; -#endif -} - -static inline uint8_t VmaBitScanLSB(uint32_t mask) -{ -#ifdef _MSC_VER - unsigned long pos; - if (_BitScanForward(&pos, mask)) - return static_cast(pos); - return UINT8_MAX; -#elif defined __GNUC__ || defined __clang__ - return static_cast(__builtin_ffs(mask)) - 1U; -#else - uint8_t pos = 0; - uint32_t bit = 1; - do - { - if (mask & bit) - return pos; - bit <<= 1; - } while (pos++ < 31); - return UINT8_MAX; -#endif -} - -static inline uint8_t VmaBitScanMSB(uint64_t mask) -{ -#if defined(_MSC_VER) && defined(_WIN64) - unsigned long pos; - if (_BitScanReverse64(&pos, mask)) - return static_cast(pos); -#elif defined __GNUC__ || defined __clang__ - if (mask) - return 63 - static_cast(__builtin_clzll(mask)); -#else - uint8_t pos = 63; - uint64_t bit = 1ULL << 63; - do - { - if (mask & bit) - return pos; - bit >>= 1; - } while (pos-- > 0); -#endif - return UINT8_MAX; -} - -static inline uint8_t VmaBitScanMSB(uint32_t mask) -{ -#ifdef _MSC_VER - unsigned long pos; - if (_BitScanReverse(&pos, mask)) - return static_cast(pos); -#elif defined __GNUC__ || defined __clang__ - if (mask) - return 31 - static_cast(__builtin_clz(mask)); -#else - uint8_t pos = 31; - uint32_t bit = 1UL << 31; - do - { - if (mask & bit) - return pos; - bit >>= 1; - } while (pos-- > 0); -#endif - return UINT8_MAX; -} - -/* -Returns true if given number is a power of two. -T must be unsigned integer number or signed integer but always nonnegative. -For 0 returns true. -*/ -template -inline bool VmaIsPow2(T x) -{ - return (x & (x - 1)) == 0; -} - -// Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16. -// Use types like uint32_t, uint64_t as T. -template -static inline T VmaAlignUp(T val, T alignment) -{ - VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); - return (val + alignment - 1) & ~(alignment - 1); -} - -// Aligns given value down to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 8. -// Use types like uint32_t, uint64_t as T. -template -static inline T VmaAlignDown(T val, T alignment) -{ - VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); - return val & ~(alignment - 1); -} - -// Division with mathematical rounding to nearest number. -template -static inline T VmaRoundDiv(T x, T y) -{ - return (x + (y / (T)2)) / y; -} - -// Divide by 'y' and round up to nearest integer. -template -static inline T VmaDivideRoundingUp(T x, T y) -{ - return (x + y - (T)1) / y; -} - -// Returns smallest power of 2 greater or equal to v. -static inline uint32_t VmaNextPow2(uint32_t v) -{ - v--; - v |= v >> 1; - v |= v >> 2; - v |= v >> 4; - v |= v >> 8; - v |= v >> 16; - v++; - return v; -} - -static inline uint64_t VmaNextPow2(uint64_t v) -{ - v--; - v |= v >> 1; - v |= v >> 2; - v |= v >> 4; - v |= v >> 8; - v |= v >> 16; - v |= v >> 32; - v++; - return v; -} - -// Returns largest power of 2 less or equal to v. -static inline uint32_t VmaPrevPow2(uint32_t v) -{ - v |= v >> 1; - v |= v >> 2; - v |= v >> 4; - v |= v >> 8; - v |= v >> 16; - v = v ^ (v >> 1); - return v; -} - -static inline uint64_t VmaPrevPow2(uint64_t v) -{ - v |= v >> 1; - v |= v >> 2; - v |= v >> 4; - v |= v >> 8; - v |= v >> 16; - v |= v >> 32; - v = v ^ (v >> 1); - return v; -} - -static inline bool VmaStrIsEmpty(const char* pStr) -{ - return pStr == VMA_NULL || *pStr == '\0'; -} - -/* -Returns true if two memory blocks occupy overlapping pages. -ResourceA must be in less memory offset than ResourceB. - -Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)" -chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity". -*/ -static inline bool VmaBlocksOnSamePage( - VkDeviceSize resourceAOffset, - VkDeviceSize resourceASize, - VkDeviceSize resourceBOffset, - VkDeviceSize pageSize) -{ - VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0); - VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1; - VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1); - VkDeviceSize resourceBStart = resourceBOffset; - VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1); - return resourceAEndPage == resourceBStartPage; -} - -/* -Returns true if given suballocation types could conflict and must respect -VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer -or linear image and another one is optimal image. If type is unknown, behave -conservatively. -*/ -static inline bool VmaIsBufferImageGranularityConflict( - VmaSuballocationType suballocType1, - VmaSuballocationType suballocType2) -{ - if (suballocType1 > suballocType2) - { - VMA_SWAP(suballocType1, suballocType2); - } - - switch (suballocType1) - { - case VMA_SUBALLOCATION_TYPE_FREE: - return false; - case VMA_SUBALLOCATION_TYPE_UNKNOWN: - return true; - case VMA_SUBALLOCATION_TYPE_BUFFER: - return - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; - case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN: - return - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR || - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; - case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR: - return - suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; - case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL: - return false; - default: - VMA_ASSERT(0); - return true; - } -} - -static void VmaWriteMagicValue(void* pData, VkDeviceSize offset) -{ -#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION - uint32_t* pDst = (uint32_t*)((char*)pData + offset); - const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); - for (size_t i = 0; i < numberCount; ++i, ++pDst) - { - *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE; - } -#else - // no-op -#endif -} - -static bool VmaValidateMagicValue(const void* pData, VkDeviceSize offset) -{ -#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION - const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset); - const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); - for (size_t i = 0; i < numberCount; ++i, ++pSrc) - { - if (*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE) - { - return false; - } - } -#endif - return true; -} - -/* -Fills structure with parameters of an example buffer to be used for transfers -during GPU memory defragmentation. -*/ -static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo) -{ - memset(&outBufCreateInfo, 0, sizeof(outBufCreateInfo)); - outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; - outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE; // Example size. -} - - -/* -Performs binary search and returns iterator to first element that is greater or -equal to (key), according to comparison (cmp). - -Cmp should return true if first argument is less than second argument. - -Returned value is the found element, if present in the collection or place where -new element with value (key) should be inserted. -*/ -template -static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT& key, const CmpLess& cmp) -{ - size_t down = 0, up = (end - beg); - while (down < up) - { - const size_t mid = down + (up - down) / 2; // Overflow-safe midpoint calculation - if (cmp(*(beg + mid), key)) - { - down = mid + 1; - } - else - { - up = mid; - } - } - return beg + down; -} - -template -IterT VmaBinaryFindSorted(const IterT& beg, const IterT& end, const KeyT& value, const CmpLess& cmp) -{ - IterT it = VmaBinaryFindFirstNotLess( - beg, end, value, cmp); - if (it == end || - (!cmp(*it, value) && !cmp(value, *it))) - { - return it; - } - return end; -} - -/* -Returns true if all pointers in the array are not-null and unique. -Warning! O(n^2) complexity. Use only inside VMA_HEAVY_ASSERT. -T must be pointer type, e.g. VmaAllocation, VmaPool. -*/ -template -static bool VmaValidatePointerArray(uint32_t count, const T* arr) -{ - for (uint32_t i = 0; i < count; ++i) - { - const T iPtr = arr[i]; - if (iPtr == VMA_NULL) - { - return false; - } - for (uint32_t j = i + 1; j < count; ++j) - { - if (iPtr == arr[j]) - { - return false; - } - } - } - return true; -} - -template -static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct) -{ - newStruct->pNext = mainStruct->pNext; - mainStruct->pNext = newStruct; -} - -// This is the main algorithm that guides the selection of a memory type best for an allocation - -// converts usage to required/preferred/not preferred flags. -static bool FindMemoryPreferences( - bool isIntegratedGPU, - const VmaAllocationCreateInfo& allocCreateInfo, - VkFlags bufImgUsage, // VkBufferCreateInfo::usage or VkImageCreateInfo::usage. UINT32_MAX if unknown. - VkMemoryPropertyFlags& outRequiredFlags, - VkMemoryPropertyFlags& outPreferredFlags, - VkMemoryPropertyFlags& outNotPreferredFlags) -{ - outRequiredFlags = allocCreateInfo.requiredFlags; - outPreferredFlags = allocCreateInfo.preferredFlags; - outNotPreferredFlags = 0; - - switch(allocCreateInfo.usage) - { - case VMA_MEMORY_USAGE_UNKNOWN: - break; - case VMA_MEMORY_USAGE_GPU_ONLY: - if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) - { - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - } - break; - case VMA_MEMORY_USAGE_CPU_ONLY: - outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; - break; - case VMA_MEMORY_USAGE_CPU_TO_GPU: - outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) - { - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - } - break; - case VMA_MEMORY_USAGE_GPU_TO_CPU: - outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; - break; - case VMA_MEMORY_USAGE_CPU_COPY: - outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - break; - case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: - outRequiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT; - break; - case VMA_MEMORY_USAGE_AUTO: - case VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE: - case VMA_MEMORY_USAGE_AUTO_PREFER_HOST: - { - if(bufImgUsage == UINT32_MAX) - { - VMA_ASSERT(0 && "VMA_MEMORY_USAGE_AUTO* values can only be used with functions like vmaCreateBuffer, vmaCreateImage so that the details of the created resource are known."); - return false; - } - // This relies on values of VK_IMAGE_USAGE_TRANSFER* being the same VK_BUFFER_IMAGE_TRANSFER*. - const bool deviceAccess = (bufImgUsage & ~(VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) != 0; - const bool hostAccessSequentialWrite = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT) != 0; - const bool hostAccessRandom = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) != 0; - const bool hostAccessAllowTransferInstead = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) != 0; - const bool preferDevice = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; - const bool preferHost = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST; - - // CPU random access - e.g. a buffer written to or transferred from GPU to read back on CPU. - if(hostAccessRandom) - { - if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) - { - // Nice if it will end up in HOST_VISIBLE, but more importantly prefer DEVICE_LOCAL. - // Omitting HOST_VISIBLE here is intentional. - // In case there is DEVICE_LOCAL | HOST_VISIBLE | HOST_CACHED, it will pick that one. - // Otherwise, this will give same weight to DEVICE_LOCAL as HOST_VISIBLE | HOST_CACHED and select the former if occurs first on the list. - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; - } - else - { - // Always CPU memory, cached. - outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; - } - } - // CPU sequential write - may be CPU or host-visible GPU memory, uncached and write-combined. - else if(hostAccessSequentialWrite) - { - // Want uncached and write-combined. - outNotPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; - - if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) - { - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - } - else - { - outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - // Direct GPU access, CPU sequential write (e.g. a dynamic uniform buffer updated every frame) - if(deviceAccess) - { - // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose GPU memory. - if(preferHost) - outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - else - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - } - // GPU no direct access, CPU sequential write (e.g. an upload buffer to be transferred to the GPU) - else - { - // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose CPU memory. - if(preferDevice) - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - else - outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - } - } - } - // No CPU access - else - { - // GPU access, no CPU access (e.g. a color attachment image) - prefer GPU memory - if(deviceAccess) - { - // ...unless there is a clear preference from the user not to do so. - if(preferHost) - outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - else - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - } - // No direct GPU access, no CPU access, just transfers. - // It may be staging copy intended for e.g. preserving image for next frame (then better GPU memory) or - // a "swap file" copy to free some GPU memory (then better CPU memory). - // Up to the user to decide. If no preferece, assume the former and choose GPU memory. - if(preferHost) - outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - else - outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - } - break; - } - default: - VMA_ASSERT(0); - } - - // Avoid DEVICE_COHERENT unless explicitly requested. - if(((allocCreateInfo.requiredFlags | allocCreateInfo.preferredFlags) & - (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0) - { - outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY; - } - - return true; -} - -//////////////////////////////////////////////////////////////////////////////// -// Memory allocation - -static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment) -{ - void* result = VMA_NULL; - if ((pAllocationCallbacks != VMA_NULL) && - (pAllocationCallbacks->pfnAllocation != VMA_NULL)) - { - result = (*pAllocationCallbacks->pfnAllocation)( - pAllocationCallbacks->pUserData, - size, - alignment, - VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); - } - else - { - result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment); - } - VMA_ASSERT(result != VMA_NULL && "CPU memory allocation failed."); - return result; -} - -static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr) -{ - if ((pAllocationCallbacks != VMA_NULL) && - (pAllocationCallbacks->pfnFree != VMA_NULL)) - { - (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr); - } - else - { - VMA_SYSTEM_ALIGNED_FREE(ptr); - } -} - -template -static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks) -{ - return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T)); -} - -template -static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count) -{ - return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T)); -} - -#define vma_new(allocator, type) new(VmaAllocate(allocator))(type) - -#define vma_new_array(allocator, type, count) new(VmaAllocateArray((allocator), (count)))(type) - -template -static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr) -{ - ptr->~T(); - VmaFree(pAllocationCallbacks, ptr); -} - -template -static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count) -{ - if (ptr != VMA_NULL) - { - for (size_t i = count; i--; ) - { - ptr[i].~T(); - } - VmaFree(pAllocationCallbacks, ptr); - } -} - -static char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr) -{ - if (srcStr != VMA_NULL) - { - const size_t len = strlen(srcStr); - char* const result = vma_new_array(allocs, char, len + 1); - memcpy(result, srcStr, len + 1); - return result; - } - return VMA_NULL; -} - -#if VMA_STATS_STRING_ENABLED -static char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr, size_t strLen) -{ - if (srcStr != VMA_NULL) - { - char* const result = vma_new_array(allocs, char, strLen + 1); - memcpy(result, srcStr, strLen); - result[strLen] = '\0'; - return result; - } - return VMA_NULL; -} -#endif // VMA_STATS_STRING_ENABLED - -static void VmaFreeString(const VkAllocationCallbacks* allocs, char* str) -{ - if (str != VMA_NULL) - { - const size_t len = strlen(str); - vma_delete_array(allocs, str, len + 1); - } -} - -template -size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value) -{ - const size_t indexToInsert = VmaBinaryFindFirstNotLess( - vector.data(), - vector.data() + vector.size(), - value, - CmpLess()) - vector.data(); - VmaVectorInsert(vector, indexToInsert, value); - return indexToInsert; -} - -template -bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value) -{ - CmpLess comparator; - typename VectorT::iterator it = VmaBinaryFindFirstNotLess( - vector.begin(), - vector.end(), - value, - comparator); - if ((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it)) - { - size_t indexToRemove = it - vector.begin(); - VmaVectorRemove(vector, indexToRemove); - return true; - } - return false; -} -#endif // _VMA_FUNCTIONS - -#ifndef _VMA_STATISTICS_FUNCTIONS - -static void VmaClearStatistics(VmaStatistics& outStats) -{ - outStats.blockCount = 0; - outStats.allocationCount = 0; - outStats.blockBytes = 0; - outStats.allocationBytes = 0; -} - -static void VmaAddStatistics(VmaStatistics& inoutStats, const VmaStatistics& src) -{ - inoutStats.blockCount += src.blockCount; - inoutStats.allocationCount += src.allocationCount; - inoutStats.blockBytes += src.blockBytes; - inoutStats.allocationBytes += src.allocationBytes; -} - -static void VmaClearDetailedStatistics(VmaDetailedStatistics& outStats) -{ - VmaClearStatistics(outStats.statistics); - outStats.unusedRangeCount = 0; - outStats.allocationSizeMin = VK_WHOLE_SIZE; - outStats.allocationSizeMax = 0; - outStats.unusedRangeSizeMin = VK_WHOLE_SIZE; - outStats.unusedRangeSizeMax = 0; -} - -static void VmaAddDetailedStatisticsAllocation(VmaDetailedStatistics& inoutStats, VkDeviceSize size) -{ - inoutStats.statistics.allocationCount++; - inoutStats.statistics.allocationBytes += size; - inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, size); - inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, size); -} - -static void VmaAddDetailedStatisticsUnusedRange(VmaDetailedStatistics& inoutStats, VkDeviceSize size) -{ - inoutStats.unusedRangeCount++; - inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, size); - inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, size); -} - -static void VmaAddDetailedStatistics(VmaDetailedStatistics& inoutStats, const VmaDetailedStatistics& src) -{ - VmaAddStatistics(inoutStats.statistics, src.statistics); - inoutStats.unusedRangeCount += src.unusedRangeCount; - inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, src.allocationSizeMin); - inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, src.allocationSizeMax); - inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, src.unusedRangeSizeMin); - inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, src.unusedRangeSizeMax); -} - -#endif // _VMA_STATISTICS_FUNCTIONS - -#ifndef _VMA_MUTEX_LOCK -// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope). -struct VmaMutexLock -{ - VMA_CLASS_NO_COPY(VmaMutexLock) -public: - VmaMutexLock(VMA_MUTEX& mutex, bool useMutex = true) : - m_pMutex(useMutex ? &mutex : VMA_NULL) - { - if (m_pMutex) { m_pMutex->Lock(); } - } - ~VmaMutexLock() { if (m_pMutex) { m_pMutex->Unlock(); } } - -private: - VMA_MUTEX* m_pMutex; -}; - -// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for reading. -struct VmaMutexLockRead -{ - VMA_CLASS_NO_COPY(VmaMutexLockRead) -public: - VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) : - m_pMutex(useMutex ? &mutex : VMA_NULL) - { - if (m_pMutex) { m_pMutex->LockRead(); } - } - ~VmaMutexLockRead() { if (m_pMutex) { m_pMutex->UnlockRead(); } } - -private: - VMA_RW_MUTEX* m_pMutex; -}; - -// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for writing. -struct VmaMutexLockWrite -{ - VMA_CLASS_NO_COPY(VmaMutexLockWrite) -public: - VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) - : m_pMutex(useMutex ? &mutex : VMA_NULL) - { - if (m_pMutex) { m_pMutex->LockWrite(); } - } - ~VmaMutexLockWrite() { if (m_pMutex) { m_pMutex->UnlockWrite(); } } - -private: - VMA_RW_MUTEX* m_pMutex; -}; - -#if VMA_DEBUG_GLOBAL_MUTEX - static VMA_MUTEX gDebugGlobalMutex; - #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); -#else - #define VMA_DEBUG_GLOBAL_MUTEX_LOCK -#endif -#endif // _VMA_MUTEX_LOCK - -#ifndef _VMA_ATOMIC_TRANSACTIONAL_INCREMENT -// An object that increments given atomic but decrements it back in the destructor unless Commit() is called. -template -struct AtomicTransactionalIncrement -{ -public: - typedef std::atomic AtomicT; - - ~AtomicTransactionalIncrement() - { - if(m_Atomic) - --(*m_Atomic); - } - - void Commit() { m_Atomic = nullptr; } - T Increment(AtomicT* atomic) - { - m_Atomic = atomic; - return m_Atomic->fetch_add(1); - } - -private: - AtomicT* m_Atomic = nullptr; -}; -#endif // _VMA_ATOMIC_TRANSACTIONAL_INCREMENT - -#ifndef _VMA_STL_ALLOCATOR -// STL-compatible allocator. -template -struct VmaStlAllocator -{ - const VkAllocationCallbacks* const m_pCallbacks; - typedef T value_type; - - VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) {} - template - VmaStlAllocator(const VmaStlAllocator& src) : m_pCallbacks(src.m_pCallbacks) {} - VmaStlAllocator(const VmaStlAllocator&) = default; - VmaStlAllocator& operator=(const VmaStlAllocator&) = delete; - - T* allocate(size_t n) { return VmaAllocateArray(m_pCallbacks, n); } - void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); } - - template - bool operator==(const VmaStlAllocator& rhs) const - { - return m_pCallbacks == rhs.m_pCallbacks; - } - template - bool operator!=(const VmaStlAllocator& rhs) const - { - return m_pCallbacks != rhs.m_pCallbacks; - } -}; -#endif // _VMA_STL_ALLOCATOR - -#ifndef _VMA_VECTOR -/* Class with interface compatible with subset of std::vector. -T must be POD because constructors and destructors are not called and memcpy is -used for these objects. */ -template -class VmaVector -{ -public: - typedef T value_type; - typedef T* iterator; - typedef const T* const_iterator; - - VmaVector(const AllocatorT& allocator); - VmaVector(size_t count, const AllocatorT& allocator); - // This version of the constructor is here for compatibility with pre-C++14 std::vector. - // value is unused. - VmaVector(size_t count, const T& value, const AllocatorT& allocator) : VmaVector(count, allocator) {} - VmaVector(const VmaVector& src); - VmaVector& operator=(const VmaVector& rhs); - ~VmaVector() { VmaFree(m_Allocator.m_pCallbacks, m_pArray); } - - bool empty() const { return m_Count == 0; } - size_t size() const { return m_Count; } - T* data() { return m_pArray; } - T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } - T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } - const T* data() const { return m_pArray; } - const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } - const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } - - iterator begin() { return m_pArray; } - iterator end() { return m_pArray + m_Count; } - const_iterator cbegin() const { return m_pArray; } - const_iterator cend() const { return m_pArray + m_Count; } - const_iterator begin() const { return cbegin(); } - const_iterator end() const { return cend(); } - - void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } - void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } - void push_front(const T& src) { insert(0, src); } - - void push_back(const T& src); - void reserve(size_t newCapacity, bool freeMemory = false); - void resize(size_t newCount); - void clear() { resize(0); } - void shrink_to_fit(); - void insert(size_t index, const T& src); - void remove(size_t index); - - T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } - const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } - -private: - AllocatorT m_Allocator; - T* m_pArray; - size_t m_Count; - size_t m_Capacity; -}; - -#ifndef _VMA_VECTOR_FUNCTIONS -template -VmaVector::VmaVector(const AllocatorT& allocator) - : m_Allocator(allocator), - m_pArray(VMA_NULL), - m_Count(0), - m_Capacity(0) {} - -template -VmaVector::VmaVector(size_t count, const AllocatorT& allocator) - : m_Allocator(allocator), - m_pArray(count ? (T*)VmaAllocateArray(allocator.m_pCallbacks, count) : VMA_NULL), - m_Count(count), - m_Capacity(count) {} - -template -VmaVector::VmaVector(const VmaVector& src) - : m_Allocator(src.m_Allocator), - m_pArray(src.m_Count ? (T*)VmaAllocateArray(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL), - m_Count(src.m_Count), - m_Capacity(src.m_Count) -{ - if (m_Count != 0) - { - memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T)); - } -} - -template -VmaVector& VmaVector::operator=(const VmaVector& rhs) -{ - if (&rhs != this) - { - resize(rhs.m_Count); - if (m_Count != 0) - { - memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T)); - } - } - return *this; -} - -template -void VmaVector::push_back(const T& src) -{ - const size_t newIndex = size(); - resize(newIndex + 1); - m_pArray[newIndex] = src; -} - -template -void VmaVector::reserve(size_t newCapacity, bool freeMemory) -{ - newCapacity = VMA_MAX(newCapacity, m_Count); - - if ((newCapacity < m_Capacity) && !freeMemory) - { - newCapacity = m_Capacity; - } - - if (newCapacity != m_Capacity) - { - T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator, newCapacity) : VMA_NULL; - if (m_Count != 0) - { - memcpy(newArray, m_pArray, m_Count * sizeof(T)); - } - VmaFree(m_Allocator.m_pCallbacks, m_pArray); - m_Capacity = newCapacity; - m_pArray = newArray; - } -} - -template -void VmaVector::resize(size_t newCount) -{ - size_t newCapacity = m_Capacity; - if (newCount > m_Capacity) - { - newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8)); - } - - if (newCapacity != m_Capacity) - { - T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL; - const size_t elementsToCopy = VMA_MIN(m_Count, newCount); - if (elementsToCopy != 0) - { - memcpy(newArray, m_pArray, elementsToCopy * sizeof(T)); - } - VmaFree(m_Allocator.m_pCallbacks, m_pArray); - m_Capacity = newCapacity; - m_pArray = newArray; - } - - m_Count = newCount; -} - -template -void VmaVector::shrink_to_fit() -{ - if (m_Capacity > m_Count) - { - T* newArray = VMA_NULL; - if (m_Count > 0) - { - newArray = VmaAllocateArray(m_Allocator.m_pCallbacks, m_Count); - memcpy(newArray, m_pArray, m_Count * sizeof(T)); - } - VmaFree(m_Allocator.m_pCallbacks, m_pArray); - m_Capacity = m_Count; - m_pArray = newArray; - } -} - -template -void VmaVector::insert(size_t index, const T& src) -{ - VMA_HEAVY_ASSERT(index <= m_Count); - const size_t oldCount = size(); - resize(oldCount + 1); - if (index < oldCount) - { - memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T)); - } - m_pArray[index] = src; -} - -template -void VmaVector::remove(size_t index) -{ - VMA_HEAVY_ASSERT(index < m_Count); - const size_t oldCount = size(); - if (index < oldCount - 1) - { - memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T)); - } - resize(oldCount - 1); -} -#endif // _VMA_VECTOR_FUNCTIONS - -template -static void VmaVectorInsert(VmaVector& vec, size_t index, const T& item) -{ - vec.insert(index, item); -} - -template -static void VmaVectorRemove(VmaVector& vec, size_t index) -{ - vec.remove(index); -} -#endif // _VMA_VECTOR - -#ifndef _VMA_SMALL_VECTOR -/* -This is a vector (a variable-sized array), optimized for the case when the array is small. - -It contains some number of elements in-place, which allows it to avoid heap allocation -when the actual number of elements is below that threshold. This allows normal "small" -cases to be fast without losing generality for large inputs. -*/ -template -class VmaSmallVector -{ -public: - typedef T value_type; - typedef T* iterator; - - VmaSmallVector(const AllocatorT& allocator); - VmaSmallVector(size_t count, const AllocatorT& allocator); - template - VmaSmallVector(const VmaSmallVector&) = delete; - template - VmaSmallVector& operator=(const VmaSmallVector&) = delete; - ~VmaSmallVector() = default; - - bool empty() const { return m_Count == 0; } - size_t size() const { return m_Count; } - T* data() { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } - T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } - T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } - const T* data() const { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } - const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } - const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } - - iterator begin() { return data(); } - iterator end() { return data() + m_Count; } - - void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } - void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } - void push_front(const T& src) { insert(0, src); } - - void push_back(const T& src); - void resize(size_t newCount, bool freeMemory = false); - void clear(bool freeMemory = false); - void insert(size_t index, const T& src); - void remove(size_t index); - - T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } - const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } - -private: - size_t m_Count; - T m_StaticArray[N]; // Used when m_Size <= N - VmaVector m_DynamicArray; // Used when m_Size > N -}; - -#ifndef _VMA_SMALL_VECTOR_FUNCTIONS -template -VmaSmallVector::VmaSmallVector(const AllocatorT& allocator) - : m_Count(0), - m_DynamicArray(allocator) {} - -template -VmaSmallVector::VmaSmallVector(size_t count, const AllocatorT& allocator) - : m_Count(count), - m_DynamicArray(count > N ? count : 0, allocator) {} - -template -void VmaSmallVector::push_back(const T& src) -{ - const size_t newIndex = size(); - resize(newIndex + 1); - data()[newIndex] = src; -} - -template -void VmaSmallVector::resize(size_t newCount, bool freeMemory) -{ - if (newCount > N && m_Count > N) - { - // Any direction, staying in m_DynamicArray - m_DynamicArray.resize(newCount); - if (freeMemory) - { - m_DynamicArray.shrink_to_fit(); - } - } - else if (newCount > N && m_Count <= N) - { - // Growing, moving from m_StaticArray to m_DynamicArray - m_DynamicArray.resize(newCount); - if (m_Count > 0) - { - memcpy(m_DynamicArray.data(), m_StaticArray, m_Count * sizeof(T)); - } - } - else if (newCount <= N && m_Count > N) - { - // Shrinking, moving from m_DynamicArray to m_StaticArray - if (newCount > 0) - { - memcpy(m_StaticArray, m_DynamicArray.data(), newCount * sizeof(T)); - } - m_DynamicArray.resize(0); - if (freeMemory) - { - m_DynamicArray.shrink_to_fit(); - } - } - else - { - // Any direction, staying in m_StaticArray - nothing to do here - } - m_Count = newCount; -} - -template -void VmaSmallVector::clear(bool freeMemory) -{ - m_DynamicArray.clear(); - if (freeMemory) - { - m_DynamicArray.shrink_to_fit(); - } - m_Count = 0; -} - -template -void VmaSmallVector::insert(size_t index, const T& src) -{ - VMA_HEAVY_ASSERT(index <= m_Count); - const size_t oldCount = size(); - resize(oldCount + 1); - T* const dataPtr = data(); - if (index < oldCount) - { - // I know, this could be more optimal for case where memmove can be memcpy directly from m_StaticArray to m_DynamicArray. - memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) * sizeof(T)); - } - dataPtr[index] = src; -} - -template -void VmaSmallVector::remove(size_t index) -{ - VMA_HEAVY_ASSERT(index < m_Count); - const size_t oldCount = size(); - if (index < oldCount - 1) - { - // I know, this could be more optimal for case where memmove can be memcpy directly from m_DynamicArray to m_StaticArray. - T* const dataPtr = data(); - memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) * sizeof(T)); - } - resize(oldCount - 1); -} -#endif // _VMA_SMALL_VECTOR_FUNCTIONS -#endif // _VMA_SMALL_VECTOR - -#ifndef _VMA_POOL_ALLOCATOR -/* -Allocator for objects of type T using a list of arrays (pools) to speed up -allocation. Number of elements that can be allocated is not bounded because -allocator can create multiple blocks. -*/ -template -class VmaPoolAllocator -{ - VMA_CLASS_NO_COPY(VmaPoolAllocator) -public: - VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity); - ~VmaPoolAllocator(); - template T* Alloc(Types&&... args); - void Free(T* ptr); - -private: - union Item - { - uint32_t NextFreeIndex; - alignas(T) char Value[sizeof(T)]; - }; - struct ItemBlock - { - Item* pItems; - uint32_t Capacity; - uint32_t FirstFreeIndex; - }; - - const VkAllocationCallbacks* m_pAllocationCallbacks; - const uint32_t m_FirstBlockCapacity; - VmaVector> m_ItemBlocks; - - ItemBlock& CreateNewBlock(); -}; - -#ifndef _VMA_POOL_ALLOCATOR_FUNCTIONS -template -VmaPoolAllocator::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) - : m_pAllocationCallbacks(pAllocationCallbacks), - m_FirstBlockCapacity(firstBlockCapacity), - m_ItemBlocks(VmaStlAllocator(pAllocationCallbacks)) -{ - VMA_ASSERT(m_FirstBlockCapacity > 1); -} - -template -VmaPoolAllocator::~VmaPoolAllocator() -{ - for (size_t i = m_ItemBlocks.size(); i--;) - vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity); - m_ItemBlocks.clear(); -} - -template -template T* VmaPoolAllocator::Alloc(Types&&... args) -{ - for (size_t i = m_ItemBlocks.size(); i--; ) - { - ItemBlock& block = m_ItemBlocks[i]; - // This block has some free items: Use first one. - if (block.FirstFreeIndex != UINT32_MAX) - { - Item* const pItem = &block.pItems[block.FirstFreeIndex]; - block.FirstFreeIndex = pItem->NextFreeIndex; - T* result = (T*)&pItem->Value; - new(result)T(std::forward(args)...); // Explicit constructor call. - return result; - } - } - - // No block has free item: Create new one and use it. - ItemBlock& newBlock = CreateNewBlock(); - Item* const pItem = &newBlock.pItems[0]; - newBlock.FirstFreeIndex = pItem->NextFreeIndex; - T* result = (T*)&pItem->Value; - new(result) T(std::forward(args)...); // Explicit constructor call. - return result; -} - -template -void VmaPoolAllocator::Free(T* ptr) -{ - // Search all memory blocks to find ptr. - for (size_t i = m_ItemBlocks.size(); i--; ) - { - ItemBlock& block = m_ItemBlocks[i]; - - // Casting to union. - Item* pItemPtr; - memcpy(&pItemPtr, &ptr, sizeof(pItemPtr)); - - // Check if pItemPtr is in address range of this block. - if ((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity)) - { - ptr->~T(); // Explicit destructor call. - const uint32_t index = static_cast(pItemPtr - block.pItems); - pItemPtr->NextFreeIndex = block.FirstFreeIndex; - block.FirstFreeIndex = index; - return; - } - } - VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool."); -} - -template -typename VmaPoolAllocator::ItemBlock& VmaPoolAllocator::CreateNewBlock() -{ - const uint32_t newBlockCapacity = m_ItemBlocks.empty() ? - m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2; - - const ItemBlock newBlock = - { - vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity), - newBlockCapacity, - 0 - }; - - m_ItemBlocks.push_back(newBlock); - - // Setup singly-linked list of all free items in this block. - for (uint32_t i = 0; i < newBlockCapacity - 1; ++i) - newBlock.pItems[i].NextFreeIndex = i + 1; - newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX; - return m_ItemBlocks.back(); -} -#endif // _VMA_POOL_ALLOCATOR_FUNCTIONS -#endif // _VMA_POOL_ALLOCATOR - -#ifndef _VMA_RAW_LIST -template -struct VmaListItem -{ - VmaListItem* pPrev; - VmaListItem* pNext; - T Value; -}; - -// Doubly linked list. -template -class VmaRawList -{ - VMA_CLASS_NO_COPY(VmaRawList) -public: - typedef VmaListItem ItemType; - - VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks); - // Intentionally not calling Clear, because that would be unnecessary - // computations to return all items to m_ItemAllocator as free. - ~VmaRawList() = default; - - size_t GetCount() const { return m_Count; } - bool IsEmpty() const { return m_Count == 0; } - - ItemType* Front() { return m_pFront; } - ItemType* Back() { return m_pBack; } - const ItemType* Front() const { return m_pFront; } - const ItemType* Back() const { return m_pBack; } - - ItemType* PushFront(); - ItemType* PushBack(); - ItemType* PushFront(const T& value); - ItemType* PushBack(const T& value); - void PopFront(); - void PopBack(); - - // Item can be null - it means PushBack. - ItemType* InsertBefore(ItemType* pItem); - // Item can be null - it means PushFront. - ItemType* InsertAfter(ItemType* pItem); - ItemType* InsertBefore(ItemType* pItem, const T& value); - ItemType* InsertAfter(ItemType* pItem, const T& value); - - void Clear(); - void Remove(ItemType* pItem); - -private: - const VkAllocationCallbacks* const m_pAllocationCallbacks; - VmaPoolAllocator m_ItemAllocator; - ItemType* m_pFront; - ItemType* m_pBack; - size_t m_Count; -}; - -#ifndef _VMA_RAW_LIST_FUNCTIONS -template -VmaRawList::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) - : m_pAllocationCallbacks(pAllocationCallbacks), - m_ItemAllocator(pAllocationCallbacks, 128), - m_pFront(VMA_NULL), - m_pBack(VMA_NULL), - m_Count(0) {} - -template -VmaListItem* VmaRawList::PushFront() -{ - ItemType* const pNewItem = m_ItemAllocator.Alloc(); - pNewItem->pPrev = VMA_NULL; - if (IsEmpty()) - { - pNewItem->pNext = VMA_NULL; - m_pFront = pNewItem; - m_pBack = pNewItem; - m_Count = 1; - } - else - { - pNewItem->pNext = m_pFront; - m_pFront->pPrev = pNewItem; - m_pFront = pNewItem; - ++m_Count; - } - return pNewItem; -} - -template -VmaListItem* VmaRawList::PushBack() -{ - ItemType* const pNewItem = m_ItemAllocator.Alloc(); - pNewItem->pNext = VMA_NULL; - if(IsEmpty()) - { - pNewItem->pPrev = VMA_NULL; - m_pFront = pNewItem; - m_pBack = pNewItem; - m_Count = 1; - } - else - { - pNewItem->pPrev = m_pBack; - m_pBack->pNext = pNewItem; - m_pBack = pNewItem; - ++m_Count; - } - return pNewItem; -} - -template -VmaListItem* VmaRawList::PushFront(const T& value) -{ - ItemType* const pNewItem = PushFront(); - pNewItem->Value = value; - return pNewItem; -} - -template -VmaListItem* VmaRawList::PushBack(const T& value) -{ - ItemType* const pNewItem = PushBack(); - pNewItem->Value = value; - return pNewItem; -} - -template -void VmaRawList::PopFront() -{ - VMA_HEAVY_ASSERT(m_Count > 0); - ItemType* const pFrontItem = m_pFront; - ItemType* const pNextItem = pFrontItem->pNext; - if (pNextItem != VMA_NULL) - { - pNextItem->pPrev = VMA_NULL; - } - m_pFront = pNextItem; - m_ItemAllocator.Free(pFrontItem); - --m_Count; -} - -template -void VmaRawList::PopBack() -{ - VMA_HEAVY_ASSERT(m_Count > 0); - ItemType* const pBackItem = m_pBack; - ItemType* const pPrevItem = pBackItem->pPrev; - if(pPrevItem != VMA_NULL) - { - pPrevItem->pNext = VMA_NULL; - } - m_pBack = pPrevItem; - m_ItemAllocator.Free(pBackItem); - --m_Count; -} - -template -void VmaRawList::Clear() -{ - if (IsEmpty() == false) - { - ItemType* pItem = m_pBack; - while (pItem != VMA_NULL) - { - ItemType* const pPrevItem = pItem->pPrev; - m_ItemAllocator.Free(pItem); - pItem = pPrevItem; - } - m_pFront = VMA_NULL; - m_pBack = VMA_NULL; - m_Count = 0; - } -} - -template -void VmaRawList::Remove(ItemType* pItem) -{ - VMA_HEAVY_ASSERT(pItem != VMA_NULL); - VMA_HEAVY_ASSERT(m_Count > 0); - - if(pItem->pPrev != VMA_NULL) - { - pItem->pPrev->pNext = pItem->pNext; - } - else - { - VMA_HEAVY_ASSERT(m_pFront == pItem); - m_pFront = pItem->pNext; - } - - if(pItem->pNext != VMA_NULL) - { - pItem->pNext->pPrev = pItem->pPrev; - } - else - { - VMA_HEAVY_ASSERT(m_pBack == pItem); - m_pBack = pItem->pPrev; - } - - m_ItemAllocator.Free(pItem); - --m_Count; -} - -template -VmaListItem* VmaRawList::InsertBefore(ItemType* pItem) -{ - if(pItem != VMA_NULL) - { - ItemType* const prevItem = pItem->pPrev; - ItemType* const newItem = m_ItemAllocator.Alloc(); - newItem->pPrev = prevItem; - newItem->pNext = pItem; - pItem->pPrev = newItem; - if(prevItem != VMA_NULL) - { - prevItem->pNext = newItem; - } - else - { - VMA_HEAVY_ASSERT(m_pFront == pItem); - m_pFront = newItem; - } - ++m_Count; - return newItem; - } - else - return PushBack(); -} - -template -VmaListItem* VmaRawList::InsertAfter(ItemType* pItem) -{ - if(pItem != VMA_NULL) - { - ItemType* const nextItem = pItem->pNext; - ItemType* const newItem = m_ItemAllocator.Alloc(); - newItem->pNext = nextItem; - newItem->pPrev = pItem; - pItem->pNext = newItem; - if(nextItem != VMA_NULL) - { - nextItem->pPrev = newItem; - } - else - { - VMA_HEAVY_ASSERT(m_pBack == pItem); - m_pBack = newItem; - } - ++m_Count; - return newItem; - } - else - return PushFront(); -} - -template -VmaListItem* VmaRawList::InsertBefore(ItemType* pItem, const T& value) -{ - ItemType* const newItem = InsertBefore(pItem); - newItem->Value = value; - return newItem; -} - -template -VmaListItem* VmaRawList::InsertAfter(ItemType* pItem, const T& value) -{ - ItemType* const newItem = InsertAfter(pItem); - newItem->Value = value; - return newItem; -} -#endif // _VMA_RAW_LIST_FUNCTIONS -#endif // _VMA_RAW_LIST - -#ifndef _VMA_LIST -template -class VmaList -{ - VMA_CLASS_NO_COPY(VmaList) -public: - class reverse_iterator; - class const_iterator; - class const_reverse_iterator; - - class iterator - { - friend class const_iterator; - friend class VmaList; - public: - iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} - iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} - - T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } - T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } - - bool operator==(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } - bool operator!=(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } - - iterator operator++(int) { iterator result = *this; ++*this; return result; } - iterator operator--(int) { iterator result = *this; --*this; return result; } - - iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } - iterator& operator--(); - - private: - VmaRawList* m_pList; - VmaListItem* m_pItem; - - iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} - }; - class reverse_iterator - { - friend class const_reverse_iterator; - friend class VmaList; - public: - reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} - reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} - - T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } - T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } - - bool operator==(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } - bool operator!=(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } - - reverse_iterator operator++(int) { reverse_iterator result = *this; ++* this; return result; } - reverse_iterator operator--(int) { reverse_iterator result = *this; --* this; return result; } - - reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } - reverse_iterator& operator--(); - - private: - VmaRawList* m_pList; - VmaListItem* m_pItem; - - reverse_iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} - }; - class const_iterator - { - friend class VmaList; - public: - const_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} - const_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} - const_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} - - iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } - - const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } - const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } - - bool operator==(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } - bool operator!=(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } - - const_iterator operator++(int) { const_iterator result = *this; ++* this; return result; } - const_iterator operator--(int) { const_iterator result = *this; --* this; return result; } - - const_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } - const_iterator& operator--(); - - private: - const VmaRawList* m_pList; - const VmaListItem* m_pItem; - - const_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} - }; - class const_reverse_iterator - { - friend class VmaList; - public: - const_reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} - const_reverse_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} - const_reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} - - reverse_iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } - - const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } - const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } - - bool operator==(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } - bool operator!=(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } - - const_reverse_iterator operator++(int) { const_reverse_iterator result = *this; ++* this; return result; } - const_reverse_iterator operator--(int) { const_reverse_iterator result = *this; --* this; return result; } - - const_reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } - const_reverse_iterator& operator--(); - - private: - const VmaRawList* m_pList; - const VmaListItem* m_pItem; - - const_reverse_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} - }; - - VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) {} - - bool empty() const { return m_RawList.IsEmpty(); } - size_t size() const { return m_RawList.GetCount(); } - - iterator begin() { return iterator(&m_RawList, m_RawList.Front()); } - iterator end() { return iterator(&m_RawList, VMA_NULL); } - - const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); } - const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); } - - const_iterator begin() const { return cbegin(); } - const_iterator end() const { return cend(); } - - reverse_iterator rbegin() { return reverse_iterator(&m_RawList, m_RawList.Back()); } - reverse_iterator rend() { return reverse_iterator(&m_RawList, VMA_NULL); } - - const_reverse_iterator crbegin() const { return const_reverse_iterator(&m_RawList, m_RawList.Back()); } - const_reverse_iterator crend() const { return const_reverse_iterator(&m_RawList, VMA_NULL); } - - const_reverse_iterator rbegin() const { return crbegin(); } - const_reverse_iterator rend() const { return crend(); } - - void push_back(const T& value) { m_RawList.PushBack(value); } - iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); } - - void clear() { m_RawList.Clear(); } - void erase(iterator it) { m_RawList.Remove(it.m_pItem); } - -private: - VmaRawList m_RawList; -}; - -#ifndef _VMA_LIST_FUNCTIONS -template -typename VmaList::iterator& VmaList::iterator::operator--() -{ - if (m_pItem != VMA_NULL) - { - m_pItem = m_pItem->pPrev; - } - else - { - VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); - m_pItem = m_pList->Back(); - } - return *this; -} - -template -typename VmaList::reverse_iterator& VmaList::reverse_iterator::operator--() -{ - if (m_pItem != VMA_NULL) - { - m_pItem = m_pItem->pNext; - } - else - { - VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); - m_pItem = m_pList->Front(); - } - return *this; -} - -template -typename VmaList::const_iterator& VmaList::const_iterator::operator--() -{ - if (m_pItem != VMA_NULL) - { - m_pItem = m_pItem->pPrev; - } - else - { - VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); - m_pItem = m_pList->Back(); - } - return *this; -} - -template -typename VmaList::const_reverse_iterator& VmaList::const_reverse_iterator::operator--() -{ - if (m_pItem != VMA_NULL) - { - m_pItem = m_pItem->pNext; - } - else - { - VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); - m_pItem = m_pList->Back(); - } - return *this; -} -#endif // _VMA_LIST_FUNCTIONS -#endif // _VMA_LIST - -#ifndef _VMA_INTRUSIVE_LINKED_LIST -/* -Expected interface of ItemTypeTraits: -struct MyItemTypeTraits -{ - typedef MyItem ItemType; - static ItemType* GetPrev(const ItemType* item) { return item->myPrevPtr; } - static ItemType* GetNext(const ItemType* item) { return item->myNextPtr; } - static ItemType*& AccessPrev(ItemType* item) { return item->myPrevPtr; } - static ItemType*& AccessNext(ItemType* item) { return item->myNextPtr; } -}; -*/ -template -class VmaIntrusiveLinkedList -{ -public: - typedef typename ItemTypeTraits::ItemType ItemType; - static ItemType* GetPrev(const ItemType* item) { return ItemTypeTraits::GetPrev(item); } - static ItemType* GetNext(const ItemType* item) { return ItemTypeTraits::GetNext(item); } - - // Movable, not copyable. - VmaIntrusiveLinkedList() = default; - VmaIntrusiveLinkedList(VmaIntrusiveLinkedList && src); - VmaIntrusiveLinkedList(const VmaIntrusiveLinkedList&) = delete; - VmaIntrusiveLinkedList& operator=(VmaIntrusiveLinkedList&& src); - VmaIntrusiveLinkedList& operator=(const VmaIntrusiveLinkedList&) = delete; - ~VmaIntrusiveLinkedList() { VMA_HEAVY_ASSERT(IsEmpty()); } - - size_t GetCount() const { return m_Count; } - bool IsEmpty() const { return m_Count == 0; } - ItemType* Front() { return m_Front; } - ItemType* Back() { return m_Back; } - const ItemType* Front() const { return m_Front; } - const ItemType* Back() const { return m_Back; } - - void PushBack(ItemType* item); - void PushFront(ItemType* item); - ItemType* PopBack(); - ItemType* PopFront(); - - // MyItem can be null - it means PushBack. - void InsertBefore(ItemType* existingItem, ItemType* newItem); - // MyItem can be null - it means PushFront. - void InsertAfter(ItemType* existingItem, ItemType* newItem); - void Remove(ItemType* item); - void RemoveAll(); - -private: - ItemType* m_Front = VMA_NULL; - ItemType* m_Back = VMA_NULL; - size_t m_Count = 0; -}; - -#ifndef _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS -template -VmaIntrusiveLinkedList::VmaIntrusiveLinkedList(VmaIntrusiveLinkedList&& src) - : m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count) -{ - src.m_Front = src.m_Back = VMA_NULL; - src.m_Count = 0; -} - -template -VmaIntrusiveLinkedList& VmaIntrusiveLinkedList::operator=(VmaIntrusiveLinkedList&& src) -{ - if (&src != this) - { - VMA_HEAVY_ASSERT(IsEmpty()); - m_Front = src.m_Front; - m_Back = src.m_Back; - m_Count = src.m_Count; - src.m_Front = src.m_Back = VMA_NULL; - src.m_Count = 0; - } - return *this; -} - -template -void VmaIntrusiveLinkedList::PushBack(ItemType* item) -{ - VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); - if (IsEmpty()) - { - m_Front = item; - m_Back = item; - m_Count = 1; - } - else - { - ItemTypeTraits::AccessPrev(item) = m_Back; - ItemTypeTraits::AccessNext(m_Back) = item; - m_Back = item; - ++m_Count; - } -} - -template -void VmaIntrusiveLinkedList::PushFront(ItemType* item) -{ - VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); - if (IsEmpty()) - { - m_Front = item; - m_Back = item; - m_Count = 1; - } - else - { - ItemTypeTraits::AccessNext(item) = m_Front; - ItemTypeTraits::AccessPrev(m_Front) = item; - m_Front = item; - ++m_Count; - } -} - -template -typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopBack() -{ - VMA_HEAVY_ASSERT(m_Count > 0); - ItemType* const backItem = m_Back; - ItemType* const prevItem = ItemTypeTraits::GetPrev(backItem); - if (prevItem != VMA_NULL) - { - ItemTypeTraits::AccessNext(prevItem) = VMA_NULL; - } - m_Back = prevItem; - --m_Count; - ItemTypeTraits::AccessPrev(backItem) = VMA_NULL; - ItemTypeTraits::AccessNext(backItem) = VMA_NULL; - return backItem; -} - -template -typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopFront() -{ - VMA_HEAVY_ASSERT(m_Count > 0); - ItemType* const frontItem = m_Front; - ItemType* const nextItem = ItemTypeTraits::GetNext(frontItem); - if (nextItem != VMA_NULL) - { - ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL; - } - m_Front = nextItem; - --m_Count; - ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL; - ItemTypeTraits::AccessNext(frontItem) = VMA_NULL; - return frontItem; -} - -template -void VmaIntrusiveLinkedList::InsertBefore(ItemType* existingItem, ItemType* newItem) -{ - VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); - if (existingItem != VMA_NULL) - { - ItemType* const prevItem = ItemTypeTraits::GetPrev(existingItem); - ItemTypeTraits::AccessPrev(newItem) = prevItem; - ItemTypeTraits::AccessNext(newItem) = existingItem; - ItemTypeTraits::AccessPrev(existingItem) = newItem; - if (prevItem != VMA_NULL) - { - ItemTypeTraits::AccessNext(prevItem) = newItem; - } - else - { - VMA_HEAVY_ASSERT(m_Front == existingItem); - m_Front = newItem; - } - ++m_Count; - } - else - PushBack(newItem); -} - -template -void VmaIntrusiveLinkedList::InsertAfter(ItemType* existingItem, ItemType* newItem) -{ - VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); - if (existingItem != VMA_NULL) - { - ItemType* const nextItem = ItemTypeTraits::GetNext(existingItem); - ItemTypeTraits::AccessNext(newItem) = nextItem; - ItemTypeTraits::AccessPrev(newItem) = existingItem; - ItemTypeTraits::AccessNext(existingItem) = newItem; - if (nextItem != VMA_NULL) - { - ItemTypeTraits::AccessPrev(nextItem) = newItem; - } - else - { - VMA_HEAVY_ASSERT(m_Back == existingItem); - m_Back = newItem; - } - ++m_Count; - } - else - return PushFront(newItem); -} - -template -void VmaIntrusiveLinkedList::Remove(ItemType* item) -{ - VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0); - if (ItemTypeTraits::GetPrev(item) != VMA_NULL) - { - ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item); - } - else - { - VMA_HEAVY_ASSERT(m_Front == item); - m_Front = ItemTypeTraits::GetNext(item); - } - - if (ItemTypeTraits::GetNext(item) != VMA_NULL) - { - ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item); - } - else - { - VMA_HEAVY_ASSERT(m_Back == item); - m_Back = ItemTypeTraits::GetPrev(item); - } - ItemTypeTraits::AccessPrev(item) = VMA_NULL; - ItemTypeTraits::AccessNext(item) = VMA_NULL; - --m_Count; -} - -template -void VmaIntrusiveLinkedList::RemoveAll() -{ - if (!IsEmpty()) - { - ItemType* item = m_Back; - while (item != VMA_NULL) - { - ItemType* const prevItem = ItemTypeTraits::AccessPrev(item); - ItemTypeTraits::AccessPrev(item) = VMA_NULL; - ItemTypeTraits::AccessNext(item) = VMA_NULL; - item = prevItem; - } - m_Front = VMA_NULL; - m_Back = VMA_NULL; - m_Count = 0; - } -} -#endif // _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS -#endif // _VMA_INTRUSIVE_LINKED_LIST - -// Unused in this version. -#if 0 - -#ifndef _VMA_PAIR -template -struct VmaPair -{ - T1 first; - T2 second; - - VmaPair() : first(), second() {} - VmaPair(const T1& firstSrc, const T2& secondSrc) : first(firstSrc), second(secondSrc) {} -}; - -template -struct VmaPairFirstLess -{ - bool operator()(const VmaPair& lhs, const VmaPair& rhs) const - { - return lhs.first < rhs.first; - } - bool operator()(const VmaPair& lhs, const FirstT& rhsFirst) const - { - return lhs.first < rhsFirst; - } -}; -#endif // _VMA_PAIR - -#ifndef _VMA_MAP -/* Class compatible with subset of interface of std::unordered_map. -KeyT, ValueT must be POD because they will be stored in VmaVector. -*/ -template -class VmaMap -{ -public: - typedef VmaPair PairType; - typedef PairType* iterator; - - VmaMap(const VmaStlAllocator& allocator) : m_Vector(allocator) {} - - iterator begin() { return m_Vector.begin(); } - iterator end() { return m_Vector.end(); } - size_t size() { return m_Vector.size(); } - - void insert(const PairType& pair); - iterator find(const KeyT& key); - void erase(iterator it); - -private: - VmaVector< PairType, VmaStlAllocator> m_Vector; -}; - -#ifndef _VMA_MAP_FUNCTIONS -template -void VmaMap::insert(const PairType& pair) -{ - const size_t indexToInsert = VmaBinaryFindFirstNotLess( - m_Vector.data(), - m_Vector.data() + m_Vector.size(), - pair, - VmaPairFirstLess()) - m_Vector.data(); - VmaVectorInsert(m_Vector, indexToInsert, pair); -} - -template -VmaPair* VmaMap::find(const KeyT& key) -{ - PairType* it = VmaBinaryFindFirstNotLess( - m_Vector.data(), - m_Vector.data() + m_Vector.size(), - key, - VmaPairFirstLess()); - if ((it != m_Vector.end()) && (it->first == key)) - { - return it; - } - else - { - return m_Vector.end(); - } -} - -template -void VmaMap::erase(iterator it) -{ - VmaVectorRemove(m_Vector, it - m_Vector.begin()); -} -#endif // _VMA_MAP_FUNCTIONS -#endif // _VMA_MAP - -#endif // #if 0 - -#if !defined(_VMA_STRING_BUILDER) && VMA_STATS_STRING_ENABLED -class VmaStringBuilder -{ -public: - VmaStringBuilder(const VkAllocationCallbacks* allocationCallbacks) : m_Data(VmaStlAllocator(allocationCallbacks)) {} - ~VmaStringBuilder() = default; - - size_t GetLength() const { return m_Data.size(); } - const char* GetData() const { return m_Data.data(); } - void AddNewLine() { Add('\n'); } - void Add(char ch) { m_Data.push_back(ch); } - - void Add(const char* pStr); - void AddNumber(uint32_t num); - void AddNumber(uint64_t num); - void AddPointer(const void* ptr); - -private: - VmaVector> m_Data; -}; - -#ifndef _VMA_STRING_BUILDER_FUNCTIONS -void VmaStringBuilder::Add(const char* pStr) -{ - const size_t strLen = strlen(pStr); - if (strLen > 0) - { - const size_t oldCount = m_Data.size(); - m_Data.resize(oldCount + strLen); - memcpy(m_Data.data() + oldCount, pStr, strLen); - } -} - -void VmaStringBuilder::AddNumber(uint32_t num) -{ - char buf[11]; - buf[10] = '\0'; - char* p = &buf[10]; - do - { - *--p = '0' + (num % 10); - num /= 10; - } while (num); - Add(p); -} - -void VmaStringBuilder::AddNumber(uint64_t num) -{ - char buf[21]; - buf[20] = '\0'; - char* p = &buf[20]; - do - { - *--p = '0' + (num % 10); - num /= 10; - } while (num); - Add(p); -} - -void VmaStringBuilder::AddPointer(const void* ptr) -{ - char buf[21]; - VmaPtrToStr(buf, sizeof(buf), ptr); - Add(buf); -} -#endif //_VMA_STRING_BUILDER_FUNCTIONS -#endif // _VMA_STRING_BUILDER - -#if !defined(_VMA_JSON_WRITER) && VMA_STATS_STRING_ENABLED -/* -Allows to conveniently build a correct JSON document to be written to the -VmaStringBuilder passed to the constructor. -*/ -class VmaJsonWriter -{ - VMA_CLASS_NO_COPY(VmaJsonWriter) -public: - // sb - string builder to write the document to. Must remain alive for the whole lifetime of this object. - VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb); - ~VmaJsonWriter(); - - // Begins object by writing "{". - // Inside an object, you must call pairs of WriteString and a value, e.g.: - // j.BeginObject(true); j.WriteString("A"); j.WriteNumber(1); j.WriteString("B"); j.WriteNumber(2); j.EndObject(); - // Will write: { "A": 1, "B": 2 } - void BeginObject(bool singleLine = false); - // Ends object by writing "}". - void EndObject(); - - // Begins array by writing "[". - // Inside an array, you can write a sequence of any values. - void BeginArray(bool singleLine = false); - // Ends array by writing "[". - void EndArray(); - - // Writes a string value inside "". - // pStr can contain any ANSI characters, including '"', new line etc. - they will be properly escaped. - void WriteString(const char* pStr); - - // Begins writing a string value. - // Call BeginString, ContinueString, ContinueString, ..., EndString instead of - // WriteString to conveniently build the string content incrementally, made of - // parts including numbers. - void BeginString(const char* pStr = VMA_NULL); - // Posts next part of an open string. - void ContinueString(const char* pStr); - // Posts next part of an open string. The number is converted to decimal characters. - void ContinueString(uint32_t n); - void ContinueString(uint64_t n); - void ContinueString_Size(size_t n); - // Posts next part of an open string. Pointer value is converted to characters - // using "%p" formatting - shown as hexadecimal number, e.g.: 000000081276Ad00 - void ContinueString_Pointer(const void* ptr); - // Ends writing a string value by writing '"'. - void EndString(const char* pStr = VMA_NULL); - - // Writes a number value. - void WriteNumber(uint32_t n); - void WriteNumber(uint64_t n); - void WriteSize(size_t n); - // Writes a boolean value - false or true. - void WriteBool(bool b); - // Writes a null value. - void WriteNull(); - -private: - enum COLLECTION_TYPE - { - COLLECTION_TYPE_OBJECT, - COLLECTION_TYPE_ARRAY, - }; - struct StackItem - { - COLLECTION_TYPE type; - uint32_t valueCount; - bool singleLineMode; - }; - - static const char* const INDENT; - - VmaStringBuilder& m_SB; - VmaVector< StackItem, VmaStlAllocator > m_Stack; - bool m_InsideString; - - // Write size_t for less than 64bits - void WriteSize(size_t n, std::integral_constant) { m_SB.AddNumber(static_cast(n)); } - // Write size_t for 64bits - void WriteSize(size_t n, std::integral_constant) { m_SB.AddNumber(static_cast(n)); } - - void BeginValue(bool isString); - void WriteIndent(bool oneLess = false); -}; -const char* const VmaJsonWriter::INDENT = " "; - -#ifndef _VMA_JSON_WRITER_FUNCTIONS -VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) - : m_SB(sb), - m_Stack(VmaStlAllocator(pAllocationCallbacks)), - m_InsideString(false) {} - -VmaJsonWriter::~VmaJsonWriter() -{ - VMA_ASSERT(!m_InsideString); - VMA_ASSERT(m_Stack.empty()); -} - -void VmaJsonWriter::BeginObject(bool singleLine) -{ - VMA_ASSERT(!m_InsideString); - - BeginValue(false); - m_SB.Add('{'); - - StackItem item; - item.type = COLLECTION_TYPE_OBJECT; - item.valueCount = 0; - item.singleLineMode = singleLine; - m_Stack.push_back(item); -} - -void VmaJsonWriter::EndObject() -{ - VMA_ASSERT(!m_InsideString); - - WriteIndent(true); - m_SB.Add('}'); - - VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT); - m_Stack.pop_back(); -} - -void VmaJsonWriter::BeginArray(bool singleLine) -{ - VMA_ASSERT(!m_InsideString); - - BeginValue(false); - m_SB.Add('['); - - StackItem item; - item.type = COLLECTION_TYPE_ARRAY; - item.valueCount = 0; - item.singleLineMode = singleLine; - m_Stack.push_back(item); -} - -void VmaJsonWriter::EndArray() -{ - VMA_ASSERT(!m_InsideString); - - WriteIndent(true); - m_SB.Add(']'); - - VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY); - m_Stack.pop_back(); -} - -void VmaJsonWriter::WriteString(const char* pStr) -{ - BeginString(pStr); - EndString(); -} - -void VmaJsonWriter::BeginString(const char* pStr) -{ - VMA_ASSERT(!m_InsideString); - - BeginValue(true); - m_SB.Add('"'); - m_InsideString = true; - if (pStr != VMA_NULL && pStr[0] != '\0') - { - ContinueString(pStr); - } -} - -void VmaJsonWriter::ContinueString(const char* pStr) -{ - VMA_ASSERT(m_InsideString); - - const size_t strLen = strlen(pStr); - for (size_t i = 0; i < strLen; ++i) - { - char ch = pStr[i]; - if (ch == '\\') - { - m_SB.Add("\\\\"); - } - else if (ch == '"') - { - m_SB.Add("\\\""); - } - else if (ch >= 32) - { - m_SB.Add(ch); - } - else switch (ch) - { - case '\b': - m_SB.Add("\\b"); - break; - case '\f': - m_SB.Add("\\f"); - break; - case '\n': - m_SB.Add("\\n"); - break; - case '\r': - m_SB.Add("\\r"); - break; - case '\t': - m_SB.Add("\\t"); - break; - default: - VMA_ASSERT(0 && "Character not currently supported."); - break; - } - } -} - -void VmaJsonWriter::ContinueString(uint32_t n) -{ - VMA_ASSERT(m_InsideString); - m_SB.AddNumber(n); -} - -void VmaJsonWriter::ContinueString(uint64_t n) -{ - VMA_ASSERT(m_InsideString); - m_SB.AddNumber(n); -} - -void VmaJsonWriter::ContinueString_Size(size_t n) -{ - VMA_ASSERT(m_InsideString); - // Fix for AppleClang incorrect type casting - // TODO: Change to if constexpr when C++17 used as minimal standard - WriteSize(n, std::is_same{}); -} - -void VmaJsonWriter::ContinueString_Pointer(const void* ptr) -{ - VMA_ASSERT(m_InsideString); - m_SB.AddPointer(ptr); -} - -void VmaJsonWriter::EndString(const char* pStr) -{ - VMA_ASSERT(m_InsideString); - if (pStr != VMA_NULL && pStr[0] != '\0') - { - ContinueString(pStr); - } - m_SB.Add('"'); - m_InsideString = false; -} - -void VmaJsonWriter::WriteNumber(uint32_t n) -{ - VMA_ASSERT(!m_InsideString); - BeginValue(false); - m_SB.AddNumber(n); -} - -void VmaJsonWriter::WriteNumber(uint64_t n) -{ - VMA_ASSERT(!m_InsideString); - BeginValue(false); - m_SB.AddNumber(n); -} - -void VmaJsonWriter::WriteSize(size_t n) -{ - VMA_ASSERT(!m_InsideString); - BeginValue(false); - // Fix for AppleClang incorrect type casting - // TODO: Change to if constexpr when C++17 used as minimal standard - WriteSize(n, std::is_same{}); -} - -void VmaJsonWriter::WriteBool(bool b) -{ - VMA_ASSERT(!m_InsideString); - BeginValue(false); - m_SB.Add(b ? "true" : "false"); -} - -void VmaJsonWriter::WriteNull() -{ - VMA_ASSERT(!m_InsideString); - BeginValue(false); - m_SB.Add("null"); -} - -void VmaJsonWriter::BeginValue(bool isString) -{ - if (!m_Stack.empty()) - { - StackItem& currItem = m_Stack.back(); - if (currItem.type == COLLECTION_TYPE_OBJECT && - currItem.valueCount % 2 == 0) - { - VMA_ASSERT(isString); - } - - if (currItem.type == COLLECTION_TYPE_OBJECT && - currItem.valueCount % 2 != 0) - { - m_SB.Add(": "); - } - else if (currItem.valueCount > 0) - { - m_SB.Add(", "); - WriteIndent(); - } - else - { - WriteIndent(); - } - ++currItem.valueCount; - } -} - -void VmaJsonWriter::WriteIndent(bool oneLess) -{ - if (!m_Stack.empty() && !m_Stack.back().singleLineMode) - { - m_SB.AddNewLine(); - - size_t count = m_Stack.size(); - if (count > 0 && oneLess) - { - --count; - } - for (size_t i = 0; i < count; ++i) - { - m_SB.Add(INDENT); - } - } -} -#endif // _VMA_JSON_WRITER_FUNCTIONS - -static void VmaPrintDetailedStatistics(VmaJsonWriter& json, const VmaDetailedStatistics& stat) -{ - json.BeginObject(); - - json.WriteString("BlockCount"); - json.WriteNumber(stat.statistics.blockCount); - json.WriteString("BlockBytes"); - json.WriteNumber(stat.statistics.blockBytes); - json.WriteString("AllocationCount"); - json.WriteNumber(stat.statistics.allocationCount); - json.WriteString("AllocationBytes"); - json.WriteNumber(stat.statistics.allocationBytes); - json.WriteString("UnusedRangeCount"); - json.WriteNumber(stat.unusedRangeCount); - - if (stat.statistics.allocationCount > 1) - { - json.WriteString("AllocationSizeMin"); - json.WriteNumber(stat.allocationSizeMin); - json.WriteString("AllocationSizeMax"); - json.WriteNumber(stat.allocationSizeMax); - } - if (stat.unusedRangeCount > 1) - { - json.WriteString("UnusedRangeSizeMin"); - json.WriteNumber(stat.unusedRangeSizeMin); - json.WriteString("UnusedRangeSizeMax"); - json.WriteNumber(stat.unusedRangeSizeMax); - } - json.EndObject(); -} -#endif // _VMA_JSON_WRITER - -#ifndef _VMA_MAPPING_HYSTERESIS - -class VmaMappingHysteresis -{ - VMA_CLASS_NO_COPY(VmaMappingHysteresis) -public: - VmaMappingHysteresis() = default; - - uint32_t GetExtraMapping() const { return m_ExtraMapping; } - - // Call when Map was called. - // Returns true if switched to extra +1 mapping reference count. - bool PostMap() - { -#if VMA_MAPPING_HYSTERESIS_ENABLED - if(m_ExtraMapping == 0) - { - ++m_MajorCounter; - if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING) - { - m_ExtraMapping = 1; - m_MajorCounter = 0; - m_MinorCounter = 0; - return true; - } - } - else // m_ExtraMapping == 1 - PostMinorCounter(); -#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED - return false; - } - - // Call when Unmap was called. - void PostUnmap() - { -#if VMA_MAPPING_HYSTERESIS_ENABLED - if(m_ExtraMapping == 0) - ++m_MajorCounter; - else // m_ExtraMapping == 1 - PostMinorCounter(); -#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED - } - - // Call when allocation was made from the memory block. - void PostAlloc() - { -#if VMA_MAPPING_HYSTERESIS_ENABLED - if(m_ExtraMapping == 1) - ++m_MajorCounter; - else // m_ExtraMapping == 0 - PostMinorCounter(); -#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED - } - - // Call when allocation was freed from the memory block. - // Returns true if switched to extra -1 mapping reference count. - bool PostFree() - { -#if VMA_MAPPING_HYSTERESIS_ENABLED - if(m_ExtraMapping == 1) - { - ++m_MajorCounter; - if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING && - m_MajorCounter > m_MinorCounter + 1) - { - m_ExtraMapping = 0; - m_MajorCounter = 0; - m_MinorCounter = 0; - return true; - } - } - else // m_ExtraMapping == 0 - PostMinorCounter(); -#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED - return false; - } - -private: - static const int32_t COUNTER_MIN_EXTRA_MAPPING = 7; - - uint32_t m_MinorCounter = 0; - uint32_t m_MajorCounter = 0; - uint32_t m_ExtraMapping = 0; // 0 or 1. - - void PostMinorCounter() - { - if(m_MinorCounter < m_MajorCounter) - { - ++m_MinorCounter; - } - else if(m_MajorCounter > 0) - { - --m_MajorCounter; - --m_MinorCounter; - } - } -}; - -#endif // _VMA_MAPPING_HYSTERESIS - -#ifndef _VMA_DEVICE_MEMORY_BLOCK -/* -Represents a single block of device memory (`VkDeviceMemory`) with all the -data about its regions (aka suballocations, #VmaAllocation), assigned and free. - -Thread-safety: -- Access to m_pMetadata must be externally synchronized. -- Map, Unmap, Bind* are synchronized internally. -*/ -class VmaDeviceMemoryBlock -{ - VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock) -public: - VmaBlockMetadata* m_pMetadata; - - VmaDeviceMemoryBlock(VmaAllocator hAllocator); - ~VmaDeviceMemoryBlock(); - - // Always call after construction. - void Init( - VmaAllocator hAllocator, - VmaPool hParentPool, - uint32_t newMemoryTypeIndex, - VkDeviceMemory newMemory, - VkDeviceSize newSize, - uint32_t id, - uint32_t algorithm, - VkDeviceSize bufferImageGranularity); - // Always call before destruction. - void Destroy(VmaAllocator allocator); - - VmaPool GetParentPool() const { return m_hParentPool; } - VkDeviceMemory GetDeviceMemory() const { return m_hMemory; } - uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } - uint32_t GetId() const { return m_Id; } - void* GetMappedData() const { return m_pMappedData; } - uint32_t GetMapRefCount() const { return m_MapCount; } - - // Call when allocation/free was made from m_pMetadata. - // Used for m_MappingHysteresis. - void PostAlloc() { m_MappingHysteresis.PostAlloc(); } - void PostFree(VmaAllocator hAllocator); - - // Validates all data structures inside this object. If not valid, returns false. - bool Validate() const; - VkResult CheckCorruption(VmaAllocator hAllocator); - - // ppData can be null. - VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData); - void Unmap(VmaAllocator hAllocator, uint32_t count); - - VkResult WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); - VkResult ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); - - VkResult BindBufferMemory( - const VmaAllocator hAllocator, - const VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkBuffer hBuffer, - const void* pNext); - VkResult BindImageMemory( - const VmaAllocator hAllocator, - const VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkImage hImage, - const void* pNext); - -private: - VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. - uint32_t m_MemoryTypeIndex; - uint32_t m_Id; - VkDeviceMemory m_hMemory; - - /* - Protects access to m_hMemory so it is not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory. - Also protects m_MapCount, m_pMappedData. - Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex. - */ - VMA_MUTEX m_MapAndBindMutex; - VmaMappingHysteresis m_MappingHysteresis; - uint32_t m_MapCount; - void* m_pMappedData; -}; -#endif // _VMA_DEVICE_MEMORY_BLOCK - -#ifndef _VMA_ALLOCATION_T -struct VmaAllocation_T -{ - friend struct VmaDedicatedAllocationListItemTraits; - - enum FLAGS - { - FLAG_PERSISTENT_MAP = 0x01, - FLAG_MAPPING_ALLOWED = 0x02, - }; - -public: - enum ALLOCATION_TYPE - { - ALLOCATION_TYPE_NONE, - ALLOCATION_TYPE_BLOCK, - ALLOCATION_TYPE_DEDICATED, - }; - - // This struct is allocated using VmaPoolAllocator. - VmaAllocation_T(bool mappingAllowed); - ~VmaAllocation_T(); - - void InitBlockAllocation( - VmaDeviceMemoryBlock* block, - VmaAllocHandle allocHandle, - VkDeviceSize alignment, - VkDeviceSize size, - uint32_t memoryTypeIndex, - VmaSuballocationType suballocationType, - bool mapped); - // pMappedData not null means allocation is created with MAPPED flag. - void InitDedicatedAllocation( - VmaPool hParentPool, - uint32_t memoryTypeIndex, - VkDeviceMemory hMemory, - VmaSuballocationType suballocationType, - void* pMappedData, - VkDeviceSize size); - - ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; } - VkDeviceSize GetAlignment() const { return m_Alignment; } - VkDeviceSize GetSize() const { return m_Size; } - void* GetUserData() const { return m_pUserData; } - const char* GetName() const { return m_pName; } - VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; } - - VmaDeviceMemoryBlock* GetBlock() const { VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); return m_BlockAllocation.m_Block; } - uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } - bool IsPersistentMap() const { return (m_Flags & FLAG_PERSISTENT_MAP) != 0; } - bool IsMappingAllowed() const { return (m_Flags & FLAG_MAPPING_ALLOWED) != 0; } - - void SetUserData(VmaAllocator hAllocator, void* pUserData) { m_pUserData = pUserData; } - void SetName(VmaAllocator hAllocator, const char* pName); - void FreeName(VmaAllocator hAllocator); - uint8_t SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation); - VmaAllocHandle GetAllocHandle() const; - VkDeviceSize GetOffset() const; - VmaPool GetParentPool() const; - VkDeviceMemory GetMemory() const; - void* GetMappedData() const; - - void BlockAllocMap(); - void BlockAllocUnmap(); - VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData); - void DedicatedAllocUnmap(VmaAllocator hAllocator); - -#if VMA_STATS_STRING_ENABLED - uint32_t GetBufferImageUsage() const { return m_BufferImageUsage; } - - void InitBufferImageUsage(uint32_t bufferImageUsage); - void PrintParameters(class VmaJsonWriter& json) const; -#endif - -private: - // Allocation out of VmaDeviceMemoryBlock. - struct BlockAllocation - { - VmaDeviceMemoryBlock* m_Block; - VmaAllocHandle m_AllocHandle; - }; - // Allocation for an object that has its own private VkDeviceMemory. - struct DedicatedAllocation - { - VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. - VkDeviceMemory m_hMemory; - void* m_pMappedData; // Not null means memory is mapped. - VmaAllocation_T* m_Prev; - VmaAllocation_T* m_Next; - }; - union - { - // Allocation out of VmaDeviceMemoryBlock. - BlockAllocation m_BlockAllocation; - // Allocation for an object that has its own private VkDeviceMemory. - DedicatedAllocation m_DedicatedAllocation; - }; - - VkDeviceSize m_Alignment; - VkDeviceSize m_Size; - void* m_pUserData; - char* m_pName; - uint32_t m_MemoryTypeIndex; - uint8_t m_Type; // ALLOCATION_TYPE - uint8_t m_SuballocationType; // VmaSuballocationType - // Reference counter for vmaMapMemory()/vmaUnmapMemory(). - uint8_t m_MapCount; - uint8_t m_Flags; // enum FLAGS -#if VMA_STATS_STRING_ENABLED - uint32_t m_BufferImageUsage; // 0 if unknown. -#endif -}; -#endif // _VMA_ALLOCATION_T - -#ifndef _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS -struct VmaDedicatedAllocationListItemTraits -{ - typedef VmaAllocation_T ItemType; - - static ItemType* GetPrev(const ItemType* item) - { - VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); - return item->m_DedicatedAllocation.m_Prev; - } - static ItemType* GetNext(const ItemType* item) - { - VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); - return item->m_DedicatedAllocation.m_Next; - } - static ItemType*& AccessPrev(ItemType* item) - { - VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); - return item->m_DedicatedAllocation.m_Prev; - } - static ItemType*& AccessNext(ItemType* item) - { - VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); - return item->m_DedicatedAllocation.m_Next; - } -}; -#endif // _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS - -#ifndef _VMA_DEDICATED_ALLOCATION_LIST -/* -Stores linked list of VmaAllocation_T objects. -Thread-safe, synchronized internally. -*/ -class VmaDedicatedAllocationList -{ -public: - VmaDedicatedAllocationList() {} - ~VmaDedicatedAllocationList(); - - void Init(bool useMutex) { m_UseMutex = useMutex; } - bool Validate(); - - void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); - void AddStatistics(VmaStatistics& inoutStats); -#if VMA_STATS_STRING_ENABLED - // Writes JSON array with the list of allocations. - void BuildStatsString(VmaJsonWriter& json); -#endif - - bool IsEmpty(); - void Register(VmaAllocation alloc); - void Unregister(VmaAllocation alloc); - -private: - typedef VmaIntrusiveLinkedList DedicatedAllocationLinkedList; - - bool m_UseMutex = true; - VMA_RW_MUTEX m_Mutex; - DedicatedAllocationLinkedList m_AllocationList; -}; - -#ifndef _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS - -VmaDedicatedAllocationList::~VmaDedicatedAllocationList() -{ - VMA_HEAVY_ASSERT(Validate()); - - if (!m_AllocationList.IsEmpty()) - { - VMA_ASSERT(false && "Unfreed dedicated allocations found!"); - } -} - -bool VmaDedicatedAllocationList::Validate() -{ - const size_t declaredCount = m_AllocationList.GetCount(); - size_t actualCount = 0; - VmaMutexLockRead lock(m_Mutex, m_UseMutex); - for (VmaAllocation alloc = m_AllocationList.Front(); - alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) - { - ++actualCount; - } - VMA_VALIDATE(actualCount == declaredCount); - - return true; -} - -void VmaDedicatedAllocationList::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) -{ - for(auto* item = m_AllocationList.Front(); item != nullptr; item = DedicatedAllocationLinkedList::GetNext(item)) - { - const VkDeviceSize size = item->GetSize(); - inoutStats.statistics.blockCount++; - inoutStats.statistics.blockBytes += size; - VmaAddDetailedStatisticsAllocation(inoutStats, item->GetSize()); - } -} - -void VmaDedicatedAllocationList::AddStatistics(VmaStatistics& inoutStats) -{ - VmaMutexLockRead lock(m_Mutex, m_UseMutex); - - const uint32_t allocCount = (uint32_t)m_AllocationList.GetCount(); - inoutStats.blockCount += allocCount; - inoutStats.allocationCount += allocCount; - - for(auto* item = m_AllocationList.Front(); item != nullptr; item = DedicatedAllocationLinkedList::GetNext(item)) - { - const VkDeviceSize size = item->GetSize(); - inoutStats.blockBytes += size; - inoutStats.allocationBytes += size; - } -} - -#if VMA_STATS_STRING_ENABLED -void VmaDedicatedAllocationList::BuildStatsString(VmaJsonWriter& json) -{ - VmaMutexLockRead lock(m_Mutex, m_UseMutex); - json.BeginArray(); - for (VmaAllocation alloc = m_AllocationList.Front(); - alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) - { - json.BeginObject(true); - alloc->PrintParameters(json); - json.EndObject(); - } - json.EndArray(); -} -#endif // VMA_STATS_STRING_ENABLED - -bool VmaDedicatedAllocationList::IsEmpty() -{ - VmaMutexLockRead lock(m_Mutex, m_UseMutex); - return m_AllocationList.IsEmpty(); -} - -void VmaDedicatedAllocationList::Register(VmaAllocation alloc) -{ - VmaMutexLockWrite lock(m_Mutex, m_UseMutex); - m_AllocationList.PushBack(alloc); -} - -void VmaDedicatedAllocationList::Unregister(VmaAllocation alloc) -{ - VmaMutexLockWrite lock(m_Mutex, m_UseMutex); - m_AllocationList.Remove(alloc); -} -#endif // _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS -#endif // _VMA_DEDICATED_ALLOCATION_LIST - -#ifndef _VMA_SUBALLOCATION -/* -Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as -allocated memory block or free. -*/ -struct VmaSuballocation -{ - VkDeviceSize offset; - VkDeviceSize size; - void* userData; - VmaSuballocationType type; -}; - -// Comparator for offsets. -struct VmaSuballocationOffsetLess -{ - bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const - { - return lhs.offset < rhs.offset; - } -}; - -struct VmaSuballocationOffsetGreater -{ - bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const - { - return lhs.offset > rhs.offset; - } -}; - -struct VmaSuballocationItemSizeLess -{ - bool operator()(const VmaSuballocationList::iterator lhs, - const VmaSuballocationList::iterator rhs) const - { - return lhs->size < rhs->size; - } - - bool operator()(const VmaSuballocationList::iterator lhs, - VkDeviceSize rhsSize) const - { - return lhs->size < rhsSize; - } -}; -#endif // _VMA_SUBALLOCATION - -#ifndef _VMA_ALLOCATION_REQUEST -/* -Parameters of planned allocation inside a VmaDeviceMemoryBlock. -item points to a FREE suballocation. -*/ -struct VmaAllocationRequest -{ - VmaAllocHandle allocHandle; - VkDeviceSize size; - VmaSuballocationList::iterator item; - void* customData; - uint64_t algorithmData; - VmaAllocationRequestType type; -}; -#endif // _VMA_ALLOCATION_REQUEST - -#ifndef _VMA_BLOCK_METADATA -/* -Data structure used for bookkeeping of allocations and unused ranges of memory -in a single VkDeviceMemory block. -*/ -class VmaBlockMetadata -{ -public: - // pAllocationCallbacks, if not null, must be owned externally - alive and unchanged for the whole lifetime of this object. - VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual); - virtual ~VmaBlockMetadata() = default; - - virtual void Init(VkDeviceSize size) { m_Size = size; } - bool IsVirtual() const { return m_IsVirtual; } - VkDeviceSize GetSize() const { return m_Size; } - - // Validates all data structures inside this object. If not valid, returns false. - virtual bool Validate() const = 0; - virtual size_t GetAllocationCount() const = 0; - virtual size_t GetFreeRegionsCount() const = 0; - virtual VkDeviceSize GetSumFreeSize() const = 0; - // Returns true if this block is empty - contains only single free suballocation. - virtual bool IsEmpty() const = 0; - virtual void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) = 0; - virtual VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const = 0; - virtual void* GetAllocationUserData(VmaAllocHandle allocHandle) const = 0; - - virtual VmaAllocHandle GetAllocationListBegin() const = 0; - virtual VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const = 0; - virtual VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const = 0; - - // Shouldn't modify blockCount. - virtual void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const = 0; - virtual void AddStatistics(VmaStatistics& inoutStats) const = 0; - -#if VMA_STATS_STRING_ENABLED - virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0; -#endif - - // Tries to find a place for suballocation with given parameters inside this block. - // If succeeded, fills pAllocationRequest and returns true. - // If failed, returns false. - virtual bool CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags. - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) = 0; - - virtual VkResult CheckCorruption(const void* pBlockData) = 0; - - // Makes actual allocation based on request. Request must already be checked and valid. - virtual void Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) = 0; - - // Frees suballocation assigned to given memory region. - virtual void Free(VmaAllocHandle allocHandle) = 0; - - // Frees all allocations. - // Careful! Don't call it if there are VmaAllocation objects owned by userData of cleared allocations! - virtual void Clear() = 0; - - virtual void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) = 0; - virtual void DebugLogAllAllocations() const = 0; - -protected: - const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; } - VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } - VkDeviceSize GetDebugMargin() const { return IsVirtual() ? 0 : VMA_DEBUG_MARGIN; } - - void DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const; -#if VMA_STATS_STRING_ENABLED - // mapRefCount == UINT32_MAX means unspecified. - void PrintDetailedMap_Begin(class VmaJsonWriter& json, - VkDeviceSize unusedBytes, - size_t allocationCount, - size_t unusedRangeCount) const; - void PrintDetailedMap_Allocation(class VmaJsonWriter& json, - VkDeviceSize offset, VkDeviceSize size, void* userData) const; - void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, - VkDeviceSize offset, - VkDeviceSize size) const; - void PrintDetailedMap_End(class VmaJsonWriter& json) const; -#endif - -private: - VkDeviceSize m_Size; - const VkAllocationCallbacks* m_pAllocationCallbacks; - const VkDeviceSize m_BufferImageGranularity; - const bool m_IsVirtual; -}; - -#ifndef _VMA_BLOCK_METADATA_FUNCTIONS -VmaBlockMetadata::VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual) - : m_Size(0), - m_pAllocationCallbacks(pAllocationCallbacks), - m_BufferImageGranularity(bufferImageGranularity), - m_IsVirtual(isVirtual) {} - -void VmaBlockMetadata::DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const -{ - if (IsVirtual()) - { - VMA_DEBUG_LOG("UNFREED VIRTUAL ALLOCATION; Offset: %llu; Size: %llu; UserData: %p", offset, size, userData); - } - else - { - VMA_ASSERT(userData != VMA_NULL); - VmaAllocation allocation = reinterpret_cast(userData); - - userData = allocation->GetUserData(); - const char* name = allocation->GetName(); - -#if VMA_STATS_STRING_ENABLED - VMA_DEBUG_LOG("UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %s; Usage: %u", - offset, size, userData, name ? name : "vma_empty", - VMA_SUBALLOCATION_TYPE_NAMES[allocation->GetSuballocationType()], - allocation->GetBufferImageUsage()); -#else - VMA_DEBUG_LOG("UNFREED ALLOCATION; Offset: %llu; Size: %llu; UserData: %p; Name: %s; Type: %u", - offset, size, userData, name ? name : "vma_empty", - (uint32_t)allocation->GetSuballocationType()); -#endif // VMA_STATS_STRING_ENABLED - } - -} - -#if VMA_STATS_STRING_ENABLED -void VmaBlockMetadata::PrintDetailedMap_Begin(class VmaJsonWriter& json, - VkDeviceSize unusedBytes, size_t allocationCount, size_t unusedRangeCount) const -{ - json.WriteString("TotalBytes"); - json.WriteNumber(GetSize()); - - json.WriteString("UnusedBytes"); - json.WriteSize(unusedBytes); - - json.WriteString("Allocations"); - json.WriteSize(allocationCount); - - json.WriteString("UnusedRanges"); - json.WriteSize(unusedRangeCount); - - json.WriteString("Suballocations"); - json.BeginArray(); -} - -void VmaBlockMetadata::PrintDetailedMap_Allocation(class VmaJsonWriter& json, - VkDeviceSize offset, VkDeviceSize size, void* userData) const -{ - json.BeginObject(true); - - json.WriteString("Offset"); - json.WriteNumber(offset); - - if (IsVirtual()) - { - json.WriteString("Size"); - json.WriteNumber(size); - if (userData) - { - json.WriteString("CustomData"); - json.BeginString(); - json.ContinueString_Pointer(userData); - json.EndString(); - } - } - else - { - ((VmaAllocation)userData)->PrintParameters(json); - } - - json.EndObject(); -} - -void VmaBlockMetadata::PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, - VkDeviceSize offset, VkDeviceSize size) const -{ - json.BeginObject(true); - - json.WriteString("Offset"); - json.WriteNumber(offset); - - json.WriteString("Type"); - json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]); - - json.WriteString("Size"); - json.WriteNumber(size); - - json.EndObject(); -} - -void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) const -{ - json.EndArray(); -} -#endif // VMA_STATS_STRING_ENABLED -#endif // _VMA_BLOCK_METADATA_FUNCTIONS -#endif // _VMA_BLOCK_METADATA - -#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY -// Before deleting object of this class remember to call 'Destroy()' -class VmaBlockBufferImageGranularity final -{ -public: - struct ValidationContext - { - const VkAllocationCallbacks* allocCallbacks; - uint16_t* pageAllocs; - }; - - VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity); - ~VmaBlockBufferImageGranularity(); - - bool IsEnabled() const { return m_BufferImageGranularity > MAX_LOW_BUFFER_IMAGE_GRANULARITY; } - - void Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size); - // Before destroying object you must call free it's memory - void Destroy(const VkAllocationCallbacks* pAllocationCallbacks); - - void RoundupAllocRequest(VmaSuballocationType allocType, - VkDeviceSize& inOutAllocSize, - VkDeviceSize& inOutAllocAlignment) const; - - bool CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, - VkDeviceSize allocSize, - VkDeviceSize blockOffset, - VkDeviceSize blockSize, - VmaSuballocationType allocType) const; - - void AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size); - void FreePages(VkDeviceSize offset, VkDeviceSize size); - void Clear(); - - ValidationContext StartValidation(const VkAllocationCallbacks* pAllocationCallbacks, - bool isVirutal) const; - bool Validate(ValidationContext& ctx, VkDeviceSize offset, VkDeviceSize size) const; - bool FinishValidation(ValidationContext& ctx) const; - -private: - static const uint16_t MAX_LOW_BUFFER_IMAGE_GRANULARITY = 256; - - struct RegionInfo - { - uint8_t allocType; - uint16_t allocCount; - }; - - VkDeviceSize m_BufferImageGranularity; - uint32_t m_RegionCount; - RegionInfo* m_RegionInfo; - - uint32_t GetStartPage(VkDeviceSize offset) const { return OffsetToPageIndex(offset & ~(m_BufferImageGranularity - 1)); } - uint32_t GetEndPage(VkDeviceSize offset, VkDeviceSize size) const { return OffsetToPageIndex((offset + size - 1) & ~(m_BufferImageGranularity - 1)); } - - uint32_t OffsetToPageIndex(VkDeviceSize offset) const; - void AllocPage(RegionInfo& page, uint8_t allocType); -}; - -#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS -VmaBlockBufferImageGranularity::VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity) - : m_BufferImageGranularity(bufferImageGranularity), - m_RegionCount(0), - m_RegionInfo(VMA_NULL) {} - -VmaBlockBufferImageGranularity::~VmaBlockBufferImageGranularity() -{ - VMA_ASSERT(m_RegionInfo == VMA_NULL && "Free not called before destroying object!"); -} - -void VmaBlockBufferImageGranularity::Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size) -{ - if (IsEnabled()) - { - m_RegionCount = static_cast(VmaDivideRoundingUp(size, m_BufferImageGranularity)); - m_RegionInfo = vma_new_array(pAllocationCallbacks, RegionInfo, m_RegionCount); - memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); - } -} - -void VmaBlockBufferImageGranularity::Destroy(const VkAllocationCallbacks* pAllocationCallbacks) -{ - if (m_RegionInfo) - { - vma_delete_array(pAllocationCallbacks, m_RegionInfo, m_RegionCount); - m_RegionInfo = VMA_NULL; - } -} - -void VmaBlockBufferImageGranularity::RoundupAllocRequest(VmaSuballocationType allocType, - VkDeviceSize& inOutAllocSize, - VkDeviceSize& inOutAllocAlignment) const -{ - if (m_BufferImageGranularity > 1 && - m_BufferImageGranularity <= MAX_LOW_BUFFER_IMAGE_GRANULARITY) - { - if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN || - allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || - allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL) - { - inOutAllocAlignment = VMA_MAX(inOutAllocAlignment, m_BufferImageGranularity); - inOutAllocSize = VmaAlignUp(inOutAllocSize, m_BufferImageGranularity); - } - } -} - -bool VmaBlockBufferImageGranularity::CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, - VkDeviceSize allocSize, - VkDeviceSize blockOffset, - VkDeviceSize blockSize, - VmaSuballocationType allocType) const -{ - if (IsEnabled()) - { - uint32_t startPage = GetStartPage(inOutAllocOffset); - if (m_RegionInfo[startPage].allocCount > 0 && - VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[startPage].allocType), allocType)) - { - inOutAllocOffset = VmaAlignUp(inOutAllocOffset, m_BufferImageGranularity); - if (blockSize < allocSize + inOutAllocOffset - blockOffset) - return true; - ++startPage; - } - uint32_t endPage = GetEndPage(inOutAllocOffset, allocSize); - if (endPage != startPage && - m_RegionInfo[endPage].allocCount > 0 && - VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[endPage].allocType), allocType)) - { - return true; - } - } - return false; -} - -void VmaBlockBufferImageGranularity::AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size) -{ - if (IsEnabled()) - { - uint32_t startPage = GetStartPage(offset); - AllocPage(m_RegionInfo[startPage], allocType); - - uint32_t endPage = GetEndPage(offset, size); - if (startPage != endPage) - AllocPage(m_RegionInfo[endPage], allocType); - } -} - -void VmaBlockBufferImageGranularity::FreePages(VkDeviceSize offset, VkDeviceSize size) -{ - if (IsEnabled()) - { - uint32_t startPage = GetStartPage(offset); - --m_RegionInfo[startPage].allocCount; - if (m_RegionInfo[startPage].allocCount == 0) - m_RegionInfo[startPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; - uint32_t endPage = GetEndPage(offset, size); - if (startPage != endPage) - { - --m_RegionInfo[endPage].allocCount; - if (m_RegionInfo[endPage].allocCount == 0) - m_RegionInfo[endPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; - } - } -} - -void VmaBlockBufferImageGranularity::Clear() -{ - if (m_RegionInfo) - memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); -} - -VmaBlockBufferImageGranularity::ValidationContext VmaBlockBufferImageGranularity::StartValidation( - const VkAllocationCallbacks* pAllocationCallbacks, bool isVirutal) const -{ - ValidationContext ctx{ pAllocationCallbacks, VMA_NULL }; - if (!isVirutal && IsEnabled()) - { - ctx.pageAllocs = vma_new_array(pAllocationCallbacks, uint16_t, m_RegionCount); - memset(ctx.pageAllocs, 0, m_RegionCount * sizeof(uint16_t)); - } - return ctx; -} - -bool VmaBlockBufferImageGranularity::Validate(ValidationContext& ctx, - VkDeviceSize offset, VkDeviceSize size) const -{ - if (IsEnabled()) - { - uint32_t start = GetStartPage(offset); - ++ctx.pageAllocs[start]; - VMA_VALIDATE(m_RegionInfo[start].allocCount > 0); - - uint32_t end = GetEndPage(offset, size); - if (start != end) - { - ++ctx.pageAllocs[end]; - VMA_VALIDATE(m_RegionInfo[end].allocCount > 0); - } - } - return true; -} - -bool VmaBlockBufferImageGranularity::FinishValidation(ValidationContext& ctx) const -{ - // Check proper page structure - if (IsEnabled()) - { - VMA_ASSERT(ctx.pageAllocs != VMA_NULL && "Validation context not initialized!"); - - for (uint32_t page = 0; page < m_RegionCount; ++page) - { - VMA_VALIDATE(ctx.pageAllocs[page] == m_RegionInfo[page].allocCount); - } - vma_delete_array(ctx.allocCallbacks, ctx.pageAllocs, m_RegionCount); - ctx.pageAllocs = VMA_NULL; - } - return true; -} - -uint32_t VmaBlockBufferImageGranularity::OffsetToPageIndex(VkDeviceSize offset) const -{ - return static_cast(offset >> VMA_BITSCAN_MSB(m_BufferImageGranularity)); -} - -void VmaBlockBufferImageGranularity::AllocPage(RegionInfo& page, uint8_t allocType) -{ - // When current alloc type is free then it can be overriden by new type - if (page.allocCount == 0 || (page.allocCount > 0 && page.allocType == VMA_SUBALLOCATION_TYPE_FREE)) - page.allocType = allocType; - - ++page.allocCount; -} -#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS -#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY - -#if 0 -#ifndef _VMA_BLOCK_METADATA_GENERIC -class VmaBlockMetadata_Generic : public VmaBlockMetadata -{ - friend class VmaDefragmentationAlgorithm_Generic; - friend class VmaDefragmentationAlgorithm_Fast; - VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic) -public: - VmaBlockMetadata_Generic(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual); - virtual ~VmaBlockMetadata_Generic() = default; - - size_t GetAllocationCount() const override { return m_Suballocations.size() - m_FreeCount; } - VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize; } - bool IsEmpty() const override { return (m_Suballocations.size() == 1) && (m_FreeCount == 1); } - void Free(VmaAllocHandle allocHandle) override { FreeSuballocation(FindAtOffset((VkDeviceSize)allocHandle - 1)); } - VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; }; - - void Init(VkDeviceSize size) override; - bool Validate() const override; - - void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; - void AddStatistics(VmaStatistics& inoutStats) const override; - -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const override; -#endif - - bool CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) override; - - VkResult CheckCorruption(const void* pBlockData) override; - - void Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) override; - - void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; - void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; - VmaAllocHandle GetAllocationListBegin() const override; - VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; - void Clear() override; - void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; - void DebugLogAllAllocations() const override; - -private: - uint32_t m_FreeCount; - VkDeviceSize m_SumFreeSize; - VmaSuballocationList m_Suballocations; - // Suballocations that are free. Sorted by size, ascending. - VmaVector> m_FreeSuballocationsBySize; - - VkDeviceSize AlignAllocationSize(VkDeviceSize size) const { return IsVirtual() ? size : VmaAlignUp(size, (VkDeviceSize)16); } - - VmaSuballocationList::iterator FindAtOffset(VkDeviceSize offset) const; - bool ValidateFreeSuballocationList() const; - - // Checks if requested suballocation with given parameters can be placed in given pFreeSuballocItem. - // If yes, fills pOffset and returns true. If no, returns false. - bool CheckAllocation( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaSuballocationList::const_iterator suballocItem, - VmaAllocHandle* pAllocHandle) const; - - // Given free suballocation, it merges it with following one, which must also be free. - void MergeFreeWithNext(VmaSuballocationList::iterator item); - // Releases given suballocation, making it free. - // Merges it with adjacent free suballocations if applicable. - // Returns iterator to new free suballocation at this place. - VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem); - // Given free suballocation, it inserts it into sorted list of - // m_FreeSuballocationsBySize if it is suitable. - void RegisterFreeSuballocation(VmaSuballocationList::iterator item); - // Given free suballocation, it removes it from sorted list of - // m_FreeSuballocationsBySize if it is suitable. - void UnregisterFreeSuballocation(VmaSuballocationList::iterator item); -}; - -#ifndef _VMA_BLOCK_METADATA_GENERIC_FUNCTIONS -VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual) - : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), - m_FreeCount(0), - m_SumFreeSize(0), - m_Suballocations(VmaStlAllocator(pAllocationCallbacks)), - m_FreeSuballocationsBySize(VmaStlAllocator(pAllocationCallbacks)) {} - -void VmaBlockMetadata_Generic::Init(VkDeviceSize size) -{ - VmaBlockMetadata::Init(size); - - m_FreeCount = 1; - m_SumFreeSize = size; - - VmaSuballocation suballoc = {}; - suballoc.offset = 0; - suballoc.size = size; - suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - - m_Suballocations.push_back(suballoc); - m_FreeSuballocationsBySize.push_back(m_Suballocations.begin()); -} - -bool VmaBlockMetadata_Generic::Validate() const -{ - VMA_VALIDATE(!m_Suballocations.empty()); - - // Expected offset of new suballocation as calculated from previous ones. - VkDeviceSize calculatedOffset = 0; - // Expected number of free suballocations as calculated from traversing their list. - uint32_t calculatedFreeCount = 0; - // Expected sum size of free suballocations as calculated from traversing their list. - VkDeviceSize calculatedSumFreeSize = 0; - // Expected number of free suballocations that should be registered in - // m_FreeSuballocationsBySize calculated from traversing their list. - size_t freeSuballocationsToRegister = 0; - // True if previous visited suballocation was free. - bool prevFree = false; - - const VkDeviceSize debugMargin = GetDebugMargin(); - - for (const auto& subAlloc : m_Suballocations) - { - // Actual offset of this suballocation doesn't match expected one. - VMA_VALIDATE(subAlloc.offset == calculatedOffset); - - const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE); - // Two adjacent free suballocations are invalid. They should be merged. - VMA_VALIDATE(!prevFree || !currFree); - - VmaAllocation alloc = (VmaAllocation)subAlloc.userData; - if (!IsVirtual()) - { - VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); - } - - if (currFree) - { - calculatedSumFreeSize += subAlloc.size; - ++calculatedFreeCount; - ++freeSuballocationsToRegister; - - // Margin required between allocations - every free space must be at least that large. - VMA_VALIDATE(subAlloc.size >= debugMargin); - } - else - { - if (!IsVirtual()) - { - VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == subAlloc.offset + 1); - VMA_VALIDATE(alloc->GetSize() == subAlloc.size); - } - - // Margin required between allocations - previous allocation must be free. - VMA_VALIDATE(debugMargin == 0 || prevFree); - } - - calculatedOffset += subAlloc.size; - prevFree = currFree; - } - - // Number of free suballocations registered in m_FreeSuballocationsBySize doesn't - // match expected one. - VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister); - - VkDeviceSize lastSize = 0; - for (size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i) - { - VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i]; - - // Only free suballocations can be registered in m_FreeSuballocationsBySize. - VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE); - // They must be sorted by size ascending. - VMA_VALIDATE(suballocItem->size >= lastSize); - - lastSize = suballocItem->size; - } - - // Check if totals match calculated values. - VMA_VALIDATE(ValidateFreeSuballocationList()); - VMA_VALIDATE(calculatedOffset == GetSize()); - VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize); - VMA_VALIDATE(calculatedFreeCount == m_FreeCount); - - return true; -} - -void VmaBlockMetadata_Generic::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const -{ - const uint32_t rangeCount = (uint32_t)m_Suballocations.size(); - inoutStats.statistics.blockCount++; - inoutStats.statistics.blockBytes += GetSize(); - - for (const auto& suballoc : m_Suballocations) - { - if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) - VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); - else - VmaAddDetailedStatisticsUnusedRange(inoutStats, suballoc.size); - } -} - -void VmaBlockMetadata_Generic::AddStatistics(VmaStatistics& inoutStats) const -{ - inoutStats.blockCount++; - inoutStats.allocationCount += (uint32_t)m_Suballocations.size() - m_FreeCount; - inoutStats.blockBytes += GetSize(); - inoutStats.allocationBytes += GetSize() - m_SumFreeSize; -} - -#if VMA_STATS_STRING_ENABLED -void VmaBlockMetadata_Generic::PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const -{ - PrintDetailedMap_Begin(json, - m_SumFreeSize, // unusedBytes - m_Suballocations.size() - (size_t)m_FreeCount, // allocationCount - m_FreeCount, // unusedRangeCount - mapRefCount); - - for (const auto& suballoc : m_Suballocations) - { - if (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE) - { - PrintDetailedMap_UnusedRange(json, suballoc.offset, suballoc.size); - } - else - { - PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); - } - } - - PrintDetailedMap_End(json); -} -#endif // VMA_STATS_STRING_ENABLED - -bool VmaBlockMetadata_Generic::CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) -{ - VMA_ASSERT(allocSize > 0); - VMA_ASSERT(!upperAddress); - VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(pAllocationRequest != VMA_NULL); - VMA_HEAVY_ASSERT(Validate()); - - allocSize = AlignAllocationSize(allocSize); - - pAllocationRequest->type = VmaAllocationRequestType::Normal; - pAllocationRequest->size = allocSize; - - const VkDeviceSize debugMargin = GetDebugMargin(); - - // There is not enough total free space in this block to fulfill the request: Early return. - if (m_SumFreeSize < allocSize + debugMargin) - { - return false; - } - - // New algorithm, efficiently searching freeSuballocationsBySize. - const size_t freeSuballocCount = m_FreeSuballocationsBySize.size(); - if (freeSuballocCount > 0) - { - if (strategy == 0 || - strategy == VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT) - { - // Find first free suballocation with size not less than allocSize + debugMargin. - VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess( - m_FreeSuballocationsBySize.data(), - m_FreeSuballocationsBySize.data() + freeSuballocCount, - allocSize + debugMargin, - VmaSuballocationItemSizeLess()); - size_t index = it - m_FreeSuballocationsBySize.data(); - for (; index < freeSuballocCount; ++index) - { - if (CheckAllocation( - allocSize, - allocAlignment, - allocType, - m_FreeSuballocationsBySize[index], - &pAllocationRequest->allocHandle)) - { - pAllocationRequest->item = m_FreeSuballocationsBySize[index]; - return true; - } - } - } - else if (strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET) - { - for (VmaSuballocationList::iterator it = m_Suballocations.begin(); - it != m_Suballocations.end(); - ++it) - { - if (it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation( - allocSize, - allocAlignment, - allocType, - it, - &pAllocationRequest->allocHandle)) - { - pAllocationRequest->item = it; - return true; - } - } - } - else - { - VMA_ASSERT(strategy & (VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT | VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT )); - // Search staring from biggest suballocations. - for (size_t index = freeSuballocCount; index--; ) - { - if (CheckAllocation( - allocSize, - allocAlignment, - allocType, - m_FreeSuballocationsBySize[index], - &pAllocationRequest->allocHandle)) - { - pAllocationRequest->item = m_FreeSuballocationsBySize[index]; - return true; - } - } - } - } - - return false; -} - -VkResult VmaBlockMetadata_Generic::CheckCorruption(const void* pBlockData) -{ - for (auto& suballoc : m_Suballocations) - { - if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) - { - if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) - { - VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); - return VK_ERROR_UNKNOWN_COPY; - } - } - } - - return VK_SUCCESS; -} - -void VmaBlockMetadata_Generic::Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) -{ - VMA_ASSERT(request.type == VmaAllocationRequestType::Normal); - VMA_ASSERT(request.item != m_Suballocations.end()); - VmaSuballocation& suballoc = *request.item; - // Given suballocation is a free block. - VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); - - // Given offset is inside this suballocation. - VMA_ASSERT((VkDeviceSize)request.allocHandle - 1 >= suballoc.offset); - const VkDeviceSize paddingBegin = (VkDeviceSize)request.allocHandle - suballoc.offset - 1; - VMA_ASSERT(suballoc.size >= paddingBegin + request.size); - const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - request.size; - - // Unregister this free suballocation from m_FreeSuballocationsBySize and update - // it to become used. - UnregisterFreeSuballocation(request.item); - - suballoc.offset = (VkDeviceSize)request.allocHandle - 1; - suballoc.size = request.size; - suballoc.type = type; - suballoc.userData = userData; - - // If there are any free bytes remaining at the end, insert new free suballocation after current one. - if (paddingEnd) - { - VmaSuballocation paddingSuballoc = {}; - paddingSuballoc.offset = suballoc.offset + suballoc.size; - paddingSuballoc.size = paddingEnd; - paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - VmaSuballocationList::iterator next = request.item; - ++next; - const VmaSuballocationList::iterator paddingEndItem = - m_Suballocations.insert(next, paddingSuballoc); - RegisterFreeSuballocation(paddingEndItem); - } - - // If there are any free bytes remaining at the beginning, insert new free suballocation before current one. - if (paddingBegin) - { - VmaSuballocation paddingSuballoc = {}; - paddingSuballoc.offset = suballoc.offset - paddingBegin; - paddingSuballoc.size = paddingBegin; - paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - const VmaSuballocationList::iterator paddingBeginItem = - m_Suballocations.insert(request.item, paddingSuballoc); - RegisterFreeSuballocation(paddingBeginItem); - } - - // Update totals. - m_FreeCount = m_FreeCount - 1; - if (paddingBegin > 0) - { - ++m_FreeCount; - } - if (paddingEnd > 0) - { - ++m_FreeCount; - } - m_SumFreeSize -= request.size; -} - -void VmaBlockMetadata_Generic::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) -{ - outInfo.offset = (VkDeviceSize)allocHandle - 1; - const VmaSuballocation& suballoc = *FindAtOffset(outInfo.offset); - outInfo.size = suballoc.size; - outInfo.pUserData = suballoc.userData; -} - -void* VmaBlockMetadata_Generic::GetAllocationUserData(VmaAllocHandle allocHandle) const -{ - return FindAtOffset((VkDeviceSize)allocHandle - 1)->userData; -} - -VmaAllocHandle VmaBlockMetadata_Generic::GetAllocationListBegin() const -{ - if (IsEmpty()) - return VK_NULL_HANDLE; - - for (const auto& suballoc : m_Suballocations) - { - if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) - return (VmaAllocHandle)(suballoc.offset + 1); - } - VMA_ASSERT(false && "Should contain at least 1 allocation!"); - return VK_NULL_HANDLE; -} - -VmaAllocHandle VmaBlockMetadata_Generic::GetNextAllocation(VmaAllocHandle prevAlloc) const -{ - VmaSuballocationList::const_iterator prev = FindAtOffset((VkDeviceSize)prevAlloc - 1); - - for (VmaSuballocationList::const_iterator it = ++prev; it != m_Suballocations.end(); ++it) - { - if (it->type != VMA_SUBALLOCATION_TYPE_FREE) - return (VmaAllocHandle)(it->offset + 1); - } - return VK_NULL_HANDLE; -} - -void VmaBlockMetadata_Generic::Clear() -{ - const VkDeviceSize size = GetSize(); - - VMA_ASSERT(IsVirtual()); - m_FreeCount = 1; - m_SumFreeSize = size; - m_Suballocations.clear(); - m_FreeSuballocationsBySize.clear(); - - VmaSuballocation suballoc = {}; - suballoc.offset = 0; - suballoc.size = size; - suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - m_Suballocations.push_back(suballoc); - - m_FreeSuballocationsBySize.push_back(m_Suballocations.begin()); -} - -void VmaBlockMetadata_Generic::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) -{ - VmaSuballocation& suballoc = *FindAtOffset((VkDeviceSize)allocHandle - 1); - suballoc.userData = userData; -} - -void VmaBlockMetadata_Generic::DebugLogAllAllocations() const -{ - for (const auto& suballoc : m_Suballocations) - { - if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) - DebugLogAllocation(suballoc.offset, suballoc.size, suballoc.userData); - } -} - -VmaSuballocationList::iterator VmaBlockMetadata_Generic::FindAtOffset(VkDeviceSize offset) const -{ - VMA_HEAVY_ASSERT(!m_Suballocations.empty()); - const VkDeviceSize last = m_Suballocations.rbegin()->offset; - if (last == offset) - return m_Suballocations.rbegin().drop_const(); - const VkDeviceSize first = m_Suballocations.begin()->offset; - if (first == offset) - return m_Suballocations.begin().drop_const(); - - const size_t suballocCount = m_Suballocations.size(); - const VkDeviceSize step = (last - first + m_Suballocations.begin()->size) / suballocCount; - auto findSuballocation = [&](auto begin, auto end) -> VmaSuballocationList::iterator - { - for (auto suballocItem = begin; - suballocItem != end; - ++suballocItem) - { - if (suballocItem->offset == offset) - return suballocItem.drop_const(); - } - VMA_ASSERT(false && "Not found!"); - return m_Suballocations.end().drop_const(); - }; - // If requested offset is closer to the end of range, search from the end - if (offset - first > suballocCount * step / 2) - { - return findSuballocation(m_Suballocations.rbegin(), m_Suballocations.rend()); - } - return findSuballocation(m_Suballocations.begin(), m_Suballocations.end()); -} - -bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList() const -{ - VkDeviceSize lastSize = 0; - for (size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i) - { - const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i]; - - VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE); - VMA_VALIDATE(it->size >= lastSize); - lastSize = it->size; - } - return true; -} - -bool VmaBlockMetadata_Generic::CheckAllocation( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaSuballocationList::const_iterator suballocItem, - VmaAllocHandle* pAllocHandle) const -{ - VMA_ASSERT(allocSize > 0); - VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(suballocItem != m_Suballocations.cend()); - VMA_ASSERT(pAllocHandle != VMA_NULL); - - const VkDeviceSize debugMargin = GetDebugMargin(); - const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); - - const VmaSuballocation& suballoc = *suballocItem; - VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); - - // Size of this suballocation is too small for this request: Early return. - if (suballoc.size < allocSize) - { - return false; - } - - // Start from offset equal to beginning of this suballocation. - VkDeviceSize offset = suballoc.offset + (suballocItem == m_Suballocations.cbegin() ? 0 : GetDebugMargin()); - - // Apply debugMargin from the end of previous alloc. - if (debugMargin > 0) - { - offset += debugMargin; - } - - // Apply alignment. - offset = VmaAlignUp(offset, allocAlignment); - - // Check previous suballocations for BufferImageGranularity conflicts. - // Make bigger alignment if necessary. - if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment) - { - bool bufferImageGranularityConflict = false; - VmaSuballocationList::const_iterator prevSuballocItem = suballocItem; - while (prevSuballocItem != m_Suballocations.cbegin()) - { - --prevSuballocItem; - const VmaSuballocation& prevSuballoc = *prevSuballocItem; - if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, offset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) - { - bufferImageGranularityConflict = true; - break; - } - } - else - // Already on previous page. - break; - } - if (bufferImageGranularityConflict) - { - offset = VmaAlignUp(offset, bufferImageGranularity); - } - } - - // Calculate padding at the beginning based on current offset. - const VkDeviceSize paddingBegin = offset - suballoc.offset; - - // Fail if requested size plus margin after is bigger than size of this suballocation. - if (paddingBegin + allocSize + debugMargin > suballoc.size) - { - return false; - } - - // Check next suballocations for BufferImageGranularity conflicts. - // If conflict exists, allocation cannot be made here. - if (allocSize % bufferImageGranularity || offset % bufferImageGranularity) - { - VmaSuballocationList::const_iterator nextSuballocItem = suballocItem; - ++nextSuballocItem; - while (nextSuballocItem != m_Suballocations.cend()) - { - const VmaSuballocation& nextSuballoc = *nextSuballocItem; - if (VmaBlocksOnSamePage(offset, allocSize, nextSuballoc.offset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) - { - return false; - } - } - else - { - // Already on next page. - break; - } - ++nextSuballocItem; - } - } - - *pAllocHandle = (VmaAllocHandle)(offset + 1); - // All tests passed: Success. pAllocHandle is already filled. - return true; -} - -void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item) -{ - VMA_ASSERT(item != m_Suballocations.end()); - VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); - - VmaSuballocationList::iterator nextItem = item; - ++nextItem; - VMA_ASSERT(nextItem != m_Suballocations.end()); - VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE); - - item->size += nextItem->size; - --m_FreeCount; - m_Suballocations.erase(nextItem); -} - -VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem) -{ - // Change this suballocation to be marked as free. - VmaSuballocation& suballoc = *suballocItem; - suballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - suballoc.userData = VMA_NULL; - - // Update totals. - ++m_FreeCount; - m_SumFreeSize += suballoc.size; - - // Merge with previous and/or next suballocation if it's also free. - bool mergeWithNext = false; - bool mergeWithPrev = false; - - VmaSuballocationList::iterator nextItem = suballocItem; - ++nextItem; - if ((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)) - { - mergeWithNext = true; - } - - VmaSuballocationList::iterator prevItem = suballocItem; - if (suballocItem != m_Suballocations.begin()) - { - --prevItem; - if (prevItem->type == VMA_SUBALLOCATION_TYPE_FREE) - { - mergeWithPrev = true; - } - } - - if (mergeWithNext) - { - UnregisterFreeSuballocation(nextItem); - MergeFreeWithNext(suballocItem); - } - - if (mergeWithPrev) - { - UnregisterFreeSuballocation(prevItem); - MergeFreeWithNext(prevItem); - RegisterFreeSuballocation(prevItem); - return prevItem; - } - else - { - RegisterFreeSuballocation(suballocItem); - return suballocItem; - } -} - -void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item) -{ - VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(item->size > 0); - - // You may want to enable this validation at the beginning or at the end of - // this function, depending on what do you want to check. - VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); - - if (m_FreeSuballocationsBySize.empty()) - { - m_FreeSuballocationsBySize.push_back(item); - } - else - { - VmaVectorInsertSorted(m_FreeSuballocationsBySize, item); - } - - //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); -} - -void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item) -{ - VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(item->size > 0); - - // You may want to enable this validation at the beginning or at the end of - // this function, depending on what do you want to check. - VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); - - VmaSuballocationList::iterator* const it = VmaBinaryFindFirstNotLess( - m_FreeSuballocationsBySize.data(), - m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(), - item, - VmaSuballocationItemSizeLess()); - for (size_t index = it - m_FreeSuballocationsBySize.data(); - index < m_FreeSuballocationsBySize.size(); - ++index) - { - if (m_FreeSuballocationsBySize[index] == item) - { - VmaVectorRemove(m_FreeSuballocationsBySize, index); - return; - } - VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) && "Not found."); - } - VMA_ASSERT(0 && "Not found."); - - //VMA_HEAVY_ASSERT(ValidateFreeSuballocationList()); -} -#endif // _VMA_BLOCK_METADATA_GENERIC_FUNCTIONS -#endif // _VMA_BLOCK_METADATA_GENERIC -#endif // #if 0 - -#ifndef _VMA_BLOCK_METADATA_LINEAR -/* -Allocations and their references in internal data structure look like this: - -if(m_2ndVectorMode == SECOND_VECTOR_EMPTY): - - 0 +-------+ - | | - | | - | | - +-------+ - | Alloc | 1st[m_1stNullItemsBeginCount] - +-------+ - | Alloc | 1st[m_1stNullItemsBeginCount + 1] - +-------+ - | ... | - +-------+ - | Alloc | 1st[1st.size() - 1] - +-------+ - | | - | | - | | -GetSize() +-------+ - -if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER): - - 0 +-------+ - | Alloc | 2nd[0] - +-------+ - | Alloc | 2nd[1] - +-------+ - | ... | - +-------+ - | Alloc | 2nd[2nd.size() - 1] - +-------+ - | | - | | - | | - +-------+ - | Alloc | 1st[m_1stNullItemsBeginCount] - +-------+ - | Alloc | 1st[m_1stNullItemsBeginCount + 1] - +-------+ - | ... | - +-------+ - | Alloc | 1st[1st.size() - 1] - +-------+ - | | -GetSize() +-------+ - -if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK): - - 0 +-------+ - | | - | | - | | - +-------+ - | Alloc | 1st[m_1stNullItemsBeginCount] - +-------+ - | Alloc | 1st[m_1stNullItemsBeginCount + 1] - +-------+ - | ... | - +-------+ - | Alloc | 1st[1st.size() - 1] - +-------+ - | | - | | - | | - +-------+ - | Alloc | 2nd[2nd.size() - 1] - +-------+ - | ... | - +-------+ - | Alloc | 2nd[1] - +-------+ - | Alloc | 2nd[0] -GetSize() +-------+ - -*/ -class VmaBlockMetadata_Linear : public VmaBlockMetadata -{ - VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear) -public: - VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual); - virtual ~VmaBlockMetadata_Linear() = default; - - VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize; } - bool IsEmpty() const override { return GetAllocationCount() == 0; } - VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; }; - - void Init(VkDeviceSize size) override; - bool Validate() const override; - size_t GetAllocationCount() const override; - size_t GetFreeRegionsCount() const override; - - void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; - void AddStatistics(VmaStatistics& inoutStats) const override; - -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaJsonWriter& json) const override; -#endif - - bool CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) override; - - VkResult CheckCorruption(const void* pBlockData) override; - - void Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) override; - - void Free(VmaAllocHandle allocHandle) override; - void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; - void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; - VmaAllocHandle GetAllocationListBegin() const override; - VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; - VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; - void Clear() override; - void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; - void DebugLogAllAllocations() const override; - -private: - /* - There are two suballocation vectors, used in ping-pong way. - The one with index m_1stVectorIndex is called 1st. - The one with index (m_1stVectorIndex ^ 1) is called 2nd. - 2nd can be non-empty only when 1st is not empty. - When 2nd is not empty, m_2ndVectorMode indicates its mode of operation. - */ - typedef VmaVector> SuballocationVectorType; - - enum SECOND_VECTOR_MODE - { - SECOND_VECTOR_EMPTY, - /* - Suballocations in 2nd vector are created later than the ones in 1st, but they - all have smaller offset. - */ - SECOND_VECTOR_RING_BUFFER, - /* - Suballocations in 2nd vector are upper side of double stack. - They all have offsets higher than those in 1st vector. - Top of this stack means smaller offsets, but higher indices in this vector. - */ - SECOND_VECTOR_DOUBLE_STACK, - }; - - VkDeviceSize m_SumFreeSize; - SuballocationVectorType m_Suballocations0, m_Suballocations1; - uint32_t m_1stVectorIndex; - SECOND_VECTOR_MODE m_2ndVectorMode; - // Number of items in 1st vector with hAllocation = null at the beginning. - size_t m_1stNullItemsBeginCount; - // Number of other items in 1st vector with hAllocation = null somewhere in the middle. - size_t m_1stNullItemsMiddleCount; - // Number of items in 2nd vector with hAllocation = null. - size_t m_2ndNullItemsCount; - - SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } - SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } - const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } - const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } - - VmaSuballocation& FindSuballocation(VkDeviceSize offset) const; - bool ShouldCompact1st() const; - void CleanupAfterFree(); - - bool CreateAllocationRequest_LowerAddress( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest); - bool CreateAllocationRequest_UpperAddress( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest); -}; - -#ifndef _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS -VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual) - : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), - m_SumFreeSize(0), - m_Suballocations0(VmaStlAllocator(pAllocationCallbacks)), - m_Suballocations1(VmaStlAllocator(pAllocationCallbacks)), - m_1stVectorIndex(0), - m_2ndVectorMode(SECOND_VECTOR_EMPTY), - m_1stNullItemsBeginCount(0), - m_1stNullItemsMiddleCount(0), - m_2ndNullItemsCount(0) {} - -void VmaBlockMetadata_Linear::Init(VkDeviceSize size) -{ - VmaBlockMetadata::Init(size); - m_SumFreeSize = size; -} - -bool VmaBlockMetadata_Linear::Validate() const -{ - const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - - VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY)); - VMA_VALIDATE(!suballocations1st.empty() || - suballocations2nd.empty() || - m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER); - - if (!suballocations1st.empty()) - { - // Null item at the beginning should be accounted into m_1stNullItemsBeginCount. - VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].type != VMA_SUBALLOCATION_TYPE_FREE); - // Null item at the end should be just pop_back(). - VMA_VALIDATE(suballocations1st.back().type != VMA_SUBALLOCATION_TYPE_FREE); - } - if (!suballocations2nd.empty()) - { - // Null item at the end should be just pop_back(). - VMA_VALIDATE(suballocations2nd.back().type != VMA_SUBALLOCATION_TYPE_FREE); - } - - VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size()); - VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size()); - - VkDeviceSize sumUsedSize = 0; - const size_t suballoc1stCount = suballocations1st.size(); - const VkDeviceSize debugMargin = GetDebugMargin(); - VkDeviceSize offset = 0; - - if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - const size_t suballoc2ndCount = suballocations2nd.size(); - size_t nullItem2ndCount = 0; - for (size_t i = 0; i < suballoc2ndCount; ++i) - { - const VmaSuballocation& suballoc = suballocations2nd[i]; - const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); - - VmaAllocation const alloc = (VmaAllocation)suballoc.userData; - if (!IsVirtual()) - { - VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); - } - VMA_VALIDATE(suballoc.offset >= offset); - - if (!currFree) - { - if (!IsVirtual()) - { - VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); - VMA_VALIDATE(alloc->GetSize() == suballoc.size); - } - sumUsedSize += suballoc.size; - } - else - { - ++nullItem2ndCount; - } - - offset = suballoc.offset + suballoc.size + debugMargin; - } - - VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); - } - - for (size_t i = 0; i < m_1stNullItemsBeginCount; ++i) - { - const VmaSuballocation& suballoc = suballocations1st[i]; - VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE && - suballoc.userData == VMA_NULL); - } - - size_t nullItem1stCount = m_1stNullItemsBeginCount; - - for (size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i) - { - const VmaSuballocation& suballoc = suballocations1st[i]; - const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); - - VmaAllocation const alloc = (VmaAllocation)suballoc.userData; - if (!IsVirtual()) - { - VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); - } - VMA_VALIDATE(suballoc.offset >= offset); - VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree); - - if (!currFree) - { - if (!IsVirtual()) - { - VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); - VMA_VALIDATE(alloc->GetSize() == suballoc.size); - } - sumUsedSize += suballoc.size; - } - else - { - ++nullItem1stCount; - } - - offset = suballoc.offset + suballoc.size + debugMargin; - } - VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount); - - if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - const size_t suballoc2ndCount = suballocations2nd.size(); - size_t nullItem2ndCount = 0; - for (size_t i = suballoc2ndCount; i--; ) - { - const VmaSuballocation& suballoc = suballocations2nd[i]; - const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); - - VmaAllocation const alloc = (VmaAllocation)suballoc.userData; - if (!IsVirtual()) - { - VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); - } - VMA_VALIDATE(suballoc.offset >= offset); - - if (!currFree) - { - if (!IsVirtual()) - { - VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); - VMA_VALIDATE(alloc->GetSize() == suballoc.size); - } - sumUsedSize += suballoc.size; - } - else - { - ++nullItem2ndCount; - } - - offset = suballoc.offset + suballoc.size + debugMargin; - } - - VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); - } - - VMA_VALIDATE(offset <= GetSize()); - VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize); - - return true; -} - -size_t VmaBlockMetadata_Linear::GetAllocationCount() const -{ - return AccessSuballocations1st().size() - m_1stNullItemsBeginCount - m_1stNullItemsMiddleCount + - AccessSuballocations2nd().size() - m_2ndNullItemsCount; -} - -size_t VmaBlockMetadata_Linear::GetFreeRegionsCount() const -{ - // Function only used for defragmentation, which is disabled for this algorithm - VMA_ASSERT(0); - return SIZE_MAX; -} - -void VmaBlockMetadata_Linear::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const -{ - const VkDeviceSize size = GetSize(); - const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - const size_t suballoc1stCount = suballocations1st.size(); - const size_t suballoc2ndCount = suballocations2nd.size(); - - inoutStats.statistics.blockCount++; - inoutStats.statistics.blockBytes += size; - - VkDeviceSize lastOffset = 0; - - if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; - size_t nextAlloc2ndIndex = 0; - while (lastOffset < freeSpace2ndTo1stEnd) - { - // Find next non-null allocation or move nextAllocIndex to the end. - while (nextAlloc2ndIndex < suballoc2ndCount && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - ++nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex < suballoc2ndCount) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc2ndIndex; - } - // We are at the end. - else - { - // There is free space from lastOffset to freeSpace2ndTo1stEnd. - if (lastOffset < freeSpace2ndTo1stEnd) - { - const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; - VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); - } - - // End of loop. - lastOffset = freeSpace2ndTo1stEnd; - } - } - } - - size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; - const VkDeviceSize freeSpace1stTo2ndEnd = - m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; - while (lastOffset < freeSpace1stTo2ndEnd) - { - // Find next non-null allocation or move nextAllocIndex to the end. - while (nextAlloc1stIndex < suballoc1stCount && - suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) - { - ++nextAlloc1stIndex; - } - - // Found non-null allocation. - if (nextAlloc1stIndex < suballoc1stCount) - { - const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc1stIndex; - } - // We are at the end. - else - { - // There is free space from lastOffset to freeSpace1stTo2ndEnd. - if (lastOffset < freeSpace1stTo2ndEnd) - { - const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; - VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); - } - - // End of loop. - lastOffset = freeSpace1stTo2ndEnd; - } - } - - if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; - while (lastOffset < size) - { - // Find next non-null allocation or move nextAllocIndex to the end. - while (nextAlloc2ndIndex != SIZE_MAX && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - --nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex != SIZE_MAX) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - --nextAlloc2ndIndex; - } - // We are at the end. - else - { - // There is free space from lastOffset to size. - if (lastOffset < size) - { - const VkDeviceSize unusedRangeSize = size - lastOffset; - VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); - } - - // End of loop. - lastOffset = size; - } - } - } -} - -void VmaBlockMetadata_Linear::AddStatistics(VmaStatistics& inoutStats) const -{ - const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - const VkDeviceSize size = GetSize(); - const size_t suballoc1stCount = suballocations1st.size(); - const size_t suballoc2ndCount = suballocations2nd.size(); - - inoutStats.blockCount++; - inoutStats.blockBytes += size; - inoutStats.allocationBytes += size - m_SumFreeSize; - - VkDeviceSize lastOffset = 0; - - if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; - size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount; - while (lastOffset < freeSpace2ndTo1stEnd) - { - // Find next non-null allocation or move nextAlloc2ndIndex to the end. - while (nextAlloc2ndIndex < suballoc2ndCount && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - ++nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex < suballoc2ndCount) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - ++inoutStats.allocationCount; - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc2ndIndex; - } - // We are at the end. - else - { - if (lastOffset < freeSpace2ndTo1stEnd) - { - // There is free space from lastOffset to freeSpace2ndTo1stEnd. - const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; - } - - // End of loop. - lastOffset = freeSpace2ndTo1stEnd; - } - } - } - - size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; - const VkDeviceSize freeSpace1stTo2ndEnd = - m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; - while (lastOffset < freeSpace1stTo2ndEnd) - { - // Find next non-null allocation or move nextAllocIndex to the end. - while (nextAlloc1stIndex < suballoc1stCount && - suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) - { - ++nextAlloc1stIndex; - } - - // Found non-null allocation. - if (nextAlloc1stIndex < suballoc1stCount) - { - const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - ++inoutStats.allocationCount; - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc1stIndex; - } - // We are at the end. - else - { - if (lastOffset < freeSpace1stTo2ndEnd) - { - // There is free space from lastOffset to freeSpace1stTo2ndEnd. - const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; - } - - // End of loop. - lastOffset = freeSpace1stTo2ndEnd; - } - } - - if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; - while (lastOffset < size) - { - // Find next non-null allocation or move nextAlloc2ndIndex to the end. - while (nextAlloc2ndIndex != SIZE_MAX && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - --nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex != SIZE_MAX) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - ++inoutStats.allocationCount; - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - --nextAlloc2ndIndex; - } - // We are at the end. - else - { - if (lastOffset < size) - { - // There is free space from lastOffset to size. - const VkDeviceSize unusedRangeSize = size - lastOffset; - } - - // End of loop. - lastOffset = size; - } - } - } -} - -#if VMA_STATS_STRING_ENABLED -void VmaBlockMetadata_Linear::PrintDetailedMap(class VmaJsonWriter& json) const -{ - const VkDeviceSize size = GetSize(); - const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - const size_t suballoc1stCount = suballocations1st.size(); - const size_t suballoc2ndCount = suballocations2nd.size(); - - // FIRST PASS - - size_t unusedRangeCount = 0; - VkDeviceSize usedBytes = 0; - - VkDeviceSize lastOffset = 0; - - size_t alloc2ndCount = 0; - if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; - size_t nextAlloc2ndIndex = 0; - while (lastOffset < freeSpace2ndTo1stEnd) - { - // Find next non-null allocation or move nextAlloc2ndIndex to the end. - while (nextAlloc2ndIndex < suballoc2ndCount && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - ++nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex < suballoc2ndCount) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - ++unusedRangeCount; - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - ++alloc2ndCount; - usedBytes += suballoc.size; - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc2ndIndex; - } - // We are at the end. - else - { - if (lastOffset < freeSpace2ndTo1stEnd) - { - // There is free space from lastOffset to freeSpace2ndTo1stEnd. - ++unusedRangeCount; - } - - // End of loop. - lastOffset = freeSpace2ndTo1stEnd; - } - } - } - - size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; - size_t alloc1stCount = 0; - const VkDeviceSize freeSpace1stTo2ndEnd = - m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; - while (lastOffset < freeSpace1stTo2ndEnd) - { - // Find next non-null allocation or move nextAllocIndex to the end. - while (nextAlloc1stIndex < suballoc1stCount && - suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) - { - ++nextAlloc1stIndex; - } - - // Found non-null allocation. - if (nextAlloc1stIndex < suballoc1stCount) - { - const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - ++unusedRangeCount; - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - ++alloc1stCount; - usedBytes += suballoc.size; - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc1stIndex; - } - // We are at the end. - else - { - if (lastOffset < size) - { - // There is free space from lastOffset to freeSpace1stTo2ndEnd. - ++unusedRangeCount; - } - - // End of loop. - lastOffset = freeSpace1stTo2ndEnd; - } - } - - if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; - while (lastOffset < size) - { - // Find next non-null allocation or move nextAlloc2ndIndex to the end. - while (nextAlloc2ndIndex != SIZE_MAX && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - --nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex != SIZE_MAX) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - ++unusedRangeCount; - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - ++alloc2ndCount; - usedBytes += suballoc.size; - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - --nextAlloc2ndIndex; - } - // We are at the end. - else - { - if (lastOffset < size) - { - // There is free space from lastOffset to size. - ++unusedRangeCount; - } - - // End of loop. - lastOffset = size; - } - } - } - - const VkDeviceSize unusedBytes = size - usedBytes; - PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount); - - // SECOND PASS - lastOffset = 0; - - if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; - size_t nextAlloc2ndIndex = 0; - while (lastOffset < freeSpace2ndTo1stEnd) - { - // Find next non-null allocation or move nextAlloc2ndIndex to the end. - while (nextAlloc2ndIndex < suballoc2ndCount && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - ++nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex < suballoc2ndCount) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc2ndIndex; - } - // We are at the end. - else - { - if (lastOffset < freeSpace2ndTo1stEnd) - { - // There is free space from lastOffset to freeSpace2ndTo1stEnd. - const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; - PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); - } - - // End of loop. - lastOffset = freeSpace2ndTo1stEnd; - } - } - } - - nextAlloc1stIndex = m_1stNullItemsBeginCount; - while (lastOffset < freeSpace1stTo2ndEnd) - { - // Find next non-null allocation or move nextAllocIndex to the end. - while (nextAlloc1stIndex < suballoc1stCount && - suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) - { - ++nextAlloc1stIndex; - } - - // Found non-null allocation. - if (nextAlloc1stIndex < suballoc1stCount) - { - const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - ++nextAlloc1stIndex; - } - // We are at the end. - else - { - if (lastOffset < freeSpace1stTo2ndEnd) - { - // There is free space from lastOffset to freeSpace1stTo2ndEnd. - const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; - PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); - } - - // End of loop. - lastOffset = freeSpace1stTo2ndEnd; - } - } - - if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; - while (lastOffset < size) - { - // Find next non-null allocation or move nextAlloc2ndIndex to the end. - while (nextAlloc2ndIndex != SIZE_MAX && - suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) - { - --nextAlloc2ndIndex; - } - - // Found non-null allocation. - if (nextAlloc2ndIndex != SIZE_MAX) - { - const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; - - // 1. Process free space before this allocation. - if (lastOffset < suballoc.offset) - { - // There is free space from lastOffset to suballoc.offset. - const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; - PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); - } - - // 2. Process this allocation. - // There is allocation with suballoc.offset, suballoc.size. - PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); - - // 3. Prepare for next iteration. - lastOffset = suballoc.offset + suballoc.size; - --nextAlloc2ndIndex; - } - // We are at the end. - else - { - if (lastOffset < size) - { - // There is free space from lastOffset to size. - const VkDeviceSize unusedRangeSize = size - lastOffset; - PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); - } - - // End of loop. - lastOffset = size; - } - } - } - - PrintDetailedMap_End(json); -} -#endif // VMA_STATS_STRING_ENABLED - -bool VmaBlockMetadata_Linear::CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) -{ - VMA_ASSERT(allocSize > 0); - VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); - VMA_ASSERT(pAllocationRequest != VMA_NULL); - VMA_HEAVY_ASSERT(Validate()); - pAllocationRequest->size = allocSize; - return upperAddress ? - CreateAllocationRequest_UpperAddress( - allocSize, allocAlignment, allocType, strategy, pAllocationRequest) : - CreateAllocationRequest_LowerAddress( - allocSize, allocAlignment, allocType, strategy, pAllocationRequest); -} - -VkResult VmaBlockMetadata_Linear::CheckCorruption(const void* pBlockData) -{ - VMA_ASSERT(!IsVirtual()); - SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - for (size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i) - { - const VmaSuballocation& suballoc = suballocations1st[i]; - if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) - { - if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) - { - VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); - return VK_ERROR_UNKNOWN_COPY; - } - } - } - - SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - for (size_t i = 0, count = suballocations2nd.size(); i < count; ++i) - { - const VmaSuballocation& suballoc = suballocations2nd[i]; - if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) - { - if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) - { - VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); - return VK_ERROR_UNKNOWN_COPY; - } - } - } - - return VK_SUCCESS; -} - -void VmaBlockMetadata_Linear::Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) -{ - const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1; - const VmaSuballocation newSuballoc = { offset, request.size, userData, type }; - - switch (request.type) - { - case VmaAllocationRequestType::UpperAddress: - { - VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER && - "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer."); - SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - suballocations2nd.push_back(newSuballoc); - m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK; - } - break; - case VmaAllocationRequestType::EndOf1st: - { - SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - - VMA_ASSERT(suballocations1st.empty() || - offset >= suballocations1st.back().offset + suballocations1st.back().size); - // Check if it fits before the end of the block. - VMA_ASSERT(offset + request.size <= GetSize()); - - suballocations1st.push_back(newSuballoc); - } - break; - case VmaAllocationRequestType::EndOf2nd: - { - SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector. - VMA_ASSERT(!suballocations1st.empty() && - offset + request.size <= suballocations1st[m_1stNullItemsBeginCount].offset); - SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - - switch (m_2ndVectorMode) - { - case SECOND_VECTOR_EMPTY: - // First allocation from second part ring buffer. - VMA_ASSERT(suballocations2nd.empty()); - m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER; - break; - case SECOND_VECTOR_RING_BUFFER: - // 2-part ring buffer is already started. - VMA_ASSERT(!suballocations2nd.empty()); - break; - case SECOND_VECTOR_DOUBLE_STACK: - VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack."); - break; - default: - VMA_ASSERT(0); - } - - suballocations2nd.push_back(newSuballoc); - } - break; - default: - VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR."); - } - - m_SumFreeSize -= newSuballoc.size; -} - -void VmaBlockMetadata_Linear::Free(VmaAllocHandle allocHandle) -{ - SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - VkDeviceSize offset = (VkDeviceSize)allocHandle - 1; - - if (!suballocations1st.empty()) - { - // First allocation: Mark it as next empty at the beginning. - VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount]; - if (firstSuballoc.offset == offset) - { - firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; - firstSuballoc.userData = VMA_NULL; - m_SumFreeSize += firstSuballoc.size; - ++m_1stNullItemsBeginCount; - CleanupAfterFree(); - return; - } - } - - // Last allocation in 2-part ring buffer or top of upper stack (same logic). - if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER || - m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - VmaSuballocation& lastSuballoc = suballocations2nd.back(); - if (lastSuballoc.offset == offset) - { - m_SumFreeSize += lastSuballoc.size; - suballocations2nd.pop_back(); - CleanupAfterFree(); - return; - } - } - // Last allocation in 1st vector. - else if (m_2ndVectorMode == SECOND_VECTOR_EMPTY) - { - VmaSuballocation& lastSuballoc = suballocations1st.back(); - if (lastSuballoc.offset == offset) - { - m_SumFreeSize += lastSuballoc.size; - suballocations1st.pop_back(); - CleanupAfterFree(); - return; - } - } - - VmaSuballocation refSuballoc; - refSuballoc.offset = offset; - // Rest of members stays uninitialized intentionally for better performance. - - // Item from the middle of 1st vector. - { - const SuballocationVectorType::iterator it = VmaBinaryFindSorted( - suballocations1st.begin() + m_1stNullItemsBeginCount, - suballocations1st.end(), - refSuballoc, - VmaSuballocationOffsetLess()); - if (it != suballocations1st.end()) - { - it->type = VMA_SUBALLOCATION_TYPE_FREE; - it->userData = VMA_NULL; - ++m_1stNullItemsMiddleCount; - m_SumFreeSize += it->size; - CleanupAfterFree(); - return; - } - } - - if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) - { - // Item from the middle of 2nd vector. - const SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? - VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : - VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); - if (it != suballocations2nd.end()) - { - it->type = VMA_SUBALLOCATION_TYPE_FREE; - it->userData = VMA_NULL; - ++m_2ndNullItemsCount; - m_SumFreeSize += it->size; - CleanupAfterFree(); - return; - } - } - - VMA_ASSERT(0 && "Allocation to free not found in linear allocator!"); -} - -void VmaBlockMetadata_Linear::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) -{ - outInfo.offset = (VkDeviceSize)allocHandle - 1; - VmaSuballocation& suballoc = FindSuballocation(outInfo.offset); - outInfo.size = suballoc.size; - outInfo.pUserData = suballoc.userData; -} - -void* VmaBlockMetadata_Linear::GetAllocationUserData(VmaAllocHandle allocHandle) const -{ - return FindSuballocation((VkDeviceSize)allocHandle - 1).userData; -} - -VmaAllocHandle VmaBlockMetadata_Linear::GetAllocationListBegin() const -{ - // Function only used for defragmentation, which is disabled for this algorithm - VMA_ASSERT(0); - return VK_NULL_HANDLE; -} - -VmaAllocHandle VmaBlockMetadata_Linear::GetNextAllocation(VmaAllocHandle prevAlloc) const -{ - // Function only used for defragmentation, which is disabled for this algorithm - VMA_ASSERT(0); - return VK_NULL_HANDLE; -} - -VkDeviceSize VmaBlockMetadata_Linear::GetNextFreeRegionSize(VmaAllocHandle alloc) const -{ - // Function only used for defragmentation, which is disabled for this algorithm - VMA_ASSERT(0); - return 0; -} - -void VmaBlockMetadata_Linear::Clear() -{ - m_SumFreeSize = GetSize(); - m_Suballocations0.clear(); - m_Suballocations1.clear(); - // Leaving m_1stVectorIndex unchanged - it doesn't matter. - m_2ndVectorMode = SECOND_VECTOR_EMPTY; - m_1stNullItemsBeginCount = 0; - m_1stNullItemsMiddleCount = 0; - m_2ndNullItemsCount = 0; -} - -void VmaBlockMetadata_Linear::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) -{ - VmaSuballocation& suballoc = FindSuballocation((VkDeviceSize)allocHandle - 1); - suballoc.userData = userData; -} - -void VmaBlockMetadata_Linear::DebugLogAllAllocations() const -{ - const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - for (auto it = suballocations1st.begin() + m_1stNullItemsBeginCount; it != suballocations1st.end(); ++it) - if (it->type != VMA_SUBALLOCATION_TYPE_FREE) - DebugLogAllocation(it->offset, it->size, it->userData); - - const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - for (auto it = suballocations2nd.begin(); it != suballocations2nd.end(); ++it) - if (it->type != VMA_SUBALLOCATION_TYPE_FREE) - DebugLogAllocation(it->offset, it->size, it->userData); -} - -VmaSuballocation& VmaBlockMetadata_Linear::FindSuballocation(VkDeviceSize offset) const -{ - const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - - VmaSuballocation refSuballoc; - refSuballoc.offset = offset; - // Rest of members stays uninitialized intentionally for better performance. - - // Item from the 1st vector. - { - SuballocationVectorType::const_iterator it = VmaBinaryFindSorted( - suballocations1st.begin() + m_1stNullItemsBeginCount, - suballocations1st.end(), - refSuballoc, - VmaSuballocationOffsetLess()); - if (it != suballocations1st.end()) - { - return const_cast(*it); - } - } - - if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) - { - // Rest of members stays uninitialized intentionally for better performance. - SuballocationVectorType::const_iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? - VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : - VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); - if (it != suballocations2nd.end()) - { - return const_cast(*it); - } - } - - VMA_ASSERT(0 && "Allocation not found in linear allocator!"); - return const_cast(suballocations1st.back()); // Should never occur. -} - -bool VmaBlockMetadata_Linear::ShouldCompact1st() const -{ - const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; - const size_t suballocCount = AccessSuballocations1st().size(); - return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3; -} - -void VmaBlockMetadata_Linear::CleanupAfterFree() -{ - SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - - if (IsEmpty()) - { - suballocations1st.clear(); - suballocations2nd.clear(); - m_1stNullItemsBeginCount = 0; - m_1stNullItemsMiddleCount = 0; - m_2ndNullItemsCount = 0; - m_2ndVectorMode = SECOND_VECTOR_EMPTY; - } - else - { - const size_t suballoc1stCount = suballocations1st.size(); - const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; - VMA_ASSERT(nullItem1stCount <= suballoc1stCount); - - // Find more null items at the beginning of 1st vector. - while (m_1stNullItemsBeginCount < suballoc1stCount && - suballocations1st[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) - { - ++m_1stNullItemsBeginCount; - --m_1stNullItemsMiddleCount; - } - - // Find more null items at the end of 1st vector. - while (m_1stNullItemsMiddleCount > 0 && - suballocations1st.back().type == VMA_SUBALLOCATION_TYPE_FREE) - { - --m_1stNullItemsMiddleCount; - suballocations1st.pop_back(); - } - - // Find more null items at the end of 2nd vector. - while (m_2ndNullItemsCount > 0 && - suballocations2nd.back().type == VMA_SUBALLOCATION_TYPE_FREE) - { - --m_2ndNullItemsCount; - suballocations2nd.pop_back(); - } - - // Find more null items at the beginning of 2nd vector. - while (m_2ndNullItemsCount > 0 && - suballocations2nd[0].type == VMA_SUBALLOCATION_TYPE_FREE) - { - --m_2ndNullItemsCount; - VmaVectorRemove(suballocations2nd, 0); - } - - if (ShouldCompact1st()) - { - const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount; - size_t srcIndex = m_1stNullItemsBeginCount; - for (size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex) - { - while (suballocations1st[srcIndex].type == VMA_SUBALLOCATION_TYPE_FREE) - { - ++srcIndex; - } - if (dstIndex != srcIndex) - { - suballocations1st[dstIndex] = suballocations1st[srcIndex]; - } - ++srcIndex; - } - suballocations1st.resize(nonNullItemCount); - m_1stNullItemsBeginCount = 0; - m_1stNullItemsMiddleCount = 0; - } - - // 2nd vector became empty. - if (suballocations2nd.empty()) - { - m_2ndVectorMode = SECOND_VECTOR_EMPTY; - } - - // 1st vector became empty. - if (suballocations1st.size() - m_1stNullItemsBeginCount == 0) - { - suballocations1st.clear(); - m_1stNullItemsBeginCount = 0; - - if (!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - // Swap 1st with 2nd. Now 2nd is empty. - m_2ndVectorMode = SECOND_VECTOR_EMPTY; - m_1stNullItemsMiddleCount = m_2ndNullItemsCount; - while (m_1stNullItemsBeginCount < suballocations2nd.size() && - suballocations2nd[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) - { - ++m_1stNullItemsBeginCount; - --m_1stNullItemsMiddleCount; - } - m_2ndNullItemsCount = 0; - m_1stVectorIndex ^= 1; - } - } - } - - VMA_HEAVY_ASSERT(Validate()); -} - -bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) -{ - const VkDeviceSize blockSize = GetSize(); - const VkDeviceSize debugMargin = GetDebugMargin(); - const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); - SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - - if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - // Try to allocate at the end of 1st vector. - - VkDeviceSize resultBaseOffset = 0; - if (!suballocations1st.empty()) - { - const VmaSuballocation& lastSuballoc = suballocations1st.back(); - resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; - } - - // Start from offset equal to beginning of free space. - VkDeviceSize resultOffset = resultBaseOffset; - - // Apply alignment. - resultOffset = VmaAlignUp(resultOffset, allocAlignment); - - // Check previous suballocations for BufferImageGranularity conflicts. - // Make bigger alignment if necessary. - if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty()) - { - bool bufferImageGranularityConflict = false; - for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) - { - const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; - if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) - { - bufferImageGranularityConflict = true; - break; - } - } - else - // Already on previous page. - break; - } - if (bufferImageGranularityConflict) - { - resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); - } - } - - const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? - suballocations2nd.back().offset : blockSize; - - // There is enough free space at the end after alignment. - if (resultOffset + allocSize + debugMargin <= freeSpaceEnd) - { - // Check next suballocations for BufferImageGranularity conflicts. - // If conflict exists, allocation cannot be made here. - if ((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) - { - for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) - { - const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; - if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) - { - return false; - } - } - else - { - // Already on previous page. - break; - } - } - } - - // All tests passed: Success. - pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); - // pAllocationRequest->item, customData unused. - pAllocationRequest->type = VmaAllocationRequestType::EndOf1st; - return true; - } - } - - // Wrap-around to end of 2nd vector. Try to allocate there, watching for the - // beginning of 1st vector as the end of free space. - if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - VMA_ASSERT(!suballocations1st.empty()); - - VkDeviceSize resultBaseOffset = 0; - if (!suballocations2nd.empty()) - { - const VmaSuballocation& lastSuballoc = suballocations2nd.back(); - resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; - } - - // Start from offset equal to beginning of free space. - VkDeviceSize resultOffset = resultBaseOffset; - - // Apply alignment. - resultOffset = VmaAlignUp(resultOffset, allocAlignment); - - // Check previous suballocations for BufferImageGranularity conflicts. - // Make bigger alignment if necessary. - if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) - { - bool bufferImageGranularityConflict = false; - for (size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; ) - { - const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex]; - if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) - { - bufferImageGranularityConflict = true; - break; - } - } - else - // Already on previous page. - break; - } - if (bufferImageGranularityConflict) - { - resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); - } - } - - size_t index1st = m_1stNullItemsBeginCount; - - // There is enough free space at the end after alignment. - if ((index1st == suballocations1st.size() && resultOffset + allocSize + debugMargin <= blockSize) || - (index1st < suballocations1st.size() && resultOffset + allocSize + debugMargin <= suballocations1st[index1st].offset)) - { - // Check next suballocations for BufferImageGranularity conflicts. - // If conflict exists, allocation cannot be made here. - if (allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) - { - for (size_t nextSuballocIndex = index1st; - nextSuballocIndex < suballocations1st.size(); - nextSuballocIndex++) - { - const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex]; - if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) - { - return false; - } - } - else - { - // Already on next page. - break; - } - } - } - - // All tests passed: Success. - pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); - pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd; - // pAllocationRequest->item, customData unused. - return true; - } - } - - return false; -} - -bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) -{ - const VkDeviceSize blockSize = GetSize(); - const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); - SuballocationVectorType& suballocations1st = AccessSuballocations1st(); - SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); - - if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) - { - VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer."); - return false; - } - - // Try to allocate before 2nd.back(), or end of block if 2nd.empty(). - if (allocSize > blockSize) - { - return false; - } - VkDeviceSize resultBaseOffset = blockSize - allocSize; - if (!suballocations2nd.empty()) - { - const VmaSuballocation& lastSuballoc = suballocations2nd.back(); - resultBaseOffset = lastSuballoc.offset - allocSize; - if (allocSize > lastSuballoc.offset) - { - return false; - } - } - - // Start from offset equal to end of free space. - VkDeviceSize resultOffset = resultBaseOffset; - - const VkDeviceSize debugMargin = GetDebugMargin(); - - // Apply debugMargin at the end. - if (debugMargin > 0) - { - if (resultOffset < debugMargin) - { - return false; - } - resultOffset -= debugMargin; - } - - // Apply alignment. - resultOffset = VmaAlignDown(resultOffset, allocAlignment); - - // Check next suballocations from 2nd for BufferImageGranularity conflicts. - // Make bigger alignment if necessary. - if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) - { - bool bufferImageGranularityConflict = false; - for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) - { - const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; - if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType)) - { - bufferImageGranularityConflict = true; - break; - } - } - else - // Already on previous page. - break; - } - if (bufferImageGranularityConflict) - { - resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity); - } - } - - // There is enough free space. - const VkDeviceSize endOf1st = !suballocations1st.empty() ? - suballocations1st.back().offset + suballocations1st.back().size : - 0; - if (endOf1st + debugMargin <= resultOffset) - { - // Check previous suballocations for BufferImageGranularity conflicts. - // If conflict exists, allocation cannot be made here. - if (bufferImageGranularity > 1) - { - for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) - { - const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; - if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) - { - if (VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type)) - { - return false; - } - } - else - { - // Already on next page. - break; - } - } - } - - // All tests passed: Success. - pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); - // pAllocationRequest->item unused. - pAllocationRequest->type = VmaAllocationRequestType::UpperAddress; - return true; - } - - return false; -} -#endif // _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS -#endif // _VMA_BLOCK_METADATA_LINEAR - -#if 0 -#ifndef _VMA_BLOCK_METADATA_BUDDY -/* -- GetSize() is the original size of allocated memory block. -- m_UsableSize is this size aligned down to a power of two. - All allocations and calculations happen relative to m_UsableSize. -- GetUnusableSize() is the difference between them. - It is reported as separate, unused range, not available for allocations. - -Node at level 0 has size = m_UsableSize. -Each next level contains nodes with size 2 times smaller than current level. -m_LevelCount is the maximum number of levels to use in the current object. -*/ -class VmaBlockMetadata_Buddy : public VmaBlockMetadata -{ - VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy) -public: - VmaBlockMetadata_Buddy(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual); - virtual ~VmaBlockMetadata_Buddy(); - - size_t GetAllocationCount() const override { return m_AllocationCount; } - VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize + GetUnusableSize(); } - bool IsEmpty() const override { return m_Root->type == Node::TYPE_FREE; } - VkResult CheckCorruption(const void* pBlockData) override { return VK_ERROR_FEATURE_NOT_PRESENT; } - VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; }; - void DebugLogAllAllocations() const override { DebugLogAllAllocationNode(m_Root, 0); } - - void Init(VkDeviceSize size) override; - bool Validate() const override; - - void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; - void AddStatistics(VmaStatistics& inoutStats) const override; - -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const override; -#endif - - bool CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) override; - - void Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) override; - - void Free(VmaAllocHandle allocHandle) override; - void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; - void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; - VmaAllocHandle GetAllocationListBegin() const override; - VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; - void Clear() override; - void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; - -private: - static const size_t MAX_LEVELS = 48; - - struct ValidationContext - { - size_t calculatedAllocationCount = 0; - size_t calculatedFreeCount = 0; - VkDeviceSize calculatedSumFreeSize = 0; - }; - struct Node - { - VkDeviceSize offset; - enum TYPE - { - TYPE_FREE, - TYPE_ALLOCATION, - TYPE_SPLIT, - TYPE_COUNT - } type; - Node* parent; - Node* buddy; - - union - { - struct - { - Node* prev; - Node* next; - } free; - struct - { - void* userData; - } allocation; - struct - { - Node* leftChild; - } split; - }; - }; - - // Size of the memory block aligned down to a power of two. - VkDeviceSize m_UsableSize; - uint32_t m_LevelCount; - VmaPoolAllocator m_NodeAllocator; - Node* m_Root; - struct - { - Node* front; - Node* back; - } m_FreeList[MAX_LEVELS]; - - // Number of nodes in the tree with type == TYPE_ALLOCATION. - size_t m_AllocationCount; - // Number of nodes in the tree with type == TYPE_FREE. - size_t m_FreeCount; - // Doesn't include space wasted due to internal fragmentation - allocation sizes are just aligned up to node sizes. - // Doesn't include unusable size. - VkDeviceSize m_SumFreeSize; - - VkDeviceSize GetUnusableSize() const { return GetSize() - m_UsableSize; } - VkDeviceSize LevelToNodeSize(uint32_t level) const { return m_UsableSize >> level; } - - VkDeviceSize AlignAllocationSize(VkDeviceSize size) const - { - if (!IsVirtual()) - { - size = VmaAlignUp(size, (VkDeviceSize)16); - } - return VmaNextPow2(size); - } - Node* FindAllocationNode(VkDeviceSize offset, uint32_t& outLevel) const; - void DeleteNodeChildren(Node* node); - bool ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const; - uint32_t AllocSizeToLevel(VkDeviceSize allocSize) const; - void AddNodeToDetailedStatistics(VmaDetailedStatistics& inoutStats, const Node* node, VkDeviceSize levelNodeSize) const; - // Adds node to the front of FreeList at given level. - // node->type must be FREE. - // node->free.prev, next can be undefined. - void AddToFreeListFront(uint32_t level, Node* node); - // Removes node from FreeList at given level. - // node->type must be FREE. - // node->free.prev, next stay untouched. - void RemoveFromFreeList(uint32_t level, Node* node); - void DebugLogAllAllocationNode(Node* node, uint32_t level) const; - -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const; -#endif -}; - -#ifndef _VMA_BLOCK_METADATA_BUDDY_FUNCTIONS -VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual) - : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), - m_NodeAllocator(pAllocationCallbacks, 32), // firstBlockCapacity - m_Root(VMA_NULL), - m_AllocationCount(0), - m_FreeCount(1), - m_SumFreeSize(0) -{ - memset(m_FreeList, 0, sizeof(m_FreeList)); -} - -VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy() -{ - DeleteNodeChildren(m_Root); - m_NodeAllocator.Free(m_Root); -} - -void VmaBlockMetadata_Buddy::Init(VkDeviceSize size) -{ - VmaBlockMetadata::Init(size); - - m_UsableSize = VmaPrevPow2(size); - m_SumFreeSize = m_UsableSize; - - // Calculate m_LevelCount. - const VkDeviceSize minNodeSize = IsVirtual() ? 1 : 16; - m_LevelCount = 1; - while (m_LevelCount < MAX_LEVELS && - LevelToNodeSize(m_LevelCount) >= minNodeSize) - { - ++m_LevelCount; - } - - Node* rootNode = m_NodeAllocator.Alloc(); - rootNode->offset = 0; - rootNode->type = Node::TYPE_FREE; - rootNode->parent = VMA_NULL; - rootNode->buddy = VMA_NULL; - - m_Root = rootNode; - AddToFreeListFront(0, rootNode); -} - -bool VmaBlockMetadata_Buddy::Validate() const -{ - // Validate tree. - ValidationContext ctx; - if (!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0))) - { - VMA_VALIDATE(false && "ValidateNode failed."); - } - VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount); - VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize); - - // Validate free node lists. - for (uint32_t level = 0; level < m_LevelCount; ++level) - { - VMA_VALIDATE(m_FreeList[level].front == VMA_NULL || - m_FreeList[level].front->free.prev == VMA_NULL); - - for (Node* node = m_FreeList[level].front; - node != VMA_NULL; - node = node->free.next) - { - VMA_VALIDATE(node->type == Node::TYPE_FREE); - - if (node->free.next == VMA_NULL) - { - VMA_VALIDATE(m_FreeList[level].back == node); - } - else - { - VMA_VALIDATE(node->free.next->free.prev == node); - } - } - } - - // Validate that free lists ar higher levels are empty. - for (uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level) - { - VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL); - } - - return true; -} - -void VmaBlockMetadata_Buddy::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const -{ - inoutStats.statistics.blockCount++; - inoutStats.statistics.blockBytes += GetSize(); - - AddNodeToDetailedStatistics(inoutStats, m_Root, LevelToNodeSize(0)); - - const VkDeviceSize unusableSize = GetUnusableSize(); - if (unusableSize > 0) - VmaAddDetailedStatisticsUnusedRange(inoutStats, unusableSize); -} - -void VmaBlockMetadata_Buddy::AddStatistics(VmaStatistics& inoutStats) const -{ - inoutStats.blockCount++; - inoutStats.allocationCount += (uint32_t)m_AllocationCount; - inoutStats.blockBytes += GetSize(); - inoutStats.allocationBytes += GetSize() - m_SumFreeSize; -} - -#if VMA_STATS_STRING_ENABLED -void VmaBlockMetadata_Buddy::PrintDetailedMap(class VmaJsonWriter& json, uint32_t mapRefCount) const -{ - VmaDetailedStatistics stats; - VmaClearDetailedStatistics(stats); - AddDetailedStatistics(stats); - - PrintDetailedMap_Begin( - json, - stats.statistics.blockBytes - stats.statistics.allocationBytes, - stats.statistics.allocationCount, - stats.unusedRangeCount, - mapRefCount); - - PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0)); - - const VkDeviceSize unusableSize = GetUnusableSize(); - if (unusableSize > 0) - { - PrintDetailedMap_UnusedRange(json, - m_UsableSize, // offset - unusableSize); // size - } - - PrintDetailedMap_End(json); -} -#endif // VMA_STATS_STRING_ENABLED - -bool VmaBlockMetadata_Buddy::CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) -{ - VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm."); - - allocSize = AlignAllocationSize(allocSize); - - // Simple way to respect bufferImageGranularity. May be optimized some day. - // Whenever it might be an OPTIMAL image... - if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN || - allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || - allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL) - { - allocAlignment = VMA_MAX(allocAlignment, GetBufferImageGranularity()); - allocSize = VmaAlignUp(allocSize, GetBufferImageGranularity()); - } - - if (allocSize > m_UsableSize) - { - return false; - } - - const uint32_t targetLevel = AllocSizeToLevel(allocSize); - for (uint32_t level = targetLevel; level--; ) - { - for (Node* freeNode = m_FreeList[level].front; - freeNode != VMA_NULL; - freeNode = freeNode->free.next) - { - if (freeNode->offset % allocAlignment == 0) - { - pAllocationRequest->type = VmaAllocationRequestType::Normal; - pAllocationRequest->allocHandle = (VmaAllocHandle)(freeNode->offset + 1); - pAllocationRequest->size = allocSize; - pAllocationRequest->customData = (void*)(uintptr_t)level; - return true; - } - } - } - - return false; -} - -void VmaBlockMetadata_Buddy::Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) -{ - VMA_ASSERT(request.type == VmaAllocationRequestType::Normal); - - const uint32_t targetLevel = AllocSizeToLevel(request.size); - uint32_t currLevel = (uint32_t)(uintptr_t)request.customData; - - Node* currNode = m_FreeList[currLevel].front; - VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE); - const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1; - while (currNode->offset != offset) - { - currNode = currNode->free.next; - VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE); - } - - // Go down, splitting free nodes. - while (currLevel < targetLevel) - { - // currNode is already first free node at currLevel. - // Remove it from list of free nodes at this currLevel. - RemoveFromFreeList(currLevel, currNode); - - const uint32_t childrenLevel = currLevel + 1; - - // Create two free sub-nodes. - Node* leftChild = m_NodeAllocator.Alloc(); - Node* rightChild = m_NodeAllocator.Alloc(); - - leftChild->offset = currNode->offset; - leftChild->type = Node::TYPE_FREE; - leftChild->parent = currNode; - leftChild->buddy = rightChild; - - rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel); - rightChild->type = Node::TYPE_FREE; - rightChild->parent = currNode; - rightChild->buddy = leftChild; - - // Convert current currNode to split type. - currNode->type = Node::TYPE_SPLIT; - currNode->split.leftChild = leftChild; - - // Add child nodes to free list. Order is important! - AddToFreeListFront(childrenLevel, rightChild); - AddToFreeListFront(childrenLevel, leftChild); - - ++m_FreeCount; - ++currLevel; - currNode = m_FreeList[currLevel].front; - - /* - We can be sure that currNode, as left child of node previously split, - also fulfills the alignment requirement. - */ - } - - // Remove from free list. - VMA_ASSERT(currLevel == targetLevel && - currNode != VMA_NULL && - currNode->type == Node::TYPE_FREE); - RemoveFromFreeList(currLevel, currNode); - - // Convert to allocation node. - currNode->type = Node::TYPE_ALLOCATION; - currNode->allocation.userData = userData; - - ++m_AllocationCount; - --m_FreeCount; - m_SumFreeSize -= request.size; -} - -void VmaBlockMetadata_Buddy::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) -{ - uint32_t level = 0; - outInfo.offset = (VkDeviceSize)allocHandle - 1; - const Node* const node = FindAllocationNode(outInfo.offset, level); - outInfo.size = LevelToNodeSize(level); - outInfo.pUserData = node->allocation.userData; -} - -void* VmaBlockMetadata_Buddy::GetAllocationUserData(VmaAllocHandle allocHandle) const -{ - uint32_t level = 0; - const Node* const node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level); - return node->allocation.userData; -} - -VmaAllocHandle VmaBlockMetadata_Buddy::GetAllocationListBegin() const -{ - // Function only used for defragmentation, which is disabled for this algorithm - return VK_NULL_HANDLE; -} - -VmaAllocHandle VmaBlockMetadata_Buddy::GetNextAllocation(VmaAllocHandle prevAlloc) const -{ - // Function only used for defragmentation, which is disabled for this algorithm - return VK_NULL_HANDLE; -} - -void VmaBlockMetadata_Buddy::DeleteNodeChildren(Node* node) -{ - if (node->type == Node::TYPE_SPLIT) - { - DeleteNodeChildren(node->split.leftChild->buddy); - DeleteNodeChildren(node->split.leftChild); - const VkAllocationCallbacks* allocationCallbacks = GetAllocationCallbacks(); - m_NodeAllocator.Free(node->split.leftChild->buddy); - m_NodeAllocator.Free(node->split.leftChild); - } -} - -void VmaBlockMetadata_Buddy::Clear() -{ - DeleteNodeChildren(m_Root); - m_Root->type = Node::TYPE_FREE; - m_AllocationCount = 0; - m_FreeCount = 1; - m_SumFreeSize = m_UsableSize; -} - -void VmaBlockMetadata_Buddy::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) -{ - uint32_t level = 0; - Node* const node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level); - node->allocation.userData = userData; -} - -VmaBlockMetadata_Buddy::Node* VmaBlockMetadata_Buddy::FindAllocationNode(VkDeviceSize offset, uint32_t& outLevel) const -{ - Node* node = m_Root; - VkDeviceSize nodeOffset = 0; - outLevel = 0; - VkDeviceSize levelNodeSize = LevelToNodeSize(0); - while (node->type == Node::TYPE_SPLIT) - { - const VkDeviceSize nextLevelNodeSize = levelNodeSize >> 1; - if (offset < nodeOffset + nextLevelNodeSize) - { - node = node->split.leftChild; - } - else - { - node = node->split.leftChild->buddy; - nodeOffset += nextLevelNodeSize; - } - ++outLevel; - levelNodeSize = nextLevelNodeSize; - } - - VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION); - return node; -} - -bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx, const Node* parent, const Node* curr, uint32_t level, VkDeviceSize levelNodeSize) const -{ - VMA_VALIDATE(level < m_LevelCount); - VMA_VALIDATE(curr->parent == parent); - VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL)); - VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr); - switch (curr->type) - { - case Node::TYPE_FREE: - // curr->free.prev, next are validated separately. - ctx.calculatedSumFreeSize += levelNodeSize; - ++ctx.calculatedFreeCount; - break; - case Node::TYPE_ALLOCATION: - ++ctx.calculatedAllocationCount; - if (!IsVirtual()) - { - VMA_VALIDATE(curr->allocation.userData != VMA_NULL); - } - break; - case Node::TYPE_SPLIT: - { - const uint32_t childrenLevel = level + 1; - const VkDeviceSize childrenLevelNodeSize = levelNodeSize >> 1; - const Node* const leftChild = curr->split.leftChild; - VMA_VALIDATE(leftChild != VMA_NULL); - VMA_VALIDATE(leftChild->offset == curr->offset); - if (!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize)) - { - VMA_VALIDATE(false && "ValidateNode for left child failed."); - } - const Node* const rightChild = leftChild->buddy; - VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize); - if (!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize)) - { - VMA_VALIDATE(false && "ValidateNode for right child failed."); - } - } - break; - default: - return false; - } - - return true; -} - -uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize) const -{ - // I know this could be optimized somehow e.g. by using std::log2p1 from C++20. - uint32_t level = 0; - VkDeviceSize currLevelNodeSize = m_UsableSize; - VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1; - while (allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount) - { - ++level; - currLevelNodeSize >>= 1; - nextLevelNodeSize >>= 1; - } - return level; -} - -void VmaBlockMetadata_Buddy::Free(VmaAllocHandle allocHandle) -{ - uint32_t level = 0; - Node* node = FindAllocationNode((VkDeviceSize)allocHandle - 1, level); - - ++m_FreeCount; - --m_AllocationCount; - m_SumFreeSize += LevelToNodeSize(level); - - node->type = Node::TYPE_FREE; - - // Join free nodes if possible. - while (level > 0 && node->buddy->type == Node::TYPE_FREE) - { - RemoveFromFreeList(level, node->buddy); - Node* const parent = node->parent; - - m_NodeAllocator.Free(node->buddy); - m_NodeAllocator.Free(node); - parent->type = Node::TYPE_FREE; - - node = parent; - --level; - --m_FreeCount; - } - - AddToFreeListFront(level, node); -} - -void VmaBlockMetadata_Buddy::AddNodeToDetailedStatistics(VmaDetailedStatistics& inoutStats, const Node* node, VkDeviceSize levelNodeSize) const -{ - switch (node->type) - { - case Node::TYPE_FREE: - VmaAddDetailedStatisticsUnusedRange(inoutStats, levelNodeSize); - break; - case Node::TYPE_ALLOCATION: - VmaAddDetailedStatisticsAllocation(inoutStats, levelNodeSize); - break; - case Node::TYPE_SPLIT: - { - const VkDeviceSize childrenNodeSize = levelNodeSize / 2; - const Node* const leftChild = node->split.leftChild; - AddNodeToDetailedStatistics(inoutStats, leftChild, childrenNodeSize); - const Node* const rightChild = leftChild->buddy; - AddNodeToDetailedStatistics(inoutStats, rightChild, childrenNodeSize); - } - break; - default: - VMA_ASSERT(0); - } -} - -void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node) -{ - VMA_ASSERT(node->type == Node::TYPE_FREE); - - // List is empty. - Node* const frontNode = m_FreeList[level].front; - if (frontNode == VMA_NULL) - { - VMA_ASSERT(m_FreeList[level].back == VMA_NULL); - node->free.prev = node->free.next = VMA_NULL; - m_FreeList[level].front = m_FreeList[level].back = node; - } - else - { - VMA_ASSERT(frontNode->free.prev == VMA_NULL); - node->free.prev = VMA_NULL; - node->free.next = frontNode; - frontNode->free.prev = node; - m_FreeList[level].front = node; - } -} - -void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node) -{ - VMA_ASSERT(m_FreeList[level].front != VMA_NULL); - - // It is at the front. - if (node->free.prev == VMA_NULL) - { - VMA_ASSERT(m_FreeList[level].front == node); - m_FreeList[level].front = node->free.next; - } - else - { - Node* const prevFreeNode = node->free.prev; - VMA_ASSERT(prevFreeNode->free.next == node); - prevFreeNode->free.next = node->free.next; - } - - // It is at the back. - if (node->free.next == VMA_NULL) - { - VMA_ASSERT(m_FreeList[level].back == node); - m_FreeList[level].back = node->free.prev; - } - else - { - Node* const nextFreeNode = node->free.next; - VMA_ASSERT(nextFreeNode->free.prev == node); - nextFreeNode->free.prev = node->free.prev; - } -} - -void VmaBlockMetadata_Buddy::DebugLogAllAllocationNode(Node* node, uint32_t level) const -{ - switch (node->type) - { - case Node::TYPE_FREE: - break; - case Node::TYPE_ALLOCATION: - DebugLogAllocation(node->offset, LevelToNodeSize(level), node->allocation.userData); - break; - case Node::TYPE_SPLIT: - { - ++level; - DebugLogAllAllocationNode(node->split.leftChild, level); - DebugLogAllAllocationNode(node->split.leftChild->buddy, level); - } - break; - default: - VMA_ASSERT(0); - } -} - -#if VMA_STATS_STRING_ENABLED -void VmaBlockMetadata_Buddy::PrintDetailedMapNode(class VmaJsonWriter& json, const Node* node, VkDeviceSize levelNodeSize) const -{ - switch (node->type) - { - case Node::TYPE_FREE: - PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize); - break; - case Node::TYPE_ALLOCATION: - PrintDetailedMap_Allocation(json, node->offset, levelNodeSize, node->allocation.userData); - break; - case Node::TYPE_SPLIT: - { - const VkDeviceSize childrenNodeSize = levelNodeSize / 2; - const Node* const leftChild = node->split.leftChild; - PrintDetailedMapNode(json, leftChild, childrenNodeSize); - const Node* const rightChild = leftChild->buddy; - PrintDetailedMapNode(json, rightChild, childrenNodeSize); - } - break; - default: - VMA_ASSERT(0); - } -} -#endif // VMA_STATS_STRING_ENABLED -#endif // _VMA_BLOCK_METADATA_BUDDY_FUNCTIONS -#endif // _VMA_BLOCK_METADATA_BUDDY -#endif // #if 0 - -#ifndef _VMA_BLOCK_METADATA_TLSF -// To not search current larger region if first allocation won't succeed and skip to smaller range -// use with VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT as strategy in CreateAllocationRequest(). -// When fragmentation and reusal of previous blocks doesn't matter then use with -// VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT for fastest alloc time possible. -class VmaBlockMetadata_TLSF : public VmaBlockMetadata -{ - VMA_CLASS_NO_COPY(VmaBlockMetadata_TLSF) -public: - VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual); - virtual ~VmaBlockMetadata_TLSF(); - - size_t GetAllocationCount() const override { return m_AllocCount; } - size_t GetFreeRegionsCount() const override { return m_BlocksFreeCount + 1; } - VkDeviceSize GetSumFreeSize() const override { return m_BlocksFreeSize + m_NullBlock->size; } - bool IsEmpty() const override { return m_NullBlock->offset == 0; } - VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return ((Block*)allocHandle)->offset; }; - - void Init(VkDeviceSize size) override; - bool Validate() const override; - - void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; - void AddStatistics(VmaStatistics& inoutStats) const override; - -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaJsonWriter& json) const override; -#endif - - bool CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) override; - - VkResult CheckCorruption(const void* pBlockData) override; - void Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) override; - - void Free(VmaAllocHandle allocHandle) override; - void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; - void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; - VmaAllocHandle GetAllocationListBegin() const override; - VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; - VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; - void Clear() override; - void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; - void DebugLogAllAllocations() const override; - -private: - // According to original paper it should be preferable 4 or 5: - // M. Masmano, I. Ripoll, A. Crespo, and J. Real "TLSF: a New Dynamic Memory Allocator for Real-Time Systems" - // http://www.gii.upv.es/tlsf/files/ecrts04_tlsf.pdf - static const uint8_t SECOND_LEVEL_INDEX = 5; - static const uint16_t SMALL_BUFFER_SIZE = 256; - static const uint32_t INITIAL_BLOCK_ALLOC_COUNT = 16; - static const uint8_t MEMORY_CLASS_SHIFT = 7; - static const uint8_t MAX_MEMORY_CLASSES = 65 - MEMORY_CLASS_SHIFT; - - class Block - { - public: - VkDeviceSize offset; - VkDeviceSize size; - Block* prevPhysical; - Block* nextPhysical; - - void MarkFree() { prevFree = VMA_NULL; } - void MarkTaken() { prevFree = this; } - bool IsFree() const { return prevFree != this; } - void*& UserData() { VMA_HEAVY_ASSERT(!IsFree()); return userData; } - Block*& PrevFree() { return prevFree; } - Block*& NextFree() { VMA_HEAVY_ASSERT(IsFree()); return nextFree; } - - private: - Block* prevFree; // Address of the same block here indicates that block is taken - union - { - Block* nextFree; - void* userData; - }; - }; - - size_t m_AllocCount; - // Total number of free blocks besides null block - size_t m_BlocksFreeCount; - // Total size of free blocks excluding null block - VkDeviceSize m_BlocksFreeSize; - uint32_t m_IsFreeBitmap; - uint8_t m_MemoryClasses; - uint32_t m_InnerIsFreeBitmap[MAX_MEMORY_CLASSES]; - uint32_t m_ListsCount; - /* - * 0: 0-3 lists for small buffers - * 1+: 0-(2^SLI-1) lists for normal buffers - */ - Block** m_FreeList; - VmaPoolAllocator m_BlockAllocator; - Block* m_NullBlock; - VmaBlockBufferImageGranularity m_GranularityHandler; - - uint8_t SizeToMemoryClass(VkDeviceSize size) const; - uint16_t SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const; - uint32_t GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const; - uint32_t GetListIndex(VkDeviceSize size) const; - - void RemoveFreeBlock(Block* block); - void InsertFreeBlock(Block* block); - void MergeBlock(Block* block, Block* prev); - - Block* FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const; - bool CheckBlock( - Block& block, - uint32_t listIndex, - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaAllocationRequest* pAllocationRequest); -}; - -#ifndef _VMA_BLOCK_METADATA_TLSF_FUNCTIONS -VmaBlockMetadata_TLSF::VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, - VkDeviceSize bufferImageGranularity, bool isVirtual) - : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), - m_AllocCount(0), - m_BlocksFreeCount(0), - m_BlocksFreeSize(0), - m_IsFreeBitmap(0), - m_MemoryClasses(0), - m_ListsCount(0), - m_FreeList(VMA_NULL), - m_BlockAllocator(pAllocationCallbacks, INITIAL_BLOCK_ALLOC_COUNT), - m_NullBlock(VMA_NULL), - m_GranularityHandler(bufferImageGranularity) {} - -VmaBlockMetadata_TLSF::~VmaBlockMetadata_TLSF() -{ - if (m_FreeList) - vma_delete_array(GetAllocationCallbacks(), m_FreeList, m_ListsCount); - m_GranularityHandler.Destroy(GetAllocationCallbacks()); -} - -void VmaBlockMetadata_TLSF::Init(VkDeviceSize size) -{ - VmaBlockMetadata::Init(size); - - if (!IsVirtual()) - m_GranularityHandler.Init(GetAllocationCallbacks(), size); - - m_NullBlock = m_BlockAllocator.Alloc(); - m_NullBlock->size = size; - m_NullBlock->offset = 0; - m_NullBlock->prevPhysical = VMA_NULL; - m_NullBlock->nextPhysical = VMA_NULL; - m_NullBlock->MarkFree(); - m_NullBlock->NextFree() = VMA_NULL; - m_NullBlock->PrevFree() = VMA_NULL; - uint8_t memoryClass = SizeToMemoryClass(size); - uint16_t sli = SizeToSecondIndex(size, memoryClass); - m_ListsCount = (memoryClass == 0 ? 0 : (memoryClass - 1) * (1UL << SECOND_LEVEL_INDEX) + sli) + 1; - if (IsVirtual()) - m_ListsCount += 1UL << SECOND_LEVEL_INDEX; - else - m_ListsCount += 4; - - m_MemoryClasses = memoryClass + 2; - memset(m_InnerIsFreeBitmap, 0, MAX_MEMORY_CLASSES * sizeof(uint32_t)); - - m_FreeList = vma_new_array(GetAllocationCallbacks(), Block*, m_ListsCount); - memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); -} - -bool VmaBlockMetadata_TLSF::Validate() const -{ - VMA_VALIDATE(GetSumFreeSize() <= GetSize()); - - VkDeviceSize calculatedSize = m_NullBlock->size; - VkDeviceSize calculatedFreeSize = m_NullBlock->size; - size_t allocCount = 0; - size_t freeCount = 0; - - // Check integrity of free lists - for (uint32_t list = 0; list < m_ListsCount; ++list) - { - Block* block = m_FreeList[list]; - if (block != VMA_NULL) - { - VMA_VALIDATE(block->IsFree()); - VMA_VALIDATE(block->PrevFree() == VMA_NULL); - while (block->NextFree()) - { - VMA_VALIDATE(block->NextFree()->IsFree()); - VMA_VALIDATE(block->NextFree()->PrevFree() == block); - block = block->NextFree(); - } - } - } - - VkDeviceSize nextOffset = m_NullBlock->offset; - auto validateCtx = m_GranularityHandler.StartValidation(GetAllocationCallbacks(), IsVirtual()); - - VMA_VALIDATE(m_NullBlock->nextPhysical == VMA_NULL); - if (m_NullBlock->prevPhysical) - { - VMA_VALIDATE(m_NullBlock->prevPhysical->nextPhysical == m_NullBlock); - } - // Check all blocks - for (Block* prev = m_NullBlock->prevPhysical; prev != VMA_NULL; prev = prev->prevPhysical) - { - VMA_VALIDATE(prev->offset + prev->size == nextOffset); - nextOffset = prev->offset; - calculatedSize += prev->size; - - uint32_t listIndex = GetListIndex(prev->size); - if (prev->IsFree()) - { - ++freeCount; - // Check if free block belongs to free list - Block* freeBlock = m_FreeList[listIndex]; - VMA_VALIDATE(freeBlock != VMA_NULL); - - bool found = false; - do - { - if (freeBlock == prev) - found = true; - - freeBlock = freeBlock->NextFree(); - } while (!found && freeBlock != VMA_NULL); - - VMA_VALIDATE(found); - calculatedFreeSize += prev->size; - } - else - { - ++allocCount; - // Check if taken block is not on a free list - Block* freeBlock = m_FreeList[listIndex]; - while (freeBlock) - { - VMA_VALIDATE(freeBlock != prev); - freeBlock = freeBlock->NextFree(); - } - - if (!IsVirtual()) - { - VMA_VALIDATE(m_GranularityHandler.Validate(validateCtx, prev->offset, prev->size)); - } - } - - if (prev->prevPhysical) - { - VMA_VALIDATE(prev->prevPhysical->nextPhysical == prev); - } - } - - if (!IsVirtual()) - { - VMA_VALIDATE(m_GranularityHandler.FinishValidation(validateCtx)); - } - - VMA_VALIDATE(nextOffset == 0); - VMA_VALIDATE(calculatedSize == GetSize()); - VMA_VALIDATE(calculatedFreeSize == GetSumFreeSize()); - VMA_VALIDATE(allocCount == m_AllocCount); - VMA_VALIDATE(freeCount == m_BlocksFreeCount); - - return true; -} - -void VmaBlockMetadata_TLSF::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const -{ - inoutStats.statistics.blockCount++; - inoutStats.statistics.blockBytes += GetSize(); - if (m_NullBlock->size > 0) - VmaAddDetailedStatisticsUnusedRange(inoutStats, m_NullBlock->size); - - for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) - { - if (block->IsFree()) - VmaAddDetailedStatisticsUnusedRange(inoutStats, block->size); - else - VmaAddDetailedStatisticsAllocation(inoutStats, block->size); - } -} - -void VmaBlockMetadata_TLSF::AddStatistics(VmaStatistics& inoutStats) const -{ - inoutStats.blockCount++; - inoutStats.allocationCount += (uint32_t)m_AllocCount; - inoutStats.blockBytes += GetSize(); - inoutStats.allocationBytes += GetSize() - GetSumFreeSize(); -} - -#if VMA_STATS_STRING_ENABLED -void VmaBlockMetadata_TLSF::PrintDetailedMap(class VmaJsonWriter& json) const -{ - size_t blockCount = m_AllocCount + m_BlocksFreeCount; - VmaStlAllocator allocator(GetAllocationCallbacks()); - VmaVector> blockList(blockCount, allocator); - - size_t i = blockCount; - for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) - { - blockList[--i] = block; - } - VMA_ASSERT(i == 0); - - VmaDetailedStatistics stats; - VmaClearDetailedStatistics(stats); - AddDetailedStatistics(stats); - - PrintDetailedMap_Begin(json, - stats.statistics.blockBytes - stats.statistics.allocationBytes, - stats.statistics.allocationCount, - stats.unusedRangeCount); - - for (; i < blockCount; ++i) - { - Block* block = blockList[i]; - if (block->IsFree()) - PrintDetailedMap_UnusedRange(json, block->offset, block->size); - else - PrintDetailedMap_Allocation(json, block->offset, block->size, block->UserData()); - } - if (m_NullBlock->size > 0) - PrintDetailedMap_UnusedRange(json, m_NullBlock->offset, m_NullBlock->size); - - PrintDetailedMap_End(json); -} -#endif - -bool VmaBlockMetadata_TLSF::CreateAllocationRequest( - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - bool upperAddress, - VmaSuballocationType allocType, - uint32_t strategy, - VmaAllocationRequest* pAllocationRequest) -{ - VMA_ASSERT(allocSize > 0 && "Cannot allocate empty block!"); - VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm."); - - // For small granularity round up - if (!IsVirtual()) - m_GranularityHandler.RoundupAllocRequest(allocType, allocSize, allocAlignment); - - allocSize += GetDebugMargin(); - // Quick check for too small pool - if (allocSize > GetSumFreeSize()) - return false; - - // If no free blocks in pool then check only null block - if (m_BlocksFreeCount == 0) - return CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest); - - // Round up to the next block - VkDeviceSize sizeForNextList = allocSize; - VkDeviceSize smallSizeStep = SMALL_BUFFER_SIZE / (IsVirtual() ? 1 << SECOND_LEVEL_INDEX : 4); - if (allocSize > SMALL_BUFFER_SIZE) - { - sizeForNextList += (1ULL << (VMA_BITSCAN_MSB(allocSize) - SECOND_LEVEL_INDEX)); - } - else if (allocSize > SMALL_BUFFER_SIZE - smallSizeStep) - sizeForNextList = SMALL_BUFFER_SIZE + 1; - else - sizeForNextList += smallSizeStep; - - uint32_t nextListIndex = 0; - uint32_t prevListIndex = 0; - Block* nextListBlock = VMA_NULL; - Block* prevListBlock = VMA_NULL; - - // Check blocks according to strategies - if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) - { - // Quick check for larger block first - nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); - if (nextListBlock != VMA_NULL && CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - - // If not fitted then null block - if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - - // Null block failed, search larger bucket - while (nextListBlock) - { - if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - nextListBlock = nextListBlock->NextFree(); - } - - // Failed again, check best fit bucket - prevListBlock = FindFreeBlock(allocSize, prevListIndex); - while (prevListBlock) - { - if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - prevListBlock = prevListBlock->NextFree(); - } - } - else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT) - { - // Check best fit bucket - prevListBlock = FindFreeBlock(allocSize, prevListIndex); - while (prevListBlock) - { - if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - prevListBlock = prevListBlock->NextFree(); - } - - // If failed check null block - if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - - // Check larger bucket - nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); - while (nextListBlock) - { - if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - nextListBlock = nextListBlock->NextFree(); - } - } - else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT ) - { - // Perform search from the start - VmaStlAllocator allocator(GetAllocationCallbacks()); - VmaVector> blockList(m_BlocksFreeCount, allocator); - - size_t i = m_BlocksFreeCount; - for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) - { - if (block->IsFree() && block->size >= allocSize) - blockList[--i] = block; - } - - for (; i < m_BlocksFreeCount; ++i) - { - Block& block = *blockList[i]; - if (CheckBlock(block, GetListIndex(block.size), allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - } - - // If failed check null block - if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - - // Whole range searched, no more memory - return false; - } - else - { - // Check larger bucket - nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); - while (nextListBlock) - { - if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - nextListBlock = nextListBlock->NextFree(); - } - - // If failed check null block - if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - - // Check best fit bucket - prevListBlock = FindFreeBlock(allocSize, prevListIndex); - while (prevListBlock) - { - if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - prevListBlock = prevListBlock->NextFree(); - } - } - - // Worst case, full search has to be done - while (++nextListIndex < m_ListsCount) - { - nextListBlock = m_FreeList[nextListIndex]; - while (nextListBlock) - { - if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) - return true; - nextListBlock = nextListBlock->NextFree(); - } - } - - // No more memory sadly - return false; -} - -VkResult VmaBlockMetadata_TLSF::CheckCorruption(const void* pBlockData) -{ - for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) - { - if (!block->IsFree()) - { - if (!VmaValidateMagicValue(pBlockData, block->offset + block->size)) - { - VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); - return VK_ERROR_UNKNOWN_COPY; - } - } - } - - return VK_SUCCESS; -} - -void VmaBlockMetadata_TLSF::Alloc( - const VmaAllocationRequest& request, - VmaSuballocationType type, - void* userData) -{ - VMA_ASSERT(request.type == VmaAllocationRequestType::TLSF); - - // Get block and pop it from the free list - Block* currentBlock = (Block*)request.allocHandle; - VkDeviceSize offset = request.algorithmData; - VMA_ASSERT(currentBlock != VMA_NULL); - VMA_ASSERT(currentBlock->offset <= offset); - - if (currentBlock != m_NullBlock) - RemoveFreeBlock(currentBlock); - - VkDeviceSize debugMargin = GetDebugMargin(); - VkDeviceSize misssingAlignment = offset - currentBlock->offset; - - // Append missing alignment to prev block or create new one - if (misssingAlignment) - { - Block* prevBlock = currentBlock->prevPhysical; - VMA_ASSERT(prevBlock != VMA_NULL && "There should be no missing alignment at offset 0!"); - - if (prevBlock->IsFree() && prevBlock->size != debugMargin) - { - uint32_t oldList = GetListIndex(prevBlock->size); - prevBlock->size += misssingAlignment; - // Check if new size crosses list bucket - if (oldList != GetListIndex(prevBlock->size)) - { - prevBlock->size -= misssingAlignment; - RemoveFreeBlock(prevBlock); - prevBlock->size += misssingAlignment; - InsertFreeBlock(prevBlock); - } - else - m_BlocksFreeSize += misssingAlignment; - } - else - { - Block* newBlock = m_BlockAllocator.Alloc(); - currentBlock->prevPhysical = newBlock; - prevBlock->nextPhysical = newBlock; - newBlock->prevPhysical = prevBlock; - newBlock->nextPhysical = currentBlock; - newBlock->size = misssingAlignment; - newBlock->offset = currentBlock->offset; - newBlock->MarkTaken(); - - InsertFreeBlock(newBlock); - } - - currentBlock->size -= misssingAlignment; - currentBlock->offset += misssingAlignment; - } - - VkDeviceSize size = request.size + debugMargin; - if (currentBlock->size == size) - { - if (currentBlock == m_NullBlock) - { - // Setup new null block - m_NullBlock = m_BlockAllocator.Alloc(); - m_NullBlock->size = 0; - m_NullBlock->offset = currentBlock->offset + size; - m_NullBlock->prevPhysical = currentBlock; - m_NullBlock->nextPhysical = VMA_NULL; - m_NullBlock->MarkFree(); - m_NullBlock->PrevFree() = VMA_NULL; - m_NullBlock->NextFree() = VMA_NULL; - currentBlock->nextPhysical = m_NullBlock; - currentBlock->MarkTaken(); - } - } - else - { - VMA_ASSERT(currentBlock->size > size && "Proper block already found, shouldn't find smaller one!"); - - // Create new free block - Block* newBlock = m_BlockAllocator.Alloc(); - newBlock->size = currentBlock->size - size; - newBlock->offset = currentBlock->offset + size; - newBlock->prevPhysical = currentBlock; - newBlock->nextPhysical = currentBlock->nextPhysical; - currentBlock->nextPhysical = newBlock; - currentBlock->size = size; - - if (currentBlock == m_NullBlock) - { - m_NullBlock = newBlock; - m_NullBlock->MarkFree(); - m_NullBlock->NextFree() = VMA_NULL; - m_NullBlock->PrevFree() = VMA_NULL; - currentBlock->MarkTaken(); - } - else - { - newBlock->nextPhysical->prevPhysical = newBlock; - newBlock->MarkTaken(); - InsertFreeBlock(newBlock); - } - } - currentBlock->UserData() = userData; - - if (debugMargin > 0) - { - currentBlock->size -= debugMargin; - Block* newBlock = m_BlockAllocator.Alloc(); - newBlock->size = debugMargin; - newBlock->offset = currentBlock->offset + currentBlock->size; - newBlock->prevPhysical = currentBlock; - newBlock->nextPhysical = currentBlock->nextPhysical; - newBlock->MarkTaken(); - currentBlock->nextPhysical->prevPhysical = newBlock; - currentBlock->nextPhysical = newBlock; - InsertFreeBlock(newBlock); - } - - if (!IsVirtual()) - m_GranularityHandler.AllocPages((uint8_t)(uintptr_t)request.customData, - currentBlock->offset, currentBlock->size); - ++m_AllocCount; -} - -void VmaBlockMetadata_TLSF::Free(VmaAllocHandle allocHandle) -{ - Block* block = (Block*)allocHandle; - Block* next = block->nextPhysical; - VMA_ASSERT(!block->IsFree() && "Block is already free!"); - - if (!IsVirtual()) - m_GranularityHandler.FreePages(block->offset, block->size); - --m_AllocCount; - - VkDeviceSize debugMargin = GetDebugMargin(); - if (debugMargin > 0) - { - RemoveFreeBlock(next); - MergeBlock(next, block); - block = next; - next = next->nextPhysical; - } - - // Try merging - Block* prev = block->prevPhysical; - if (prev != VMA_NULL && prev->IsFree() && prev->size != debugMargin) - { - RemoveFreeBlock(prev); - MergeBlock(block, prev); - } - - if (!next->IsFree()) - InsertFreeBlock(block); - else if (next == m_NullBlock) - MergeBlock(m_NullBlock, block); - else - { - RemoveFreeBlock(next); - MergeBlock(next, block); - InsertFreeBlock(next); - } -} - -void VmaBlockMetadata_TLSF::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) -{ - Block* block = (Block*)allocHandle; - VMA_ASSERT(!block->IsFree() && "Cannot get allocation info for free block!"); - outInfo.offset = block->offset; - outInfo.size = block->size; - outInfo.pUserData = block->UserData(); -} - -void* VmaBlockMetadata_TLSF::GetAllocationUserData(VmaAllocHandle allocHandle) const -{ - Block* block = (Block*)allocHandle; - VMA_ASSERT(!block->IsFree() && "Cannot get user data for free block!"); - return block->UserData(); -} - -VmaAllocHandle VmaBlockMetadata_TLSF::GetAllocationListBegin() const -{ - if (m_AllocCount == 0) - return VK_NULL_HANDLE; - - for (Block* block = m_NullBlock->prevPhysical; block; block = block->prevPhysical) - { - if (!block->IsFree()) - return (VmaAllocHandle)block; - } - VMA_ASSERT(false && "If m_AllocCount > 0 then should find any allocation!"); - return VK_NULL_HANDLE; -} - -VmaAllocHandle VmaBlockMetadata_TLSF::GetNextAllocation(VmaAllocHandle prevAlloc) const -{ - Block* startBlock = (Block*)prevAlloc; - VMA_ASSERT(!startBlock->IsFree() && "Incorrect block!"); - - for (Block* block = startBlock->prevPhysical; block; block = block->prevPhysical) - { - if (!block->IsFree()) - return (VmaAllocHandle)block; - } - return VK_NULL_HANDLE; -} - -VkDeviceSize VmaBlockMetadata_TLSF::GetNextFreeRegionSize(VmaAllocHandle alloc) const -{ - Block* block = (Block*)alloc; - VMA_ASSERT(!block->IsFree() && "Incorrect block!"); - - if (block->prevPhysical) - return block->prevPhysical->IsFree() ? block->prevPhysical->size : 0; - return 0; -} - -void VmaBlockMetadata_TLSF::Clear() -{ - m_AllocCount = 0; - m_BlocksFreeCount = 0; - m_BlocksFreeSize = 0; - m_IsFreeBitmap = 0; - m_NullBlock->offset = 0; - m_NullBlock->size = GetSize(); - Block* block = m_NullBlock->prevPhysical; - m_NullBlock->prevPhysical = VMA_NULL; - while (block) - { - Block* prev = block->prevPhysical; - m_BlockAllocator.Free(block); - block = prev; - } - memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); - memset(m_InnerIsFreeBitmap, 0, m_MemoryClasses * sizeof(uint32_t)); - m_GranularityHandler.Clear(); -} - -void VmaBlockMetadata_TLSF::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) -{ - Block* block = (Block*)allocHandle; - VMA_ASSERT(!block->IsFree() && "Trying to set user data for not allocated block!"); - block->UserData() = userData; -} - -void VmaBlockMetadata_TLSF::DebugLogAllAllocations() const -{ - for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) - if (!block->IsFree()) - DebugLogAllocation(block->offset, block->size, block->UserData()); -} - -uint8_t VmaBlockMetadata_TLSF::SizeToMemoryClass(VkDeviceSize size) const -{ - if (size > SMALL_BUFFER_SIZE) - return VMA_BITSCAN_MSB(size) - MEMORY_CLASS_SHIFT; - return 0; -} - -uint16_t VmaBlockMetadata_TLSF::SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const -{ - if (memoryClass == 0) - { - if (IsVirtual()) - return static_cast((size - 1) / 8); - else - return static_cast((size - 1) / 64); - } - return static_cast((size >> (memoryClass + MEMORY_CLASS_SHIFT - SECOND_LEVEL_INDEX)) ^ (1U << SECOND_LEVEL_INDEX)); -} - -uint32_t VmaBlockMetadata_TLSF::GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const -{ - if (memoryClass == 0) - return secondIndex; - - const uint32_t index = static_cast(memoryClass - 1) * (1 << SECOND_LEVEL_INDEX) + secondIndex; - if (IsVirtual()) - return index + (1 << SECOND_LEVEL_INDEX); - else - return index + 4; -} - -uint32_t VmaBlockMetadata_TLSF::GetListIndex(VkDeviceSize size) const -{ - uint8_t memoryClass = SizeToMemoryClass(size); - return GetListIndex(memoryClass, SizeToSecondIndex(size, memoryClass)); -} - -void VmaBlockMetadata_TLSF::RemoveFreeBlock(Block* block) -{ - VMA_ASSERT(block != m_NullBlock); - VMA_ASSERT(block->IsFree()); - - if (block->NextFree() != VMA_NULL) - block->NextFree()->PrevFree() = block->PrevFree(); - if (block->PrevFree() != VMA_NULL) - block->PrevFree()->NextFree() = block->NextFree(); - else - { - uint8_t memClass = SizeToMemoryClass(block->size); - uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); - uint32_t index = GetListIndex(memClass, secondIndex); - VMA_ASSERT(m_FreeList[index] == block); - m_FreeList[index] = block->NextFree(); - if (block->NextFree() == VMA_NULL) - { - m_InnerIsFreeBitmap[memClass] &= ~(1U << secondIndex); - if (m_InnerIsFreeBitmap[memClass] == 0) - m_IsFreeBitmap &= ~(1UL << memClass); - } - } - block->MarkTaken(); - block->UserData() = VMA_NULL; - --m_BlocksFreeCount; - m_BlocksFreeSize -= block->size; -} - -void VmaBlockMetadata_TLSF::InsertFreeBlock(Block* block) -{ - VMA_ASSERT(block != m_NullBlock); - VMA_ASSERT(!block->IsFree() && "Cannot insert block twice!"); - - uint8_t memClass = SizeToMemoryClass(block->size); - uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); - uint32_t index = GetListIndex(memClass, secondIndex); - VMA_ASSERT(index < m_ListsCount); - block->PrevFree() = VMA_NULL; - block->NextFree() = m_FreeList[index]; - m_FreeList[index] = block; - if (block->NextFree() != VMA_NULL) - block->NextFree()->PrevFree() = block; - else - { - m_InnerIsFreeBitmap[memClass] |= 1U << secondIndex; - m_IsFreeBitmap |= 1UL << memClass; - } - ++m_BlocksFreeCount; - m_BlocksFreeSize += block->size; -} - -void VmaBlockMetadata_TLSF::MergeBlock(Block* block, Block* prev) -{ - VMA_ASSERT(block->prevPhysical == prev && "Cannot merge seperate physical regions!"); - VMA_ASSERT(!prev->IsFree() && "Cannot merge block that belongs to free list!"); - - block->offset = prev->offset; - block->size += prev->size; - block->prevPhysical = prev->prevPhysical; - if (block->prevPhysical) - block->prevPhysical->nextPhysical = block; - m_BlockAllocator.Free(prev); -} - -VmaBlockMetadata_TLSF::Block* VmaBlockMetadata_TLSF::FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const -{ - uint8_t memoryClass = SizeToMemoryClass(size); - uint32_t innerFreeMap = m_InnerIsFreeBitmap[memoryClass] & (~0U << SizeToSecondIndex(size, memoryClass)); - if (!innerFreeMap) - { - // Check higher levels for avaiable blocks - uint32_t freeMap = m_IsFreeBitmap & (~0UL << (memoryClass + 1)); - if (!freeMap) - return VMA_NULL; // No more memory avaible - - // Find lowest free region - memoryClass = VMA_BITSCAN_LSB(freeMap); - innerFreeMap = m_InnerIsFreeBitmap[memoryClass]; - VMA_ASSERT(innerFreeMap != 0); - } - // Find lowest free subregion - listIndex = GetListIndex(memoryClass, VMA_BITSCAN_LSB(innerFreeMap)); - VMA_ASSERT(m_FreeList[listIndex]); - return m_FreeList[listIndex]; -} - -bool VmaBlockMetadata_TLSF::CheckBlock( - Block& block, - uint32_t listIndex, - VkDeviceSize allocSize, - VkDeviceSize allocAlignment, - VmaSuballocationType allocType, - VmaAllocationRequest* pAllocationRequest) -{ - VMA_ASSERT(block.IsFree() && "Block is already taken!"); - - VkDeviceSize alignedOffset = VmaAlignUp(block.offset, allocAlignment); - if (block.size < allocSize + alignedOffset - block.offset) - return false; - - // Check for granularity conflicts - if (!IsVirtual() && - m_GranularityHandler.CheckConflictAndAlignUp(alignedOffset, allocSize, block.offset, block.size, allocType)) - return false; - - // Alloc successful - pAllocationRequest->type = VmaAllocationRequestType::TLSF; - pAllocationRequest->allocHandle = (VmaAllocHandle)█ - pAllocationRequest->size = allocSize - GetDebugMargin(); - pAllocationRequest->customData = (void*)allocType; - pAllocationRequest->algorithmData = alignedOffset; - - // Place block at the start of list if it's normal block - if (listIndex != m_ListsCount && block.PrevFree()) - { - block.PrevFree()->NextFree() = block.NextFree(); - if (block.NextFree()) - block.NextFree()->PrevFree() = block.PrevFree(); - block.PrevFree() = VMA_NULL; - block.NextFree() = m_FreeList[listIndex]; - m_FreeList[listIndex] = █ - if (block.NextFree()) - block.NextFree()->PrevFree() = █ - } - - return true; -} -#endif // _VMA_BLOCK_METADATA_TLSF_FUNCTIONS -#endif // _VMA_BLOCK_METADATA_TLSF - -#ifndef _VMA_BLOCK_VECTOR -/* -Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific -Vulkan memory type. - -Synchronized internally with a mutex. -*/ -class VmaBlockVector -{ - friend struct VmaDefragmentationContext_T; - VMA_CLASS_NO_COPY(VmaBlockVector) -public: - VmaBlockVector( - VmaAllocator hAllocator, - VmaPool hParentPool, - uint32_t memoryTypeIndex, - VkDeviceSize preferredBlockSize, - size_t minBlockCount, - size_t maxBlockCount, - VkDeviceSize bufferImageGranularity, - bool explicitBlockSize, - uint32_t algorithm, - float priority, - VkDeviceSize minAllocationAlignment, - void* pMemoryAllocateNext); - ~VmaBlockVector(); - - VmaAllocator GetAllocator() const { return m_hAllocator; } - VmaPool GetParentPool() const { return m_hParentPool; } - bool IsCustomPool() const { return m_hParentPool != VMA_NULL; } - uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } - VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; } - VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } - uint32_t GetAlgorithm() const { return m_Algorithm; } - bool HasExplicitBlockSize() const { return m_ExplicitBlockSize; } - float GetPriority() const { return m_Priority; } - const void* GetAllocationNextPtr() const { return m_pMemoryAllocateNext; } - // To be used only while the m_Mutex is locked. Used during defragmentation. - size_t GetBlockCount() const { return m_Blocks.size(); } - // To be used only while the m_Mutex is locked. Used during defragmentation. - VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; } - VMA_RW_MUTEX &GetMutex() { return m_Mutex; } - - VkResult CreateMinBlocks(); - void AddStatistics(VmaStatistics& inoutStats); - void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); - bool IsEmpty(); - bool IsCorruptionDetectionEnabled() const; - - VkResult Allocate( - VkDeviceSize size, - VkDeviceSize alignment, - const VmaAllocationCreateInfo& createInfo, - VmaSuballocationType suballocType, - size_t allocationCount, - VmaAllocation* pAllocations); - - void Free(const VmaAllocation hAllocation); - -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaJsonWriter& json); -#endif - - VkResult CheckCorruption(); - -private: - const VmaAllocator m_hAllocator; - const VmaPool m_hParentPool; - const uint32_t m_MemoryTypeIndex; - const VkDeviceSize m_PreferredBlockSize; - const size_t m_MinBlockCount; - const size_t m_MaxBlockCount; - const VkDeviceSize m_BufferImageGranularity; - const bool m_ExplicitBlockSize; - const uint32_t m_Algorithm; - const float m_Priority; - const VkDeviceSize m_MinAllocationAlignment; - - void* const m_pMemoryAllocateNext; - VMA_RW_MUTEX m_Mutex; - // Incrementally sorted by sumFreeSize, ascending. - VmaVector> m_Blocks; - uint32_t m_NextBlockId; - bool m_IncrementalSort = true; - - void SetIncrementalSort(bool val) { m_IncrementalSort = val; } - - VkDeviceSize CalcMaxBlockSize() const; - // Finds and removes given block from vector. - void Remove(VmaDeviceMemoryBlock* pBlock); - // Performs single step in sorting m_Blocks. They may not be fully sorted - // after this call. - void IncrementallySortBlocks(); - void SortByFreeSize(); - - VkResult AllocatePage( - VkDeviceSize size, - VkDeviceSize alignment, - const VmaAllocationCreateInfo& createInfo, - VmaSuballocationType suballocType, - VmaAllocation* pAllocation); - - VkResult AllocateFromBlock( - VmaDeviceMemoryBlock* pBlock, - VkDeviceSize size, - VkDeviceSize alignment, - VmaAllocationCreateFlags allocFlags, - void* pUserData, - VmaSuballocationType suballocType, - uint32_t strategy, - VmaAllocation* pAllocation); - - VkResult CommitAllocationRequest( - VmaAllocationRequest& allocRequest, - VmaDeviceMemoryBlock* pBlock, - VkDeviceSize alignment, - VmaAllocationCreateFlags allocFlags, - void* pUserData, - VmaSuballocationType suballocType, - VmaAllocation* pAllocation); - - VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex); - bool HasEmptyBlock(); -}; -#endif // _VMA_BLOCK_VECTOR - -#ifndef _VMA_DEFRAGMENTATION_CONTEXT -struct VmaDefragmentationContext_T -{ - VMA_CLASS_NO_COPY(VmaDefragmentationContext_T) -public: - VmaDefragmentationContext_T( - VmaAllocator hAllocator, - const VmaDefragmentationInfo& info); - ~VmaDefragmentationContext_T(); - - void GetStats(VmaDefragmentationStats& outStats) { outStats = m_GlobalStats; } - - VkResult DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo); - VkResult DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo); - -private: - // Max number of allocations to ignore due to size constraints before ending single pass - static const uint8_t MAX_ALLOCS_TO_IGNORE = 16; - enum class CounterStatus { Pass, Ignore, End }; - - struct FragmentedBlock - { - uint32_t data; - VmaDeviceMemoryBlock* block; - }; - struct StateBalanced - { - VkDeviceSize avgFreeSize = 0; - VkDeviceSize avgAllocSize = UINT64_MAX; - }; - struct StateExtensive - { - enum class Operation : uint8_t - { - FindFreeBlockBuffer, FindFreeBlockTexture, FindFreeBlockAll, - MoveBuffers, MoveTextures, MoveAll, - Cleanup, Done - }; - - Operation operation = Operation::FindFreeBlockTexture; - size_t firstFreeBlock = SIZE_MAX; - }; - struct MoveAllocationData - { - VkDeviceSize size; - VkDeviceSize alignment; - VmaSuballocationType type; - VmaAllocationCreateFlags flags; - VmaDefragmentationMove move = {}; - }; - - const VkDeviceSize m_MaxPassBytes; - const uint32_t m_MaxPassAllocations; - - VmaStlAllocator m_MoveAllocator; - VmaVector> m_Moves; - - uint8_t m_IgnoredAllocs = 0; - uint32_t m_Algorithm; - uint32_t m_BlockVectorCount; - VmaBlockVector* m_PoolBlockVector; - VmaBlockVector** m_pBlockVectors; - size_t m_ImmovableBlockCount = 0; - VmaDefragmentationStats m_GlobalStats = { 0 }; - VmaDefragmentationStats m_PassStats = { 0 }; - void* m_AlgorithmState = VMA_NULL; - - static MoveAllocationData GetMoveData(VmaAllocHandle handle, VmaBlockMetadata* metadata); - CounterStatus CheckCounters(VkDeviceSize bytes); - bool IncrementCounters(VkDeviceSize bytes); - bool ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block); - bool AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector); - - bool ComputeDefragmentation(VmaBlockVector& vector, size_t index); - bool ComputeDefragmentation_Fast(VmaBlockVector& vector); - bool ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update); - bool ComputeDefragmentation_Full(VmaBlockVector& vector); - bool ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index); - - void UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state); - bool MoveDataToFreeBlocks(VmaSuballocationType currentType, - VmaBlockVector& vector, size_t firstFreeBlock, - bool& texturePresent, bool& bufferPresent, bool& otherPresent); -}; -#endif // _VMA_DEFRAGMENTATION_CONTEXT - -#ifndef _VMA_POOL_T -struct VmaPool_T -{ - friend struct VmaPoolListItemTraits; - VMA_CLASS_NO_COPY(VmaPool_T) -public: - VmaBlockVector m_BlockVector; - VmaDedicatedAllocationList m_DedicatedAllocations; - - VmaPool_T( - VmaAllocator hAllocator, - const VmaPoolCreateInfo& createInfo, - VkDeviceSize preferredBlockSize); - ~VmaPool_T(); - - uint32_t GetId() const { return m_Id; } - void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; } - - const char* GetName() const { return m_Name; } - void SetName(const char* pName); - -#if VMA_STATS_STRING_ENABLED - //void PrintDetailedMap(class VmaStringBuilder& sb); -#endif - -private: - uint32_t m_Id; - char* m_Name; - VmaPool_T* m_PrevPool = VMA_NULL; - VmaPool_T* m_NextPool = VMA_NULL; -}; - -struct VmaPoolListItemTraits -{ - typedef VmaPool_T ItemType; - - static ItemType* GetPrev(const ItemType* item) { return item->m_PrevPool; } - static ItemType* GetNext(const ItemType* item) { return item->m_NextPool; } - static ItemType*& AccessPrev(ItemType* item) { return item->m_PrevPool; } - static ItemType*& AccessNext(ItemType* item) { return item->m_NextPool; } -}; -#endif // _VMA_POOL_T - -#ifndef _VMA_CURRENT_BUDGET_DATA -struct VmaCurrentBudgetData -{ - VMA_ATOMIC_UINT32 m_BlockCount[VK_MAX_MEMORY_HEAPS]; - VMA_ATOMIC_UINT32 m_AllocationCount[VK_MAX_MEMORY_HEAPS]; - VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS]; - VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS]; - -#if VMA_MEMORY_BUDGET - VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch; - VMA_RW_MUTEX m_BudgetMutex; - uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS]; - uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS]; - uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS]; -#endif // VMA_MEMORY_BUDGET - - VmaCurrentBudgetData(); - - void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); - void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); -}; - -#ifndef _VMA_CURRENT_BUDGET_DATA_FUNCTIONS -VmaCurrentBudgetData::VmaCurrentBudgetData() -{ - for (uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex) - { - m_BlockCount[heapIndex] = 0; - m_AllocationCount[heapIndex] = 0; - m_BlockBytes[heapIndex] = 0; - m_AllocationBytes[heapIndex] = 0; -#if VMA_MEMORY_BUDGET - m_VulkanUsage[heapIndex] = 0; - m_VulkanBudget[heapIndex] = 0; - m_BlockBytesAtBudgetFetch[heapIndex] = 0; -#endif - } - -#if VMA_MEMORY_BUDGET - m_OperationsSinceBudgetFetch = 0; -#endif -} - -void VmaCurrentBudgetData::AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) -{ - m_AllocationBytes[heapIndex] += allocationSize; - ++m_AllocationCount[heapIndex]; -#if VMA_MEMORY_BUDGET - ++m_OperationsSinceBudgetFetch; -#endif -} - -void VmaCurrentBudgetData::RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) -{ - VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize); - m_AllocationBytes[heapIndex] -= allocationSize; - VMA_ASSERT(m_AllocationCount[heapIndex] > 0); - --m_AllocationCount[heapIndex]; -#if VMA_MEMORY_BUDGET - ++m_OperationsSinceBudgetFetch; -#endif -} -#endif // _VMA_CURRENT_BUDGET_DATA_FUNCTIONS -#endif // _VMA_CURRENT_BUDGET_DATA - -#ifndef _VMA_ALLOCATION_OBJECT_ALLOCATOR -/* -Thread-safe wrapper over VmaPoolAllocator free list, for allocation of VmaAllocation_T objects. -*/ -class VmaAllocationObjectAllocator -{ - VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator) -public: - VmaAllocationObjectAllocator(const VkAllocationCallbacks* pAllocationCallbacks) - : m_Allocator(pAllocationCallbacks, 1024) {} - - template VmaAllocation Allocate(Types&&... args); - void Free(VmaAllocation hAlloc); - -private: - VMA_MUTEX m_Mutex; - VmaPoolAllocator m_Allocator; -}; - -template -VmaAllocation VmaAllocationObjectAllocator::Allocate(Types&&... args) -{ - VmaMutexLock mutexLock(m_Mutex); - return m_Allocator.Alloc(std::forward(args)...); -} - -void VmaAllocationObjectAllocator::Free(VmaAllocation hAlloc) -{ - VmaMutexLock mutexLock(m_Mutex); - m_Allocator.Free(hAlloc); -} -#endif // _VMA_ALLOCATION_OBJECT_ALLOCATOR - -#ifndef _VMA_VIRTUAL_BLOCK_T -struct VmaVirtualBlock_T -{ - VMA_CLASS_NO_COPY(VmaVirtualBlock_T) -public: - const bool m_AllocationCallbacksSpecified; - const VkAllocationCallbacks m_AllocationCallbacks; - - VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo); - ~VmaVirtualBlock_T(); - - VkResult Init() { return VK_SUCCESS; } - bool IsEmpty() const { return m_Metadata->IsEmpty(); } - void Free(VmaVirtualAllocation allocation) { m_Metadata->Free((VmaAllocHandle)allocation); } - void SetAllocationUserData(VmaVirtualAllocation allocation, void* userData) { m_Metadata->SetAllocationUserData((VmaAllocHandle)allocation, userData); } - void Clear() { m_Metadata->Clear(); } - - const VkAllocationCallbacks* GetAllocationCallbacks() const; - void GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo); - VkResult Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, - VkDeviceSize* outOffset); - void GetStatistics(VmaStatistics& outStats) const; - void CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const; -#if VMA_STATS_STRING_ENABLED - void BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const; -#endif - -private: - VmaBlockMetadata* m_Metadata; -}; - -#ifndef _VMA_VIRTUAL_BLOCK_T_FUNCTIONS -VmaVirtualBlock_T::VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo) - : m_AllocationCallbacksSpecified(createInfo.pAllocationCallbacks != VMA_NULL), - m_AllocationCallbacks(createInfo.pAllocationCallbacks != VMA_NULL ? *createInfo.pAllocationCallbacks : VmaEmptyAllocationCallbacks) -{ - const uint32_t algorithm = createInfo.flags & VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK; - switch (algorithm) - { - default: - VMA_ASSERT(0); - case 0: - m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true); - break; - case VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT: - m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_Linear)(VK_NULL_HANDLE, 1, true); - break; - } - - m_Metadata->Init(createInfo.size); -} - -VmaVirtualBlock_T::~VmaVirtualBlock_T() -{ - // Define macro VMA_DEBUG_LOG to receive the list of the unfreed allocations - if (!m_Metadata->IsEmpty()) - m_Metadata->DebugLogAllAllocations(); - // This is the most important assert in the entire library. - // Hitting it means you have some memory leak - unreleased virtual allocations. - VMA_ASSERT(m_Metadata->IsEmpty() && "Some virtual allocations were not freed before destruction of this virtual block!"); - - vma_delete(GetAllocationCallbacks(), m_Metadata); -} - -const VkAllocationCallbacks* VmaVirtualBlock_T::GetAllocationCallbacks() const -{ - return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; -} - -void VmaVirtualBlock_T::GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo) -{ - m_Metadata->GetAllocationInfo((VmaAllocHandle)allocation, outInfo); -} - -VkResult VmaVirtualBlock_T::Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, - VkDeviceSize* outOffset) -{ - VmaAllocationRequest request = {}; - if (m_Metadata->CreateAllocationRequest( - createInfo.size, // allocSize - VMA_MAX(createInfo.alignment, (VkDeviceSize)1), // allocAlignment - (createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0, // upperAddress - VMA_SUBALLOCATION_TYPE_UNKNOWN, // allocType - unimportant - createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK, // strategy - &request)) - { - m_Metadata->Alloc(request, - VMA_SUBALLOCATION_TYPE_UNKNOWN, // type - unimportant - createInfo.pUserData); - outAllocation = (VmaVirtualAllocation)request.allocHandle; - if(outOffset) - *outOffset = m_Metadata->GetAllocationOffset(request.allocHandle); - return VK_SUCCESS; - } - outAllocation = (VmaVirtualAllocation)VK_NULL_HANDLE; - if (outOffset) - *outOffset = UINT64_MAX; - return VK_ERROR_OUT_OF_DEVICE_MEMORY; -} - -void VmaVirtualBlock_T::GetStatistics(VmaStatistics& outStats) const -{ - VmaClearStatistics(outStats); - m_Metadata->AddStatistics(outStats); -} - -void VmaVirtualBlock_T::CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const -{ - VmaClearDetailedStatistics(outStats); - m_Metadata->AddDetailedStatistics(outStats); -} - -#if VMA_STATS_STRING_ENABLED -void VmaVirtualBlock_T::BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const -{ - VmaJsonWriter json(GetAllocationCallbacks(), sb); - json.BeginObject(); - - VmaDetailedStatistics stats; - CalculateDetailedStatistics(stats); - - json.WriteString("Stats"); - VmaPrintDetailedStatistics(json, stats); - - if (detailedMap) - { - json.WriteString("Details"); - json.BeginObject(); - m_Metadata->PrintDetailedMap(json); - json.EndObject(); - } - - json.EndObject(); -} -#endif // VMA_STATS_STRING_ENABLED -#endif // _VMA_VIRTUAL_BLOCK_T_FUNCTIONS -#endif // _VMA_VIRTUAL_BLOCK_T - - -// Main allocator object. -struct VmaAllocator_T -{ - VMA_CLASS_NO_COPY(VmaAllocator_T) -public: - bool m_UseMutex; - uint32_t m_VulkanApiVersion; - bool m_UseKhrDedicatedAllocation; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). - bool m_UseKhrBindMemory2; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). - bool m_UseExtMemoryBudget; - bool m_UseAmdDeviceCoherentMemory; - bool m_UseKhrBufferDeviceAddress; - bool m_UseExtMemoryPriority; - VkDevice m_hDevice; - VkInstance m_hInstance; - bool m_AllocationCallbacksSpecified; - VkAllocationCallbacks m_AllocationCallbacks; - VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks; - VmaAllocationObjectAllocator m_AllocationObjectAllocator; - - // Each bit (1 << i) is set if HeapSizeLimit is enabled for that heap, so cannot allocate more than the heap size. - uint32_t m_HeapSizeLimitMask; - - VkPhysicalDeviceProperties m_PhysicalDeviceProperties; - VkPhysicalDeviceMemoryProperties m_MemProps; - - // Default pools. - VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES]; - VmaDedicatedAllocationList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES]; - - VmaCurrentBudgetData m_Budget; - VMA_ATOMIC_UINT32 m_DeviceMemoryCount; // Total number of VkDeviceMemory objects. - - VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo); - VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo); - ~VmaAllocator_T(); - - const VkAllocationCallbacks* GetAllocationCallbacks() const - { - return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; - } - const VmaVulkanFunctions& GetVulkanFunctions() const - { - return m_VulkanFunctions; - } - - VkPhysicalDevice GetPhysicalDevice() const { return m_PhysicalDevice; } - - VkDeviceSize GetBufferImageGranularity() const - { - return VMA_MAX( - static_cast(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY), - m_PhysicalDeviceProperties.limits.bufferImageGranularity); - } - - uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; } - uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; } - - uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const - { - VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount); - return m_MemProps.memoryTypes[memTypeIndex].heapIndex; - } - // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT. - bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const - { - return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) == - VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - } - // Minimum alignment for all allocations in specific memory type. - VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const - { - return IsMemoryTypeNonCoherent(memTypeIndex) ? - VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) : - (VkDeviceSize)VMA_MIN_ALIGNMENT; - } - - bool IsIntegratedGpu() const - { - return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU; - } - - uint32_t GetGlobalMemoryTypeBits() const { return m_GlobalMemoryTypeBits; } - - void GetBufferMemoryRequirements( - VkBuffer hBuffer, - VkMemoryRequirements& memReq, - bool& requiresDedicatedAllocation, - bool& prefersDedicatedAllocation) const; - void GetImageMemoryRequirements( - VkImage hImage, - VkMemoryRequirements& memReq, - bool& requiresDedicatedAllocation, - bool& prefersDedicatedAllocation) const; - VkResult FindMemoryTypeIndex( - uint32_t memoryTypeBits, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - VkFlags bufImgUsage, // VkBufferCreateInfo::usage or VkImageCreateInfo::usage. UINT32_MAX if unknown. - uint32_t* pMemoryTypeIndex) const; - - // Main allocation function. - VkResult AllocateMemory( - const VkMemoryRequirements& vkMemReq, - bool requiresDedicatedAllocation, - bool prefersDedicatedAllocation, - VkBuffer dedicatedBuffer, - VkImage dedicatedImage, - VkFlags dedicatedBufferImageUsage, // UINT32_MAX if unknown. - const VmaAllocationCreateInfo& createInfo, - VmaSuballocationType suballocType, - size_t allocationCount, - VmaAllocation* pAllocations); - - // Main deallocation function. - void FreeMemory( - size_t allocationCount, - const VmaAllocation* pAllocations); - - void CalculateStatistics(VmaTotalStatistics* pStats); - - void GetHeapBudgets( - VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount); - -#if VMA_STATS_STRING_ENABLED - void PrintDetailedMap(class VmaJsonWriter& json); -#endif - - void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo); - - VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool); - void DestroyPool(VmaPool pool); - void GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats); - void CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats); - - void SetCurrentFrameIndex(uint32_t frameIndex); - uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); } - - VkResult CheckPoolCorruption(VmaPool hPool); - VkResult CheckCorruption(uint32_t memoryTypeBits); - - // Call to Vulkan function vkAllocateMemory with accompanying bookkeeping. - VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory); - // Call to Vulkan function vkFreeMemory with accompanying bookkeeping. - void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory); - // Call to Vulkan function vkBindBufferMemory or vkBindBufferMemory2KHR. - VkResult BindVulkanBuffer( - VkDeviceMemory memory, - VkDeviceSize memoryOffset, - VkBuffer buffer, - const void* pNext); - // Call to Vulkan function vkBindImageMemory or vkBindImageMemory2KHR. - VkResult BindVulkanImage( - VkDeviceMemory memory, - VkDeviceSize memoryOffset, - VkImage image, - const void* pNext); - - VkResult Map(VmaAllocation hAllocation, void** ppData); - void Unmap(VmaAllocation hAllocation); - - VkResult BindBufferMemory( - VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkBuffer hBuffer, - const void* pNext); - VkResult BindImageMemory( - VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkImage hImage, - const void* pNext); - - VkResult FlushOrInvalidateAllocation( - VmaAllocation hAllocation, - VkDeviceSize offset, VkDeviceSize size, - VMA_CACHE_OPERATION op); - VkResult FlushOrInvalidateAllocations( - uint32_t allocationCount, - const VmaAllocation* allocations, - const VkDeviceSize* offsets, const VkDeviceSize* sizes, - VMA_CACHE_OPERATION op); - - void FillAllocation(const VmaAllocation hAllocation, uint8_t pattern); - - /* - Returns bit mask of memory types that can support defragmentation on GPU as - they support creation of required buffer for copy operations. - */ - uint32_t GetGpuDefragmentationMemoryTypeBits(); - -#if VMA_EXTERNAL_MEMORY - VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex) const - { - return m_TypeExternalMemoryHandleTypes[memTypeIndex]; - } -#endif // #if VMA_EXTERNAL_MEMORY - -private: - VkDeviceSize m_PreferredLargeHeapBlockSize; - - VkPhysicalDevice m_PhysicalDevice; - VMA_ATOMIC_UINT32 m_CurrentFrameIndex; - VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits; // UINT32_MAX means uninitialized. -#if VMA_EXTERNAL_MEMORY - VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES]; -#endif // #if VMA_EXTERNAL_MEMORY - - VMA_RW_MUTEX m_PoolsMutex; - typedef VmaIntrusiveLinkedList PoolList; - // Protected by m_PoolsMutex. - PoolList m_Pools; - uint32_t m_NextPoolId; - - VmaVulkanFunctions m_VulkanFunctions; - - // Global bit mask AND-ed with any memoryTypeBits to disallow certain memory types. - uint32_t m_GlobalMemoryTypeBits; - - void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions); - -#if VMA_STATIC_VULKAN_FUNCTIONS == 1 - void ImportVulkanFunctions_Static(); -#endif - - void ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions); - -#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 - void ImportVulkanFunctions_Dynamic(); -#endif - - void ValidateVulkanFunctions(); - - VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex); - - VkResult AllocateMemoryOfType( - VmaPool pool, - VkDeviceSize size, - VkDeviceSize alignment, - bool dedicatedPreferred, - VkBuffer dedicatedBuffer, - VkImage dedicatedImage, - VkFlags dedicatedBufferImageUsage, - const VmaAllocationCreateInfo& createInfo, - uint32_t memTypeIndex, - VmaSuballocationType suballocType, - VmaDedicatedAllocationList& dedicatedAllocations, - VmaBlockVector& blockVector, - size_t allocationCount, - VmaAllocation* pAllocations); - - // Helper function only to be used inside AllocateDedicatedMemory. - VkResult AllocateDedicatedMemoryPage( - VmaPool pool, - VkDeviceSize size, - VmaSuballocationType suballocType, - uint32_t memTypeIndex, - const VkMemoryAllocateInfo& allocInfo, - bool map, - bool isUserDataString, - bool isMappingAllowed, - void* pUserData, - VmaAllocation* pAllocation); - - // Allocates and registers new VkDeviceMemory specifically for dedicated allocations. - VkResult AllocateDedicatedMemory( - VmaPool pool, - VkDeviceSize size, - VmaSuballocationType suballocType, - VmaDedicatedAllocationList& dedicatedAllocations, - uint32_t memTypeIndex, - bool map, - bool isUserDataString, - bool isMappingAllowed, - bool canAliasMemory, - void* pUserData, - float priority, - VkBuffer dedicatedBuffer, - VkImage dedicatedImage, - VkFlags dedicatedBufferImageUsage, - size_t allocationCount, - VmaAllocation* pAllocations, - const void* pNextChain = nullptr); - - void FreeDedicatedMemory(const VmaAllocation allocation); - - VkResult CalcMemTypeParams( - VmaAllocationCreateInfo& outCreateInfo, - uint32_t memTypeIndex, - VkDeviceSize size, - size_t allocationCount); - VkResult CalcAllocationParams( - VmaAllocationCreateInfo& outCreateInfo, - bool dedicatedRequired, - bool dedicatedPreferred); - - /* - Calculates and returns bit mask of memory types that can support defragmentation - on GPU as they support creation of required buffer for copy operations. - */ - uint32_t CalculateGpuDefragmentationMemoryTypeBits() const; - uint32_t CalculateGlobalMemoryTypeBits() const; - - bool GetFlushOrInvalidateRange( - VmaAllocation allocation, - VkDeviceSize offset, VkDeviceSize size, - VkMappedMemoryRange& outRange) const; - -#if VMA_MEMORY_BUDGET - void UpdateVulkanBudget(); -#endif // #if VMA_MEMORY_BUDGET -}; - - -#ifndef _VMA_MEMORY_FUNCTIONS -static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment) -{ - return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment); -} - -static void VmaFree(VmaAllocator hAllocator, void* ptr) -{ - VmaFree(&hAllocator->m_AllocationCallbacks, ptr); -} - -template -static T* VmaAllocate(VmaAllocator hAllocator) -{ - return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T)); -} - -template -static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count) -{ - return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T)); -} - -template -static void vma_delete(VmaAllocator hAllocator, T* ptr) -{ - if(ptr != VMA_NULL) - { - ptr->~T(); - VmaFree(hAllocator, ptr); - } -} - -template -static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count) -{ - if(ptr != VMA_NULL) - { - for(size_t i = count; i--; ) - ptr[i].~T(); - VmaFree(hAllocator, ptr); - } -} -#endif // _VMA_MEMORY_FUNCTIONS - -#ifndef _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS -VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) - : m_pMetadata(VMA_NULL), - m_MemoryTypeIndex(UINT32_MAX), - m_Id(0), - m_hMemory(VK_NULL_HANDLE), - m_MapCount(0), - m_pMappedData(VMA_NULL) {} - -VmaDeviceMemoryBlock::~VmaDeviceMemoryBlock() -{ - VMA_ASSERT(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped."); - VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); -} - -void VmaDeviceMemoryBlock::Init( - VmaAllocator hAllocator, - VmaPool hParentPool, - uint32_t newMemoryTypeIndex, - VkDeviceMemory newMemory, - VkDeviceSize newSize, - uint32_t id, - uint32_t algorithm, - VkDeviceSize bufferImageGranularity) -{ - VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); - - m_hParentPool = hParentPool; - m_MemoryTypeIndex = newMemoryTypeIndex; - m_Id = id; - m_hMemory = newMemory; - - switch (algorithm) - { - case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT: - m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator->GetAllocationCallbacks(), - bufferImageGranularity, false); // isVirtual - break; - default: - VMA_ASSERT(0); - // Fall-through. - case 0: - m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(), - bufferImageGranularity, false); // isVirtual - } - m_pMetadata->Init(newSize); -} - -void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator) -{ - // Define macro VMA_DEBUG_LOG to receive the list of the unfreed allocations - if (!m_pMetadata->IsEmpty()) - m_pMetadata->DebugLogAllAllocations(); - // This is the most important assert in the entire library. - // Hitting it means you have some memory leak - unreleased VmaAllocation objects. - VMA_ASSERT(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!"); - - VMA_ASSERT(m_hMemory != VK_NULL_HANDLE); - allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory); - m_hMemory = VK_NULL_HANDLE; - - vma_delete(allocator, m_pMetadata); - m_pMetadata = VMA_NULL; -} - -void VmaDeviceMemoryBlock::PostFree(VmaAllocator hAllocator) -{ - if(m_MappingHysteresis.PostFree()) - { - VMA_ASSERT(m_MappingHysteresis.GetExtraMapping() == 0); - if (m_MapCount == 0) - { - m_pMappedData = VMA_NULL; - (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); - } - } -} - -bool VmaDeviceMemoryBlock::Validate() const -{ - VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) && - (m_pMetadata->GetSize() != 0)); - - return m_pMetadata->Validate(); -} - -VkResult VmaDeviceMemoryBlock::CheckCorruption(VmaAllocator hAllocator) -{ - void* pData = nullptr; - VkResult res = Map(hAllocator, 1, &pData); - if (res != VK_SUCCESS) - { - return res; - } - - res = m_pMetadata->CheckCorruption(pData); - - Unmap(hAllocator, 1); - - return res; -} - -VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData) -{ - if (count == 0) - { - return VK_SUCCESS; - } - - VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); - const uint32_t oldTotalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); - m_MappingHysteresis.PostMap(); - if (oldTotalMapCount != 0) - { - m_MapCount += count; - VMA_ASSERT(m_pMappedData != VMA_NULL); - if (ppData != VMA_NULL) - { - *ppData = m_pMappedData; - } - return VK_SUCCESS; - } - else - { - VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( - hAllocator->m_hDevice, - m_hMemory, - 0, // offset - VK_WHOLE_SIZE, - 0, // flags - &m_pMappedData); - if (result == VK_SUCCESS) - { - if (ppData != VMA_NULL) - { - *ppData = m_pMappedData; - } - m_MapCount = count; - } - return result; - } -} - -void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count) -{ - if (count == 0) - { - return; - } - - VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); - if (m_MapCount >= count) - { - m_MapCount -= count; - const uint32_t totalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); - if (totalMapCount == 0) - { - m_pMappedData = VMA_NULL; - (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); - } - m_MappingHysteresis.PostUnmap(); - } - else - { - VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped."); - } -} - -VkResult VmaDeviceMemoryBlock::WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) -{ - VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); - - void* pData; - VkResult res = Map(hAllocator, 1, &pData); - if (res != VK_SUCCESS) - { - return res; - } - - VmaWriteMagicValue(pData, allocOffset + allocSize); - - Unmap(hAllocator, 1); - return VK_SUCCESS; -} - -VkResult VmaDeviceMemoryBlock::ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) -{ - VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); - - void* pData; - VkResult res = Map(hAllocator, 1, &pData); - if (res != VK_SUCCESS) - { - return res; - } - - if (!VmaValidateMagicValue(pData, allocOffset + allocSize)) - { - VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!"); - } - - Unmap(hAllocator, 1); - return VK_SUCCESS; -} - -VkResult VmaDeviceMemoryBlock::BindBufferMemory( - const VmaAllocator hAllocator, - const VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkBuffer hBuffer, - const void* pNext) -{ - VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && - hAllocation->GetBlock() == this); - VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && - "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); - const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; - // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. - VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); - return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext); -} - -VkResult VmaDeviceMemoryBlock::BindImageMemory( - const VmaAllocator hAllocator, - const VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkImage hImage, - const void* pNext) -{ - VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && - hAllocation->GetBlock() == this); - VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && - "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); - const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; - // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. - VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); - return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext); -} -#endif // _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS - -#ifndef _VMA_ALLOCATION_T_FUNCTIONS -VmaAllocation_T::VmaAllocation_T(bool mappingAllowed) - : m_Alignment{ 1 }, - m_Size{ 0 }, - m_pUserData{ VMA_NULL }, - m_pName{ VMA_NULL }, - m_MemoryTypeIndex{ 0 }, - m_Type{ (uint8_t)ALLOCATION_TYPE_NONE }, - m_SuballocationType{ (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN }, - m_MapCount{ 0 }, - m_Flags{ 0 } -{ - if(mappingAllowed) - m_Flags |= (uint8_t)FLAG_MAPPING_ALLOWED; - -#if VMA_STATS_STRING_ENABLED - m_BufferImageUsage = 0; -#endif -} - -VmaAllocation_T::~VmaAllocation_T() -{ - VMA_ASSERT(m_MapCount == 0 && "Allocation was not unmapped before destruction."); - - // Check if owned string was freed. - VMA_ASSERT(m_pName == VMA_NULL); -} - -void VmaAllocation_T::InitBlockAllocation( - VmaDeviceMemoryBlock* block, - VmaAllocHandle allocHandle, - VkDeviceSize alignment, - VkDeviceSize size, - uint32_t memoryTypeIndex, - VmaSuballocationType suballocationType, - bool mapped) -{ - VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); - VMA_ASSERT(block != VMA_NULL); - m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK; - m_Alignment = alignment; - m_Size = size; - m_MemoryTypeIndex = memoryTypeIndex; - if(mapped) - { - VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); - m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; - } - m_SuballocationType = (uint8_t)suballocationType; - m_BlockAllocation.m_Block = block; - m_BlockAllocation.m_AllocHandle = allocHandle; -} - -void VmaAllocation_T::InitDedicatedAllocation( - VmaPool hParentPool, - uint32_t memoryTypeIndex, - VkDeviceMemory hMemory, - VmaSuballocationType suballocationType, - void* pMappedData, - VkDeviceSize size) -{ - VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); - VMA_ASSERT(hMemory != VK_NULL_HANDLE); - m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED; - m_Alignment = 0; - m_Size = size; - m_MemoryTypeIndex = memoryTypeIndex; - m_SuballocationType = (uint8_t)suballocationType; - if(pMappedData != VMA_NULL) - { - VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); - m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; - } - m_DedicatedAllocation.m_hParentPool = hParentPool; - m_DedicatedAllocation.m_hMemory = hMemory; - m_DedicatedAllocation.m_pMappedData = pMappedData; - m_DedicatedAllocation.m_Prev = VMA_NULL; - m_DedicatedAllocation.m_Next = VMA_NULL; -} - -void VmaAllocation_T::SetName(VmaAllocator hAllocator, const char* pName) -{ - VMA_ASSERT(pName == VMA_NULL || pName != m_pName); - - FreeName(hAllocator); - - if (pName != VMA_NULL) - m_pName = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), pName); -} - -uint8_t VmaAllocation_T::SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation) -{ - VMA_ASSERT(allocation != VMA_NULL); - VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); - VMA_ASSERT(allocation->m_Type == ALLOCATION_TYPE_BLOCK); - - if (m_MapCount != 0) - m_BlockAllocation.m_Block->Unmap(hAllocator, m_MapCount); - - m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, allocation); - VMA_SWAP(m_BlockAllocation, allocation->m_BlockAllocation); - m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, this); - -#if VMA_STATS_STRING_ENABLED - VMA_SWAP(m_BufferImageUsage, allocation->m_BufferImageUsage); -#endif - return m_MapCount; -} - -VmaAllocHandle VmaAllocation_T::GetAllocHandle() const -{ - switch (m_Type) - { - case ALLOCATION_TYPE_BLOCK: - return m_BlockAllocation.m_AllocHandle; - case ALLOCATION_TYPE_DEDICATED: - return VK_NULL_HANDLE; - default: - VMA_ASSERT(0); - return VK_NULL_HANDLE; - } -} - -VkDeviceSize VmaAllocation_T::GetOffset() const -{ - switch (m_Type) - { - case ALLOCATION_TYPE_BLOCK: - return m_BlockAllocation.m_Block->m_pMetadata->GetAllocationOffset(m_BlockAllocation.m_AllocHandle); - case ALLOCATION_TYPE_DEDICATED: - return 0; - default: - VMA_ASSERT(0); - return 0; - } -} - -VmaPool VmaAllocation_T::GetParentPool() const -{ - switch (m_Type) - { - case ALLOCATION_TYPE_BLOCK: - return m_BlockAllocation.m_Block->GetParentPool(); - case ALLOCATION_TYPE_DEDICATED: - return m_DedicatedAllocation.m_hParentPool; - default: - VMA_ASSERT(0); - return VK_NULL_HANDLE; - } -} - -VkDeviceMemory VmaAllocation_T::GetMemory() const -{ - switch (m_Type) - { - case ALLOCATION_TYPE_BLOCK: - return m_BlockAllocation.m_Block->GetDeviceMemory(); - case ALLOCATION_TYPE_DEDICATED: - return m_DedicatedAllocation.m_hMemory; - default: - VMA_ASSERT(0); - return VK_NULL_HANDLE; - } -} - -void* VmaAllocation_T::GetMappedData() const -{ - switch (m_Type) - { - case ALLOCATION_TYPE_BLOCK: - if (m_MapCount != 0 || IsPersistentMap()) - { - void* pBlockData = m_BlockAllocation.m_Block->GetMappedData(); - VMA_ASSERT(pBlockData != VMA_NULL); - return (char*)pBlockData + GetOffset(); - } - else - { - return VMA_NULL; - } - break; - case ALLOCATION_TYPE_DEDICATED: - VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0 || IsPersistentMap())); - return m_DedicatedAllocation.m_pMappedData; - default: - VMA_ASSERT(0); - return VMA_NULL; - } -} - -void VmaAllocation_T::BlockAllocMap() -{ - VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); - VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); - - if (m_MapCount < 0xFF) - { - ++m_MapCount; - } - else - { - VMA_ASSERT(0 && "Allocation mapped too many times simultaneously."); - } -} - -void VmaAllocation_T::BlockAllocUnmap() -{ - VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); - - if (m_MapCount > 0) - { - --m_MapCount; - } - else - { - VMA_ASSERT(0 && "Unmapping allocation not previously mapped."); - } -} - -VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData) -{ - VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); - VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); - - if (m_MapCount != 0 || IsPersistentMap()) - { - if (m_MapCount < 0xFF) - { - VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL); - *ppData = m_DedicatedAllocation.m_pMappedData; - ++m_MapCount; - return VK_SUCCESS; - } - else - { - VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously."); - return VK_ERROR_MEMORY_MAP_FAILED; - } - } - else - { - VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( - hAllocator->m_hDevice, - m_DedicatedAllocation.m_hMemory, - 0, // offset - VK_WHOLE_SIZE, - 0, // flags - ppData); - if (result == VK_SUCCESS) - { - m_DedicatedAllocation.m_pMappedData = *ppData; - m_MapCount = 1; - } - return result; - } -} - -void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator) -{ - VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); - - if (m_MapCount > 0) - { - --m_MapCount; - if (m_MapCount == 0 && !IsPersistentMap()) - { - m_DedicatedAllocation.m_pMappedData = VMA_NULL; - (*hAllocator->GetVulkanFunctions().vkUnmapMemory)( - hAllocator->m_hDevice, - m_DedicatedAllocation.m_hMemory); - } - } - else - { - VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped."); - } -} - -#if VMA_STATS_STRING_ENABLED -void VmaAllocation_T::InitBufferImageUsage(uint32_t bufferImageUsage) -{ - VMA_ASSERT(m_BufferImageUsage == 0); - m_BufferImageUsage = bufferImageUsage; -} - -void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const -{ - json.WriteString("Type"); - json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]); - - json.WriteString("Size"); - json.WriteNumber(m_Size); - json.WriteString("Usage"); - json.WriteNumber(m_BufferImageUsage); - - if (m_pUserData != VMA_NULL) - { - json.WriteString("CustomData"); - json.BeginString(); - json.ContinueString_Pointer(m_pUserData); - json.EndString(); - } - if (m_pName != VMA_NULL) - { - json.WriteString("Name"); - json.WriteString(m_pName); - } -} -#endif // VMA_STATS_STRING_ENABLED - -void VmaAllocation_T::FreeName(VmaAllocator hAllocator) -{ - if(m_pName) - { - VmaFreeString(hAllocator->GetAllocationCallbacks(), m_pName); - m_pName = VMA_NULL; - } -} -#endif // _VMA_ALLOCATION_T_FUNCTIONS - -#ifndef _VMA_BLOCK_VECTOR_FUNCTIONS -VmaBlockVector::VmaBlockVector( - VmaAllocator hAllocator, - VmaPool hParentPool, - uint32_t memoryTypeIndex, - VkDeviceSize preferredBlockSize, - size_t minBlockCount, - size_t maxBlockCount, - VkDeviceSize bufferImageGranularity, - bool explicitBlockSize, - uint32_t algorithm, - float priority, - VkDeviceSize minAllocationAlignment, - void* pMemoryAllocateNext) - : m_hAllocator(hAllocator), - m_hParentPool(hParentPool), - m_MemoryTypeIndex(memoryTypeIndex), - m_PreferredBlockSize(preferredBlockSize), - m_MinBlockCount(minBlockCount), - m_MaxBlockCount(maxBlockCount), - m_BufferImageGranularity(bufferImageGranularity), - m_ExplicitBlockSize(explicitBlockSize), - m_Algorithm(algorithm), - m_Priority(priority), - m_MinAllocationAlignment(minAllocationAlignment), - m_pMemoryAllocateNext(pMemoryAllocateNext), - m_Blocks(VmaStlAllocator(hAllocator->GetAllocationCallbacks())), - m_NextBlockId(0) {} - -VmaBlockVector::~VmaBlockVector() -{ - for (size_t i = m_Blocks.size(); i--; ) - { - m_Blocks[i]->Destroy(m_hAllocator); - vma_delete(m_hAllocator, m_Blocks[i]); - } -} - -VkResult VmaBlockVector::CreateMinBlocks() -{ - for (size_t i = 0; i < m_MinBlockCount; ++i) - { - VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL); - if (res != VK_SUCCESS) - { - return res; - } - } - return VK_SUCCESS; -} - -void VmaBlockVector::AddStatistics(VmaStatistics& inoutStats) -{ - VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); - - const size_t blockCount = m_Blocks.size(); - for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) - { - const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; - VMA_ASSERT(pBlock); - VMA_HEAVY_ASSERT(pBlock->Validate()); - pBlock->m_pMetadata->AddStatistics(inoutStats); - } -} - -void VmaBlockVector::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) -{ - VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); - - const size_t blockCount = m_Blocks.size(); - for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) - { - const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; - VMA_ASSERT(pBlock); - VMA_HEAVY_ASSERT(pBlock->Validate()); - pBlock->m_pMetadata->AddDetailedStatistics(inoutStats); - } -} - -bool VmaBlockVector::IsEmpty() -{ - VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); - return m_Blocks.empty(); -} - -bool VmaBlockVector::IsCorruptionDetectionEnabled() const -{ - const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; - return (VMA_DEBUG_DETECT_CORRUPTION != 0) && - (VMA_DEBUG_MARGIN > 0) && - (m_Algorithm == 0 || m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) && - (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags; -} - -VkResult VmaBlockVector::Allocate( - VkDeviceSize size, - VkDeviceSize alignment, - const VmaAllocationCreateInfo& createInfo, - VmaSuballocationType suballocType, - size_t allocationCount, - VmaAllocation* pAllocations) -{ - size_t allocIndex; - VkResult res = VK_SUCCESS; - - alignment = VMA_MAX(alignment, m_MinAllocationAlignment); - - if (IsCorruptionDetectionEnabled()) - { - size = VmaAlignUp(size, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); - alignment = VmaAlignUp(alignment, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); - } - - { - VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); - for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex) - { - res = AllocatePage( - size, - alignment, - createInfo, - suballocType, - pAllocations + allocIndex); - if (res != VK_SUCCESS) - { - break; - } - } - } - - if (res != VK_SUCCESS) - { - // Free all already created allocations. - while (allocIndex--) - Free(pAllocations[allocIndex]); - memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); - } - - return res; -} - -VkResult VmaBlockVector::AllocatePage( - VkDeviceSize size, - VkDeviceSize alignment, - const VmaAllocationCreateInfo& createInfo, - VmaSuballocationType suballocType, - VmaAllocation* pAllocation) -{ - const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; - - VkDeviceSize freeMemory; - { - const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); - VmaBudget heapBudget = {}; - m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); - freeMemory = (heapBudget.usage < heapBudget.budget) ? (heapBudget.budget - heapBudget.usage) : 0; - } - - const bool canFallbackToDedicated = !HasExplicitBlockSize() && - (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0; - const bool canCreateNewBlock = - ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) && - (m_Blocks.size() < m_MaxBlockCount) && - (freeMemory >= size || !canFallbackToDedicated); - uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK; - - // Upper address can only be used with linear allocator and within single memory block. - if (isUpperAddress && - (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1)) - { - return VK_ERROR_FEATURE_NOT_PRESENT; - } - - // Early reject: requested allocation size is larger that maximum block size for this block vector. - if (size + VMA_DEBUG_MARGIN > m_PreferredBlockSize) - { - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } - - // 1. Search existing allocations. Try to allocate. - if (m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) - { - // Use only last block. - if (!m_Blocks.empty()) - { - VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back(); - VMA_ASSERT(pCurrBlock); - VkResult res = AllocateFromBlock( - pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); - if (res == VK_SUCCESS) - { - VMA_DEBUG_LOG(" Returned from last block #%u", pCurrBlock->GetId()); - IncrementallySortBlocks(); - return VK_SUCCESS; - } - } - } - else - { - if (strategy != VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) // MIN_MEMORY or default - { - const bool isHostVisible = - (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0; - if(isHostVisible) - { - const bool isMappingAllowed = (createInfo.flags & - (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; - /* - For non-mappable allocations, check blocks that are not mapped first. - For mappable allocations, check blocks that are already mapped first. - This way, having many blocks, we will separate mappable and non-mappable allocations, - hopefully limiting the number of blocks that are mapped, which will help tools like RenderDoc. - */ - for(size_t mappingI = 0; mappingI < 2; ++mappingI) - { - // Forward order in m_Blocks - prefer blocks with smallest amount of free space. - for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) - { - VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; - VMA_ASSERT(pCurrBlock); - const bool isBlockMapped = pCurrBlock->GetMappedData() != VMA_NULL; - if((mappingI == 0) == (isMappingAllowed == isBlockMapped)) - { - VkResult res = AllocateFromBlock( - pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); - if (res == VK_SUCCESS) - { - VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId()); - IncrementallySortBlocks(); - return VK_SUCCESS; - } - } - } - } - } - else - { - // Forward order in m_Blocks - prefer blocks with smallest amount of free space. - for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) - { - VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; - VMA_ASSERT(pCurrBlock); - VkResult res = AllocateFromBlock( - pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); - if (res == VK_SUCCESS) - { - VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId()); - IncrementallySortBlocks(); - return VK_SUCCESS; - } - } - } - } - else // VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT - { - // Backward order in m_Blocks - prefer blocks with largest amount of free space. - for (size_t blockIndex = m_Blocks.size(); blockIndex--; ) - { - VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; - VMA_ASSERT(pCurrBlock); - VkResult res = AllocateFromBlock(pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); - if (res == VK_SUCCESS) - { - VMA_DEBUG_LOG(" Returned from existing block #%u", pCurrBlock->GetId()); - IncrementallySortBlocks(); - return VK_SUCCESS; - } - } - } - } - - // 2. Try to create new block. - if (canCreateNewBlock) - { - // Calculate optimal size for new block. - VkDeviceSize newBlockSize = m_PreferredBlockSize; - uint32_t newBlockSizeShift = 0; - const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3; - - if (!m_ExplicitBlockSize) - { - // Allocate 1/8, 1/4, 1/2 as first blocks. - const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize(); - for (uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i) - { - const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; - if (smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2) - { - newBlockSize = smallerNewBlockSize; - ++newBlockSizeShift; - } - else - { - break; - } - } - } - - size_t newBlockIndex = 0; - VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? - CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; - // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize. - if (!m_ExplicitBlockSize) - { - while (res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX) - { - const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; - if (smallerNewBlockSize >= size) - { - newBlockSize = smallerNewBlockSize; - ++newBlockSizeShift; - res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? - CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; - } - else - { - break; - } - } - } - - if (res == VK_SUCCESS) - { - VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex]; - VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size); - - res = AllocateFromBlock( - pBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); - if (res == VK_SUCCESS) - { - VMA_DEBUG_LOG(" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize); - IncrementallySortBlocks(); - return VK_SUCCESS; - } - else - { - // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment. - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } - } - } - - return VK_ERROR_OUT_OF_DEVICE_MEMORY; -} - -void VmaBlockVector::Free(const VmaAllocation hAllocation) -{ - VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL; - - bool budgetExceeded = false; - { - const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); - VmaBudget heapBudget = {}; - m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); - budgetExceeded = heapBudget.usage >= heapBudget.budget; - } - - // Scope for lock. - { - VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); - - VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); - - if (IsCorruptionDetectionEnabled()) - { - VkResult res = pBlock->ValidateMagicValueAfterAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize()); - VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value."); - } - - if (hAllocation->IsPersistentMap()) - { - pBlock->Unmap(m_hAllocator, 1); - } - - const bool hadEmptyBlockBeforeFree = HasEmptyBlock(); - pBlock->m_pMetadata->Free(hAllocation->GetAllocHandle()); - pBlock->PostFree(m_hAllocator); - VMA_HEAVY_ASSERT(pBlock->Validate()); - - VMA_DEBUG_LOG(" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex); - - const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount; - // pBlock became empty after this deallocation. - if (pBlock->m_pMetadata->IsEmpty()) - { - // Already had empty block. We don't want to have two, so delete this one. - if ((hadEmptyBlockBeforeFree || budgetExceeded) && canDeleteBlock) - { - pBlockToDelete = pBlock; - Remove(pBlock); - } - // else: We now have one empty block - leave it. A hysteresis to avoid allocating whole block back and forth. - } - // pBlock didn't become empty, but we have another empty block - find and free that one. - // (This is optional, heuristics.) - else if (hadEmptyBlockBeforeFree && canDeleteBlock) - { - VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back(); - if (pLastBlock->m_pMetadata->IsEmpty()) - { - pBlockToDelete = pLastBlock; - m_Blocks.pop_back(); - } - } - - IncrementallySortBlocks(); - } - - // Destruction of a free block. Deferred until this point, outside of mutex - // lock, for performance reason. - if (pBlockToDelete != VMA_NULL) - { - VMA_DEBUG_LOG(" Deleted empty block #%u", pBlockToDelete->GetId()); - pBlockToDelete->Destroy(m_hAllocator); - vma_delete(m_hAllocator, pBlockToDelete); - } - - m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize()); - m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation); -} - -VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const -{ - VkDeviceSize result = 0; - for (size_t i = m_Blocks.size(); i--; ) - { - result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize()); - if (result >= m_PreferredBlockSize) - { - break; - } - } - return result; -} - -void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock) -{ - for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) - { - if (m_Blocks[blockIndex] == pBlock) - { - VmaVectorRemove(m_Blocks, blockIndex); - return; - } - } - VMA_ASSERT(0); -} - -void VmaBlockVector::IncrementallySortBlocks() -{ - if (!m_IncrementalSort) - return; - if (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) - { - // Bubble sort only until first swap. - for (size_t i = 1; i < m_Blocks.size(); ++i) - { - if (m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize()) - { - VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]); - return; - } - } - } -} - -void VmaBlockVector::SortByFreeSize() -{ - VMA_SORT(m_Blocks.begin(), m_Blocks.end(), - [](VmaDeviceMemoryBlock* b1, VmaDeviceMemoryBlock* b2) -> bool - { - return b1->m_pMetadata->GetSumFreeSize() < b2->m_pMetadata->GetSumFreeSize(); - }); -} - -VkResult VmaBlockVector::AllocateFromBlock( - VmaDeviceMemoryBlock* pBlock, - VkDeviceSize size, - VkDeviceSize alignment, - VmaAllocationCreateFlags allocFlags, - void* pUserData, - VmaSuballocationType suballocType, - uint32_t strategy, - VmaAllocation* pAllocation) -{ - const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; - - VmaAllocationRequest currRequest = {}; - if (pBlock->m_pMetadata->CreateAllocationRequest( - size, - alignment, - isUpperAddress, - suballocType, - strategy, - &currRequest)) - { - return CommitAllocationRequest(currRequest, pBlock, alignment, allocFlags, pUserData, suballocType, pAllocation); - } - return VK_ERROR_OUT_OF_DEVICE_MEMORY; -} - -VkResult VmaBlockVector::CommitAllocationRequest( - VmaAllocationRequest& allocRequest, - VmaDeviceMemoryBlock* pBlock, - VkDeviceSize alignment, - VmaAllocationCreateFlags allocFlags, - void* pUserData, - VmaSuballocationType suballocType, - VmaAllocation* pAllocation) -{ - const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0; - const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0; - const bool isMappingAllowed = (allocFlags & - (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; - - pBlock->PostAlloc(); - // Allocate from pCurrBlock. - if (mapped) - { - VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL); - if (res != VK_SUCCESS) - { - return res; - } - } - - *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(isMappingAllowed); - pBlock->m_pMetadata->Alloc(allocRequest, suballocType, *pAllocation); - (*pAllocation)->InitBlockAllocation( - pBlock, - allocRequest.allocHandle, - alignment, - allocRequest.size, // Not size, as actual allocation size may be larger than requested! - m_MemoryTypeIndex, - suballocType, - mapped); - VMA_HEAVY_ASSERT(pBlock->Validate()); - if (isUserDataString) - (*pAllocation)->SetName(m_hAllocator, (const char*)pUserData); - else - (*pAllocation)->SetUserData(m_hAllocator, pUserData); - m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), allocRequest.size); - if (VMA_DEBUG_INITIALIZE_ALLOCATIONS) - { - m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); - } - if (IsCorruptionDetectionEnabled()) - { - VkResult res = pBlock->WriteMagicValueAfterAllocation(m_hAllocator, (*pAllocation)->GetOffset(), allocRequest.size); - VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value."); - } - return VK_SUCCESS; -} - -VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex) -{ - VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; - allocInfo.pNext = m_pMemoryAllocateNext; - allocInfo.memoryTypeIndex = m_MemoryTypeIndex; - allocInfo.allocationSize = blockSize; - -#if VMA_BUFFER_DEVICE_ADDRESS - // Every standalone block can potentially contain a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT - always enable the feature. - VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; - if (m_hAllocator->m_UseKhrBufferDeviceAddress) - { - allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; - VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); - } -#endif // VMA_BUFFER_DEVICE_ADDRESS - -#if VMA_MEMORY_PRIORITY - VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; - if (m_hAllocator->m_UseExtMemoryPriority) - { - VMA_ASSERT(m_Priority >= 0.f && m_Priority <= 1.f); - priorityInfo.priority = m_Priority; - VmaPnextChainPushFront(&allocInfo, &priorityInfo); - } -#endif // VMA_MEMORY_PRIORITY - -#if VMA_EXTERNAL_MEMORY - // Attach VkExportMemoryAllocateInfoKHR if necessary. - VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; - exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex); - if (exportMemoryAllocInfo.handleTypes != 0) - { - VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); - } -#endif // VMA_EXTERNAL_MEMORY - - VkDeviceMemory mem = VK_NULL_HANDLE; - VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem); - if (res < 0) - { - return res; - } - - // New VkDeviceMemory successfully created. - - // Create new Allocation for it. - VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator); - pBlock->Init( - m_hAllocator, - m_hParentPool, - m_MemoryTypeIndex, - mem, - allocInfo.allocationSize, - m_NextBlockId++, - m_Algorithm, - m_BufferImageGranularity); - - m_Blocks.push_back(pBlock); - if (pNewBlockIndex != VMA_NULL) - { - *pNewBlockIndex = m_Blocks.size() - 1; - } - - return VK_SUCCESS; -} - -bool VmaBlockVector::HasEmptyBlock() -{ - for (size_t index = 0, count = m_Blocks.size(); index < count; ++index) - { - VmaDeviceMemoryBlock* const pBlock = m_Blocks[index]; - if (pBlock->m_pMetadata->IsEmpty()) - { - return true; - } - } - return false; -} - -#if VMA_STATS_STRING_ENABLED -void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json) -{ - VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); - - - json.BeginObject(); - for (size_t i = 0; i < m_Blocks.size(); ++i) - { - json.BeginString(); - json.ContinueString(m_Blocks[i]->GetId()); - json.EndString(); - - json.BeginObject(); - json.WriteString("MapRefCount"); - json.WriteNumber(m_Blocks[i]->GetMapRefCount()); - - m_Blocks[i]->m_pMetadata->PrintDetailedMap(json); - json.EndObject(); - } - json.EndObject(); -} -#endif // VMA_STATS_STRING_ENABLED - -VkResult VmaBlockVector::CheckCorruption() -{ - if (!IsCorruptionDetectionEnabled()) - { - return VK_ERROR_FEATURE_NOT_PRESENT; - } - - VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); - for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) - { - VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; - VMA_ASSERT(pBlock); - VkResult res = pBlock->CheckCorruption(m_hAllocator); - if (res != VK_SUCCESS) - { - return res; - } - } - return VK_SUCCESS; -} - -#endif // _VMA_BLOCK_VECTOR_FUNCTIONS - -#ifndef _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS -VmaDefragmentationContext_T::VmaDefragmentationContext_T( - VmaAllocator hAllocator, - const VmaDefragmentationInfo& info) - : m_MaxPassBytes(info.maxBytesPerPass == 0 ? VK_WHOLE_SIZE : info.maxBytesPerPass), - m_MaxPassAllocations(info.maxAllocationsPerPass == 0 ? UINT32_MAX : info.maxAllocationsPerPass), - m_MoveAllocator(hAllocator->GetAllocationCallbacks()), - m_Moves(m_MoveAllocator) -{ - m_Algorithm = info.flags & VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK; - - if (info.pool != VMA_NULL) - { - m_BlockVectorCount = 1; - m_PoolBlockVector = &info.pool->m_BlockVector; - m_pBlockVectors = &m_PoolBlockVector; - m_PoolBlockVector->SetIncrementalSort(false); - m_PoolBlockVector->SortByFreeSize(); - } - else - { - m_BlockVectorCount = hAllocator->GetMemoryTypeCount(); - m_PoolBlockVector = VMA_NULL; - m_pBlockVectors = hAllocator->m_pBlockVectors; - for (uint32_t i = 0; i < m_BlockVectorCount; ++i) - { - VmaBlockVector* vector = m_pBlockVectors[i]; - if (vector != VMA_NULL) - { - vector->SetIncrementalSort(false); - vector->SortByFreeSize(); - } - } - } - - switch (m_Algorithm) - { - case 0: // Default algorithm - m_Algorithm = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT; - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: - { - m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount); - break; - } - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: - { - if (hAllocator->GetBufferImageGranularity() > 1) - { - m_AlgorithmState = vma_new_array(hAllocator, StateExtensive, m_BlockVectorCount); - } - break; - } - } -} - -VmaDefragmentationContext_T::~VmaDefragmentationContext_T() -{ - if (m_PoolBlockVector != VMA_NULL) - { - m_PoolBlockVector->SetIncrementalSort(true); - } - else - { - for (uint32_t i = 0; i < m_BlockVectorCount; ++i) - { - VmaBlockVector* vector = m_pBlockVectors[i]; - if (vector != VMA_NULL) - vector->SetIncrementalSort(true); - } - } - - if (m_AlgorithmState) - { - switch (m_Algorithm) - { - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: - vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); - break; - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: - vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); - break; - default: - VMA_ASSERT(0); - } - } -} - -VkResult VmaDefragmentationContext_T::DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo) -{ - if (m_PoolBlockVector != VMA_NULL) - { - VmaMutexLockWrite lock(m_PoolBlockVector->GetMutex(), m_PoolBlockVector->GetAllocator()->m_UseMutex); - - if (m_PoolBlockVector->GetBlockCount() > 1) - ComputeDefragmentation(*m_PoolBlockVector, 0); - else if (m_PoolBlockVector->GetBlockCount() == 1) - ReallocWithinBlock(*m_PoolBlockVector, m_PoolBlockVector->GetBlock(0)); - } - else - { - for (uint32_t i = 0; i < m_BlockVectorCount; ++i) - { - if (m_pBlockVectors[i] != VMA_NULL) - { - VmaMutexLockWrite lock(m_pBlockVectors[i]->GetMutex(), m_pBlockVectors[i]->GetAllocator()->m_UseMutex); - - if (m_pBlockVectors[i]->GetBlockCount() > 1) - { - if (ComputeDefragmentation(*m_pBlockVectors[i], i)) - break; - } - else if (m_pBlockVectors[i]->GetBlockCount() == 1) - { - if (ReallocWithinBlock(*m_pBlockVectors[i], m_pBlockVectors[i]->GetBlock(0))) - break; - } - } - } - } - - moveInfo.moveCount = static_cast(m_Moves.size()); - if (moveInfo.moveCount > 0) - { - moveInfo.pMoves = m_Moves.data(); - return VK_INCOMPLETE; - } - - moveInfo.pMoves = VMA_NULL; - return VK_SUCCESS; -} - -VkResult VmaDefragmentationContext_T::DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo) -{ - VMA_ASSERT(moveInfo.moveCount > 0 ? moveInfo.pMoves != VMA_NULL : true); - - VkResult result = VK_SUCCESS; - VmaStlAllocator blockAllocator(m_MoveAllocator.m_pCallbacks); - VmaVector> immovableBlocks(blockAllocator); - VmaVector> mappedBlocks(blockAllocator); - - VmaAllocator allocator = VMA_NULL; - for (uint32_t i = 0; i < moveInfo.moveCount; ++i) - { - VmaDefragmentationMove& move = moveInfo.pMoves[i]; - size_t prevCount = 0, currentCount = 0; - VkDeviceSize freedBlockSize = 0; - - uint32_t vectorIndex; - VmaBlockVector* vector; - if (m_PoolBlockVector != VMA_NULL) - { - vectorIndex = 0; - vector = m_PoolBlockVector; - } - else - { - vectorIndex = move.srcAllocation->GetMemoryTypeIndex(); - vector = m_pBlockVectors[vectorIndex]; - VMA_ASSERT(vector != VMA_NULL); - } - - switch (move.operation) - { - case VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY: - { - uint8_t mapCount = move.srcAllocation->SwapBlockAllocation(vector->m_hAllocator, move.dstTmpAllocation); - if (mapCount > 0) - { - allocator = vector->m_hAllocator; - VmaDeviceMemoryBlock* newMapBlock = move.srcAllocation->GetBlock(); - bool notPresent = true; - for (FragmentedBlock& block : mappedBlocks) - { - if (block.block == newMapBlock) - { - notPresent = false; - block.data += mapCount; - break; - } - } - if (notPresent) - mappedBlocks.push_back({ mapCount, newMapBlock }); - } - - // Scope for locks, Free have it's own lock - { - VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - prevCount = vector->GetBlockCount(); - freedBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); - } - vector->Free(move.dstTmpAllocation); - { - VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - currentCount = vector->GetBlockCount(); - } - - result = VK_INCOMPLETE; - break; - } - case VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE: - { - m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); - --m_PassStats.allocationsMoved; - vector->Free(move.dstTmpAllocation); - - VmaDeviceMemoryBlock* newBlock = move.srcAllocation->GetBlock(); - bool notPresent = true; - for (const FragmentedBlock& block : immovableBlocks) - { - if (block.block == newBlock) - { - notPresent = false; - break; - } - } - if (notPresent) - immovableBlocks.push_back({ vectorIndex, newBlock }); - break; - } - case VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY: - { - m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); - --m_PassStats.allocationsMoved; - // Scope for locks, Free have it's own lock - { - VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - prevCount = vector->GetBlockCount(); - freedBlockSize = move.srcAllocation->GetBlock()->m_pMetadata->GetSize(); - } - vector->Free(move.srcAllocation); - { - VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - currentCount = vector->GetBlockCount(); - } - freedBlockSize *= prevCount - currentCount; - - VkDeviceSize dstBlockSize; - { - VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - dstBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); - } - vector->Free(move.dstTmpAllocation); - { - VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - freedBlockSize += dstBlockSize * (currentCount - vector->GetBlockCount()); - currentCount = vector->GetBlockCount(); - } - - result = VK_INCOMPLETE; - break; - } - default: - VMA_ASSERT(0); - } - - if (prevCount > currentCount) - { - size_t freedBlocks = prevCount - currentCount; - m_PassStats.deviceMemoryBlocksFreed += static_cast(freedBlocks); - m_PassStats.bytesFreed += freedBlockSize; - } - - switch (m_Algorithm) - { - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: - { - if (m_AlgorithmState != VMA_NULL) - { - // Avoid unnecessary tries to allocate when new free block is avaiable - StateExtensive& state = reinterpret_cast(m_AlgorithmState)[vectorIndex]; - if (state.firstFreeBlock != SIZE_MAX) - { - const size_t diff = prevCount - currentCount; - if (state.firstFreeBlock >= diff) - { - state.firstFreeBlock -= diff; - if (state.firstFreeBlock != 0) - state.firstFreeBlock -= vector->GetBlock(state.firstFreeBlock - 1)->m_pMetadata->IsEmpty(); - } - else - state.firstFreeBlock = 0; - } - } - } - } - } - moveInfo.moveCount = 0; - moveInfo.pMoves = VMA_NULL; - m_Moves.clear(); - - // Update stats - m_GlobalStats.allocationsMoved += m_PassStats.allocationsMoved; - m_GlobalStats.bytesFreed += m_PassStats.bytesFreed; - m_GlobalStats.bytesMoved += m_PassStats.bytesMoved; - m_GlobalStats.deviceMemoryBlocksFreed += m_PassStats.deviceMemoryBlocksFreed; - m_PassStats = { 0 }; - - // Move blocks with immovable allocations according to algorithm - if (immovableBlocks.size() > 0) - { - switch (m_Algorithm) - { - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: - { - if (m_AlgorithmState != VMA_NULL) - { - bool swapped = false; - // Move to the start of free blocks range - for (const FragmentedBlock& block : immovableBlocks) - { - StateExtensive& state = reinterpret_cast(m_AlgorithmState)[block.data]; - if (state.operation != StateExtensive::Operation::Cleanup) - { - VmaBlockVector* vector = m_pBlockVectors[block.data]; - VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - - for (size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i) - { - if (vector->GetBlock(i) == block.block) - { - VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]); - if (state.firstFreeBlock != SIZE_MAX) - { - if (i + 1 < state.firstFreeBlock) - { - if (state.firstFreeBlock > 1) - VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[--state.firstFreeBlock]); - else - --state.firstFreeBlock; - } - } - swapped = true; - break; - } - } - } - } - if (swapped) - result = VK_INCOMPLETE; - break; - } - } - default: - { - // Move to the begining - for (const FragmentedBlock& block : immovableBlocks) - { - VmaBlockVector* vector = m_pBlockVectors[block.data]; - VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); - - for (size_t i = m_ImmovableBlockCount; i < vector->GetBlockCount(); ++i) - { - if (vector->GetBlock(i) == block.block) - { - VMA_SWAP(vector->m_Blocks[i], vector->m_Blocks[m_ImmovableBlockCount++]); - break; - } - } - } - break; - } - } - } - - // Bulk-map destination blocks - for (const FragmentedBlock& block : mappedBlocks) - { - VkResult res = block.block->Map(allocator, block.data, VMA_NULL); - VMA_ASSERT(res == VK_SUCCESS); - } - return result; -} - -bool VmaDefragmentationContext_T::ComputeDefragmentation(VmaBlockVector& vector, size_t index) -{ - switch (m_Algorithm) - { - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT: - return ComputeDefragmentation_Fast(vector); - default: - VMA_ASSERT(0); - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: - return ComputeDefragmentation_Balanced(vector, index, true); - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT: - return ComputeDefragmentation_Full(vector); - case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: - return ComputeDefragmentation_Extensive(vector, index); - } -} - -VmaDefragmentationContext_T::MoveAllocationData VmaDefragmentationContext_T::GetMoveData( - VmaAllocHandle handle, VmaBlockMetadata* metadata) -{ - MoveAllocationData moveData; - moveData.move.srcAllocation = (VmaAllocation)metadata->GetAllocationUserData(handle); - moveData.size = moveData.move.srcAllocation->GetSize(); - moveData.alignment = moveData.move.srcAllocation->GetAlignment(); - moveData.type = moveData.move.srcAllocation->GetSuballocationType(); - moveData.flags = 0; - - if (moveData.move.srcAllocation->IsPersistentMap()) - moveData.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT; - if (moveData.move.srcAllocation->IsMappingAllowed()) - moveData.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; - - return moveData; -} - -VmaDefragmentationContext_T::CounterStatus VmaDefragmentationContext_T::CheckCounters(VkDeviceSize bytes) -{ - // Ignore allocation if will exceed max size for copy - if (m_PassStats.bytesMoved + bytes > m_MaxPassBytes) - { - if (++m_IgnoredAllocs < MAX_ALLOCS_TO_IGNORE) - return CounterStatus::Ignore; - else - return CounterStatus::End; - } - return CounterStatus::Pass; -} - -bool VmaDefragmentationContext_T::IncrementCounters(VkDeviceSize bytes) -{ - m_PassStats.bytesMoved += bytes; - // Early return when max found - if (++m_PassStats.allocationsMoved >= m_MaxPassAllocations || m_PassStats.bytesMoved >= m_MaxPassBytes) - { - VMA_ASSERT(m_PassStats.allocationsMoved == m_MaxPassAllocations || - m_PassStats.bytesMoved == m_MaxPassBytes && "Exceeded maximal pass threshold!"); - return true; - } - return false; -} - -bool VmaDefragmentationContext_T::ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block) -{ - VmaBlockMetadata* metadata = block->m_pMetadata; - - for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); - handle != VK_NULL_HANDLE; - handle = metadata->GetNextAllocation(handle)) - { - MoveAllocationData moveData = GetMoveData(handle, metadata); - // Ignore newly created allocations by defragmentation algorithm - if (moveData.move.srcAllocation->GetUserData() == this) - continue; - switch (CheckCounters(moveData.move.srcAllocation->GetSize())) - { - case CounterStatus::Ignore: - continue; - case CounterStatus::End: - return true; - default: - VMA_ASSERT(0); - case CounterStatus::Pass: - break; - } - - VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); - if (offset != 0 && metadata->GetSumFreeSize() >= moveData.size) - { - VmaAllocationRequest request = {}; - if (metadata->CreateAllocationRequest( - moveData.size, - moveData.alignment, - false, - moveData.type, - VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, - &request)) - { - if (metadata->GetAllocationOffset(request.allocHandle) < offset) - { - if (vector.CommitAllocationRequest( - request, - block, - moveData.alignment, - moveData.flags, - this, - moveData.type, - &moveData.move.dstTmpAllocation) == VK_SUCCESS) - { - m_Moves.push_back(moveData.move); - if (IncrementCounters(moveData.size)) - return true; - } - } - } - } - } - return false; -} - -bool VmaDefragmentationContext_T::AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector) -{ - for (; start < end; ++start) - { - VmaDeviceMemoryBlock* dstBlock = vector.GetBlock(start); - if (dstBlock->m_pMetadata->GetSumFreeSize() >= data.size) - { - if (vector.AllocateFromBlock(dstBlock, - data.size, - data.alignment, - data.flags, - this, - data.type, - 0, - &data.move.dstTmpAllocation) == VK_SUCCESS) - { - m_Moves.push_back(data.move); - if (IncrementCounters(data.size)) - return true; - break; - } - } - } - return false; -} - -bool VmaDefragmentationContext_T::ComputeDefragmentation_Fast(VmaBlockVector& vector) -{ - // Move only between blocks - - // Go through allocations in last blocks and try to fit them inside first ones - for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) - { - VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; - - for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); - handle != VK_NULL_HANDLE; - handle = metadata->GetNextAllocation(handle)) - { - MoveAllocationData moveData = GetMoveData(handle, metadata); - // Ignore newly created allocations by defragmentation algorithm - if (moveData.move.srcAllocation->GetUserData() == this) - continue; - switch (CheckCounters(moveData.move.srcAllocation->GetSize())) - { - case CounterStatus::Ignore: - continue; - case CounterStatus::End: - return true; - default: - VMA_ASSERT(0); - case CounterStatus::Pass: - break; - } - - // Check all previous blocks for free space - if (AllocInOtherBlock(0, i, moveData, vector)) - return true; - } - } - return false; -} - -bool VmaDefragmentationContext_T::ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update) -{ - // Go over every allocation and try to fit it in previous blocks at lowest offsets, - // if not possible: realloc within single block to minimize offset (exclude offset == 0), - // but only if there are noticable gaps between them (some heuristic, ex. average size of allocation in block) - VMA_ASSERT(m_AlgorithmState != VMA_NULL); - - StateBalanced& vectorState = reinterpret_cast(m_AlgorithmState)[index]; - if (update && vectorState.avgAllocSize == UINT64_MAX) - UpdateVectorStatistics(vector, vectorState); - - const size_t startMoveCount = m_Moves.size(); - VkDeviceSize minimalFreeRegion = vectorState.avgFreeSize / 2; - for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) - { - VmaDeviceMemoryBlock* block = vector.GetBlock(i); - VmaBlockMetadata* metadata = block->m_pMetadata; - VkDeviceSize prevFreeRegionSize = 0; - - for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); - handle != VK_NULL_HANDLE; - handle = metadata->GetNextAllocation(handle)) - { - MoveAllocationData moveData = GetMoveData(handle, metadata); - // Ignore newly created allocations by defragmentation algorithm - if (moveData.move.srcAllocation->GetUserData() == this) - continue; - switch (CheckCounters(moveData.move.srcAllocation->GetSize())) - { - case CounterStatus::Ignore: - continue; - case CounterStatus::End: - return true; - default: - VMA_ASSERT(0); - case CounterStatus::Pass: - break; - } - - // Check all previous blocks for free space - const size_t prevMoveCount = m_Moves.size(); - if (AllocInOtherBlock(0, i, moveData, vector)) - return true; - - VkDeviceSize nextFreeRegionSize = metadata->GetNextFreeRegionSize(handle); - // If no room found then realloc within block for lower offset - VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); - if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) - { - // Check if realloc will make sense - if (prevFreeRegionSize >= minimalFreeRegion || - nextFreeRegionSize >= minimalFreeRegion || - moveData.size <= vectorState.avgFreeSize || - moveData.size <= vectorState.avgAllocSize) - { - VmaAllocationRequest request = {}; - if (metadata->CreateAllocationRequest( - moveData.size, - moveData.alignment, - false, - moveData.type, - VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, - &request)) - { - if (metadata->GetAllocationOffset(request.allocHandle) < offset) - { - if (vector.CommitAllocationRequest( - request, - block, - moveData.alignment, - moveData.flags, - this, - moveData.type, - &moveData.move.dstTmpAllocation) == VK_SUCCESS) - { - m_Moves.push_back(moveData.move); - if (IncrementCounters(moveData.size)) - return true; - } - } - } - } - } - prevFreeRegionSize = nextFreeRegionSize; - } - } - - // No moves perfomed, update statistics to current vector state - if (startMoveCount == m_Moves.size() && !update) - { - vectorState.avgAllocSize = UINT64_MAX; - return ComputeDefragmentation_Balanced(vector, index, false); - } - return false; -} - -bool VmaDefragmentationContext_T::ComputeDefragmentation_Full(VmaBlockVector& vector) -{ - // Go over every allocation and try to fit it in previous blocks at lowest offsets, - // if not possible: realloc within single block to minimize offset (exclude offset == 0) - - for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) - { - VmaDeviceMemoryBlock* block = vector.GetBlock(i); - VmaBlockMetadata* metadata = block->m_pMetadata; - - for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); - handle != VK_NULL_HANDLE; - handle = metadata->GetNextAllocation(handle)) - { - MoveAllocationData moveData = GetMoveData(handle, metadata); - // Ignore newly created allocations by defragmentation algorithm - if (moveData.move.srcAllocation->GetUserData() == this) - continue; - switch (CheckCounters(moveData.move.srcAllocation->GetSize())) - { - case CounterStatus::Ignore: - continue; - case CounterStatus::End: - return true; - default: - VMA_ASSERT(0); - case CounterStatus::Pass: - break; - } - - // Check all previous blocks for free space - const size_t prevMoveCount = m_Moves.size(); - if (AllocInOtherBlock(0, i, moveData, vector)) - return true; - - // If no room found then realloc within block for lower offset - VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); - if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) - { - VmaAllocationRequest request = {}; - if (metadata->CreateAllocationRequest( - moveData.size, - moveData.alignment, - false, - moveData.type, - VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, - &request)) - { - if (metadata->GetAllocationOffset(request.allocHandle) < offset) - { - if (vector.CommitAllocationRequest( - request, - block, - moveData.alignment, - moveData.flags, - this, - moveData.type, - &moveData.move.dstTmpAllocation) == VK_SUCCESS) - { - m_Moves.push_back(moveData.move); - if (IncrementCounters(moveData.size)) - return true; - } - } - } - } - } - } - return false; -} - -bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index) -{ - // First free single block, then populate it to the brim, then free another block, and so on - - // Fallback to previous algorithm since without granularity conflicts it can achieve max packing - if (vector.m_BufferImageGranularity == 1) - return ComputeDefragmentation_Full(vector); - - VMA_ASSERT(m_AlgorithmState != VMA_NULL); - - StateExtensive& vectorState = reinterpret_cast(m_AlgorithmState)[index]; - - bool texturePresent = false, bufferPresent = false, otherPresent = false; - switch (vectorState.operation) - { - case StateExtensive::Operation::Done: // Vector defragmented - return false; - case StateExtensive::Operation::FindFreeBlockBuffer: - case StateExtensive::Operation::FindFreeBlockTexture: - case StateExtensive::Operation::FindFreeBlockAll: - { - // No more blocks to free, just perform fast realloc and move to cleanup - if (vectorState.firstFreeBlock == 0) - { - vectorState.operation = StateExtensive::Operation::Cleanup; - return ComputeDefragmentation_Fast(vector); - } - - // No free blocks, have to clear last one - size_t last = (vectorState.firstFreeBlock == SIZE_MAX ? vector.GetBlockCount() : vectorState.firstFreeBlock) - 1; - VmaBlockMetadata* freeMetadata = vector.GetBlock(last)->m_pMetadata; - - const size_t prevMoveCount = m_Moves.size(); - for (VmaAllocHandle handle = freeMetadata->GetAllocationListBegin(); - handle != VK_NULL_HANDLE; - handle = freeMetadata->GetNextAllocation(handle)) - { - MoveAllocationData moveData = GetMoveData(handle, freeMetadata); - switch (CheckCounters(moveData.move.srcAllocation->GetSize())) - { - case CounterStatus::Ignore: - continue; - case CounterStatus::End: - return true; - default: - VMA_ASSERT(0); - case CounterStatus::Pass: - break; - } - - // Check all previous blocks for free space - if (AllocInOtherBlock(0, last, moveData, vector)) - { - // Full clear performed already - if (prevMoveCount != m_Moves.size() && freeMetadata->GetNextAllocation(handle) == VK_NULL_HANDLE) - reinterpret_cast(m_AlgorithmState)[index] = last; - return true; - } - } - - if (prevMoveCount == m_Moves.size()) - { - // Cannot perform full clear, have to move data in other blocks around - if (last != 0) - { - for (size_t i = last - 1; i; --i) - { - if (ReallocWithinBlock(vector, vector.GetBlock(i))) - return true; - } - } - - if (prevMoveCount == m_Moves.size()) - { - // No possible reallocs within blocks, try to move them around fast - return ComputeDefragmentation_Fast(vector); - } - } - else - { - switch (vectorState.operation) - { - case StateExtensive::Operation::FindFreeBlockBuffer: - vectorState.operation = StateExtensive::Operation::MoveBuffers; - break; - default: - VMA_ASSERT(0); - case StateExtensive::Operation::FindFreeBlockTexture: - vectorState.operation = StateExtensive::Operation::MoveTextures; - break; - case StateExtensive::Operation::FindFreeBlockAll: - vectorState.operation = StateExtensive::Operation::MoveAll; - break; - } - vectorState.firstFreeBlock = last; - // Nothing done, block found without reallocations, can perform another reallocs in same pass - return ComputeDefragmentation_Extensive(vector, index); - } - break; - } - case StateExtensive::Operation::MoveTextures: - { - if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL, vector, - vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) - { - if (texturePresent) - { - vectorState.operation = StateExtensive::Operation::FindFreeBlockTexture; - return ComputeDefragmentation_Extensive(vector, index); - } - - if (!bufferPresent && !otherPresent) - { - vectorState.operation = StateExtensive::Operation::Cleanup; - break; - } - - // No more textures to move, check buffers - vectorState.operation = StateExtensive::Operation::MoveBuffers; - bufferPresent = false; - otherPresent = false; - } - else - break; - } - case StateExtensive::Operation::MoveBuffers: - { - if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_BUFFER, vector, - vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) - { - if (bufferPresent) - { - vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; - return ComputeDefragmentation_Extensive(vector, index); - } - - if (!otherPresent) - { - vectorState.operation = StateExtensive::Operation::Cleanup; - break; - } - - // No more buffers to move, check all others - vectorState.operation = StateExtensive::Operation::MoveAll; - otherPresent = false; - } - else - break; - } - case StateExtensive::Operation::MoveAll: - { - if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_FREE, vector, - vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) - { - if (otherPresent) - { - vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; - return ComputeDefragmentation_Extensive(vector, index); - } - // Everything moved - vectorState.operation = StateExtensive::Operation::Cleanup; - } - break; - } - case StateExtensive::Operation::Cleanup: - // Cleanup is handled below so that other operations may reuse the cleanup code. This case is here to prevent the unhandled enum value warning (C4062). - break; - } - - if (vectorState.operation == StateExtensive::Operation::Cleanup) - { - // All other work done, pack data in blocks even tighter if possible - const size_t prevMoveCount = m_Moves.size(); - for (size_t i = 0; i < vector.GetBlockCount(); ++i) - { - if (ReallocWithinBlock(vector, vector.GetBlock(i))) - return true; - } - - if (prevMoveCount == m_Moves.size()) - vectorState.operation = StateExtensive::Operation::Done; - } - return false; -} - -void VmaDefragmentationContext_T::UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state) -{ - size_t allocCount = 0; - size_t freeCount = 0; - state.avgFreeSize = 0; - state.avgAllocSize = 0; - - for (size_t i = 0; i < vector.GetBlockCount(); ++i) - { - VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; - - allocCount += metadata->GetAllocationCount(); - freeCount += metadata->GetFreeRegionsCount(); - state.avgFreeSize += metadata->GetSumFreeSize(); - state.avgAllocSize += metadata->GetSize(); - } - - state.avgAllocSize = (state.avgAllocSize - state.avgFreeSize) / allocCount; - state.avgFreeSize /= freeCount; -} - -bool VmaDefragmentationContext_T::MoveDataToFreeBlocks(VmaSuballocationType currentType, - VmaBlockVector& vector, size_t firstFreeBlock, - bool& texturePresent, bool& bufferPresent, bool& otherPresent) -{ - const size_t prevMoveCount = m_Moves.size(); - for (size_t i = firstFreeBlock ; i;) - { - VmaDeviceMemoryBlock* block = vector.GetBlock(--i); - VmaBlockMetadata* metadata = block->m_pMetadata; - - for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); - handle != VK_NULL_HANDLE; - handle = metadata->GetNextAllocation(handle)) - { - MoveAllocationData moveData = GetMoveData(handle, metadata); - // Ignore newly created allocations by defragmentation algorithm - if (moveData.move.srcAllocation->GetUserData() == this) - continue; - switch (CheckCounters(moveData.move.srcAllocation->GetSize())) - { - case CounterStatus::Ignore: - continue; - case CounterStatus::End: - return true; - default: - VMA_ASSERT(0); - case CounterStatus::Pass: - break; - } - - // Move only single type of resources at once - if (!VmaIsBufferImageGranularityConflict(moveData.type, currentType)) - { - // Try to fit allocation into free blocks - if (AllocInOtherBlock(firstFreeBlock, vector.GetBlockCount(), moveData, vector)) - return false; - } - - if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)) - texturePresent = true; - else if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_BUFFER)) - bufferPresent = true; - else - otherPresent = true; - } - } - return prevMoveCount == m_Moves.size(); -} -#endif // _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS - -#ifndef _VMA_POOL_T_FUNCTIONS -VmaPool_T::VmaPool_T( - VmaAllocator hAllocator, - const VmaPoolCreateInfo& createInfo, - VkDeviceSize preferredBlockSize) - : m_BlockVector( - hAllocator, - this, // hParentPool - createInfo.memoryTypeIndex, - createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize, - createInfo.minBlockCount, - createInfo.maxBlockCount, - (createInfo.flags& VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(), - createInfo.blockSize != 0, // explicitBlockSize - createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK, // algorithm - createInfo.priority, - VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment), - createInfo.pMemoryAllocateNext), - m_Id(0), - m_Name(VMA_NULL) {} - -VmaPool_T::~VmaPool_T() -{ - VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL); -} - -void VmaPool_T::SetName(const char* pName) -{ - const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks(); - VmaFreeString(allocs, m_Name); - - if (pName != VMA_NULL) - { - m_Name = VmaCreateStringCopy(allocs, pName); - } - else - { - m_Name = VMA_NULL; - } -} -#endif // _VMA_POOL_T_FUNCTIONS - -#ifndef _VMA_ALLOCATOR_T_FUNCTIONS -VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : - m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0), - m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0), - m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0), - m_UseKhrBindMemory2((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0), - m_UseExtMemoryBudget((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0), - m_UseAmdDeviceCoherentMemory((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT) != 0), - m_UseKhrBufferDeviceAddress((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT) != 0), - m_UseExtMemoryPriority((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT) != 0), - m_hDevice(pCreateInfo->device), - m_hInstance(pCreateInfo->instance), - m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL), - m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ? - *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks), - m_AllocationObjectAllocator(&m_AllocationCallbacks), - m_HeapSizeLimitMask(0), - m_DeviceMemoryCount(0), - m_PreferredLargeHeapBlockSize(0), - m_PhysicalDevice(pCreateInfo->physicalDevice), - m_GpuDefragmentationMemoryTypeBits(UINT32_MAX), - m_NextPoolId(0), - m_GlobalMemoryTypeBits(UINT32_MAX) -{ - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - m_UseKhrDedicatedAllocation = false; - m_UseKhrBindMemory2 = false; - } - - if(VMA_DEBUG_DETECT_CORRUPTION) - { - // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it. - VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0); - } - - VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device && pCreateInfo->instance); - - if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0)) - { -#if !(VMA_DEDICATED_ALLOCATION) - if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0) - { - VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros."); - } -#endif -#if !(VMA_BIND_MEMORY2) - if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0) - { - VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros."); - } -#endif - } -#if !(VMA_MEMORY_BUDGET) - if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0) - { - VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros."); - } -#endif -#if !(VMA_BUFFER_DEVICE_ADDRESS) - if(m_UseKhrBufferDeviceAddress) - { - VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); - } -#endif -#if VMA_VULKAN_VERSION < 1002000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0)) - { - VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros."); - } -#endif -#if VMA_VULKAN_VERSION < 1001000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros."); - } -#endif -#if !(VMA_MEMORY_PRIORITY) - if(m_UseExtMemoryPriority) - { - VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); - } -#endif - - memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks)); - memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties)); - memset(&m_MemProps, 0, sizeof(m_MemProps)); - - memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors)); - memset(&m_VulkanFunctions, 0, sizeof(m_VulkanFunctions)); - -#if VMA_EXTERNAL_MEMORY - memset(&m_TypeExternalMemoryHandleTypes, 0, sizeof(m_TypeExternalMemoryHandleTypes)); -#endif // #if VMA_EXTERNAL_MEMORY - - if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL) - { - m_DeviceMemoryCallbacks.pUserData = pCreateInfo->pDeviceMemoryCallbacks->pUserData; - m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate; - m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree; - } - - ImportVulkanFunctions(pCreateInfo->pVulkanFunctions); - - (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties); - (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps); - - VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT)); - VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY)); - VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity)); - VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize)); - - m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ? - pCreateInfo->preferredLargeHeapBlockSize : static_cast(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE); - - m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits(); - -#if VMA_EXTERNAL_MEMORY - if(pCreateInfo->pTypeExternalMemoryHandleTypes != VMA_NULL) - { - memcpy(m_TypeExternalMemoryHandleTypes, pCreateInfo->pTypeExternalMemoryHandleTypes, - sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount()); - } -#endif // #if VMA_EXTERNAL_MEMORY - - if(pCreateInfo->pHeapSizeLimit != VMA_NULL) - { - for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) - { - const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex]; - if(limit != VK_WHOLE_SIZE) - { - m_HeapSizeLimitMask |= 1u << heapIndex; - if(limit < m_MemProps.memoryHeaps[heapIndex].size) - { - m_MemProps.memoryHeaps[heapIndex].size = limit; - } - } - } - } - - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - // Create only supported types - if((m_GlobalMemoryTypeBits & (1u << memTypeIndex)) != 0) - { - const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex); - m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)( - this, - VK_NULL_HANDLE, // hParentPool - memTypeIndex, - preferredBlockSize, - 0, - SIZE_MAX, - GetBufferImageGranularity(), - false, // explicitBlockSize - 0, // algorithm - 0.5f, // priority (0.5 is the default per Vulkan spec) - GetMemoryTypeMinAlignment(memTypeIndex), // minAllocationAlignment - VMA_NULL); // // pMemoryAllocateNext - // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here, - // becase minBlockCount is 0. - } - } -} - -VkResult VmaAllocator_T::Init(const VmaAllocatorCreateInfo* pCreateInfo) -{ - VkResult res = VK_SUCCESS; - -#if VMA_MEMORY_BUDGET - if(m_UseExtMemoryBudget) - { - UpdateVulkanBudget(); - } -#endif // #if VMA_MEMORY_BUDGET - - return res; -} - -VmaAllocator_T::~VmaAllocator_T() -{ - VMA_ASSERT(m_Pools.IsEmpty()); - - for(size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; ) - { - vma_delete(this, m_pBlockVectors[memTypeIndex]); - } -} - -void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions) -{ -#if VMA_STATIC_VULKAN_FUNCTIONS == 1 - ImportVulkanFunctions_Static(); -#endif - - if(pVulkanFunctions != VMA_NULL) - { - ImportVulkanFunctions_Custom(pVulkanFunctions); - } - -#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 - ImportVulkanFunctions_Dynamic(); -#endif - - ValidateVulkanFunctions(); -} - -#if VMA_STATIC_VULKAN_FUNCTIONS == 1 - -void VmaAllocator_T::ImportVulkanFunctions_Static() -{ - // Vulkan 1.0 - m_VulkanFunctions.vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)vkGetInstanceProcAddr; - m_VulkanFunctions.vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)vkGetDeviceProcAddr; - m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties; - m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties; - m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; - m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory; - m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory; - m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory; - m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges; - m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges; - m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory; - m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory; - m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements; - m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements; - m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer; - m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer; - m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage; - m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage; - m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer; - - // Vulkan 1.1 -#if VMA_VULKAN_VERSION >= 1001000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2; - m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2; - m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2; - m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2; - m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2; - } -#endif - -#if VMA_VULKAN_VERSION >= 1003000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) - { - m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)vkGetDeviceBufferMemoryRequirements; - m_VulkanFunctions.vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)vkGetDeviceImageMemoryRequirements; - } -#endif -} - -#endif // VMA_STATIC_VULKAN_FUNCTIONS == 1 - -void VmaAllocator_T::ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions) -{ - VMA_ASSERT(pVulkanFunctions != VMA_NULL); - -#define VMA_COPY_IF_NOT_NULL(funcName) \ - if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; - - VMA_COPY_IF_NOT_NULL(vkGetInstanceProcAddr); - VMA_COPY_IF_NOT_NULL(vkGetDeviceProcAddr); - VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties); - VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties); - VMA_COPY_IF_NOT_NULL(vkAllocateMemory); - VMA_COPY_IF_NOT_NULL(vkFreeMemory); - VMA_COPY_IF_NOT_NULL(vkMapMemory); - VMA_COPY_IF_NOT_NULL(vkUnmapMemory); - VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges); - VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges); - VMA_COPY_IF_NOT_NULL(vkBindBufferMemory); - VMA_COPY_IF_NOT_NULL(vkBindImageMemory); - VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements); - VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements); - VMA_COPY_IF_NOT_NULL(vkCreateBuffer); - VMA_COPY_IF_NOT_NULL(vkDestroyBuffer); - VMA_COPY_IF_NOT_NULL(vkCreateImage); - VMA_COPY_IF_NOT_NULL(vkDestroyImage); - VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer); - -#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR); - VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR); -#endif - -#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 - VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR); - VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR); -#endif - -#if VMA_MEMORY_BUDGET - VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR); -#endif - -#if VMA_VULKAN_VERSION >= 1003000 - VMA_COPY_IF_NOT_NULL(vkGetDeviceBufferMemoryRequirements); - VMA_COPY_IF_NOT_NULL(vkGetDeviceImageMemoryRequirements); -#endif - -#undef VMA_COPY_IF_NOT_NULL -} - -#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 - -void VmaAllocator_T::ImportVulkanFunctions_Dynamic() -{ - VMA_ASSERT(m_VulkanFunctions.vkGetInstanceProcAddr && m_VulkanFunctions.vkGetDeviceProcAddr && - "To use VMA_DYNAMIC_VULKAN_FUNCTIONS in new versions of VMA you now have to pass " - "VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as VmaAllocatorCreateInfo::pVulkanFunctions. " - "Other members can be null."); - -#define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \ - if(m_VulkanFunctions.memberName == VMA_NULL) \ - m_VulkanFunctions.memberName = \ - (functionPointerType)m_VulkanFunctions.vkGetInstanceProcAddr(m_hInstance, functionNameString); -#define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \ - if(m_VulkanFunctions.memberName == VMA_NULL) \ - m_VulkanFunctions.memberName = \ - (functionPointerType)m_VulkanFunctions.vkGetDeviceProcAddr(m_hDevice, functionNameString); - - VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties"); - VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties"); - VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory, "vkAllocateMemory"); - VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory, "vkFreeMemory"); - VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory, "vkMapMemory"); - VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory, "vkUnmapMemory"); - VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges, "vkFlushMappedMemoryRanges"); - VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges, "vkInvalidateMappedMemoryRanges"); - VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory, "vkBindBufferMemory"); - VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory, "vkBindImageMemory"); - VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements, "vkGetBufferMemoryRequirements"); - VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements, "vkGetImageMemoryRequirements"); - VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer, "vkCreateBuffer"); - VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer, "vkDestroyBuffer"); - VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage, "vkCreateImage"); - VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage, "vkDestroyImage"); - VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer, "vkCmdCopyBuffer"); - -#if VMA_VULKAN_VERSION >= 1001000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2, "vkGetBufferMemoryRequirements2"); - VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2, "vkGetImageMemoryRequirements2"); - VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2, "vkBindBufferMemory2"); - VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2, "vkBindImageMemory2"); - VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2, "vkGetPhysicalDeviceMemoryProperties2"); - } -#endif - -#if VMA_DEDICATED_ALLOCATION - if(m_UseKhrDedicatedAllocation) - { - VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR, "vkGetBufferMemoryRequirements2KHR"); - VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR, "vkGetImageMemoryRequirements2KHR"); - } -#endif - -#if VMA_BIND_MEMORY2 - if(m_UseKhrBindMemory2) - { - VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR, "vkBindBufferMemory2KHR"); - VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR, "vkBindImageMemory2KHR"); - } -#endif // #if VMA_BIND_MEMORY2 - -#if VMA_MEMORY_BUDGET - if(m_UseExtMemoryBudget) - { - VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2KHR"); - } -#endif // #if VMA_MEMORY_BUDGET - -#if VMA_VULKAN_VERSION >= 1003000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) - { - VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirements, "vkGetDeviceBufferMemoryRequirements"); - VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirements, "vkGetDeviceImageMemoryRequirements"); - } -#endif - -#undef VMA_FETCH_DEVICE_FUNC -#undef VMA_FETCH_INSTANCE_FUNC -} - -#endif // VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 - -void VmaAllocator_T::ValidateVulkanFunctions() -{ - VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL); - -#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation) - { - VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL); - } -#endif - -#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2) - { - VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL); - } -#endif - -#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 - if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL); - } -#endif - -#if VMA_VULKAN_VERSION >= 1003000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) - { - VMA_ASSERT(m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements != VMA_NULL); - VMA_ASSERT(m_VulkanFunctions.vkGetDeviceImageMemoryRequirements != VMA_NULL); - } -#endif -} - -VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex) -{ - const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); - const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; - const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE; - return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32); -} - -VkResult VmaAllocator_T::AllocateMemoryOfType( - VmaPool pool, - VkDeviceSize size, - VkDeviceSize alignment, - bool dedicatedPreferred, - VkBuffer dedicatedBuffer, - VkImage dedicatedImage, - VkFlags dedicatedBufferImageUsage, - const VmaAllocationCreateInfo& createInfo, - uint32_t memTypeIndex, - VmaSuballocationType suballocType, - VmaDedicatedAllocationList& dedicatedAllocations, - VmaBlockVector& blockVector, - size_t allocationCount, - VmaAllocation* pAllocations) -{ - VMA_ASSERT(pAllocations != VMA_NULL); - VMA_DEBUG_LOG(" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size); - - VmaAllocationCreateInfo finalCreateInfo = createInfo; - VkResult res = CalcMemTypeParams( - finalCreateInfo, - memTypeIndex, - size, - allocationCount); - if(res != VK_SUCCESS) - return res; - - if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) - { - return AllocateDedicatedMemory( - pool, - size, - suballocType, - dedicatedAllocations, - memTypeIndex, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, - (finalCreateInfo.flags & - (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, - finalCreateInfo.pUserData, - finalCreateInfo.priority, - dedicatedBuffer, - dedicatedImage, - dedicatedBufferImageUsage, - allocationCount, - pAllocations, - blockVector.GetAllocationNextPtr()); - } - else - { - const bool canAllocateDedicated = - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 && - (pool == VK_NULL_HANDLE || !blockVector.HasExplicitBlockSize()); - - if(canAllocateDedicated) - { - // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size. - if(size > blockVector.GetPreferredBlockSize() / 2) - { - dedicatedPreferred = true; - } - // Protection against creating each allocation as dedicated when we reach or exceed heap size/budget, - // which can quickly deplete maxMemoryAllocationCount: Don't prefer dedicated allocations when above - // 3/4 of the maximum allocation count. - if(m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4) - { - dedicatedPreferred = false; - } - - if(dedicatedPreferred) - { - res = AllocateDedicatedMemory( - pool, - size, - suballocType, - dedicatedAllocations, - memTypeIndex, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, - (finalCreateInfo.flags & - (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, - finalCreateInfo.pUserData, - finalCreateInfo.priority, - dedicatedBuffer, - dedicatedImage, - dedicatedBufferImageUsage, - allocationCount, - pAllocations, - blockVector.GetAllocationNextPtr()); - if(res == VK_SUCCESS) - { - // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here. - VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); - return VK_SUCCESS; - } - } - } - - res = blockVector.Allocate( - size, - alignment, - finalCreateInfo, - suballocType, - allocationCount, - pAllocations); - if(res == VK_SUCCESS) - return VK_SUCCESS; - - // Try dedicated memory. - if(canAllocateDedicated && !dedicatedPreferred) - { - res = AllocateDedicatedMemory( - pool, - size, - suballocType, - dedicatedAllocations, - memTypeIndex, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, - (finalCreateInfo.flags & - (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, - (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, - finalCreateInfo.pUserData, - finalCreateInfo.priority, - dedicatedBuffer, - dedicatedImage, - dedicatedBufferImageUsage, - allocationCount, - pAllocations, - blockVector.GetAllocationNextPtr()); - if(res == VK_SUCCESS) - { - // Succeeded: AllocateDedicatedMemory function already filld pMemory, nothing more to do here. - VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); - return VK_SUCCESS; - } - } - // Everything failed: Return error code. - VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); - return res; - } -} - -VkResult VmaAllocator_T::AllocateDedicatedMemory( - VmaPool pool, - VkDeviceSize size, - VmaSuballocationType suballocType, - VmaDedicatedAllocationList& dedicatedAllocations, - uint32_t memTypeIndex, - bool map, - bool isUserDataString, - bool isMappingAllowed, - bool canAliasMemory, - void* pUserData, - float priority, - VkBuffer dedicatedBuffer, - VkImage dedicatedImage, - VkFlags dedicatedBufferImageUsage, - size_t allocationCount, - VmaAllocation* pAllocations, - const void* pNextChain) -{ - VMA_ASSERT(allocationCount > 0 && pAllocations); - - VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; - allocInfo.memoryTypeIndex = memTypeIndex; - allocInfo.allocationSize = size; - allocInfo.pNext = pNextChain; - -#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR }; - if(!canAliasMemory) - { - if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - if(dedicatedBuffer != VK_NULL_HANDLE) - { - VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE); - dedicatedAllocInfo.buffer = dedicatedBuffer; - VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); - } - else if(dedicatedImage != VK_NULL_HANDLE) - { - dedicatedAllocInfo.image = dedicatedImage; - VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); - } - } - } -#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - -#if VMA_BUFFER_DEVICE_ADDRESS - VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; - if(m_UseKhrBufferDeviceAddress) - { - bool canContainBufferWithDeviceAddress = true; - if(dedicatedBuffer != VK_NULL_HANDLE) - { - canContainBufferWithDeviceAddress = dedicatedBufferImageUsage == UINT32_MAX || // Usage flags unknown - (dedicatedBufferImageUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0; - } - else if(dedicatedImage != VK_NULL_HANDLE) - { - canContainBufferWithDeviceAddress = false; - } - if(canContainBufferWithDeviceAddress) - { - allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; - VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); - } - } -#endif // #if VMA_BUFFER_DEVICE_ADDRESS - -#if VMA_MEMORY_PRIORITY - VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; - if(m_UseExtMemoryPriority) - { - VMA_ASSERT(priority >= 0.f && priority <= 1.f); - priorityInfo.priority = priority; - VmaPnextChainPushFront(&allocInfo, &priorityInfo); - } -#endif // #if VMA_MEMORY_PRIORITY - -#if VMA_EXTERNAL_MEMORY - // Attach VkExportMemoryAllocateInfoKHR if necessary. - VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; - exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex); - if(exportMemoryAllocInfo.handleTypes != 0) - { - VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); - } -#endif // #if VMA_EXTERNAL_MEMORY - - size_t allocIndex; - VkResult res = VK_SUCCESS; - for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex) - { - res = AllocateDedicatedMemoryPage( - pool, - size, - suballocType, - memTypeIndex, - allocInfo, - map, - isUserDataString, - isMappingAllowed, - pUserData, - pAllocations + allocIndex); - if(res != VK_SUCCESS) - { - break; - } - } - - if(res == VK_SUCCESS) - { - for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex) - { - dedicatedAllocations.Register(pAllocations[allocIndex]); - } - VMA_DEBUG_LOG(" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex); - } - else - { - // Free all already created allocations. - while(allocIndex--) - { - VmaAllocation currAlloc = pAllocations[allocIndex]; - VkDeviceMemory hMemory = currAlloc->GetMemory(); - - /* - There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory - before vkFreeMemory. - - if(currAlloc->GetMappedData() != VMA_NULL) - { - (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); - } - */ - - FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory); - m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize()); - m_AllocationObjectAllocator.Free(currAlloc); - } - - memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); - } - - return res; -} - -VkResult VmaAllocator_T::AllocateDedicatedMemoryPage( - VmaPool pool, - VkDeviceSize size, - VmaSuballocationType suballocType, - uint32_t memTypeIndex, - const VkMemoryAllocateInfo& allocInfo, - bool map, - bool isUserDataString, - bool isMappingAllowed, - void* pUserData, - VmaAllocation* pAllocation) -{ - VkDeviceMemory hMemory = VK_NULL_HANDLE; - VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory); - if(res < 0) - { - VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); - return res; - } - - void* pMappedData = VMA_NULL; - if(map) - { - res = (*m_VulkanFunctions.vkMapMemory)( - m_hDevice, - hMemory, - 0, - VK_WHOLE_SIZE, - 0, - &pMappedData); - if(res < 0) - { - VMA_DEBUG_LOG(" vkMapMemory FAILED"); - FreeVulkanMemory(memTypeIndex, size, hMemory); - return res; - } - } - - *pAllocation = m_AllocationObjectAllocator.Allocate(isMappingAllowed); - (*pAllocation)->InitDedicatedAllocation(pool, memTypeIndex, hMemory, suballocType, pMappedData, size); - if (isUserDataString) - (*pAllocation)->SetName(this, (const char*)pUserData); - else - (*pAllocation)->SetUserData(this, pUserData); - m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size); - if(VMA_DEBUG_INITIALIZE_ALLOCATIONS) - { - FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); - } - - return VK_SUCCESS; -} - -void VmaAllocator_T::GetBufferMemoryRequirements( - VkBuffer hBuffer, - VkMemoryRequirements& memReq, - bool& requiresDedicatedAllocation, - bool& prefersDedicatedAllocation) const -{ -#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR }; - memReqInfo.buffer = hBuffer; - - VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; - - VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; - VmaPnextChainPushFront(&memReq2, &memDedicatedReq); - - (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); - - memReq = memReq2.memoryRequirements; - requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); - prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); - } - else -#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - { - (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq); - requiresDedicatedAllocation = false; - prefersDedicatedAllocation = false; - } -} - -void VmaAllocator_T::GetImageMemoryRequirements( - VkImage hImage, - VkMemoryRequirements& memReq, - bool& requiresDedicatedAllocation, - bool& prefersDedicatedAllocation) const -{ -#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR }; - memReqInfo.image = hImage; - - VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; - - VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; - VmaPnextChainPushFront(&memReq2, &memDedicatedReq); - - (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); - - memReq = memReq2.memoryRequirements; - requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); - prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); - } - else -#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 - { - (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq); - requiresDedicatedAllocation = false; - prefersDedicatedAllocation = false; - } -} - -VkResult VmaAllocator_T::FindMemoryTypeIndex( - uint32_t memoryTypeBits, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - VkFlags bufImgUsage, - uint32_t* pMemoryTypeIndex) const -{ - memoryTypeBits &= GetGlobalMemoryTypeBits(); - - if(pAllocationCreateInfo->memoryTypeBits != 0) - { - memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits; - } - - VkMemoryPropertyFlags requiredFlags = 0, preferredFlags = 0, notPreferredFlags = 0; - if(!FindMemoryPreferences( - IsIntegratedGpu(), - *pAllocationCreateInfo, - bufImgUsage, - requiredFlags, preferredFlags, notPreferredFlags)) - { - return VK_ERROR_FEATURE_NOT_PRESENT; - } - - *pMemoryTypeIndex = UINT32_MAX; - uint32_t minCost = UINT32_MAX; - for(uint32_t memTypeIndex = 0, memTypeBit = 1; - memTypeIndex < GetMemoryTypeCount(); - ++memTypeIndex, memTypeBit <<= 1) - { - // This memory type is acceptable according to memoryTypeBits bitmask. - if((memTypeBit & memoryTypeBits) != 0) - { - const VkMemoryPropertyFlags currFlags = - m_MemProps.memoryTypes[memTypeIndex].propertyFlags; - // This memory type contains requiredFlags. - if((requiredFlags & ~currFlags) == 0) - { - // Calculate cost as number of bits from preferredFlags not present in this memory type. - uint32_t currCost = VMA_COUNT_BITS_SET(preferredFlags & ~currFlags) + - VMA_COUNT_BITS_SET(currFlags & notPreferredFlags); - // Remember memory type with lowest cost. - if(currCost < minCost) - { - *pMemoryTypeIndex = memTypeIndex; - if(currCost == 0) - { - return VK_SUCCESS; - } - minCost = currCost; - } - } - } - } - return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT; -} - -VkResult VmaAllocator_T::CalcMemTypeParams( - VmaAllocationCreateInfo& inoutCreateInfo, - uint32_t memTypeIndex, - VkDeviceSize size, - size_t allocationCount) -{ - // If memory type is not HOST_VISIBLE, disable MAPPED. - if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 && - (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) - { - inoutCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT; - } - - if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && - (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0) - { - const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); - VmaBudget heapBudget = {}; - GetHeapBudgets(&heapBudget, heapIndex, 1); - if(heapBudget.usage + size * allocationCount > heapBudget.budget) - { - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } - } - return VK_SUCCESS; -} - -VkResult VmaAllocator_T::CalcAllocationParams( - VmaAllocationCreateInfo& inoutCreateInfo, - bool dedicatedRequired, - bool dedicatedPreferred) -{ - VMA_ASSERT((inoutCreateInfo.flags & - (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != - (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) && - "Specifying both flags VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT and VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT is incorrect."); - VMA_ASSERT((((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) == 0 || - (inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0)) && - "Specifying VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT requires also VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); - if(inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST) - { - if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0) - { - VMA_ASSERT((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0 && - "When using VMA_ALLOCATION_CREATE_MAPPED_BIT and usage = VMA_MEMORY_USAGE_AUTO*, you must also specify VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); - } - } - - // If memory is lazily allocated, it should be always dedicated. - if(dedicatedRequired || - inoutCreateInfo.usage == VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED) - { - inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; - } - - if(inoutCreateInfo.pool != VK_NULL_HANDLE) - { - if(inoutCreateInfo.pool->m_BlockVector.HasExplicitBlockSize() && - (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) - { - VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT while current custom pool doesn't support dedicated allocations."); - return VK_ERROR_FEATURE_NOT_PRESENT; - } - inoutCreateInfo.priority = inoutCreateInfo.pool->m_BlockVector.GetPriority(); - } - - if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && - (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) - { - VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense."); - return VK_ERROR_FEATURE_NOT_PRESENT; - } - - if(VMA_DEBUG_ALWAYS_DEDICATED_MEMORY && - (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) - { - inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; - } - - // Non-auto USAGE values imply HOST_ACCESS flags. - // And so does VMA_MEMORY_USAGE_UNKNOWN because it is used with custom pools. - // Which specific flag is used doesn't matter. They change things only when used with VMA_MEMORY_USAGE_AUTO*. - // Otherwise they just protect from assert on mapping. - if(inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO && - inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE && - inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_HOST) - { - if((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) == 0) - { - inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; - } - } - - return VK_SUCCESS; -} - -VkResult VmaAllocator_T::AllocateMemory( - const VkMemoryRequirements& vkMemReq, - bool requiresDedicatedAllocation, - bool prefersDedicatedAllocation, - VkBuffer dedicatedBuffer, - VkImage dedicatedImage, - VkFlags dedicatedBufferImageUsage, - const VmaAllocationCreateInfo& createInfo, - VmaSuballocationType suballocType, - size_t allocationCount, - VmaAllocation* pAllocations) -{ - memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); - - VMA_ASSERT(VmaIsPow2(vkMemReq.alignment)); - - if(vkMemReq.size == 0) - { - return VK_ERROR_INITIALIZATION_FAILED; - } - - VmaAllocationCreateInfo createInfoFinal = createInfo; - VkResult res = CalcAllocationParams(createInfoFinal, requiresDedicatedAllocation, prefersDedicatedAllocation); - if(res != VK_SUCCESS) - return res; - - if(createInfoFinal.pool != VK_NULL_HANDLE) - { - VmaBlockVector& blockVector = createInfoFinal.pool->m_BlockVector; - return AllocateMemoryOfType( - createInfoFinal.pool, - vkMemReq.size, - vkMemReq.alignment, - prefersDedicatedAllocation, - dedicatedBuffer, - dedicatedImage, - dedicatedBufferImageUsage, - createInfoFinal, - blockVector.GetMemoryTypeIndex(), - suballocType, - createInfoFinal.pool->m_DedicatedAllocations, - blockVector, - allocationCount, - pAllocations); - } - else - { - // Bit mask of memory Vulkan types acceptable for this allocation. - uint32_t memoryTypeBits = vkMemReq.memoryTypeBits; - uint32_t memTypeIndex = UINT32_MAX; - res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); - // Can't find any single memory type matching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT. - if(res != VK_SUCCESS) - return res; - do - { - VmaBlockVector* blockVector = m_pBlockVectors[memTypeIndex]; - VMA_ASSERT(blockVector && "Trying to use unsupported memory type!"); - res = AllocateMemoryOfType( - VK_NULL_HANDLE, - vkMemReq.size, - vkMemReq.alignment, - requiresDedicatedAllocation || prefersDedicatedAllocation, - dedicatedBuffer, - dedicatedImage, - dedicatedBufferImageUsage, - createInfoFinal, - memTypeIndex, - suballocType, - m_DedicatedAllocations[memTypeIndex], - *blockVector, - allocationCount, - pAllocations); - // Allocation succeeded - if(res == VK_SUCCESS) - return VK_SUCCESS; - - // Remove old memTypeIndex from list of possibilities. - memoryTypeBits &= ~(1u << memTypeIndex); - // Find alternative memTypeIndex. - res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); - } while(res == VK_SUCCESS); - - // No other matching memory type index could be found. - // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once. - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } -} - -void VmaAllocator_T::FreeMemory( - size_t allocationCount, - const VmaAllocation* pAllocations) -{ - VMA_ASSERT(pAllocations); - - for(size_t allocIndex = allocationCount; allocIndex--; ) - { - VmaAllocation allocation = pAllocations[allocIndex]; - - if(allocation != VK_NULL_HANDLE) - { - if(VMA_DEBUG_INITIALIZE_ALLOCATIONS) - { - FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED); - } - - allocation->FreeName(this); - - switch(allocation->GetType()) - { - case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: - { - VmaBlockVector* pBlockVector = VMA_NULL; - VmaPool hPool = allocation->GetParentPool(); - if(hPool != VK_NULL_HANDLE) - { - pBlockVector = &hPool->m_BlockVector; - } - else - { - const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); - pBlockVector = m_pBlockVectors[memTypeIndex]; - VMA_ASSERT(pBlockVector && "Trying to free memory of unsupported type!"); - } - pBlockVector->Free(allocation); - } - break; - case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: - FreeDedicatedMemory(allocation); - break; - default: - VMA_ASSERT(0); - } - } - } -} - -void VmaAllocator_T::CalculateStatistics(VmaTotalStatistics* pStats) -{ - // Initialize. - VmaClearDetailedStatistics(pStats->total); - for(uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) - VmaClearDetailedStatistics(pStats->memoryType[i]); - for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) - VmaClearDetailedStatistics(pStats->memoryHeap[i]); - - // Process default pools. - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; - if (pBlockVector != VMA_NULL) - pBlockVector->AddDetailedStatistics(pStats->memoryType[memTypeIndex]); - } - - // Process custom pools. - { - VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); - for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) - { - VmaBlockVector& blockVector = pool->m_BlockVector; - const uint32_t memTypeIndex = blockVector.GetMemoryTypeIndex(); - blockVector.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); - pool->m_DedicatedAllocations.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); - } - } - - // Process dedicated allocations. - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - m_DedicatedAllocations[memTypeIndex].AddDetailedStatistics(pStats->memoryType[memTypeIndex]); - } - - // Sum from memory types to memory heaps. - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - const uint32_t memHeapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex; - VmaAddDetailedStatistics(pStats->memoryHeap[memHeapIndex], pStats->memoryType[memTypeIndex]); - } - - // Sum from memory heaps to total. - for(uint32_t memHeapIndex = 0; memHeapIndex < GetMemoryHeapCount(); ++memHeapIndex) - VmaAddDetailedStatistics(pStats->total, pStats->memoryHeap[memHeapIndex]); - - VMA_ASSERT(pStats->total.statistics.allocationCount == 0 || - pStats->total.allocationSizeMax >= pStats->total.allocationSizeMin); - VMA_ASSERT(pStats->total.unusedRangeCount == 0 || - pStats->total.unusedRangeSizeMax >= pStats->total.unusedRangeSizeMin); -} - -void VmaAllocator_T::GetHeapBudgets(VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount) -{ -#if VMA_MEMORY_BUDGET - if(m_UseExtMemoryBudget) - { - if(m_Budget.m_OperationsSinceBudgetFetch < 30) - { - VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex); - for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) - { - const uint32_t heapIndex = firstHeap + i; - - outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; - outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; - outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; - outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; - - if(m_Budget.m_VulkanUsage[heapIndex] + outBudgets->statistics.blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]) - { - outBudgets->usage = m_Budget.m_VulkanUsage[heapIndex] + - outBudgets->statistics.blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; - } - else - { - outBudgets->usage = 0; - } - - // Have to take MIN with heap size because explicit HeapSizeLimit is included in it. - outBudgets->budget = VMA_MIN( - m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size); - } - } - else - { - UpdateVulkanBudget(); // Outside of mutex lock - GetHeapBudgets(outBudgets, firstHeap, heapCount); // Recursion - } - } - else -#endif - { - for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) - { - const uint32_t heapIndex = firstHeap + i; - - outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; - outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; - outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; - outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; - - outBudgets->usage = outBudgets->statistics.blockBytes; - outBudgets->budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. - } - } -} - -void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo) -{ - pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex(); - pAllocationInfo->deviceMemory = hAllocation->GetMemory(); - pAllocationInfo->offset = hAllocation->GetOffset(); - pAllocationInfo->size = hAllocation->GetSize(); - pAllocationInfo->pMappedData = hAllocation->GetMappedData(); - pAllocationInfo->pUserData = hAllocation->GetUserData(); - pAllocationInfo->pName = hAllocation->GetName(); -} - -VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool) -{ - VMA_DEBUG_LOG(" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->memoryTypeIndex, pCreateInfo->flags); - - VmaPoolCreateInfo newCreateInfo = *pCreateInfo; - - // Protection against uninitialized new structure member. If garbage data are left there, this pointer dereference would crash. - if(pCreateInfo->pMemoryAllocateNext) - { - VMA_ASSERT(((const VkBaseInStructure*)pCreateInfo->pMemoryAllocateNext)->sType != 0); - } - - if(newCreateInfo.maxBlockCount == 0) - { - newCreateInfo.maxBlockCount = SIZE_MAX; - } - if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount) - { - return VK_ERROR_INITIALIZATION_FAILED; - } - // Memory type index out of range or forbidden. - if(pCreateInfo->memoryTypeIndex >= GetMemoryTypeCount() || - ((1u << pCreateInfo->memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0) - { - return VK_ERROR_FEATURE_NOT_PRESENT; - } - if(newCreateInfo.minAllocationAlignment > 0) - { - VMA_ASSERT(VmaIsPow2(newCreateInfo.minAllocationAlignment)); - } - - const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex); - - *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize); - - VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks(); - if(res != VK_SUCCESS) - { - vma_delete(this, *pPool); - *pPool = VMA_NULL; - return res; - } - - // Add to m_Pools. - { - VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); - (*pPool)->SetId(m_NextPoolId++); - m_Pools.PushBack(*pPool); - } - - return VK_SUCCESS; -} - -void VmaAllocator_T::DestroyPool(VmaPool pool) -{ - // Remove from m_Pools. - { - VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); - m_Pools.Remove(pool); - } - - vma_delete(this, pool); -} - -void VmaAllocator_T::GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats) -{ - VmaClearStatistics(*pPoolStats); - pool->m_BlockVector.AddStatistics(*pPoolStats); - pool->m_DedicatedAllocations.AddStatistics(*pPoolStats); -} - -void VmaAllocator_T::CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats) -{ - VmaClearDetailedStatistics(*pPoolStats); - pool->m_BlockVector.AddDetailedStatistics(*pPoolStats); - pool->m_DedicatedAllocations.AddDetailedStatistics(*pPoolStats); -} - -void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex) -{ - m_CurrentFrameIndex.store(frameIndex); - -#if VMA_MEMORY_BUDGET - if(m_UseExtMemoryBudget) - { - UpdateVulkanBudget(); - } -#endif // #if VMA_MEMORY_BUDGET -} - -VkResult VmaAllocator_T::CheckPoolCorruption(VmaPool hPool) -{ - return hPool->m_BlockVector.CheckCorruption(); -} - -VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits) -{ - VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT; - - // Process default pools. - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; - if(pBlockVector != VMA_NULL) - { - VkResult localRes = pBlockVector->CheckCorruption(); - switch(localRes) - { - case VK_ERROR_FEATURE_NOT_PRESENT: - break; - case VK_SUCCESS: - finalRes = VK_SUCCESS; - break; - default: - return localRes; - } - } - } - - // Process custom pools. - { - VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); - for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) - { - if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0) - { - VkResult localRes = pool->m_BlockVector.CheckCorruption(); - switch(localRes) - { - case VK_ERROR_FEATURE_NOT_PRESENT: - break; - case VK_SUCCESS: - finalRes = VK_SUCCESS; - break; - default: - return localRes; - } - } - } - } - - return finalRes; -} - -VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory) -{ - AtomicTransactionalIncrement deviceMemoryCountIncrement; - const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount); -#if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT - if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount) - { - return VK_ERROR_TOO_MANY_OBJECTS; - } -#endif - - const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex); - - // HeapSizeLimit is in effect for this heap. - if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0) - { - const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; - VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex]; - for(;;) - { - const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize; - if(blockBytesAfterAllocation > heapSize) - { - return VK_ERROR_OUT_OF_DEVICE_MEMORY; - } - if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation)) - { - break; - } - } - } - else - { - m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize; - } - ++m_Budget.m_BlockCount[heapIndex]; - - // VULKAN CALL vkAllocateMemory. - VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory); - - if(res == VK_SUCCESS) - { -#if VMA_MEMORY_BUDGET - ++m_Budget.m_OperationsSinceBudgetFetch; -#endif - - // Informative callback. - if(m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL) - { - (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.pUserData); - } - - deviceMemoryCountIncrement.Commit(); - } - else - { - --m_Budget.m_BlockCount[heapIndex]; - m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize; - } - - return res; -} - -void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory) -{ - // Informative callback. - if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL) - { - (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.pUserData); - } - - // VULKAN CALL vkFreeMemory. - (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks()); - - const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType); - --m_Budget.m_BlockCount[heapIndex]; - m_Budget.m_BlockBytes[heapIndex] -= size; - - --m_DeviceMemoryCount; -} - -VkResult VmaAllocator_T::BindVulkanBuffer( - VkDeviceMemory memory, - VkDeviceSize memoryOffset, - VkBuffer buffer, - const void* pNext) -{ - if(pNext != VMA_NULL) - { -#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 - if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && - m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL) - { - VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR }; - bindBufferMemoryInfo.pNext = pNext; - bindBufferMemoryInfo.buffer = buffer; - bindBufferMemoryInfo.memory = memory; - bindBufferMemoryInfo.memoryOffset = memoryOffset; - return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); - } - else -#endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 - { - return VK_ERROR_EXTENSION_NOT_PRESENT; - } - } - else - { - return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset); - } -} - -VkResult VmaAllocator_T::BindVulkanImage( - VkDeviceMemory memory, - VkDeviceSize memoryOffset, - VkImage image, - const void* pNext) -{ - if(pNext != VMA_NULL) - { -#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 - if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && - m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL) - { - VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR }; - bindBufferMemoryInfo.pNext = pNext; - bindBufferMemoryInfo.image = image; - bindBufferMemoryInfo.memory = memory; - bindBufferMemoryInfo.memoryOffset = memoryOffset; - return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); - } - else -#endif // #if VMA_BIND_MEMORY2 - { - return VK_ERROR_EXTENSION_NOT_PRESENT; - } - } - else - { - return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset); - } -} - -VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData) -{ - switch(hAllocation->GetType()) - { - case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: - { - VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); - char *pBytes = VMA_NULL; - VkResult res = pBlock->Map(this, 1, (void**)&pBytes); - if(res == VK_SUCCESS) - { - *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset(); - hAllocation->BlockAllocMap(); - } - return res; - } - case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: - return hAllocation->DedicatedAllocMap(this, ppData); - default: - VMA_ASSERT(0); - return VK_ERROR_MEMORY_MAP_FAILED; - } -} - -void VmaAllocator_T::Unmap(VmaAllocation hAllocation) -{ - switch(hAllocation->GetType()) - { - case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: - { - VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); - hAllocation->BlockAllocUnmap(); - pBlock->Unmap(this, 1); - } - break; - case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: - hAllocation->DedicatedAllocUnmap(this); - break; - default: - VMA_ASSERT(0); - } -} - -VkResult VmaAllocator_T::BindBufferMemory( - VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkBuffer hBuffer, - const void* pNext) -{ - VkResult res = VK_SUCCESS; - switch(hAllocation->GetType()) - { - case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: - res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext); - break; - case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: - { - VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); - VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block."); - res = pBlock->BindBufferMemory(this, hAllocation, allocationLocalOffset, hBuffer, pNext); - break; - } - default: - VMA_ASSERT(0); - } - return res; -} - -VkResult VmaAllocator_T::BindImageMemory( - VmaAllocation hAllocation, - VkDeviceSize allocationLocalOffset, - VkImage hImage, - const void* pNext) -{ - VkResult res = VK_SUCCESS; - switch(hAllocation->GetType()) - { - case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: - res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext); - break; - case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: - { - VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); - VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block."); - res = pBlock->BindImageMemory(this, hAllocation, allocationLocalOffset, hImage, pNext); - break; - } - default: - VMA_ASSERT(0); - } - return res; -} - -VkResult VmaAllocator_T::FlushOrInvalidateAllocation( - VmaAllocation hAllocation, - VkDeviceSize offset, VkDeviceSize size, - VMA_CACHE_OPERATION op) -{ - VkResult res = VK_SUCCESS; - - VkMappedMemoryRange memRange = {}; - if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange)) - { - switch(op) - { - case VMA_CACHE_FLUSH: - res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange); - break; - case VMA_CACHE_INVALIDATE: - res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange); - break; - default: - VMA_ASSERT(0); - } - } - // else: Just ignore this call. - return res; -} - -VkResult VmaAllocator_T::FlushOrInvalidateAllocations( - uint32_t allocationCount, - const VmaAllocation* allocations, - const VkDeviceSize* offsets, const VkDeviceSize* sizes, - VMA_CACHE_OPERATION op) -{ - typedef VmaStlAllocator RangeAllocator; - typedef VmaSmallVector RangeVector; - RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks())); - - for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex) - { - const VmaAllocation alloc = allocations[allocIndex]; - const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0; - const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE; - VkMappedMemoryRange newRange; - if(GetFlushOrInvalidateRange(alloc, offset, size, newRange)) - { - ranges.push_back(newRange); - } - } - - VkResult res = VK_SUCCESS; - if(!ranges.empty()) - { - switch(op) - { - case VMA_CACHE_FLUSH: - res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); - break; - case VMA_CACHE_INVALIDATE: - res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); - break; - default: - VMA_ASSERT(0); - } - } - // else: Just ignore this call. - return res; -} - -void VmaAllocator_T::FreeDedicatedMemory(const VmaAllocation allocation) -{ - VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); - - const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); - VmaPool parentPool = allocation->GetParentPool(); - if(parentPool == VK_NULL_HANDLE) - { - // Default pool - m_DedicatedAllocations[memTypeIndex].Unregister(allocation); - } - else - { - // Custom pool - parentPool->m_DedicatedAllocations.Unregister(allocation); - } - - VkDeviceMemory hMemory = allocation->GetMemory(); - - /* - There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory - before vkFreeMemory. - - if(allocation->GetMappedData() != VMA_NULL) - { - (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); - } - */ - - FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory); - - m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize()); - m_AllocationObjectAllocator.Free(allocation); - - VMA_DEBUG_LOG(" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex); -} - -uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits() const -{ - VkBufferCreateInfo dummyBufCreateInfo; - VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo); - - uint32_t memoryTypeBits = 0; - - // Create buffer. - VkBuffer buf = VK_NULL_HANDLE; - VkResult res = (*GetVulkanFunctions().vkCreateBuffer)( - m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf); - if(res == VK_SUCCESS) - { - // Query for supported memory types. - VkMemoryRequirements memReq; - (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq); - memoryTypeBits = memReq.memoryTypeBits; - - // Destroy buffer. - (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks()); - } - - return memoryTypeBits; -} - -uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits() const -{ - // Make sure memory information is already fetched. - VMA_ASSERT(GetMemoryTypeCount() > 0); - - uint32_t memoryTypeBits = UINT32_MAX; - - if(!m_UseAmdDeviceCoherentMemory) - { - // Exclude memory types that have VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD. - for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0) - { - memoryTypeBits &= ~(1u << memTypeIndex); - } - } - } - - return memoryTypeBits; -} - -bool VmaAllocator_T::GetFlushOrInvalidateRange( - VmaAllocation allocation, - VkDeviceSize offset, VkDeviceSize size, - VkMappedMemoryRange& outRange) const -{ - const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); - if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex)) - { - const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize; - const VkDeviceSize allocationSize = allocation->GetSize(); - VMA_ASSERT(offset <= allocationSize); - - outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; - outRange.pNext = VMA_NULL; - outRange.memory = allocation->GetMemory(); - - switch(allocation->GetType()) - { - case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: - outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); - if(size == VK_WHOLE_SIZE) - { - outRange.size = allocationSize - outRange.offset; - } - else - { - VMA_ASSERT(offset + size <= allocationSize); - outRange.size = VMA_MIN( - VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize), - allocationSize - outRange.offset); - } - break; - case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: - { - // 1. Still within this allocation. - outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); - if(size == VK_WHOLE_SIZE) - { - size = allocationSize - offset; - } - else - { - VMA_ASSERT(offset + size <= allocationSize); - } - outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize); - - // 2. Adjust to whole block. - const VkDeviceSize allocationOffset = allocation->GetOffset(); - VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0); - const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize(); - outRange.offset += allocationOffset; - outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset); - - break; - } - default: - VMA_ASSERT(0); - } - return true; - } - return false; -} - -#if VMA_MEMORY_BUDGET -void VmaAllocator_T::UpdateVulkanBudget() -{ - VMA_ASSERT(m_UseExtMemoryBudget); - - VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR }; - - VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT }; - VmaPnextChainPushFront(&memProps, &budgetProps); - - GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps); - - { - VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex); - - for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) - { - m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex]; - m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex]; - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load(); - - // Some bugged drivers return the budget incorrectly, e.g. 0 or much bigger than heap size. - if(m_Budget.m_VulkanBudget[heapIndex] == 0) - { - m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. - } - else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size) - { - m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size; - } - if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0) - { - m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; - } - } - m_Budget.m_OperationsSinceBudgetFetch = 0; - } -} -#endif // VMA_MEMORY_BUDGET - -void VmaAllocator_T::FillAllocation(const VmaAllocation hAllocation, uint8_t pattern) -{ - if(VMA_DEBUG_INITIALIZE_ALLOCATIONS && - hAllocation->IsMappingAllowed() && - (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) - { - void* pData = VMA_NULL; - VkResult res = Map(hAllocation, &pData); - if(res == VK_SUCCESS) - { - memset(pData, (int)pattern, (size_t)hAllocation->GetSize()); - FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH); - Unmap(hAllocation); - } - else - { - VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation."); - } - } -} - -uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits() -{ - uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load(); - if(memoryTypeBits == UINT32_MAX) - { - memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits(); - m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits); - } - return memoryTypeBits; -} - -#if VMA_STATS_STRING_ENABLED -void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json) -{ - json.WriteString("DefaultPools"); - json.BeginObject(); - { - for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex]; - VmaDedicatedAllocationList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex]; - if (pBlockVector != VMA_NULL) - { - json.BeginString("Type "); - json.ContinueString(memTypeIndex); - json.EndString(); - json.BeginObject(); - { - json.WriteString("PreferredBlockSize"); - json.WriteNumber(pBlockVector->GetPreferredBlockSize()); - - json.WriteString("Blocks"); - pBlockVector->PrintDetailedMap(json); - - json.WriteString("DedicatedAllocations"); - dedicatedAllocList.BuildStatsString(json); - } - json.EndObject(); - } - } - } - json.EndObject(); - - json.WriteString("CustomPools"); - json.BeginObject(); - { - VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); - if (!m_Pools.IsEmpty()) - { - for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) - { - bool displayType = true; - size_t index = 0; - for (VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) - { - VmaBlockVector& blockVector = pool->m_BlockVector; - if (blockVector.GetMemoryTypeIndex() == memTypeIndex) - { - if (displayType) - { - json.BeginString("Type "); - json.ContinueString(memTypeIndex); - json.EndString(); - json.BeginArray(); - displayType = false; - } - - json.BeginObject(); - { - json.WriteString("Name"); - json.BeginString(); - json.ContinueString_Size(index++); - if (pool->GetName()) - { - json.ContinueString(" - "); - json.ContinueString(pool->GetName()); - } - json.EndString(); - - json.WriteString("PreferredBlockSize"); - json.WriteNumber(blockVector.GetPreferredBlockSize()); - - json.WriteString("Blocks"); - blockVector.PrintDetailedMap(json); - - json.WriteString("DedicatedAllocations"); - pool->m_DedicatedAllocations.BuildStatsString(json); - } - json.EndObject(); - } - } - - if (!displayType) - json.EndArray(); - } - } - } - json.EndObject(); -} -#endif // VMA_STATS_STRING_ENABLED -#endif // _VMA_ALLOCATOR_T_FUNCTIONS - - -#ifndef _VMA_PUBLIC_INTERFACE -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( - const VmaAllocatorCreateInfo* pCreateInfo, - VmaAllocator* pAllocator) -{ - VMA_ASSERT(pCreateInfo && pAllocator); - VMA_ASSERT(pCreateInfo->vulkanApiVersion == 0 || - (VK_VERSION_MAJOR(pCreateInfo->vulkanApiVersion) == 1 && VK_VERSION_MINOR(pCreateInfo->vulkanApiVersion) <= 3)); - VMA_DEBUG_LOG("vmaCreateAllocator"); - *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo); - VkResult result = (*pAllocator)->Init(pCreateInfo); - if(result < 0) - { - vma_delete(pCreateInfo->pAllocationCallbacks, *pAllocator); - *pAllocator = VK_NULL_HANDLE; - } - return result; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( - VmaAllocator allocator) -{ - if(allocator != VK_NULL_HANDLE) - { - VMA_DEBUG_LOG("vmaDestroyAllocator"); - VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks; // Have to copy the callbacks when destroying. - vma_delete(&allocationCallbacks, allocator); - } -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo* pAllocatorInfo) -{ - VMA_ASSERT(allocator && pAllocatorInfo); - pAllocatorInfo->instance = allocator->m_hInstance; - pAllocatorInfo->physicalDevice = allocator->GetPhysicalDevice(); - pAllocatorInfo->device = allocator->m_hDevice; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( - VmaAllocator allocator, - const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties) -{ - VMA_ASSERT(allocator && ppPhysicalDeviceProperties); - *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( - VmaAllocator allocator, - const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties) -{ - VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties); - *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( - VmaAllocator allocator, - uint32_t memoryTypeIndex, - VkMemoryPropertyFlags* pFlags) -{ - VMA_ASSERT(allocator && pFlags); - VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount()); - *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( - VmaAllocator allocator, - uint32_t frameIndex) -{ - VMA_ASSERT(allocator); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->SetCurrentFrameIndex(frameIndex); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( - VmaAllocator allocator, - VmaTotalStatistics* pStats) -{ - VMA_ASSERT(allocator && pStats); - VMA_DEBUG_GLOBAL_MUTEX_LOCK - allocator->CalculateStatistics(pStats); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( - VmaAllocator allocator, - VmaBudget* pBudgets) -{ - VMA_ASSERT(allocator && pBudgets); - VMA_DEBUG_GLOBAL_MUTEX_LOCK - allocator->GetHeapBudgets(pBudgets, 0, allocator->GetMemoryHeapCount()); -} - -#if VMA_STATS_STRING_ENABLED - -VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( - VmaAllocator allocator, - char** ppStatsString, - VkBool32 detailedMap) -{ - VMA_ASSERT(allocator && ppStatsString); - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - VmaStringBuilder sb(allocator->GetAllocationCallbacks()); - { - VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; - allocator->GetHeapBudgets(budgets, 0, allocator->GetMemoryHeapCount()); - - VmaTotalStatistics stats; - allocator->CalculateStatistics(&stats); - - VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb); - json.BeginObject(); - { - json.WriteString("General"); - json.BeginObject(); - { - const VkPhysicalDeviceProperties& deviceProperties = allocator->m_PhysicalDeviceProperties; - const VkPhysicalDeviceMemoryProperties& memoryProperties = allocator->m_MemProps; - - json.WriteString("API"); - json.WriteString("Vulkan"); - - json.WriteString("apiVersion"); - json.BeginString(); - json.ContinueString(VK_API_VERSION_MAJOR(deviceProperties.apiVersion)); - json.ContinueString("."); - json.ContinueString(VK_API_VERSION_MINOR(deviceProperties.apiVersion)); - json.ContinueString("."); - json.ContinueString(VK_API_VERSION_PATCH(deviceProperties.apiVersion)); - json.EndString(); - - json.WriteString("GPU"); - json.WriteString(deviceProperties.deviceName); - json.WriteString("deviceType"); - json.WriteNumber(static_cast(deviceProperties.deviceType)); - - json.WriteString("maxMemoryAllocationCount"); - json.WriteNumber(deviceProperties.limits.maxMemoryAllocationCount); - json.WriteString("bufferImageGranularity"); - json.WriteNumber(deviceProperties.limits.bufferImageGranularity); - json.WriteString("nonCoherentAtomSize"); - json.WriteNumber(deviceProperties.limits.nonCoherentAtomSize); - - json.WriteString("memoryHeapCount"); - json.WriteNumber(memoryProperties.memoryHeapCount); - json.WriteString("memoryTypeCount"); - json.WriteNumber(memoryProperties.memoryTypeCount); - } - json.EndObject(); - } - { - json.WriteString("Total"); - VmaPrintDetailedStatistics(json, stats.total); - } - { - json.WriteString("MemoryInfo"); - json.BeginObject(); - { - for (uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex) - { - json.BeginString("Heap "); - json.ContinueString(heapIndex); - json.EndString(); - json.BeginObject(); - { - const VkMemoryHeap& heapInfo = allocator->m_MemProps.memoryHeaps[heapIndex]; - json.WriteString("Flags"); - json.BeginArray(true); - { - if (heapInfo.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) - json.WriteString("DEVICE_LOCAL"); - #if VMA_VULKAN_VERSION >= 1001000 - if (heapInfo.flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) - json.WriteString("MULTI_INSTANCE"); - #endif - - VkMemoryHeapFlags flags = heapInfo.flags & - ~(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT - #if VMA_VULKAN_VERSION >= 1001000 - | VK_MEMORY_HEAP_MULTI_INSTANCE_BIT - #endif - ); - if (flags != 0) - json.WriteNumber(flags); - } - json.EndArray(); - - json.WriteString("Size"); - json.WriteNumber(heapInfo.size); - - json.WriteString("Budget"); - json.BeginObject(); - { - json.WriteString("BudgetBytes"); - json.WriteNumber(budgets[heapIndex].budget); - json.WriteString("UsageBytes"); - json.WriteNumber(budgets[heapIndex].usage); - } - json.EndObject(); - - json.WriteString("Stats"); - VmaPrintDetailedStatistics(json, stats.memoryHeap[heapIndex]); - - json.WriteString("MemoryPools"); - json.BeginObject(); - { - for (uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex) - { - if (allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex) - { - json.BeginString("Type "); - json.ContinueString(typeIndex); - json.EndString(); - json.BeginObject(); - { - json.WriteString("Flags"); - json.BeginArray(true); - { - VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags; - if (flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) - json.WriteString("DEVICE_LOCAL"); - if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) - json.WriteString("HOST_VISIBLE"); - if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) - json.WriteString("HOST_COHERENT"); - if (flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) - json.WriteString("HOST_CACHED"); - if (flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) - json.WriteString("LAZILY_ALLOCATED"); - #if VMA_VULKAN_VERSION >= 1001000 - if (flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) - json.WriteString("PROTECTED"); - #endif - #if VK_AMD_device_coherent_memory - if (flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) - json.WriteString("DEVICE_COHERENT_AMD"); - if (flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) - json.WriteString("DEVICE_UNCACHED_AMD"); - #endif - - flags &= ~(VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT - #if VMA_VULKAN_VERSION >= 1001000 - | VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT - #endif - #if VK_AMD_device_coherent_memory - | VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY - | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY - #endif - | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT - | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT - | VK_MEMORY_PROPERTY_HOST_CACHED_BIT); - if (flags != 0) - json.WriteNumber(flags); - } - json.EndArray(); - - json.WriteString("Stats"); - VmaPrintDetailedStatistics(json, stats.memoryType[typeIndex]); - } - json.EndObject(); - } - } - - } - json.EndObject(); - } - json.EndObject(); - } - } - json.EndObject(); - } - - if (detailedMap == VK_TRUE) - allocator->PrintDetailedMap(json); - - json.EndObject(); - } - - *ppStatsString = VmaCreateStringCopy(allocator->GetAllocationCallbacks(), sb.GetData(), sb.GetLength()); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( - VmaAllocator allocator, - char* pStatsString) -{ - if(pStatsString != VMA_NULL) - { - VMA_ASSERT(allocator); - VmaFreeString(allocator->GetAllocationCallbacks(), pStatsString); - } -} - -#endif // VMA_STATS_STRING_ENABLED - -/* -This function is not protected by any mutex because it just reads immutable data. -*/ -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( - VmaAllocator allocator, - uint32_t memoryTypeBits, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator != VK_NULL_HANDLE); - VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); - VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); - - return allocator->FindMemoryTypeIndex(memoryTypeBits, pAllocationCreateInfo, UINT32_MAX, pMemoryTypeIndex); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( - VmaAllocator allocator, - const VkBufferCreateInfo* pBufferCreateInfo, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator != VK_NULL_HANDLE); - VMA_ASSERT(pBufferCreateInfo != VMA_NULL); - VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); - VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); - - const VkDevice hDev = allocator->m_hDevice; - const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); - VkResult res; - -#if VMA_VULKAN_VERSION >= 1003000 - if(funcs->vkGetDeviceBufferMemoryRequirements) - { - // Can query straight from VkBufferCreateInfo :) - VkDeviceBufferMemoryRequirements devBufMemReq = {VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS}; - devBufMemReq.pCreateInfo = pBufferCreateInfo; - - VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; - (*funcs->vkGetDeviceBufferMemoryRequirements)(hDev, &devBufMemReq, &memReq); - - res = allocator->FindMemoryTypeIndex( - memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, pBufferCreateInfo->usage, pMemoryTypeIndex); - } - else -#endif // #if VMA_VULKAN_VERSION >= 1003000 - { - // Must create a dummy buffer to query :( - VkBuffer hBuffer = VK_NULL_HANDLE; - res = funcs->vkCreateBuffer( - hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer); - if(res == VK_SUCCESS) - { - VkMemoryRequirements memReq = {}; - funcs->vkGetBufferMemoryRequirements(hDev, hBuffer, &memReq); - - res = allocator->FindMemoryTypeIndex( - memReq.memoryTypeBits, pAllocationCreateInfo, pBufferCreateInfo->usage, pMemoryTypeIndex); - - funcs->vkDestroyBuffer( - hDev, hBuffer, allocator->GetAllocationCallbacks()); - } - } - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( - VmaAllocator allocator, - const VkImageCreateInfo* pImageCreateInfo, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - uint32_t* pMemoryTypeIndex) -{ - VMA_ASSERT(allocator != VK_NULL_HANDLE); - VMA_ASSERT(pImageCreateInfo != VMA_NULL); - VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); - VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); - - const VkDevice hDev = allocator->m_hDevice; - const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); - VkResult res; - -#if VMA_VULKAN_VERSION >= 1003000 - if(funcs->vkGetDeviceImageMemoryRequirements) - { - // Can query straight from VkImageCreateInfo :) - VkDeviceImageMemoryRequirements devImgMemReq = {VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS}; - devImgMemReq.pCreateInfo = pImageCreateInfo; - VMA_ASSERT(pImageCreateInfo->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY && (pImageCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 && - "Cannot use this VkImageCreateInfo with vmaFindMemoryTypeIndexForImageInfo as I don't know what to pass as VkDeviceImageMemoryRequirements::planeAspect."); - - VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; - (*funcs->vkGetDeviceImageMemoryRequirements)(hDev, &devImgMemReq, &memReq); - - res = allocator->FindMemoryTypeIndex( - memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, pImageCreateInfo->usage, pMemoryTypeIndex); - } - else -#endif // #if VMA_VULKAN_VERSION >= 1003000 - { - // Must create a dummy image to query :( - VkImage hImage = VK_NULL_HANDLE; - res = funcs->vkCreateImage( - hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage); - if(res == VK_SUCCESS) - { - VkMemoryRequirements memReq = {}; - funcs->vkGetImageMemoryRequirements(hDev, hImage, &memReq); - - res = allocator->FindMemoryTypeIndex( - memReq.memoryTypeBits, pAllocationCreateInfo, pImageCreateInfo->usage, pMemoryTypeIndex); - - funcs->vkDestroyImage( - hDev, hImage, allocator->GetAllocationCallbacks()); - } - } - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( - VmaAllocator allocator, - const VmaPoolCreateInfo* pCreateInfo, - VmaPool* pPool) -{ - VMA_ASSERT(allocator && pCreateInfo && pPool); - - VMA_DEBUG_LOG("vmaCreatePool"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return allocator->CreatePool(pCreateInfo, pPool); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( - VmaAllocator allocator, - VmaPool pool) -{ - VMA_ASSERT(allocator); - - if(pool == VK_NULL_HANDLE) - { - return; - } - - VMA_DEBUG_LOG("vmaDestroyPool"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->DestroyPool(pool); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( - VmaAllocator allocator, - VmaPool pool, - VmaStatistics* pPoolStats) -{ - VMA_ASSERT(allocator && pool && pPoolStats); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->GetPoolStatistics(pool, pPoolStats); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( - VmaAllocator allocator, - VmaPool pool, - VmaDetailedStatistics* pPoolStats) -{ - VMA_ASSERT(allocator && pool && pPoolStats); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->CalculatePoolStatistics(pool, pPoolStats); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool) -{ - VMA_ASSERT(allocator && pool); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - VMA_DEBUG_LOG("vmaCheckPoolCorruption"); - - return allocator->CheckPoolCorruption(pool); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( - VmaAllocator allocator, - VmaPool pool, - const char** ppName) -{ - VMA_ASSERT(allocator && pool && ppName); - - VMA_DEBUG_LOG("vmaGetPoolName"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - *ppName = pool->GetName(); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( - VmaAllocator allocator, - VmaPool pool, - const char* pName) -{ - VMA_ASSERT(allocator && pool); - - VMA_DEBUG_LOG("vmaSetPoolName"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - pool->SetName(pName); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( - VmaAllocator allocator, - const VkMemoryRequirements* pVkMemoryRequirements, - const VmaAllocationCreateInfo* pCreateInfo, - VmaAllocation* pAllocation, - VmaAllocationInfo* pAllocationInfo) -{ - VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation); - - VMA_DEBUG_LOG("vmaAllocateMemory"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - VkResult result = allocator->AllocateMemory( - *pVkMemoryRequirements, - false, // requiresDedicatedAllocation - false, // prefersDedicatedAllocation - VK_NULL_HANDLE, // dedicatedBuffer - VK_NULL_HANDLE, // dedicatedImage - UINT32_MAX, // dedicatedBufferImageUsage - *pCreateInfo, - VMA_SUBALLOCATION_TYPE_UNKNOWN, - 1, // allocationCount - pAllocation); - - if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) - { - allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); - } - - return result; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( - VmaAllocator allocator, - const VkMemoryRequirements* pVkMemoryRequirements, - const VmaAllocationCreateInfo* pCreateInfo, - size_t allocationCount, - VmaAllocation* pAllocations, - VmaAllocationInfo* pAllocationInfo) -{ - if(allocationCount == 0) - { - return VK_SUCCESS; - } - - VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations); - - VMA_DEBUG_LOG("vmaAllocateMemoryPages"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - VkResult result = allocator->AllocateMemory( - *pVkMemoryRequirements, - false, // requiresDedicatedAllocation - false, // prefersDedicatedAllocation - VK_NULL_HANDLE, // dedicatedBuffer - VK_NULL_HANDLE, // dedicatedImage - UINT32_MAX, // dedicatedBufferImageUsage - *pCreateInfo, - VMA_SUBALLOCATION_TYPE_UNKNOWN, - allocationCount, - pAllocations); - - if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) - { - for(size_t i = 0; i < allocationCount; ++i) - { - allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i); - } - } - - return result; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( - VmaAllocator allocator, - VkBuffer buffer, - const VmaAllocationCreateInfo* pCreateInfo, - VmaAllocation* pAllocation, - VmaAllocationInfo* pAllocationInfo) -{ - VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation); - - VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - VkMemoryRequirements vkMemReq = {}; - bool requiresDedicatedAllocation = false; - bool prefersDedicatedAllocation = false; - allocator->GetBufferMemoryRequirements(buffer, vkMemReq, - requiresDedicatedAllocation, - prefersDedicatedAllocation); - - VkResult result = allocator->AllocateMemory( - vkMemReq, - requiresDedicatedAllocation, - prefersDedicatedAllocation, - buffer, // dedicatedBuffer - VK_NULL_HANDLE, // dedicatedImage - UINT32_MAX, // dedicatedBufferImageUsage - *pCreateInfo, - VMA_SUBALLOCATION_TYPE_BUFFER, - 1, // allocationCount - pAllocation); - - if(pAllocationInfo && result == VK_SUCCESS) - { - allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); - } - - return result; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( - VmaAllocator allocator, - VkImage image, - const VmaAllocationCreateInfo* pCreateInfo, - VmaAllocation* pAllocation, - VmaAllocationInfo* pAllocationInfo) -{ - VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation); - - VMA_DEBUG_LOG("vmaAllocateMemoryForImage"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - VkMemoryRequirements vkMemReq = {}; - bool requiresDedicatedAllocation = false; - bool prefersDedicatedAllocation = false; - allocator->GetImageMemoryRequirements(image, vkMemReq, - requiresDedicatedAllocation, prefersDedicatedAllocation); - - VkResult result = allocator->AllocateMemory( - vkMemReq, - requiresDedicatedAllocation, - prefersDedicatedAllocation, - VK_NULL_HANDLE, // dedicatedBuffer - image, // dedicatedImage - UINT32_MAX, // dedicatedBufferImageUsage - *pCreateInfo, - VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN, - 1, // allocationCount - pAllocation); - - if(pAllocationInfo && result == VK_SUCCESS) - { - allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); - } - - return result; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( - VmaAllocator allocator, - VmaAllocation allocation) -{ - VMA_ASSERT(allocator); - - if(allocation == VK_NULL_HANDLE) - { - return; - } - - VMA_DEBUG_LOG("vmaFreeMemory"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->FreeMemory( - 1, // allocationCount - &allocation); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( - VmaAllocator allocator, - size_t allocationCount, - const VmaAllocation* pAllocations) -{ - if(allocationCount == 0) - { - return; - } - - VMA_ASSERT(allocator); - - VMA_DEBUG_LOG("vmaFreeMemoryPages"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->FreeMemory(allocationCount, pAllocations); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( - VmaAllocator allocator, - VmaAllocation allocation, - VmaAllocationInfo* pAllocationInfo) -{ - VMA_ASSERT(allocator && allocation && pAllocationInfo); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->GetAllocationInfo(allocation, pAllocationInfo); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( - VmaAllocator allocator, - VmaAllocation allocation, - void* pUserData) -{ - VMA_ASSERT(allocator && allocation); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocation->SetUserData(allocator, pUserData); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - const char* VMA_NULLABLE pName) -{ - allocation->SetName(allocator, pName); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - VkMemoryPropertyFlags* VMA_NOT_NULL pFlags) -{ - VMA_ASSERT(allocator && allocation && pFlags); - const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); - *pFlags = allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( - VmaAllocator allocator, - VmaAllocation allocation, - void** ppData) -{ - VMA_ASSERT(allocator && allocation && ppData); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return allocator->Map(allocation, ppData); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( - VmaAllocator allocator, - VmaAllocation allocation) -{ - VMA_ASSERT(allocator && allocation); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - allocator->Unmap(allocation); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( - VmaAllocator allocator, - VmaAllocation allocation, - VkDeviceSize offset, - VkDeviceSize size) -{ - VMA_ASSERT(allocator && allocation); - - VMA_DEBUG_LOG("vmaFlushAllocation"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH); - - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( - VmaAllocator allocator, - VmaAllocation allocation, - VkDeviceSize offset, - VkDeviceSize size) -{ - VMA_ASSERT(allocator && allocation); - - VMA_DEBUG_LOG("vmaInvalidateAllocation"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - const VkResult res = allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE); - - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( - VmaAllocator allocator, - uint32_t allocationCount, - const VmaAllocation* allocations, - const VkDeviceSize* offsets, - const VkDeviceSize* sizes) -{ - VMA_ASSERT(allocator); - - if(allocationCount == 0) - { - return VK_SUCCESS; - } - - VMA_ASSERT(allocations); - - VMA_DEBUG_LOG("vmaFlushAllocations"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH); - - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( - VmaAllocator allocator, - uint32_t allocationCount, - const VmaAllocation* allocations, - const VkDeviceSize* offsets, - const VkDeviceSize* sizes) -{ - VMA_ASSERT(allocator); - - if(allocationCount == 0) - { - return VK_SUCCESS; - } - - VMA_ASSERT(allocations); - - VMA_DEBUG_LOG("vmaInvalidateAllocations"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - const VkResult res = allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE); - - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( - VmaAllocator allocator, - uint32_t memoryTypeBits) -{ - VMA_ASSERT(allocator); - - VMA_DEBUG_LOG("vmaCheckCorruption"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return allocator->CheckCorruption(memoryTypeBits); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( - VmaAllocator allocator, - const VmaDefragmentationInfo* pInfo, - VmaDefragmentationContext* pContext) -{ - VMA_ASSERT(allocator && pInfo && pContext); - - VMA_DEBUG_LOG("vmaBeginDefragmentation"); - - if (pInfo->pool != VMA_NULL) - { - // Check if run on supported algorithms - if (pInfo->pool->m_BlockVector.GetAlgorithm() & VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) - return VK_ERROR_FEATURE_NOT_PRESENT; - } - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - *pContext = vma_new(allocator, VmaDefragmentationContext_T)(allocator, *pInfo); - return VK_SUCCESS; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( - VmaAllocator allocator, - VmaDefragmentationContext context, - VmaDefragmentationStats* pStats) -{ - VMA_ASSERT(allocator && context); - - VMA_DEBUG_LOG("vmaEndDefragmentation"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - if (pStats) - context->GetStats(*pStats); - vma_delete(allocator, context); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( - VmaAllocator VMA_NOT_NULL allocator, - VmaDefragmentationContext VMA_NOT_NULL context, - VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) -{ - VMA_ASSERT(context && pPassInfo); - - VMA_DEBUG_LOG("vmaBeginDefragmentationPass"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return context->DefragmentPassBegin(*pPassInfo); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( - VmaAllocator VMA_NOT_NULL allocator, - VmaDefragmentationContext VMA_NOT_NULL context, - VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) -{ - VMA_ASSERT(context && pPassInfo); - - VMA_DEBUG_LOG("vmaEndDefragmentationPass"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return context->DefragmentPassEnd(*pPassInfo); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( - VmaAllocator allocator, - VmaAllocation allocation, - VkBuffer buffer) -{ - VMA_ASSERT(allocator && allocation && buffer); - - VMA_DEBUG_LOG("vmaBindBufferMemory"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( - VmaAllocator allocator, - VmaAllocation allocation, - VkDeviceSize allocationLocalOffset, - VkBuffer buffer, - const void* pNext) -{ - VMA_ASSERT(allocator && allocation && buffer); - - VMA_DEBUG_LOG("vmaBindBufferMemory2"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( - VmaAllocator allocator, - VmaAllocation allocation, - VkImage image) -{ - VMA_ASSERT(allocator && allocation && image); - - VMA_DEBUG_LOG("vmaBindImageMemory"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return allocator->BindImageMemory(allocation, 0, image, VMA_NULL); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( - VmaAllocator allocator, - VmaAllocation allocation, - VkDeviceSize allocationLocalOffset, - VkImage image, - const void* pNext) -{ - VMA_ASSERT(allocator && allocation && image); - - VMA_DEBUG_LOG("vmaBindImageMemory2"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( - VmaAllocator allocator, - const VkBufferCreateInfo* pBufferCreateInfo, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - VkBuffer* pBuffer, - VmaAllocation* pAllocation, - VmaAllocationInfo* pAllocationInfo) -{ - VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation); - - if(pBufferCreateInfo->size == 0) - { - return VK_ERROR_INITIALIZATION_FAILED; - } - if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && - !allocator->m_UseKhrBufferDeviceAddress) - { - VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); - return VK_ERROR_INITIALIZATION_FAILED; - } - - VMA_DEBUG_LOG("vmaCreateBuffer"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - *pBuffer = VK_NULL_HANDLE; - *pAllocation = VK_NULL_HANDLE; - - // 1. Create VkBuffer. - VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( - allocator->m_hDevice, - pBufferCreateInfo, - allocator->GetAllocationCallbacks(), - pBuffer); - if(res >= 0) - { - // 2. vkGetBufferMemoryRequirements. - VkMemoryRequirements vkMemReq = {}; - bool requiresDedicatedAllocation = false; - bool prefersDedicatedAllocation = false; - allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq, - requiresDedicatedAllocation, prefersDedicatedAllocation); - - // 3. Allocate memory using allocator. - res = allocator->AllocateMemory( - vkMemReq, - requiresDedicatedAllocation, - prefersDedicatedAllocation, - *pBuffer, // dedicatedBuffer - VK_NULL_HANDLE, // dedicatedImage - pBufferCreateInfo->usage, // dedicatedBufferImageUsage - *pAllocationCreateInfo, - VMA_SUBALLOCATION_TYPE_BUFFER, - 1, // allocationCount - pAllocation); - - if(res >= 0) - { - // 3. Bind buffer with memory. - if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) - { - res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL); - } - if(res >= 0) - { - // All steps succeeded. - #if VMA_STATS_STRING_ENABLED - (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage); - #endif - if(pAllocationInfo != VMA_NULL) - { - allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); - } - - return VK_SUCCESS; - } - allocator->FreeMemory( - 1, // allocationCount - pAllocation); - *pAllocation = VK_NULL_HANDLE; - (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); - *pBuffer = VK_NULL_HANDLE; - return res; - } - (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); - *pBuffer = VK_NULL_HANDLE; - return res; - } - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( - VmaAllocator allocator, - const VkBufferCreateInfo* pBufferCreateInfo, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - VkDeviceSize minAlignment, - VkBuffer* pBuffer, - VmaAllocation* pAllocation, - VmaAllocationInfo* pAllocationInfo) -{ - VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation); - - if(pBufferCreateInfo->size == 0) - { - return VK_ERROR_INITIALIZATION_FAILED; - } - if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && - !allocator->m_UseKhrBufferDeviceAddress) - { - VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); - return VK_ERROR_INITIALIZATION_FAILED; - } - - VMA_DEBUG_LOG("vmaCreateBufferWithAlignment"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - *pBuffer = VK_NULL_HANDLE; - *pAllocation = VK_NULL_HANDLE; - - // 1. Create VkBuffer. - VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( - allocator->m_hDevice, - pBufferCreateInfo, - allocator->GetAllocationCallbacks(), - pBuffer); - if(res >= 0) - { - // 2. vkGetBufferMemoryRequirements. - VkMemoryRequirements vkMemReq = {}; - bool requiresDedicatedAllocation = false; - bool prefersDedicatedAllocation = false; - allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq, - requiresDedicatedAllocation, prefersDedicatedAllocation); - - // 2a. Include minAlignment - vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, minAlignment); - - // 3. Allocate memory using allocator. - res = allocator->AllocateMemory( - vkMemReq, - requiresDedicatedAllocation, - prefersDedicatedAllocation, - *pBuffer, // dedicatedBuffer - VK_NULL_HANDLE, // dedicatedImage - pBufferCreateInfo->usage, // dedicatedBufferImageUsage - *pAllocationCreateInfo, - VMA_SUBALLOCATION_TYPE_BUFFER, - 1, // allocationCount - pAllocation); - - if(res >= 0) - { - // 3. Bind buffer with memory. - if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) - { - res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL); - } - if(res >= 0) - { - // All steps succeeded. - #if VMA_STATS_STRING_ENABLED - (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage); - #endif - if(pAllocationInfo != VMA_NULL) - { - allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); - } - - return VK_SUCCESS; - } - allocator->FreeMemory( - 1, // allocationCount - pAllocation); - *pAllocation = VK_NULL_HANDLE; - (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); - *pBuffer = VK_NULL_HANDLE; - return res; - } - (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); - *pBuffer = VK_NULL_HANDLE; - return res; - } - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, - VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer) -{ - VMA_ASSERT(allocator && pBufferCreateInfo && pBuffer && allocation); - - VMA_DEBUG_LOG("vmaCreateAliasingBuffer"); - - *pBuffer = VK_NULL_HANDLE; - - if (pBufferCreateInfo->size == 0) - { - return VK_ERROR_INITIALIZATION_FAILED; - } - if ((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && - !allocator->m_UseKhrBufferDeviceAddress) - { - VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); - return VK_ERROR_INITIALIZATION_FAILED; - } - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - // 1. Create VkBuffer. - VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( - allocator->m_hDevice, - pBufferCreateInfo, - allocator->GetAllocationCallbacks(), - pBuffer); - if (res >= 0) - { - // 2. Bind buffer with memory. - res = allocator->BindBufferMemory(allocation, 0, *pBuffer, VMA_NULL); - if (res >= 0) - { - return VK_SUCCESS; - } - (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); - } - return res; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( - VmaAllocator allocator, - VkBuffer buffer, - VmaAllocation allocation) -{ - VMA_ASSERT(allocator); - - if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) - { - return; - } - - VMA_DEBUG_LOG("vmaDestroyBuffer"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - if(buffer != VK_NULL_HANDLE) - { - (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks()); - } - - if(allocation != VK_NULL_HANDLE) - { - allocator->FreeMemory( - 1, // allocationCount - &allocation); - } -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( - VmaAllocator allocator, - const VkImageCreateInfo* pImageCreateInfo, - const VmaAllocationCreateInfo* pAllocationCreateInfo, - VkImage* pImage, - VmaAllocation* pAllocation, - VmaAllocationInfo* pAllocationInfo) -{ - VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation); - - if(pImageCreateInfo->extent.width == 0 || - pImageCreateInfo->extent.height == 0 || - pImageCreateInfo->extent.depth == 0 || - pImageCreateInfo->mipLevels == 0 || - pImageCreateInfo->arrayLayers == 0) - { - return VK_ERROR_INITIALIZATION_FAILED; - } - - VMA_DEBUG_LOG("vmaCreateImage"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - *pImage = VK_NULL_HANDLE; - *pAllocation = VK_NULL_HANDLE; - - // 1. Create VkImage. - VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)( - allocator->m_hDevice, - pImageCreateInfo, - allocator->GetAllocationCallbacks(), - pImage); - if(res >= 0) - { - VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ? - VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL : - VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR; - - // 2. Allocate memory using allocator. - VkMemoryRequirements vkMemReq = {}; - bool requiresDedicatedAllocation = false; - bool prefersDedicatedAllocation = false; - allocator->GetImageMemoryRequirements(*pImage, vkMemReq, - requiresDedicatedAllocation, prefersDedicatedAllocation); - - res = allocator->AllocateMemory( - vkMemReq, - requiresDedicatedAllocation, - prefersDedicatedAllocation, - VK_NULL_HANDLE, // dedicatedBuffer - *pImage, // dedicatedImage - pImageCreateInfo->usage, // dedicatedBufferImageUsage - *pAllocationCreateInfo, - suballocType, - 1, // allocationCount - pAllocation); - - if(res >= 0) - { - // 3. Bind image with memory. - if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) - { - res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL); - } - if(res >= 0) - { - // All steps succeeded. - #if VMA_STATS_STRING_ENABLED - (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage); - #endif - if(pAllocationInfo != VMA_NULL) - { - allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); - } - - return VK_SUCCESS; - } - allocator->FreeMemory( - 1, // allocationCount - pAllocation); - *pAllocation = VK_NULL_HANDLE; - (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); - *pImage = VK_NULL_HANDLE; - return res; - } - (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); - *pImage = VK_NULL_HANDLE; - return res; - } - return res; -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( - VmaAllocator VMA_NOT_NULL allocator, - VmaAllocation VMA_NOT_NULL allocation, - const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, - VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage) -{ - VMA_ASSERT(allocator && pImageCreateInfo && pImage && allocation); - - *pImage = VK_NULL_HANDLE; - - VMA_DEBUG_LOG("vmaCreateImage"); - - if (pImageCreateInfo->extent.width == 0 || - pImageCreateInfo->extent.height == 0 || - pImageCreateInfo->extent.depth == 0 || - pImageCreateInfo->mipLevels == 0 || - pImageCreateInfo->arrayLayers == 0) - { - return VK_ERROR_INITIALIZATION_FAILED; - } - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - // 1. Create VkImage. - VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)( - allocator->m_hDevice, - pImageCreateInfo, - allocator->GetAllocationCallbacks(), - pImage); - if (res >= 0) - { - // 2. Bind image with memory. - res = allocator->BindImageMemory(allocation, 0, *pImage, VMA_NULL); - if (res >= 0) - { - return VK_SUCCESS; - } - (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); - } - return res; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( - VmaAllocator VMA_NOT_NULL allocator, - VkImage VMA_NULLABLE_NON_DISPATCHABLE image, - VmaAllocation VMA_NULLABLE allocation) -{ - VMA_ASSERT(allocator); - - if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) - { - return; - } - - VMA_DEBUG_LOG("vmaDestroyImage"); - - VMA_DEBUG_GLOBAL_MUTEX_LOCK - - if(image != VK_NULL_HANDLE) - { - (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks()); - } - if(allocation != VK_NULL_HANDLE) - { - allocator->FreeMemory( - 1, // allocationCount - &allocation); - } -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( - const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, - VmaVirtualBlock VMA_NULLABLE * VMA_NOT_NULL pVirtualBlock) -{ - VMA_ASSERT(pCreateInfo && pVirtualBlock); - VMA_ASSERT(pCreateInfo->size > 0); - VMA_DEBUG_LOG("vmaCreateVirtualBlock"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - *pVirtualBlock = vma_new(pCreateInfo->pAllocationCallbacks, VmaVirtualBlock_T)(*pCreateInfo); - VkResult res = (*pVirtualBlock)->Init(); - if(res < 0) - { - vma_delete(pCreateInfo->pAllocationCallbacks, *pVirtualBlock); - *pVirtualBlock = VK_NULL_HANDLE; - } - return res; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock(VmaVirtualBlock VMA_NULLABLE virtualBlock) -{ - if(virtualBlock != VK_NULL_HANDLE) - { - VMA_DEBUG_LOG("vmaDestroyVirtualBlock"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - VkAllocationCallbacks allocationCallbacks = virtualBlock->m_AllocationCallbacks; // Have to copy the callbacks when destroying. - vma_delete(&allocationCallbacks, virtualBlock); - } -} - -VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty(VmaVirtualBlock VMA_NOT_NULL virtualBlock) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); - VMA_DEBUG_LOG("vmaIsVirtualBlockEmpty"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - return virtualBlock->IsEmpty() ? VK_TRUE : VK_FALSE; -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo(VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pVirtualAllocInfo != VMA_NULL); - VMA_DEBUG_LOG("vmaGetVirtualAllocationInfo"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - virtualBlock->GetAllocationInfo(allocation, *pVirtualAllocInfo); -} - -VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate(VmaVirtualBlock VMA_NOT_NULL virtualBlock, - const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, - VkDeviceSize* VMA_NULLABLE pOffset) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pCreateInfo != VMA_NULL && pAllocation != VMA_NULL); - VMA_DEBUG_LOG("vmaVirtualAllocate"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - return virtualBlock->Allocate(*pCreateInfo, *pAllocation, pOffset); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation) -{ - if(allocation != VK_NULL_HANDLE) - { - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); - VMA_DEBUG_LOG("vmaVirtualFree"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - virtualBlock->Free(allocation); - } -} - -VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock(VmaVirtualBlock VMA_NOT_NULL virtualBlock) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); - VMA_DEBUG_LOG("vmaClearVirtualBlock"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - virtualBlock->Clear(); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData(VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, void* VMA_NULLABLE pUserData) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); - VMA_DEBUG_LOG("vmaSetVirtualAllocationUserData"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - virtualBlock->SetAllocationUserData(allocation, pUserData); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaStatistics* VMA_NOT_NULL pStats) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); - VMA_DEBUG_LOG("vmaGetVirtualBlockStatistics"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - virtualBlock->GetStatistics(*pStats); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, - VmaDetailedStatistics* VMA_NOT_NULL pStats) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); - VMA_DEBUG_LOG("vmaCalculateVirtualBlockStatistics"); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - virtualBlock->CalculateDetailedStatistics(*pStats); -} - -#if VMA_STATS_STRING_ENABLED - -VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, - char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString, VkBool32 detailedMap) -{ - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && ppStatsString != VMA_NULL); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - const VkAllocationCallbacks* allocationCallbacks = virtualBlock->GetAllocationCallbacks(); - VmaStringBuilder sb(allocationCallbacks); - virtualBlock->BuildStatsString(detailedMap != VK_FALSE, sb); - *ppStatsString = VmaCreateStringCopy(allocationCallbacks, sb.GetData(), sb.GetLength()); -} - -VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, - char* VMA_NULLABLE pStatsString) -{ - if(pStatsString != VMA_NULL) - { - VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); - VMA_DEBUG_GLOBAL_MUTEX_LOCK; - VmaFreeString(virtualBlock->GetAllocationCallbacks(), pStatsString); - } -} -#endif // VMA_STATS_STRING_ENABLED -#endif // _VMA_PUBLIC_INTERFACE -#endif // VMA_IMPLEMENTATION - -/** -\page quick_start Quick start - -\section quick_start_project_setup Project setup - -Vulkan Memory Allocator comes in form of a "stb-style" single header file. -You don't need to build it as a separate library project. -You can add this file directly to your project and submit it to code repository next to your other source files. - -"Single header" doesn't mean that everything is contained in C/C++ declarations, -like it tends to be in case of inline functions or C++ templates. -It means that implementation is bundled with interface in a single file and needs to be extracted using preprocessor macro. -If you don't do it properly, you will get linker errors. - -To do it properly: - --# Include "vk_mem_alloc.h" file in each CPP file where you want to use the library. - This includes declarations of all members of the library. --# In exactly one CPP file define following macro before this include. - It enables also internal definitions. - -\code -#define VMA_IMPLEMENTATION -#include "vk_mem_alloc.h" -\endcode - -It may be a good idea to create dedicated CPP file just for this purpose. - -This library includes header ``, which in turn -includes `` on Windows. If you need some specific macros defined -before including these headers (like `WIN32_LEAN_AND_MEAN` or -`WINVER` for Windows, `VK_USE_PLATFORM_WIN32_KHR` for Vulkan), you must define -them before every `#include` of this library. - -This library is written in C++, but has C-compatible interface. -Thus you can include and use vk_mem_alloc.h in C or C++ code, but full -implementation with `VMA_IMPLEMENTATION` macro must be compiled as C++, NOT as C. -Some features of C++14 used. STL containers, RTTI, or C++ exceptions are not used. - - -\section quick_start_initialization Initialization - -At program startup: - --# Initialize Vulkan to have `VkPhysicalDevice`, `VkDevice` and `VkInstance` object. --# Fill VmaAllocatorCreateInfo structure and create #VmaAllocator object by - calling vmaCreateAllocator(). - -Only members `physicalDevice`, `device`, `instance` are required. -However, you should inform the library which Vulkan version do you use by setting -VmaAllocatorCreateInfo::vulkanApiVersion and which extensions did you enable -by setting VmaAllocatorCreateInfo::flags (like #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT for VK_KHR_buffer_device_address). -Otherwise, VMA would use only features of Vulkan 1.0 core with no extensions. - -You may need to configure importing Vulkan functions. There are 3 ways to do this: - --# **If you link with Vulkan static library** (e.g. "vulkan-1.lib" on Windows): - - You don't need to do anything. - - VMA will use these, as macro `VMA_STATIC_VULKAN_FUNCTIONS` is defined to 1 by default. --# **If you want VMA to fetch pointers to Vulkan functions dynamically** using `vkGetInstanceProcAddr`, - `vkGetDeviceProcAddr` (this is the option presented in the example below): - - Define `VMA_STATIC_VULKAN_FUNCTIONS` to 0, `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 1. - - Provide pointers to these two functions via VmaVulkanFunctions::vkGetInstanceProcAddr, - VmaVulkanFunctions::vkGetDeviceProcAddr. - - The library will fetch pointers to all other functions it needs internally. --# **If you fetch pointers to all Vulkan functions in a custom way**, e.g. using some loader like - [Volk](https://github.com/zeux/volk): - - Define `VMA_STATIC_VULKAN_FUNCTIONS` and `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 0. - - Pass these pointers via structure #VmaVulkanFunctions. - -\code -VmaVulkanFunctions vulkanFunctions = {}; -vulkanFunctions.vkGetInstanceProcAddr = &vkGetInstanceProcAddr; -vulkanFunctions.vkGetDeviceProcAddr = &vkGetDeviceProcAddr; - -VmaAllocatorCreateInfo allocatorCreateInfo = {}; -allocatorCreateInfo.vulkanApiVersion = VK_API_VERSION_1_2; -allocatorCreateInfo.physicalDevice = physicalDevice; -allocatorCreateInfo.device = device; -allocatorCreateInfo.instance = instance; -allocatorCreateInfo.pVulkanFunctions = &vulkanFunctions; - -VmaAllocator allocator; -vmaCreateAllocator(&allocatorCreateInfo, &allocator); -\endcode - - -\section quick_start_resource_allocation Resource allocation - -When you want to create a buffer or image: - --# Fill `VkBufferCreateInfo` / `VkImageCreateInfo` structure. --# Fill VmaAllocationCreateInfo structure. --# Call vmaCreateBuffer() / vmaCreateImage() to get `VkBuffer`/`VkImage` with memory - already allocated and bound to it, plus #VmaAllocation objects that represents its underlying memory. - -\code -VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -bufferInfo.size = 65536; -bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - -VmaAllocationCreateInfo allocInfo = {}; -allocInfo.usage = VMA_MEMORY_USAGE_AUTO; - -VkBuffer buffer; -VmaAllocation allocation; -vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); -\endcode - -Don't forget to destroy your objects when no longer needed: - -\code -vmaDestroyBuffer(allocator, buffer, allocation); -vmaDestroyAllocator(allocator); -\endcode - - -\page choosing_memory_type Choosing memory type - -Physical devices in Vulkan support various combinations of memory heaps and -types. Help with choosing correct and optimal memory type for your specific -resource is one of the key features of this library. You can use it by filling -appropriate members of VmaAllocationCreateInfo structure, as described below. -You can also combine multiple methods. - --# If you just want to find memory type index that meets your requirements, you - can use function: vmaFindMemoryTypeIndexForBufferInfo(), - vmaFindMemoryTypeIndexForImageInfo(), vmaFindMemoryTypeIndex(). --# If you want to allocate a region of device memory without association with any - specific image or buffer, you can use function vmaAllocateMemory(). Usage of - this function is not recommended and usually not needed. - vmaAllocateMemoryPages() function is also provided for creating multiple allocations at once, - which may be useful for sparse binding. --# If you already have a buffer or an image created, you want to allocate memory - for it and then you will bind it yourself, you can use function - vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(). - For binding you should use functions: vmaBindBufferMemory(), vmaBindImageMemory() - or their extended versions: vmaBindBufferMemory2(), vmaBindImageMemory2(). --# **This is the easiest and recommended way to use this library:** - If you want to create a buffer or an image, allocate memory for it and bind - them together, all in one call, you can use function vmaCreateBuffer(), - vmaCreateImage(). - -When using 3. or 4., the library internally queries Vulkan for memory types -supported for that buffer or image (function `vkGetBufferMemoryRequirements()`) -and uses only one of these types. - -If no memory type can be found that meets all the requirements, these functions -return `VK_ERROR_FEATURE_NOT_PRESENT`. - -You can leave VmaAllocationCreateInfo structure completely filled with zeros. -It means no requirements are specified for memory type. -It is valid, although not very useful. - -\section choosing_memory_type_usage Usage - -The easiest way to specify memory requirements is to fill member -VmaAllocationCreateInfo::usage using one of the values of enum #VmaMemoryUsage. -It defines high level, common usage types. -Since version 3 of the library, it is recommended to use #VMA_MEMORY_USAGE_AUTO to let it select best memory type for your resource automatically. - -For example, if you want to create a uniform buffer that will be filled using -transfer only once or infrequently and then used for rendering every frame as a uniform buffer, you can -do it using following code. The buffer will most likely end up in a memory type with -`VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT` to be fast to access by the GPU device. - -\code -VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -bufferInfo.size = 65536; -bufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - -VmaAllocationCreateInfo allocInfo = {}; -allocInfo.usage = VMA_MEMORY_USAGE_AUTO; - -VkBuffer buffer; -VmaAllocation allocation; -vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); -\endcode - -If you have a preference for putting the resource in GPU (device) memory or CPU (host) memory -on systems with discrete graphics card that have the memories separate, you can use -#VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST. - -When using `VMA_MEMORY_USAGE_AUTO*` while you want to map the allocated memory, -you also need to specify one of the host access flags: -#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. -This will help the library decide about preferred memory type to ensure it has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` -so you can map it. - -For example, a staging buffer that will be filled via mapped pointer and then -used as a source of transfer to the buffer decribed previously can be created like this. -It will likely and up in a memory type that is `HOST_VISIBLE` and `HOST_COHERENT` -but not `HOST_CACHED` (meaning uncached, write-combined) and not `DEVICE_LOCAL` (meaning system RAM). - -\code -VkBufferCreateInfo stagingBufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -stagingBufferInfo.size = 65536; -stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; - -VmaAllocationCreateInfo stagingAllocInfo = {}; -stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO; -stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; - -VkBuffer stagingBuffer; -VmaAllocation stagingAllocation; -vmaCreateBuffer(allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr); -\endcode - -For more examples of creating different kinds of resources, see chapter \ref usage_patterns. - -Usage values `VMA_MEMORY_USAGE_AUTO*` are legal to use only when the library knows -about the resource being created by having `VkBufferCreateInfo` / `VkImageCreateInfo` passed, -so they work with functions like: vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo() etc. -If you allocate raw memory using function vmaAllocateMemory(), you have to use other means of selecting -memory type, as decribed below. - -\note -Old usage values (`VMA_MEMORY_USAGE_GPU_ONLY`, `VMA_MEMORY_USAGE_CPU_ONLY`, -`VMA_MEMORY_USAGE_CPU_TO_GPU`, `VMA_MEMORY_USAGE_GPU_TO_CPU`, `VMA_MEMORY_USAGE_CPU_COPY`) -are still available and work same way as in previous versions of the library -for backward compatibility, but they are not recommended. - -\section choosing_memory_type_required_preferred_flags Required and preferred flags - -You can specify more detailed requirements by filling members -VmaAllocationCreateInfo::requiredFlags and VmaAllocationCreateInfo::preferredFlags -with a combination of bits from enum `VkMemoryPropertyFlags`. For example, -if you want to create a buffer that will be persistently mapped on host (so it -must be `HOST_VISIBLE`) and preferably will also be `HOST_COHERENT` and `HOST_CACHED`, -use following code: - -\code -VmaAllocationCreateInfo allocInfo = {}; -allocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; -allocInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; -allocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | VMA_ALLOCATION_CREATE_MAPPED_BIT; - -VkBuffer buffer; -VmaAllocation allocation; -vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); -\endcode - -A memory type is chosen that has all the required flags and as many preferred -flags set as possible. - -Value passed in VmaAllocationCreateInfo::usage is internally converted to a set of required and preferred flags, -plus some extra "magic" (heuristics). - -\section choosing_memory_type_explicit_memory_types Explicit memory types - -If you inspected memory types available on the physical device and you have -a preference for memory types that you want to use, you can fill member -VmaAllocationCreateInfo::memoryTypeBits. It is a bit mask, where each bit set -means that a memory type with that index is allowed to be used for the -allocation. Special value 0, just like `UINT32_MAX`, means there are no -restrictions to memory type index. - -Please note that this member is NOT just a memory type index. -Still you can use it to choose just one, specific memory type. -For example, if you already determined that your buffer should be created in -memory type 2, use following code: - -\code -uint32_t memoryTypeIndex = 2; - -VmaAllocationCreateInfo allocInfo = {}; -allocInfo.memoryTypeBits = 1u << memoryTypeIndex; - -VkBuffer buffer; -VmaAllocation allocation; -vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); -\endcode - - -\section choosing_memory_type_custom_memory_pools Custom memory pools - -If you allocate from custom memory pool, all the ways of specifying memory -requirements described above are not applicable and the aforementioned members -of VmaAllocationCreateInfo structure are ignored. Memory type is selected -explicitly when creating the pool and then used to make all the allocations from -that pool. For further details, see \ref custom_memory_pools. - -\section choosing_memory_type_dedicated_allocations Dedicated allocations - -Memory for allocations is reserved out of larger block of `VkDeviceMemory` -allocated from Vulkan internally. That is the main feature of this whole library. -You can still request a separate memory block to be created for an allocation, -just like you would do in a trivial solution without using any allocator. -In that case, a buffer or image is always bound to that memory at offset 0. -This is called a "dedicated allocation". -You can explicitly request it by using flag #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. -The library can also internally decide to use dedicated allocation in some cases, e.g.: - -- When the size of the allocation is large. -- When [VK_KHR_dedicated_allocation](@ref vk_khr_dedicated_allocation) extension is enabled - and it reports that dedicated allocation is required or recommended for the resource. -- When allocation of next big memory block fails due to not enough device memory, - but allocation with the exact requested size succeeds. - - -\page memory_mapping Memory mapping - -To "map memory" in Vulkan means to obtain a CPU pointer to `VkDeviceMemory`, -to be able to read from it or write to it in CPU code. -Mapping is possible only of memory allocated from a memory type that has -`VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag. -Functions `vkMapMemory()`, `vkUnmapMemory()` are designed for this purpose. -You can use them directly with memory allocated by this library, -but it is not recommended because of following issue: -Mapping the same `VkDeviceMemory` block multiple times is illegal - only one mapping at a time is allowed. -This includes mapping disjoint regions. Mapping is not reference-counted internally by Vulkan. -Because of this, Vulkan Memory Allocator provides following facilities: - -\note If you want to be able to map an allocation, you need to specify one of the flags -#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT -in VmaAllocationCreateInfo::flags. These flags are required for an allocation to be mappable -when using #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` enum values. -For other usage values they are ignored and every such allocation made in `HOST_VISIBLE` memory type is mappable, -but they can still be used for consistency. - -\section memory_mapping_mapping_functions Mapping functions - -The library provides following functions for mapping of a specific #VmaAllocation: vmaMapMemory(), vmaUnmapMemory(). -They are safer and more convenient to use than standard Vulkan functions. -You can map an allocation multiple times simultaneously - mapping is reference-counted internally. -You can also map different allocations simultaneously regardless of whether they use the same `VkDeviceMemory` block. -The way it is implemented is that the library always maps entire memory block, not just region of the allocation. -For further details, see description of vmaMapMemory() function. -Example: - -\code -// Having these objects initialized: -struct ConstantBuffer -{ - ... -}; -ConstantBuffer constantBufferData = ... - -VmaAllocator allocator = ... -VkBuffer constantBuffer = ... -VmaAllocation constantBufferAllocation = ... - -// You can map and fill your buffer using following code: - -void* mappedData; -vmaMapMemory(allocator, constantBufferAllocation, &mappedData); -memcpy(mappedData, &constantBufferData, sizeof(constantBufferData)); -vmaUnmapMemory(allocator, constantBufferAllocation); -\endcode - -When mapping, you may see a warning from Vulkan validation layer similar to this one: - -Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used. - -It happens because the library maps entire `VkDeviceMemory` block, where different -types of images and buffers may end up together, especially on GPUs with unified memory like Intel. -You can safely ignore it if you are sure you access only memory of the intended -object that you wanted to map. - - -\section memory_mapping_persistently_mapped_memory Persistently mapped memory - -Kepping your memory persistently mapped is generally OK in Vulkan. -You don't need to unmap it before using its data on the GPU. -The library provides a special feature designed for that: -Allocations made with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag set in -VmaAllocationCreateInfo::flags stay mapped all the time, -so you can just access CPU pointer to it any time -without a need to call any "map" or "unmap" function. -Example: - -\code -VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -bufCreateInfo.size = sizeof(ConstantBuffer); -bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; -allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | - VMA_ALLOCATION_CREATE_MAPPED_BIT; - -VkBuffer buf; -VmaAllocation alloc; -VmaAllocationInfo allocInfo; -vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); - -// Buffer is already mapped. You can access its memory. -memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData)); -\endcode - -\note #VMA_ALLOCATION_CREATE_MAPPED_BIT by itself doesn't guarantee that the allocation will end up -in a mappable memory type. -For this, you need to also specify #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or -#VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. -#VMA_ALLOCATION_CREATE_MAPPED_BIT only guarantees that if the memory is `HOST_VISIBLE`, the allocation will be mapped on creation. -For an example of how to make use of this fact, see section \ref usage_patterns_advanced_data_uploading. - -\section memory_mapping_cache_control Cache flush and invalidate - -Memory in Vulkan doesn't need to be unmapped before using it on GPU, -but unless a memory types has `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT` flag set, -you need to manually **invalidate** cache before reading of mapped pointer -and **flush** cache after writing to mapped pointer. -Map/unmap operations don't do that automatically. -Vulkan provides following functions for this purpose `vkFlushMappedMemoryRanges()`, -`vkInvalidateMappedMemoryRanges()`, but this library provides more convenient -functions that refer to given allocation object: vmaFlushAllocation(), -vmaInvalidateAllocation(), -or multiple objects at once: vmaFlushAllocations(), vmaInvalidateAllocations(). - -Regions of memory specified for flush/invalidate must be aligned to -`VkPhysicalDeviceLimits::nonCoherentAtomSize`. This is automatically ensured by the library. -In any memory type that is `HOST_VISIBLE` but not `HOST_COHERENT`, all allocations -within blocks are aligned to this value, so their offsets are always multiply of -`nonCoherentAtomSize` and two different allocations never share same "line" of this size. - -Also, Windows drivers from all 3 PC GPU vendors (AMD, Intel, NVIDIA) -currently provide `HOST_COHERENT` flag on all memory types that are -`HOST_VISIBLE`, so on PC you may not need to bother. - - -\page staying_within_budget Staying within budget - -When developing a graphics-intensive game or program, it is important to avoid allocating -more GPU memory than it is physically available. When the memory is over-committed, -various bad things can happen, depending on the specific GPU, graphics driver, and -operating system: - -- It may just work without any problems. -- The application may slow down because some memory blocks are moved to system RAM - and the GPU has to access them through PCI Express bus. -- A new allocation may take very long time to complete, even few seconds, and possibly - freeze entire system. -- The new allocation may fail with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. -- It may even result in GPU crash (TDR), observed as `VK_ERROR_DEVICE_LOST` - returned somewhere later. - -\section staying_within_budget_querying_for_budget Querying for budget - -To query for current memory usage and available budget, use function vmaGetHeapBudgets(). -Returned structure #VmaBudget contains quantities expressed in bytes, per Vulkan memory heap. - -Please note that this function returns different information and works faster than -vmaCalculateStatistics(). vmaGetHeapBudgets() can be called every frame or even before every -allocation, while vmaCalculateStatistics() is intended to be used rarely, -only to obtain statistical information, e.g. for debugging purposes. - -It is recommended to use VK_EXT_memory_budget device extension to obtain information -about the budget from Vulkan device. VMA is able to use this extension automatically. -When not enabled, the allocator behaves same way, but then it estimates current usage -and available budget based on its internal information and Vulkan memory heap sizes, -which may be less precise. In order to use this extension: - -1. Make sure extensions VK_EXT_memory_budget and VK_KHR_get_physical_device_properties2 - required by it are available and enable them. Please note that the first is a device - extension and the second is instance extension! -2. Use flag #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT when creating #VmaAllocator object. -3. Make sure to call vmaSetCurrentFrameIndex() every frame. Budget is queried from - Vulkan inside of it to avoid overhead of querying it with every allocation. - -\section staying_within_budget_controlling_memory_usage Controlling memory usage - -There are many ways in which you can try to stay within the budget. - -First, when making new allocation requires allocating a new memory block, the library -tries not to exceed the budget automatically. If a block with default recommended size -(e.g. 256 MB) would go over budget, a smaller block is allocated, possibly even -dedicated memory for just this resource. - -If the size of the requested resource plus current memory usage is more than the -budget, by default the library still tries to create it, leaving it to the Vulkan -implementation whether the allocation succeeds or fails. You can change this behavior -by using #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag. With it, the allocation is -not made if it would exceed the budget or if the budget is already exceeded. -VMA then tries to make the allocation from the next eligible Vulkan memory type. -The all of them fail, the call then fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. -Example usage pattern may be to pass the #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag -when creating resources that are not essential for the application (e.g. the texture -of a specific object) and not to pass it when creating critically important resources -(e.g. render targets). - -On AMD graphics cards there is a custom vendor extension available: VK_AMD_memory_overallocation_behavior -that allows to control the behavior of the Vulkan implementation in out-of-memory cases - -whether it should fail with an error code or still allow the allocation. -Usage of this extension involves only passing extra structure on Vulkan device creation, -so it is out of scope of this library. - -Finally, you can also use #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT flag to make sure -a new allocation is created only when it fits inside one of the existing memory blocks. -If it would require to allocate a new block, if fails instead with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. -This also ensures that the function call is very fast because it never goes to Vulkan -to obtain a new block. - -\note Creating \ref custom_memory_pools with VmaPoolCreateInfo::minBlockCount -set to more than 0 will currently try to allocate memory blocks without checking whether they -fit within budget. - - -\page resource_aliasing Resource aliasing (overlap) - -New explicit graphics APIs (Vulkan and Direct3D 12), thanks to manual memory -management, give an opportunity to alias (overlap) multiple resources in the -same region of memory - a feature not available in the old APIs (Direct3D 11, OpenGL). -It can be useful to save video memory, but it must be used with caution. - -For example, if you know the flow of your whole render frame in advance, you -are going to use some intermediate textures or buffers only during a small range of render passes, -and you know these ranges don't overlap in time, you can bind these resources to -the same place in memory, even if they have completely different parameters (width, height, format etc.). - -![Resource aliasing (overlap)](../gfx/Aliasing.png) - -Such scenario is possible using VMA, but you need to create your images manually. -Then you need to calculate parameters of an allocation to be made using formula: - -- allocation size = max(size of each image) -- allocation alignment = max(alignment of each image) -- allocation memoryTypeBits = bitwise AND(memoryTypeBits of each image) - -Following example shows two different images bound to the same place in memory, -allocated to fit largest of them. - -\code -// A 512x512 texture to be sampled. -VkImageCreateInfo img1CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; -img1CreateInfo.imageType = VK_IMAGE_TYPE_2D; -img1CreateInfo.extent.width = 512; -img1CreateInfo.extent.height = 512; -img1CreateInfo.extent.depth = 1; -img1CreateInfo.mipLevels = 10; -img1CreateInfo.arrayLayers = 1; -img1CreateInfo.format = VK_FORMAT_R8G8B8A8_SRGB; -img1CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; -img1CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; -img1CreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; -img1CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; - -// A full screen texture to be used as color attachment. -VkImageCreateInfo img2CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; -img2CreateInfo.imageType = VK_IMAGE_TYPE_2D; -img2CreateInfo.extent.width = 1920; -img2CreateInfo.extent.height = 1080; -img2CreateInfo.extent.depth = 1; -img2CreateInfo.mipLevels = 1; -img2CreateInfo.arrayLayers = 1; -img2CreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; -img2CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; -img2CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; -img2CreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; -img2CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; - -VkImage img1; -res = vkCreateImage(device, &img1CreateInfo, nullptr, &img1); -VkImage img2; -res = vkCreateImage(device, &img2CreateInfo, nullptr, &img2); - -VkMemoryRequirements img1MemReq; -vkGetImageMemoryRequirements(device, img1, &img1MemReq); -VkMemoryRequirements img2MemReq; -vkGetImageMemoryRequirements(device, img2, &img2MemReq); - -VkMemoryRequirements finalMemReq = {}; -finalMemReq.size = std::max(img1MemReq.size, img2MemReq.size); -finalMemReq.alignment = std::max(img1MemReq.alignment, img2MemReq.alignment); -finalMemReq.memoryTypeBits = img1MemReq.memoryTypeBits & img2MemReq.memoryTypeBits; -// Validate if(finalMemReq.memoryTypeBits != 0) - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; - -VmaAllocation alloc; -res = vmaAllocateMemory(allocator, &finalMemReq, &allocCreateInfo, &alloc, nullptr); - -res = vmaBindImageMemory(allocator, alloc, img1); -res = vmaBindImageMemory(allocator, alloc, img2); - -// You can use img1, img2 here, but not at the same time! - -vmaFreeMemory(allocator, alloc); -vkDestroyImage(allocator, img2, nullptr); -vkDestroyImage(allocator, img1, nullptr); -\endcode - -Remember that using resources that alias in memory requires proper synchronization. -You need to issue a memory barrier to make sure commands that use `img1` and `img2` -don't overlap on GPU timeline. -You also need to treat a resource after aliasing as uninitialized - containing garbage data. -For example, if you use `img1` and then want to use `img2`, you need to issue -an image memory barrier for `img2` with `oldLayout` = `VK_IMAGE_LAYOUT_UNDEFINED`. - -Additional considerations: - -- Vulkan also allows to interpret contents of memory between aliasing resources consistently in some cases. -See chapter 11.8. "Memory Aliasing" of Vulkan specification or `VK_IMAGE_CREATE_ALIAS_BIT` flag. -- You can create more complex layout where different images and buffers are bound -at different offsets inside one large allocation. For example, one can imagine -a big texture used in some render passes, aliasing with a set of many small buffers -used between in some further passes. To bind a resource at non-zero offset in an allocation, -use vmaBindBufferMemory2() / vmaBindImageMemory2(). -- Before allocating memory for the resources you want to alias, check `memoryTypeBits` -returned in memory requirements of each resource to make sure the bits overlap. -Some GPUs may expose multiple memory types suitable e.g. only for buffers or -images with `COLOR_ATTACHMENT` usage, so the sets of memory types supported by your -resources may be disjoint. Aliasing them is not possible in that case. - - -\page custom_memory_pools Custom memory pools - -A memory pool contains a number of `VkDeviceMemory` blocks. -The library automatically creates and manages default pool for each memory type available on the device. -Default memory pool automatically grows in size. -Size of allocated blocks is also variable and managed automatically. - -You can create custom pool and allocate memory out of it. -It can be useful if you want to: - -- Keep certain kind of allocations separate from others. -- Enforce particular, fixed size of Vulkan memory blocks. -- Limit maximum amount of Vulkan memory allocated for that pool. -- Reserve minimum or fixed amount of Vulkan memory always preallocated for that pool. -- Use extra parameters for a set of your allocations that are available in #VmaPoolCreateInfo but not in - #VmaAllocationCreateInfo - e.g., custom minimum alignment, custom `pNext` chain. -- Perform defragmentation on a specific subset of your allocations. - -To use custom memory pools: - --# Fill VmaPoolCreateInfo structure. --# Call vmaCreatePool() to obtain #VmaPool handle. --# When making an allocation, set VmaAllocationCreateInfo::pool to this handle. - You don't need to specify any other parameters of this structure, like `usage`. - -Example: - -\code -// Find memoryTypeIndex for the pool. -VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -sampleBufCreateInfo.size = 0x10000; // Doesn't matter. -sampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - -VmaAllocationCreateInfo sampleAllocCreateInfo = {}; -sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; - -uint32_t memTypeIndex; -VkResult res = vmaFindMemoryTypeIndexForBufferInfo(allocator, - &sampleBufCreateInfo, &sampleAllocCreateInfo, &memTypeIndex); -// Check res... - -// Create a pool that can have at most 2 blocks, 128 MiB each. -VmaPoolCreateInfo poolCreateInfo = {}; -poolCreateInfo.memoryTypeIndex = memTypeIndex; -poolCreateInfo.blockSize = 128ull * 1024 * 1024; -poolCreateInfo.maxBlockCount = 2; - -VmaPool pool; -res = vmaCreatePool(allocator, &poolCreateInfo, &pool); -// Check res... - -// Allocate a buffer out of it. -VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -bufCreateInfo.size = 1024; -bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.pool = pool; - -VkBuffer buf; -VmaAllocation alloc; -res = vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); -// Check res... -\endcode - -You have to free all allocations made from this pool before destroying it. - -\code -vmaDestroyBuffer(allocator, buf, alloc); -vmaDestroyPool(allocator, pool); -\endcode - -New versions of this library support creating dedicated allocations in custom pools. -It is supported only when VmaPoolCreateInfo::blockSize = 0. -To use this feature, set VmaAllocationCreateInfo::pool to the pointer to your custom pool and -VmaAllocationCreateInfo::flags to #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. - -\note Excessive use of custom pools is a common mistake when using this library. -Custom pools may be useful for special purposes - when you want to -keep certain type of resources separate e.g. to reserve minimum amount of memory -for them or limit maximum amount of memory they can occupy. For most -resources this is not needed and so it is not recommended to create #VmaPool -objects and allocations out of them. Allocating from the default pool is sufficient. - - -\section custom_memory_pools_MemTypeIndex Choosing memory type index - -When creating a pool, you must explicitly specify memory type index. -To find the one suitable for your buffers or images, you can use helper functions -vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo(). -You need to provide structures with example parameters of buffers or images -that you are going to create in that pool. - -\code -VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -exampleBufCreateInfo.size = 1024; // Doesn't matter -exampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; - -uint32_t memTypeIndex; -vmaFindMemoryTypeIndexForBufferInfo(allocator, &exampleBufCreateInfo, &allocCreateInfo, &memTypeIndex); - -VmaPoolCreateInfo poolCreateInfo = {}; -poolCreateInfo.memoryTypeIndex = memTypeIndex; -// ... -\endcode - -When creating buffers/images allocated in that pool, provide following parameters: - -- `VkBufferCreateInfo`: Prefer to pass same parameters as above. - Otherwise you risk creating resources in a memory type that is not suitable for them, which may result in undefined behavior. - Using different `VK_BUFFER_USAGE_` flags may work, but you shouldn't create images in a pool intended for buffers - or the other way around. -- VmaAllocationCreateInfo: You don't need to pass same parameters. Fill only `pool` member. - Other members are ignored anyway. - -\section linear_algorithm Linear allocation algorithm - -Each Vulkan memory block managed by this library has accompanying metadata that -keeps track of used and unused regions. By default, the metadata structure and -algorithm tries to find best place for new allocations among free regions to -optimize memory usage. This way you can allocate and free objects in any order. - -![Default allocation algorithm](../gfx/Linear_allocator_1_algo_default.png) - -Sometimes there is a need to use simpler, linear allocation algorithm. You can -create custom pool that uses such algorithm by adding flag -#VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating -#VmaPool object. Then an alternative metadata management is used. It always -creates new allocations after last one and doesn't reuse free regions after -allocations freed in the middle. It results in better allocation performance and -less memory consumed by metadata. - -![Linear allocation algorithm](../gfx/Linear_allocator_2_algo_linear.png) - -With this one flag, you can create a custom pool that can be used in many ways: -free-at-once, stack, double stack, and ring buffer. See below for details. -You don't need to specify explicitly which of these options you are going to use - it is detected automatically. - -\subsection linear_algorithm_free_at_once Free-at-once - -In a pool that uses linear algorithm, you still need to free all the allocations -individually, e.g. by using vmaFreeMemory() or vmaDestroyBuffer(). You can free -them in any order. New allocations are always made after last one - free space -in the middle is not reused. However, when you release all the allocation and -the pool becomes empty, allocation starts from the beginning again. This way you -can use linear algorithm to speed up creation of allocations that you are going -to release all at once. - -![Free-at-once](../gfx/Linear_allocator_3_free_at_once.png) - -This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount -value that allows multiple memory blocks. - -\subsection linear_algorithm_stack Stack - -When you free an allocation that was created last, its space can be reused. -Thanks to this, if you always release allocations in the order opposite to their -creation (LIFO - Last In First Out), you can achieve behavior of a stack. - -![Stack](../gfx/Linear_allocator_4_stack.png) - -This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount -value that allows multiple memory blocks. - -\subsection linear_algorithm_double_stack Double stack - -The space reserved by a custom pool with linear algorithm may be used by two -stacks: - -- First, default one, growing up from offset 0. -- Second, "upper" one, growing down from the end towards lower offsets. - -To make allocation from the upper stack, add flag #VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT -to VmaAllocationCreateInfo::flags. - -![Double stack](../gfx/Linear_allocator_7_double_stack.png) - -Double stack is available only in pools with one memory block - -VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. - -When the two stacks' ends meet so there is not enough space between them for a -new allocation, such allocation fails with usual -`VK_ERROR_OUT_OF_DEVICE_MEMORY` error. - -\subsection linear_algorithm_ring_buffer Ring buffer - -When you free some allocations from the beginning and there is not enough free space -for a new one at the end of a pool, allocator's "cursor" wraps around to the -beginning and starts allocation there. Thanks to this, if you always release -allocations in the same order as you created them (FIFO - First In First Out), -you can achieve behavior of a ring buffer / queue. - -![Ring buffer](../gfx/Linear_allocator_5_ring_buffer.png) - -Ring buffer is available only in pools with one memory block - -VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. - -\note \ref defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. - - -\page defragmentation Defragmentation - -Interleaved allocations and deallocations of many objects of varying size can -cause fragmentation over time, which can lead to a situation where the library is unable -to find a continuous range of free memory for a new allocation despite there is -enough free space, just scattered across many small free ranges between existing -allocations. - -To mitigate this problem, you can use defragmentation feature. -It doesn't happen automatically though and needs your cooperation, -because VMA is a low level library that only allocates memory. -It cannot recreate buffers and images in a new place as it doesn't remember the contents of `VkBufferCreateInfo` / `VkImageCreateInfo` structures. -It cannot copy their contents as it doesn't record any commands to a command buffer. - -Example: - -\code -VmaDefragmentationInfo defragInfo = {}; -defragInfo.pool = myPool; -defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT; - -VmaDefragmentationContext defragCtx; -VkResult res = vmaBeginDefragmentation(allocator, &defragInfo, &defragCtx); -// Check res... - -for(;;) -{ - VmaDefragmentationPassMoveInfo pass; - res = vmaBeginDefragmentationPass(allocator, defragCtx, &pass); - if(res == VK_SUCCESS) - break; - else if(res != VK_INCOMPLETE) - // Handle error... - - for(uint32_t i = 0; i < pass.moveCount; ++i) - { - // Inspect pass.pMoves[i].srcAllocation, identify what buffer/image it represents. - VmaAllocationInfo allocInfo; - vmaGetAllocationInfo(allocator, pMoves[i].srcAllocation, &allocInfo); - MyEngineResourceData* resData = (MyEngineResourceData*)allocInfo.pUserData; - - // Recreate and bind this buffer/image at: pass.pMoves[i].dstMemory, pass.pMoves[i].dstOffset. - VkImageCreateInfo imgCreateInfo = ... - VkImage newImg; - res = vkCreateImage(device, &imgCreateInfo, nullptr, &newImg); - // Check res... - res = vmaBindImageMemory(allocator, pMoves[i].dstTmpAllocation, newImg); - // Check res... - - // Issue a vkCmdCopyBuffer/vkCmdCopyImage to copy its content to the new place. - vkCmdCopyImage(cmdBuf, resData->img, ..., newImg, ...); - } - - // Make sure the copy commands finished executing. - vkWaitForFences(...); - - // Destroy old buffers/images bound with pass.pMoves[i].srcAllocation. - for(uint32_t i = 0; i < pass.moveCount; ++i) - { - // ... - vkDestroyImage(device, resData->img, nullptr); - } - - // Update appropriate descriptors to point to the new places... - - res = vmaEndDefragmentationPass(allocator, defragCtx, &pass); - if(res == VK_SUCCESS) - break; - else if(res != VK_INCOMPLETE) - // Handle error... -} - -vmaEndDefragmentation(allocator, defragCtx, nullptr); -\endcode - -Although functions like vmaCreateBuffer(), vmaCreateImage(), vmaDestroyBuffer(), vmaDestroyImage() -create/destroy an allocation and a buffer/image at once, these are just a shortcut for -creating the resource, allocating memory, and binding them together. -Defragmentation works on memory allocations only. You must handle the rest manually. -Defragmentation is an iterative process that should repreat "passes" as long as related functions -return `VK_INCOMPLETE` not `VK_SUCCESS`. -In each pass: - -1. vmaBeginDefragmentationPass() function call: - - Calculates and returns the list of allocations to be moved in this pass. - Note this can be a time-consuming process. - - Reserves destination memory for them by creating temporary destination allocations - that you can query for their `VkDeviceMemory` + offset using vmaGetAllocationInfo(). -2. Inside the pass, **you should**: - - Inspect the returned list of allocations to be moved. - - Create new buffers/images and bind them at the returned destination temporary allocations. - - Copy data from source to destination resources if necessary. - - Destroy the source buffers/images, but NOT their allocations. -3. vmaEndDefragmentationPass() function call: - - Frees the source memory reserved for the allocations that are moved. - - Modifies source #VmaAllocation objects that are moved to point to the destination reserved memory. - - Frees `VkDeviceMemory` blocks that became empty. - -Unlike in previous iterations of the defragmentation API, there is no list of "movable" allocations passed as a parameter. -Defragmentation algorithm tries to move all suitable allocations. -You can, however, refuse to move some of them inside a defragmentation pass, by setting -`pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. -This is not recommended and may result in suboptimal packing of the allocations after defragmentation. -If you cannot ensure any allocation can be moved, it is better to keep movable allocations separate in a custom pool. - -Inside a pass, for each allocation that should be moved: - -- You should copy its data from the source to the destination place by calling e.g. `vkCmdCopyBuffer()`, `vkCmdCopyImage()`. - - You need to make sure these commands finished executing before destroying the source buffers/images and before calling vmaEndDefragmentationPass(). -- If a resource doesn't contain any meaningful data, e.g. it is a transient color attachment image to be cleared, - filled, and used temporarily in each rendering frame, you can just recreate this image - without copying its data. -- If the resource is in `HOST_VISIBLE` and `HOST_CACHED` memory, you can copy its data on the CPU - using `memcpy()`. -- If you cannot move the allocation, you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. - This will cancel the move. - - vmaEndDefragmentationPass() will then free the destination memory - not the source memory of the allocation, leaving it unchanged. -- If you decide the allocation is unimportant and can be destroyed instead of moved (e.g. it wasn't used for long time), - you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. - - vmaEndDefragmentationPass() will then free both source and destination memory, and will destroy the source #VmaAllocation object. - -You can defragment a specific custom pool by setting VmaDefragmentationInfo::pool -(like in the example above) or all the default pools by setting this member to null. - -Defragmentation is always performed in each pool separately. -Allocations are never moved between different Vulkan memory types. -The size of the destination memory reserved for a moved allocation is the same as the original one. -Alignment of an allocation as it was determined using `vkGetBufferMemoryRequirements()` etc. is also respected after defragmentation. -Buffers/images should be recreated with the same `VkBufferCreateInfo` / `VkImageCreateInfo` parameters as the original ones. - -You can perform the defragmentation incrementally to limit the number of allocations and bytes to be moved -in each pass, e.g. to call it in sync with render frames and not to experience too big hitches. -See members: VmaDefragmentationInfo::maxBytesPerPass, VmaDefragmentationInfo::maxAllocationsPerPass. - -It is also safe to perform the defragmentation asynchronously to render frames and other Vulkan and VMA -usage, possibly from multiple threads, with the exception that allocations -returned in VmaDefragmentationPassMoveInfo::pMoves shouldn't be destroyed until the defragmentation pass is ended. - -Mapping is preserved on allocations that are moved during defragmentation. -Whether through #VMA_ALLOCATION_CREATE_MAPPED_BIT or vmaMapMemory(), the allocations -are mapped at their new place. Of course, pointer to the mapped data changes, so it needs to be queried -using VmaAllocationInfo::pMappedData. - -\note Defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. - - -\page statistics Statistics - -This library contains several functions that return information about its internal state, -especially the amount of memory allocated from Vulkan. - -\section statistics_numeric_statistics Numeric statistics - -If you need to obtain basic statistics about memory usage per heap, together with current budget, -you can call function vmaGetHeapBudgets() and inspect structure #VmaBudget. -This is useful to keep track of memory usage and stay withing budget -(see also \ref staying_within_budget). -Example: - -\code -uint32_t heapIndex = ... - -VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; -vmaGetHeapBudgets(allocator, budgets); - -printf("My heap currently has %u allocations taking %llu B,\n", - budgets[heapIndex].statistics.allocationCount, - budgets[heapIndex].statistics.allocationBytes); -printf("allocated out of %u Vulkan device memory blocks taking %llu B,\n", - budgets[heapIndex].statistics.blockCount, - budgets[heapIndex].statistics.blockBytes); -printf("Vulkan reports total usage %llu B with budget %llu B.\n", - budgets[heapIndex].usage, - budgets[heapIndex].budget); -\endcode - -You can query for more detailed statistics per memory heap, type, and totals, -including minimum and maximum allocation size and unused range size, -by calling function vmaCalculateStatistics() and inspecting structure #VmaTotalStatistics. -This function is slower though, as it has to traverse all the internal data structures, -so it should be used only for debugging purposes. - -You can query for statistics of a custom pool using function vmaGetPoolStatistics() -or vmaCalculatePoolStatistics(). - -You can query for information about a specific allocation using function vmaGetAllocationInfo(). -It fill structure #VmaAllocationInfo. - -\section statistics_json_dump JSON dump - -You can dump internal state of the allocator to a string in JSON format using function vmaBuildStatsString(). -The result is guaranteed to be correct JSON. -It uses ANSI encoding. -Any strings provided by user (see [Allocation names](@ref allocation_names)) -are copied as-is and properly escaped for JSON, so if they use UTF-8, ISO-8859-2 or any other encoding, -this JSON string can be treated as using this encoding. -It must be freed using function vmaFreeStatsString(). - -The format of this JSON string is not part of official documentation of the library, -but it will not change in backward-incompatible way without increasing library major version number -and appropriate mention in changelog. - -The JSON string contains all the data that can be obtained using vmaCalculateStatistics(). -It can also contain detailed map of allocated memory blocks and their regions - -free and occupied by allocations. -This allows e.g. to visualize the memory or assess fragmentation. - - -\page allocation_annotation Allocation names and user data - -\section allocation_user_data Allocation user data - -You can annotate allocations with your own information, e.g. for debugging purposes. -To do that, fill VmaAllocationCreateInfo::pUserData field when creating -an allocation. It is an opaque `void*` pointer. You can use it e.g. as a pointer, -some handle, index, key, ordinal number or any other value that would associate -the allocation with your custom metadata. -It it useful to identify appropriate data structures in your engine given #VmaAllocation, -e.g. when doing \ref defragmentation. - -\code -VkBufferCreateInfo bufCreateInfo = ... - -MyBufferMetadata* pMetadata = CreateBufferMetadata(); - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; -allocCreateInfo.pUserData = pMetadata; - -VkBuffer buffer; -VmaAllocation allocation; -vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buffer, &allocation, nullptr); -\endcode - -The pointer may be later retrieved as VmaAllocationInfo::pUserData: - -\code -VmaAllocationInfo allocInfo; -vmaGetAllocationInfo(allocator, allocation, &allocInfo); -MyBufferMetadata* pMetadata = (MyBufferMetadata*)allocInfo.pUserData; -\endcode - -It can also be changed using function vmaSetAllocationUserData(). - -Values of (non-zero) allocations' `pUserData` are printed in JSON report created by -vmaBuildStatsString() in hexadecimal form. - -\section allocation_names Allocation names - -An allocation can also carry a null-terminated string, giving a name to the allocation. -To set it, call vmaSetAllocationName(). -The library creates internal copy of the string, so the pointer you pass doesn't need -to be valid for whole lifetime of the allocation. You can free it after the call. - -\code -std::string imageName = "Texture: "; -imageName += fileName; -vmaSetAllocationName(allocator, allocation, imageName.c_str()); -\endcode - -The string can be later retrieved by inspecting VmaAllocationInfo::pName. -It is also printed in JSON report created by vmaBuildStatsString(). - -\note Setting string name to VMA allocation doesn't automatically set it to the Vulkan buffer or image created with it. -You must do it manually using an extension like VK_EXT_debug_utils, which is independent of this library. - - -\page virtual_allocator Virtual allocator - -As an extra feature, the core allocation algorithm of the library is exposed through a simple and convenient API of "virtual allocator". -It doesn't allocate any real GPU memory. It just keeps track of used and free regions of a "virtual block". -You can use it to allocate your own memory or other objects, even completely unrelated to Vulkan. -A common use case is sub-allocation of pieces of one large GPU buffer. - -\section virtual_allocator_creating_virtual_block Creating virtual block - -To use this functionality, there is no main "allocator" object. -You don't need to have #VmaAllocator object created. -All you need to do is to create a separate #VmaVirtualBlock object for each block of memory you want to be managed by the allocator: - --# Fill in #VmaVirtualBlockCreateInfo structure. --# Call vmaCreateVirtualBlock(). Get new #VmaVirtualBlock object. - -Example: - -\code -VmaVirtualBlockCreateInfo blockCreateInfo = {}; -blockCreateInfo.size = 1048576; // 1 MB - -VmaVirtualBlock block; -VkResult res = vmaCreateVirtualBlock(&blockCreateInfo, &block); -\endcode - -\section virtual_allocator_making_virtual_allocations Making virtual allocations - -#VmaVirtualBlock object contains internal data structure that keeps track of free and occupied regions -using the same code as the main Vulkan memory allocator. -Similarly to #VmaAllocation for standard GPU allocations, there is #VmaVirtualAllocation type -that represents an opaque handle to an allocation withing the virtual block. - -In order to make such allocation: - --# Fill in #VmaVirtualAllocationCreateInfo structure. --# Call vmaVirtualAllocate(). Get new #VmaVirtualAllocation object that represents the allocation. - You can also receive `VkDeviceSize offset` that was assigned to the allocation. - -Example: - -\code -VmaVirtualAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.size = 4096; // 4 KB - -VmaVirtualAllocation alloc; -VkDeviceSize offset; -res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, &offset); -if(res == VK_SUCCESS) -{ - // Use the 4 KB of your memory starting at offset. -} -else -{ - // Allocation failed - no space for it could be found. Handle this error! -} -\endcode - -\section virtual_allocator_deallocation Deallocation - -When no longer needed, an allocation can be freed by calling vmaVirtualFree(). -You can only pass to this function an allocation that was previously returned by vmaVirtualAllocate() -called for the same #VmaVirtualBlock. - -When whole block is no longer needed, the block object can be released by calling vmaDestroyVirtualBlock(). -All allocations must be freed before the block is destroyed, which is checked internally by an assert. -However, if you don't want to call vmaVirtualFree() for each allocation, you can use vmaClearVirtualBlock() to free them all at once - -a feature not available in normal Vulkan memory allocator. Example: - -\code -vmaVirtualFree(block, alloc); -vmaDestroyVirtualBlock(block); -\endcode - -\section virtual_allocator_allocation_parameters Allocation parameters - -You can attach a custom pointer to each allocation by using vmaSetVirtualAllocationUserData(). -Its default value is null. -It can be used to store any data that needs to be associated with that allocation - e.g. an index, a handle, or a pointer to some -larger data structure containing more information. Example: - -\code -struct CustomAllocData -{ - std::string m_AllocName; -}; -CustomAllocData* allocData = new CustomAllocData(); -allocData->m_AllocName = "My allocation 1"; -vmaSetVirtualAllocationUserData(block, alloc, allocData); -\endcode - -The pointer can later be fetched, along with allocation offset and size, by passing the allocation handle to function -vmaGetVirtualAllocationInfo() and inspecting returned structure #VmaVirtualAllocationInfo. -If you allocated a new object to be used as the custom pointer, don't forget to delete that object before freeing the allocation! -Example: - -\code -VmaVirtualAllocationInfo allocInfo; -vmaGetVirtualAllocationInfo(block, alloc, &allocInfo); -delete (CustomAllocData*)allocInfo.pUserData; - -vmaVirtualFree(block, alloc); -\endcode - -\section virtual_allocator_alignment_and_units Alignment and units - -It feels natural to express sizes and offsets in bytes. -If an offset of an allocation needs to be aligned to a multiply of some number (e.g. 4 bytes), you can fill optional member -VmaVirtualAllocationCreateInfo::alignment to request it. Example: - -\code -VmaVirtualAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.size = 4096; // 4 KB -allocCreateInfo.alignment = 4; // Returned offset must be a multiply of 4 B - -VmaVirtualAllocation alloc; -res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, nullptr); -\endcode - -Alignments of different allocations made from one block may vary. -However, if all alignments and sizes are always multiply of some size e.g. 4 B or `sizeof(MyDataStruct)`, -you can express all sizes, alignments, and offsets in multiples of that size instead of individual bytes. -It might be more convenient, but you need to make sure to use this new unit consistently in all the places: - -- VmaVirtualBlockCreateInfo::size -- VmaVirtualAllocationCreateInfo::size and VmaVirtualAllocationCreateInfo::alignment -- Using offset returned by vmaVirtualAllocate() or in VmaVirtualAllocationInfo::offset - -\section virtual_allocator_statistics Statistics - -You can obtain statistics of a virtual block using vmaGetVirtualBlockStatistics() -(to get brief statistics that are fast to calculate) -or vmaCalculateVirtualBlockStatistics() (to get more detailed statistics, slower to calculate). -The functions fill structures #VmaStatistics, #VmaDetailedStatistics respectively - same as used by the normal Vulkan memory allocator. -Example: - -\code -VmaStatistics stats; -vmaGetVirtualBlockStatistics(block, &stats); -printf("My virtual block has %llu bytes used by %u virtual allocations\n", - stats.allocationBytes, stats.allocationCount); -\endcode - -You can also request a full list of allocations and free regions as a string in JSON format by calling -vmaBuildVirtualBlockStatsString(). -Returned string must be later freed using vmaFreeVirtualBlockStatsString(). -The format of this string differs from the one returned by the main Vulkan allocator, but it is similar. - -\section virtual_allocator_additional_considerations Additional considerations - -The "virtual allocator" functionality is implemented on a level of individual memory blocks. -Keeping track of a whole collection of blocks, allocating new ones when out of free space, -deleting empty ones, and deciding which one to try first for a new allocation must be implemented by the user. - -Alternative allocation algorithms are supported, just like in custom pools of the real GPU memory. -See enum #VmaVirtualBlockCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT). -You can find their description in chapter \ref custom_memory_pools. -Allocation strategies are also supported. -See enum #VmaVirtualAllocationCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT). - -Following features are supported only by the allocator of the real GPU memory and not by virtual allocations: -buffer-image granularity, `VMA_DEBUG_MARGIN`, `VMA_MIN_ALIGNMENT`. - - -\page debugging_memory_usage Debugging incorrect memory usage - -If you suspect a bug with memory usage, like usage of uninitialized memory or -memory being overwritten out of bounds of an allocation, -you can use debug features of this library to verify this. - -\section debugging_memory_usage_initialization Memory initialization - -If you experience a bug with incorrect and nondeterministic data in your program and you suspect uninitialized memory to be used, -you can enable automatic memory initialization to verify this. -To do it, define macro `VMA_DEBUG_INITIALIZE_ALLOCATIONS` to 1. - -\code -#define VMA_DEBUG_INITIALIZE_ALLOCATIONS 1 -#include "vk_mem_alloc.h" -\endcode - -It makes memory of new allocations initialized to bit pattern `0xDCDCDCDC`. -Before an allocation is destroyed, its memory is filled with bit pattern `0xEFEFEFEF`. -Memory is automatically mapped and unmapped if necessary. - -If you find these values while debugging your program, good chances are that you incorrectly -read Vulkan memory that is allocated but not initialized, or already freed, respectively. - -Memory initialization works only with memory types that are `HOST_VISIBLE` and with allocations that can be mapped. -It works also with dedicated allocations. - -\section debugging_memory_usage_margins Margins - -By default, allocations are laid out in memory blocks next to each other if possible -(considering required alignment, `bufferImageGranularity`, and `nonCoherentAtomSize`). - -![Allocations without margin](../gfx/Margins_1.png) - -Define macro `VMA_DEBUG_MARGIN` to some non-zero value (e.g. 16) to enforce specified -number of bytes as a margin after every allocation. - -\code -#define VMA_DEBUG_MARGIN 16 -#include "vk_mem_alloc.h" -\endcode - -![Allocations with margin](../gfx/Margins_2.png) - -If your bug goes away after enabling margins, it means it may be caused by memory -being overwritten outside of allocation boundaries. It is not 100% certain though. -Change in application behavior may also be caused by different order and distribution -of allocations across memory blocks after margins are applied. - -Margins work with all types of memory. - -Margin is applied only to allocations made out of memory blocks and not to dedicated -allocations, which have their own memory block of specific size. -It is thus not applied to allocations made using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag -or those automatically decided to put into dedicated allocations, e.g. due to its -large size or recommended by VK_KHR_dedicated_allocation extension. - -Margins appear in [JSON dump](@ref statistics_json_dump) as part of free space. - -Note that enabling margins increases memory usage and fragmentation. - -Margins do not apply to \ref virtual_allocator. - -\section debugging_memory_usage_corruption_detection Corruption detection - -You can additionally define macro `VMA_DEBUG_DETECT_CORRUPTION` to 1 to enable validation -of contents of the margins. - -\code -#define VMA_DEBUG_MARGIN 16 -#define VMA_DEBUG_DETECT_CORRUPTION 1 -#include "vk_mem_alloc.h" -\endcode - -When this feature is enabled, number of bytes specified as `VMA_DEBUG_MARGIN` -(it must be multiply of 4) after every allocation is filled with a magic number. -This idea is also know as "canary". -Memory is automatically mapped and unmapped if necessary. - -This number is validated automatically when the allocation is destroyed. -If it is not equal to the expected value, `VMA_ASSERT()` is executed. -It clearly means that either CPU or GPU overwritten the memory outside of boundaries of the allocation, -which indicates a serious bug. - -You can also explicitly request checking margins of all allocations in all memory blocks -that belong to specified memory types by using function vmaCheckCorruption(), -or in memory blocks that belong to specified custom pool, by using function -vmaCheckPoolCorruption(). - -Margin validation (corruption detection) works only for memory types that are -`HOST_VISIBLE` and `HOST_COHERENT`. - - -\page opengl_interop OpenGL Interop - -VMA provides some features that help with interoperability with OpenGL. - -\section opengl_interop_exporting_memory Exporting memory - -If you want to attach `VkExportMemoryAllocateInfoKHR` structure to `pNext` chain of memory allocations made by the library: - -It is recommended to create \ref custom_memory_pools for such allocations. -Define and fill in your `VkExportMemoryAllocateInfoKHR` structure and attach it to VmaPoolCreateInfo::pMemoryAllocateNext -while creating the custom pool. -Please note that the structure must remain alive and unchanged for the whole lifetime of the #VmaPool, -not only while creating it, as no copy of the structure is made, -but its original pointer is used for each allocation instead. - -If you want to export all memory allocated by the library from certain memory types, -also dedicated allocations or other allocations made from default pools, -an alternative solution is to fill in VmaAllocatorCreateInfo::pTypeExternalMemoryHandleTypes. -It should point to an array with `VkExternalMemoryHandleTypeFlagsKHR` to be automatically passed by the library -through `VkExportMemoryAllocateInfoKHR` on each allocation made from a specific memory type. -Please note that new versions of the library also support dedicated allocations created in custom pools. - -You should not mix these two methods in a way that allows to apply both to the same memory type. -Otherwise, `VkExportMemoryAllocateInfoKHR` structure would be attached twice to the `pNext` chain of `VkMemoryAllocateInfo`. - - -\section opengl_interop_custom_alignment Custom alignment - -Buffers or images exported to a different API like OpenGL may require a different alignment, -higher than the one used by the library automatically, queried from functions like `vkGetBufferMemoryRequirements`. -To impose such alignment: - -It is recommended to create \ref custom_memory_pools for such allocations. -Set VmaPoolCreateInfo::minAllocationAlignment member to the minimum alignment required for each allocation -to be made out of this pool. -The alignment actually used will be the maximum of this member and the alignment returned for the specific buffer or image -from a function like `vkGetBufferMemoryRequirements`, which is called by VMA automatically. - -If you want to create a buffer with a specific minimum alignment out of default pools, -use special function vmaCreateBufferWithAlignment(), which takes additional parameter `minAlignment`. - -Note the problem of alignment affects only resources placed inside bigger `VkDeviceMemory` blocks and not dedicated -allocations, as these, by definition, always have alignment = 0 because the resource is bound to the beginning of its dedicated block. -Contrary to Direct3D 12, Vulkan doesn't have a concept of alignment of the entire memory block passed on its allocation. - - -\page usage_patterns Recommended usage patterns - -Vulkan gives great flexibility in memory allocation. -This chapter shows the most common patterns. - -See also slides from talk: -[Sawicki, Adam. Advanced Graphics Techniques Tutorial: Memory management in Vulkan and DX12. Game Developers Conference, 2018](https://www.gdcvault.com/play/1025458/Advanced-Graphics-Techniques-Tutorial-New) - - -\section usage_patterns_gpu_only GPU-only resource - -When: -Any resources that you frequently write and read on GPU, -e.g. images used as color attachments (aka "render targets"), depth-stencil attachments, -images/buffers used as storage image/buffer (aka "Unordered Access View (UAV)"). - -What to do: -Let the library select the optimal memory type, which will likely have `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. - -\code -VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; -imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; -imgCreateInfo.extent.width = 3840; -imgCreateInfo.extent.height = 2160; -imgCreateInfo.extent.depth = 1; -imgCreateInfo.mipLevels = 1; -imgCreateInfo.arrayLayers = 1; -imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; -imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; -imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; -imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; -imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; -allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; -allocCreateInfo.priority = 1.0f; - -VkImage img; -VmaAllocation alloc; -vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); -\endcode - -Also consider: -Consider creating them as dedicated allocations using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT, -especially if they are large or if you plan to destroy and recreate them with different sizes -e.g. when display resolution changes. -Prefer to create such resources first and all other GPU resources (like textures and vertex buffers) later. -When VK_EXT_memory_priority extension is enabled, it is also worth setting high priority to such allocation -to decrease chances to be evicted to system memory by the operating system. - -\section usage_patterns_staging_copy_upload Staging copy for upload - -When: -A "staging" buffer than you want to map and fill from CPU code, then use as a source od transfer -to some GPU resource. - -What to do: -Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT. -Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`. - -\code -VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -bufCreateInfo.size = 65536; -bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; -allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | - VMA_ALLOCATION_CREATE_MAPPED_BIT; - -VkBuffer buf; -VmaAllocation alloc; -VmaAllocationInfo allocInfo; -vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); - -... - -memcpy(allocInfo.pMappedData, myData, myDataSize); -\endcode - -Also consider: -You can map the allocation using vmaMapMemory() or you can create it as persistenly mapped -using #VMA_ALLOCATION_CREATE_MAPPED_BIT, as in the example above. - - -\section usage_patterns_readback Readback - -When: -Buffers for data written by or transferred from the GPU that you want to read back on the CPU, -e.g. results of some computations. - -What to do: -Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. -Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` -and `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. - -\code -VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -bufCreateInfo.size = 65536; -bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; -allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | - VMA_ALLOCATION_CREATE_MAPPED_BIT; - -VkBuffer buf; -VmaAllocation alloc; -VmaAllocationInfo allocInfo; -vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); - -... - -const float* downloadedData = (const float*)allocInfo.pMappedData; -\endcode - - -\section usage_patterns_advanced_data_uploading Advanced data uploading - -For resources that you frequently write on CPU via mapped pointer and -freqnently read on GPU e.g. as a uniform buffer (also called "dynamic"), multiple options are possible: - --# Easiest solution is to have one copy of the resource in `HOST_VISIBLE` memory, - even if it means system RAM (not `DEVICE_LOCAL`) on systems with a discrete graphics card, - and make the device reach out to that resource directly. - - Reads performed by the device will then go through PCI Express bus. - The performace of this access may be limited, but it may be fine depending on the size - of this resource (whether it is small enough to quickly end up in GPU cache) and the sparsity - of access. --# On systems with unified memory (e.g. AMD APU or Intel integrated graphics, mobile chips), - a memory type may be available that is both `HOST_VISIBLE` (available for mapping) and `DEVICE_LOCAL` - (fast to access from the GPU). Then, it is likely the best choice for such type of resource. --# Systems with a discrete graphics card and separate video memory may or may not expose - a memory type that is both `HOST_VISIBLE` and `DEVICE_LOCAL`, also known as Base Address Register (BAR). - If they do, it represents a piece of VRAM (or entire VRAM, if ReBAR is enabled in the motherboard BIOS) - that is available to CPU for mapping. - - Writes performed by the host to that memory go through PCI Express bus. - The performance of these writes may be limited, but it may be fine, especially on PCIe 4.0, - as long as rules of using uncached and write-combined memory are followed - only sequential writes and no reads. --# Finally, you may need or prefer to create a separate copy of the resource in `DEVICE_LOCAL` memory, - a separate "staging" copy in `HOST_VISIBLE` memory and perform an explicit transfer command between them. - -Thankfully, VMA offers an aid to create and use such resources in the the way optimal -for the current Vulkan device. To help the library make the best choice, -use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT together with -#VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT. -It will then prefer a memory type that is both `DEVICE_LOCAL` and `HOST_VISIBLE` (integrated memory or BAR), -but if no such memory type is available or allocation from it fails -(PC graphics cards have only 256 MB of BAR by default, unless ReBAR is supported and enabled in BIOS), -it will fall back to `DEVICE_LOCAL` memory for fast GPU access. -It is then up to you to detect that the allocation ended up in a memory type that is not `HOST_VISIBLE`, -so you need to create another "staging" allocation and perform explicit transfers. - -\code -VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; -bufCreateInfo.size = 65536; -bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; -allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | - VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT | - VMA_ALLOCATION_CREATE_MAPPED_BIT; - -VkBuffer buf; -VmaAllocation alloc; -VmaAllocationInfo allocInfo; -vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); - -VkMemoryPropertyFlags memPropFlags; -vmaGetAllocationMemoryProperties(allocator, alloc, &memPropFlags); - -if(memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) -{ - // Allocation ended up in a mappable memory and is already mapped - write to it directly. - - // [Executed in runtime]: - memcpy(allocInfo.pMappedData, myData, myDataSize); -} -else -{ - // Allocation ended up in a non-mappable memory - need to transfer. - VkBufferCreateInfo stagingBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; - stagingBufCreateInfo.size = 65536; - stagingBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; - - VmaAllocationCreateInfo stagingAllocCreateInfo = {}; - stagingAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; - stagingAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | - VMA_ALLOCATION_CREATE_MAPPED_BIT; - - VkBuffer stagingBuf; - VmaAllocation stagingAlloc; - VmaAllocationInfo stagingAllocInfo; - vmaCreateBuffer(allocator, &stagingBufCreateInfo, &stagingAllocCreateInfo, - &stagingBuf, &stagingAlloc, stagingAllocInfo); - - // [Executed in runtime]: - memcpy(stagingAllocInfo.pMappedData, myData, myDataSize); - //vkCmdPipelineBarrier: VK_ACCESS_HOST_WRITE_BIT --> VK_ACCESS_TRANSFER_READ_BIT - VkBufferCopy bufCopy = { - 0, // srcOffset - 0, // dstOffset, - myDataSize); // size - vkCmdCopyBuffer(cmdBuf, stagingBuf, buf, 1, &bufCopy); -} -\endcode - -\section usage_patterns_other_use_cases Other use cases - -Here are some other, less obvious use cases and their recommended settings: - -- An image that is used only as transfer source and destination, but it should stay on the device, - as it is used to temporarily store a copy of some texture, e.g. from the current to the next frame, - for temporal antialiasing or other temporal effects. - - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` - - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO -- An image that is used only as transfer source and destination, but it should be placed - in the system RAM despite it doesn't need to be mapped, because it serves as a "swap" copy to evict - least recently used textures from VRAM. - - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` - - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_HOST, - as VMA needs a hint here to differentiate from the previous case. -- A buffer that you want to map and write from the CPU, directly read from the GPU - (e.g. as a uniform or vertex buffer), but you have a clear preference to place it in device or - host memory due to its large size. - - Use `VkBufferCreateInfo::usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT` - - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST - - Use VmaAllocationCreateInfo::flags = #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT - - -\page configuration Configuration - -Please check "CONFIGURATION SECTION" in the code to find macros that you can define -before each include of this file or change directly in this file to provide -your own implementation of basic facilities like assert, `min()` and `max()` functions, -mutex, atomic etc. -The library uses its own implementation of containers by default, but you can switch to using -STL containers instead. - -For example, define `VMA_ASSERT(expr)` before including the library to provide -custom implementation of the assertion, compatible with your project. -By default it is defined to standard C `assert(expr)` in `_DEBUG` configuration -and empty otherwise. - -\section config_Vulkan_functions Pointers to Vulkan functions - -There are multiple ways to import pointers to Vulkan functions in the library. -In the simplest case you don't need to do anything. -If the compilation or linking of your program or the initialization of the #VmaAllocator -doesn't work for you, you can try to reconfigure it. - -First, the allocator tries to fetch pointers to Vulkan functions linked statically, -like this: - -\code -m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; -\endcode - -If you want to disable this feature, set configuration macro: `#define VMA_STATIC_VULKAN_FUNCTIONS 0`. - -Second, you can provide the pointers yourself by setting member VmaAllocatorCreateInfo::pVulkanFunctions. -You can fetch them e.g. using functions `vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` or -by using a helper library like [volk](https://github.com/zeux/volk). - -Third, VMA tries to fetch remaining pointers that are still null by calling -`vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` on its own. -You need to only fill in VmaVulkanFunctions::vkGetInstanceProcAddr and VmaVulkanFunctions::vkGetDeviceProcAddr. -Other pointers will be fetched automatically. -If you want to disable this feature, set configuration macro: `#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0`. - -Finally, all the function pointers required by the library (considering selected -Vulkan version and enabled extensions) are checked with `VMA_ASSERT` if they are not null. - - -\section custom_memory_allocator Custom host memory allocator - -If you use custom allocator for CPU memory rather than default operator `new` -and `delete` from C++, you can make this library using your allocator as well -by filling optional member VmaAllocatorCreateInfo::pAllocationCallbacks. These -functions will be passed to Vulkan, as well as used by the library itself to -make any CPU-side allocations. - -\section allocation_callbacks Device memory allocation callbacks - -The library makes calls to `vkAllocateMemory()` and `vkFreeMemory()` internally. -You can setup callbacks to be informed about these calls, e.g. for the purpose -of gathering some statistics. To do it, fill optional member -VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. - -\section heap_memory_limit Device heap memory limit - -When device memory of certain heap runs out of free space, new allocations may -fail (returning error code) or they may succeed, silently pushing some existing_ -memory blocks from GPU VRAM to system RAM (which degrades performance). This -behavior is implementation-dependent - it depends on GPU vendor and graphics -driver. - -On AMD cards it can be controlled while creating Vulkan device object by using -VK_AMD_memory_overallocation_behavior extension, if available. - -Alternatively, if you want to test how your program behaves with limited amount of Vulkan device -memory available without switching your graphics card to one that really has -smaller VRAM, you can use a feature of this library intended for this purpose. -To do it, fill optional member VmaAllocatorCreateInfo::pHeapSizeLimit. - - - -\page vk_khr_dedicated_allocation VK_KHR_dedicated_allocation - -VK_KHR_dedicated_allocation is a Vulkan extension which can be used to improve -performance on some GPUs. It augments Vulkan API with possibility to query -driver whether it prefers particular buffer or image to have its own, dedicated -allocation (separate `VkDeviceMemory` block) for better efficiency - to be able -to do some internal optimizations. The extension is supported by this library. -It will be used automatically when enabled. - -It has been promoted to core Vulkan 1.1, so if you use eligible Vulkan version -and inform VMA about it by setting VmaAllocatorCreateInfo::vulkanApiVersion, -you are all set. - -Otherwise, if you want to use it as an extension: - -1 . When creating Vulkan device, check if following 2 device extensions are -supported (call `vkEnumerateDeviceExtensionProperties()`). -If yes, enable them (fill `VkDeviceCreateInfo::ppEnabledExtensionNames`). - -- VK_KHR_get_memory_requirements2 -- VK_KHR_dedicated_allocation - -If you enabled these extensions: - -2 . Use #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag when creating -your #VmaAllocator to inform the library that you enabled required extensions -and you want the library to use them. - -\code -allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT; - -vmaCreateAllocator(&allocatorInfo, &allocator); -\endcode - -That is all. The extension will be automatically used whenever you create a -buffer using vmaCreateBuffer() or image using vmaCreateImage(). - -When using the extension together with Vulkan Validation Layer, you will receive -warnings like this: - -_vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer._ - -It is OK, you should just ignore it. It happens because you use function -`vkGetBufferMemoryRequirements2KHR()` instead of standard -`vkGetBufferMemoryRequirements()`, while the validation layer seems to be -unaware of it. - -To learn more about this extension, see: - -- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap50.html#VK_KHR_dedicated_allocation) -- [VK_KHR_dedicated_allocation unofficial manual](http://asawicki.info/articles/VK_KHR_dedicated_allocation.php5) - - - -\page vk_ext_memory_priority VK_EXT_memory_priority - -VK_EXT_memory_priority is a device extension that allows to pass additional "priority" -value to Vulkan memory allocations that the implementation may use prefer certain -buffers and images that are critical for performance to stay in device-local memory -in cases when the memory is over-subscribed, while some others may be moved to the system memory. - -VMA offers convenient usage of this extension. -If you enable it, you can pass "priority" parameter when creating allocations or custom pools -and the library automatically passes the value to Vulkan using this extension. - -If you want to use this extension in connection with VMA, follow these steps: - -\section vk_ext_memory_priority_initialization Initialization - -1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. -Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_EXT_memory_priority". - -2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. -Attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to `VkPhysicalDeviceFeatures2::pNext` to be returned. -Check if the device feature is really supported - check if `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority` is true. - -3) While creating device with `vkCreateDevice`, enable this extension - add "VK_EXT_memory_priority" -to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. - -4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. -Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. -Enable this device feature - attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to -`VkPhysicalDeviceFeatures2::pNext` chain and set its member `memoryPriority` to `VK_TRUE`. - -5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you -have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT -to VmaAllocatorCreateInfo::flags. - -\section vk_ext_memory_priority_usage Usage - -When using this extension, you should initialize following member: - -- VmaAllocationCreateInfo::priority when creating a dedicated allocation with #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. -- VmaPoolCreateInfo::priority when creating a custom pool. - -It should be a floating-point value between `0.0f` and `1.0f`, where recommended default is `0.5f`. -Memory allocated with higher value can be treated by the Vulkan implementation as higher priority -and so it can have lower chances of being pushed out to system memory, experiencing degraded performance. - -It might be a good idea to create performance-critical resources like color-attachment or depth-stencil images -as dedicated and set high priority to them. For example: - -\code -VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; -imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; -imgCreateInfo.extent.width = 3840; -imgCreateInfo.extent.height = 2160; -imgCreateInfo.extent.depth = 1; -imgCreateInfo.mipLevels = 1; -imgCreateInfo.arrayLayers = 1; -imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; -imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; -imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; -imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; -imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; - -VmaAllocationCreateInfo allocCreateInfo = {}; -allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; -allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; -allocCreateInfo.priority = 1.0f; - -VkImage img; -VmaAllocation alloc; -vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); -\endcode - -`priority` member is ignored in the following situations: - -- Allocations created in custom pools: They inherit the priority, along with all other allocation parameters - from the parametrs passed in #VmaPoolCreateInfo when the pool was created. -- Allocations created in default pools: They inherit the priority from the parameters - VMA used when creating default pools, which means `priority == 0.5f`. - - -\page vk_amd_device_coherent_memory VK_AMD_device_coherent_memory - -VK_AMD_device_coherent_memory is a device extension that enables access to -additional memory types with `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and -`VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flag. It is useful mostly for -allocation of buffers intended for writing "breadcrumb markers" in between passes -or draw calls, which in turn are useful for debugging GPU crash/hang/TDR cases. - -When the extension is available but has not been enabled, Vulkan physical device -still exposes those memory types, but their usage is forbidden. VMA automatically -takes care of that - it returns `VK_ERROR_FEATURE_NOT_PRESENT` when an attempt -to allocate memory of such type is made. - -If you want to use this extension in connection with VMA, follow these steps: - -\section vk_amd_device_coherent_memory_initialization Initialization - -1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. -Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_AMD_device_coherent_memory". - -2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. -Attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to `VkPhysicalDeviceFeatures2::pNext` to be returned. -Check if the device feature is really supported - check if `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true. - -3) While creating device with `vkCreateDevice`, enable this extension - add "VK_AMD_device_coherent_memory" -to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. - -4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. -Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. -Enable this device feature - attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to -`VkPhysicalDeviceFeatures2::pNext` and set its member `deviceCoherentMemory` to `VK_TRUE`. - -5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you -have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT -to VmaAllocatorCreateInfo::flags. - -\section vk_amd_device_coherent_memory_usage Usage - -After following steps described above, you can create VMA allocations and custom pools -out of the special `DEVICE_COHERENT` and `DEVICE_UNCACHED` memory types on eligible -devices. There are multiple ways to do it, for example: - -- You can request or prefer to allocate out of such memory types by adding - `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` to VmaAllocationCreateInfo::requiredFlags - or VmaAllocationCreateInfo::preferredFlags. Those flags can be freely mixed with - other ways of \ref choosing_memory_type, like setting VmaAllocationCreateInfo::usage. -- If you manually found memory type index to use for this purpose, force allocation - from this specific index by setting VmaAllocationCreateInfo::memoryTypeBits `= 1u << index`. - -\section vk_amd_device_coherent_memory_more_information More information - -To learn more about this extension, see [VK_AMD_device_coherent_memory in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VK_AMD_device_coherent_memory.html) - -Example use of this extension can be found in the code of the sample and test suite -accompanying this library. - - -\page enabling_buffer_device_address Enabling buffer device address - -Device extension VK_KHR_buffer_device_address -allow to fetch raw GPU pointer to a buffer and pass it for usage in a shader code. -It has been promoted to core Vulkan 1.2. - -If you want to use this feature in connection with VMA, follow these steps: - -\section enabling_buffer_device_address_initialization Initialization - -1) (For Vulkan version < 1.2) Call `vkEnumerateDeviceExtensionProperties` for the physical device. -Check if the extension is supported - if returned array of `VkExtensionProperties` contains -"VK_KHR_buffer_device_address". - -2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. -Attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to `VkPhysicalDeviceFeatures2::pNext` to be returned. -Check if the device feature is really supported - check if `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress` is true. - -3) (For Vulkan version < 1.2) While creating device with `vkCreateDevice`, enable this extension - add -"VK_KHR_buffer_device_address" to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. - -4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. -Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. -Enable this device feature - attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to -`VkPhysicalDeviceFeatures2::pNext` and set its member `bufferDeviceAddress` to `VK_TRUE`. - -5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you -have enabled this feature - add #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT -to VmaAllocatorCreateInfo::flags. - -\section enabling_buffer_device_address_usage Usage - -After following steps described above, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*` using VMA. -The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT*` to -allocated memory blocks wherever it might be needed. - -Please note that the library supports only `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*`. -The second part of this functionality related to "capture and replay" is not supported, -as it is intended for usage in debugging tools like RenderDoc, not in everyday Vulkan usage. - -\section enabling_buffer_device_address_more_information More information - -To learn more about this extension, see [VK_KHR_buffer_device_address in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap46.html#VK_KHR_buffer_device_address) - -Example use of this extension can be found in the code of the sample and test suite -accompanying this library. - -\page general_considerations General considerations - -\section general_considerations_thread_safety Thread safety - -- The library has no global state, so separate #VmaAllocator objects can be used - independently. - There should be no need to create multiple such objects though - one per `VkDevice` is enough. -- By default, all calls to functions that take #VmaAllocator as first parameter - are safe to call from multiple threads simultaneously because they are - synchronized internally when needed. - This includes allocation and deallocation from default memory pool, as well as custom #VmaPool. -- When the allocator is created with #VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT - flag, calls to functions that take such #VmaAllocator object must be - synchronized externally. -- Access to a #VmaAllocation object must be externally synchronized. For example, - you must not call vmaGetAllocationInfo() and vmaMapMemory() from different - threads at the same time if you pass the same #VmaAllocation object to these - functions. -- #VmaVirtualBlock is not safe to be used from multiple threads simultaneously. - -\section general_considerations_versioning_and_compatibility Versioning and compatibility - -The library uses [**Semantic Versioning**](https://semver.org/), -which means version numbers follow convention: Major.Minor.Patch (e.g. 2.3.0), where: - -- Incremented Patch version means a release is backward- and forward-compatible, - introducing only some internal improvements, bug fixes, optimizations etc. - or changes that are out of scope of the official API described in this documentation. -- Incremented Minor version means a release is backward-compatible, - so existing code that uses the library should continue to work, while some new - symbols could have been added: new structures, functions, new values in existing - enums and bit flags, new structure members, but not new function parameters. -- Incrementing Major version means a release could break some backward compatibility. - -All changes between official releases are documented in file "CHANGELOG.md". - -\warning Backward compatiblity is considered on the level of C++ source code, not binary linkage. -Adding new members to existing structures is treated as backward compatible if initializing -the new members to binary zero results in the old behavior. -You should always fully initialize all library structures to zeros and not rely on their -exact binary size. - -\section general_considerations_validation_layer_warnings Validation layer warnings - -When using this library, you can meet following types of warnings issued by -Vulkan validation layer. They don't necessarily indicate a bug, so you may need -to just ignore them. - -- *vkBindBufferMemory(): Binding memory to buffer 0xeb8e4 but vkGetBufferMemoryRequirements() has not been called on that buffer.* - - It happens when VK_KHR_dedicated_allocation extension is enabled. - `vkGetBufferMemoryRequirements2KHR` function is used instead, while validation layer seems to be unaware of it. -- *Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.* - - It happens when you map a buffer or image, because the library maps entire - `VkDeviceMemory` block, where different types of images and buffers may end - up together, especially on GPUs with unified memory like Intel. -- *Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug.* - - It may happen when you use [defragmentation](@ref defragmentation). - -\section general_considerations_allocation_algorithm Allocation algorithm - -The library uses following algorithm for allocation, in order: - --# Try to find free range of memory in existing blocks. --# If failed, try to create a new block of `VkDeviceMemory`, with preferred block size. --# If failed, try to create such block with size / 2, size / 4, size / 8. --# If failed, try to allocate separate `VkDeviceMemory` for this allocation, - just like when you use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. --# If failed, choose other memory type that meets the requirements specified in - VmaAllocationCreateInfo and go to point 1. --# If failed, return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. - -\section general_considerations_features_not_supported Features not supported - -Features deliberately excluded from the scope of this library: - --# **Data transfer.** Uploading (streaming) and downloading data of buffers and images - between CPU and GPU memory and related synchronization is responsibility of the user. - Defining some "texture" object that would automatically stream its data from a - staging copy in CPU memory to GPU memory would rather be a feature of another, - higher-level library implemented on top of VMA. - VMA doesn't record any commands to a `VkCommandBuffer`. It just allocates memory. --# **Recreation of buffers and images.** Although the library has functions for - buffer and image creation: vmaCreateBuffer(), vmaCreateImage(), you need to - recreate these objects yourself after defragmentation. That is because the big - structures `VkBufferCreateInfo`, `VkImageCreateInfo` are not stored in - #VmaAllocation object. --# **Handling CPU memory allocation failures.** When dynamically creating small C++ - objects in CPU memory (not Vulkan memory), allocation failures are not checked - and handled gracefully, because that would complicate code significantly and - is usually not needed in desktop PC applications anyway. - Success of an allocation is just checked with an assert. --# **Code free of any compiler warnings.** Maintaining the library to compile and - work correctly on so many different platforms is hard enough. Being free of - any warnings, on any version of any compiler, is simply not feasible. - There are many preprocessor macros that make some variables unused, function parameters unreferenced, - or conditional expressions constant in some configurations. - The code of this library should not be bigger or more complicated just to silence these warnings. - It is recommended to disable such warnings instead. --# This is a C++ library with C interface. **Bindings or ports to any other programming languages** are welcome as external projects but - are not going to be included into this repository. -*/ +// +// Copyright (c) 2017-2024 Advanced Micro Devices, Inc. All rights reserved. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// + +#ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H +#define AMD_VULKAN_MEMORY_ALLOCATOR_H + +/** \mainpage Vulkan Memory Allocator + +Version 3.1.0 + +Copyright (c) 2017-2024 Advanced Micro Devices, Inc. All rights reserved. \n +License: MIT \n +See also: [product page on GPUOpen](https://gpuopen.com/gaming-product/vulkan-memory-allocator/), +[repository on GitHub](https://github.com/GPUOpen-LibrariesAndSDKs/VulkanMemoryAllocator) + + +API documentation divided into groups: [Topics](topics.html) + +General documentation chapters: + +- User guide + - \subpage quick_start + - [Project setup](@ref quick_start_project_setup) + - [Initialization](@ref quick_start_initialization) + - [Resource allocation](@ref quick_start_resource_allocation) + - \subpage choosing_memory_type + - [Usage](@ref choosing_memory_type_usage) + - [Required and preferred flags](@ref choosing_memory_type_required_preferred_flags) + - [Explicit memory types](@ref choosing_memory_type_explicit_memory_types) + - [Custom memory pools](@ref choosing_memory_type_custom_memory_pools) + - [Dedicated allocations](@ref choosing_memory_type_dedicated_allocations) + - \subpage memory_mapping + - [Copy functions](@ref memory_mapping_copy_functions) + - [Mapping functions](@ref memory_mapping_mapping_functions) + - [Persistently mapped memory](@ref memory_mapping_persistently_mapped_memory) + - [Cache flush and invalidate](@ref memory_mapping_cache_control) + - \subpage staying_within_budget + - [Querying for budget](@ref staying_within_budget_querying_for_budget) + - [Controlling memory usage](@ref staying_within_budget_controlling_memory_usage) + - \subpage resource_aliasing + - \subpage custom_memory_pools + - [Choosing memory type index](@ref custom_memory_pools_MemTypeIndex) + - [When not to use custom pools](@ref custom_memory_pools_when_not_use) + - [Linear allocation algorithm](@ref linear_algorithm) + - [Free-at-once](@ref linear_algorithm_free_at_once) + - [Stack](@ref linear_algorithm_stack) + - [Double stack](@ref linear_algorithm_double_stack) + - [Ring buffer](@ref linear_algorithm_ring_buffer) + - \subpage defragmentation + - \subpage statistics + - [Numeric statistics](@ref statistics_numeric_statistics) + - [JSON dump](@ref statistics_json_dump) + - \subpage allocation_annotation + - [Allocation user data](@ref allocation_user_data) + - [Allocation names](@ref allocation_names) + - \subpage virtual_allocator + - \subpage debugging_memory_usage + - [Memory initialization](@ref debugging_memory_usage_initialization) + - [Margins](@ref debugging_memory_usage_margins) + - [Corruption detection](@ref debugging_memory_usage_corruption_detection) + - [Leak detection features](@ref debugging_memory_usage_leak_detection) + - \subpage other_api_interop +- \subpage usage_patterns + - [GPU-only resource](@ref usage_patterns_gpu_only) + - [Staging copy for upload](@ref usage_patterns_staging_copy_upload) + - [Readback](@ref usage_patterns_readback) + - [Advanced data uploading](@ref usage_patterns_advanced_data_uploading) + - [Other use cases](@ref usage_patterns_other_use_cases) +- \subpage configuration + - [Pointers to Vulkan functions](@ref config_Vulkan_functions) + - [Custom host memory allocator](@ref custom_memory_allocator) + - [Device memory allocation callbacks](@ref allocation_callbacks) + - [Device heap memory limit](@ref heap_memory_limit) +- Extension support + - \subpage vk_khr_dedicated_allocation + - \subpage enabling_buffer_device_address + - \subpage vk_ext_memory_priority + - \subpage vk_amd_device_coherent_memory +- \subpage general_considerations + - [Thread safety](@ref general_considerations_thread_safety) + - [Versioning and compatibility](@ref general_considerations_versioning_and_compatibility) + - [Validation layer warnings](@ref general_considerations_validation_layer_warnings) + - [Allocation algorithm](@ref general_considerations_allocation_algorithm) + - [Features not supported](@ref general_considerations_features_not_supported) + +\defgroup group_init Library initialization + +\brief API elements related to the initialization and management of the entire library, especially #VmaAllocator object. + +\defgroup group_alloc Memory allocation + +\brief API elements related to the allocation, deallocation, and management of Vulkan memory, buffers, images. +Most basic ones being: vmaCreateBuffer(), vmaCreateImage(). + +\defgroup group_virtual Virtual allocator + +\brief API elements related to the mechanism of \ref virtual_allocator - using the core allocation algorithm +for user-defined purpose without allocating any real GPU memory. + +\defgroup group_stats Statistics + +\brief API elements that query current status of the allocator, from memory usage, budget, to full dump of the internal state in JSON format. +See documentation chapter: \ref statistics. +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + +#include + +#if !defined(VMA_VULKAN_VERSION) + #if defined(VK_VERSION_1_3) + #define VMA_VULKAN_VERSION 1003000 + #elif defined(VK_VERSION_1_2) + #define VMA_VULKAN_VERSION 1002000 + #elif defined(VK_VERSION_1_1) + #define VMA_VULKAN_VERSION 1001000 + #else + #define VMA_VULKAN_VERSION 1000000 + #endif +#endif + +#if defined(__ANDROID__) && defined(VK_NO_PROTOTYPES) && VMA_STATIC_VULKAN_FUNCTIONS + extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; + extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; + extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; + extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; + extern PFN_vkAllocateMemory vkAllocateMemory; + extern PFN_vkFreeMemory vkFreeMemory; + extern PFN_vkMapMemory vkMapMemory; + extern PFN_vkUnmapMemory vkUnmapMemory; + extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; + extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; + extern PFN_vkBindBufferMemory vkBindBufferMemory; + extern PFN_vkBindImageMemory vkBindImageMemory; + extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; + extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; + extern PFN_vkCreateBuffer vkCreateBuffer; + extern PFN_vkDestroyBuffer vkDestroyBuffer; + extern PFN_vkCreateImage vkCreateImage; + extern PFN_vkDestroyImage vkDestroyImage; + extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; + #if VMA_VULKAN_VERSION >= 1001000 + extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; + extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; + extern PFN_vkBindBufferMemory2 vkBindBufferMemory2; + extern PFN_vkBindImageMemory2 vkBindImageMemory2; + extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; + #endif // #if VMA_VULKAN_VERSION >= 1001000 +#endif // #if defined(__ANDROID__) && VMA_STATIC_VULKAN_FUNCTIONS && VK_NO_PROTOTYPES + +#if !defined(VMA_DEDICATED_ALLOCATION) + #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation + #define VMA_DEDICATED_ALLOCATION 1 + #else + #define VMA_DEDICATED_ALLOCATION 0 + #endif +#endif + +#if !defined(VMA_BIND_MEMORY2) + #if VK_KHR_bind_memory2 + #define VMA_BIND_MEMORY2 1 + #else + #define VMA_BIND_MEMORY2 0 + #endif +#endif + +#if !defined(VMA_MEMORY_BUDGET) + #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000) + #define VMA_MEMORY_BUDGET 1 + #else + #define VMA_MEMORY_BUDGET 0 + #endif +#endif + +// Defined to 1 when VK_KHR_buffer_device_address device extension or equivalent core Vulkan 1.2 feature is defined in its headers. +#if !defined(VMA_BUFFER_DEVICE_ADDRESS) + #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000 + #define VMA_BUFFER_DEVICE_ADDRESS 1 + #else + #define VMA_BUFFER_DEVICE_ADDRESS 0 + #endif +#endif + +// Defined to 1 when VK_EXT_memory_priority device extension is defined in Vulkan headers. +#if !defined(VMA_MEMORY_PRIORITY) + #if VK_EXT_memory_priority + #define VMA_MEMORY_PRIORITY 1 + #else + #define VMA_MEMORY_PRIORITY 0 + #endif +#endif + +// Defined to 1 when VK_KHR_maintenance4 device extension is defined in Vulkan headers. +#if !defined(VMA_KHR_MAINTENANCE4) + #if VK_KHR_maintenance4 + #define VMA_KHR_MAINTENANCE4 1 + #else + #define VMA_KHR_MAINTENANCE4 0 + #endif +#endif + +// Defined to 1 when VK_KHR_maintenance5 device extension is defined in Vulkan headers. +#if !defined(VMA_KHR_MAINTENANCE5) + #if VK_KHR_maintenance5 + #define VMA_KHR_MAINTENANCE5 1 + #else + #define VMA_KHR_MAINTENANCE5 0 + #endif +#endif + + +// Defined to 1 when VK_KHR_external_memory device extension is defined in Vulkan headers. +#if !defined(VMA_EXTERNAL_MEMORY) + #if VK_KHR_external_memory + #define VMA_EXTERNAL_MEMORY 1 + #else + #define VMA_EXTERNAL_MEMORY 0 + #endif +#endif + +// Define these macros to decorate all public functions with additional code, +// before and after returned type, appropriately. This may be useful for +// exporting the functions when compiling VMA as a separate library. Example: +// #define VMA_CALL_PRE __declspec(dllexport) +// #define VMA_CALL_POST __cdecl +#ifndef VMA_CALL_PRE + #define VMA_CALL_PRE +#endif +#ifndef VMA_CALL_POST + #define VMA_CALL_POST +#endif + +// Define this macro to decorate pNext pointers with an attribute specifying the Vulkan +// structure that will be extended via the pNext chain. +#ifndef VMA_EXTENDS_VK_STRUCT + #define VMA_EXTENDS_VK_STRUCT(vkStruct) +#endif + +// Define this macro to decorate pointers with an attribute specifying the +// length of the array they point to if they are not null. +// +// The length may be one of +// - The name of another parameter in the argument list where the pointer is declared +// - The name of another member in the struct where the pointer is declared +// - The name of a member of a struct type, meaning the value of that member in +// the context of the call. For example +// VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount"), +// this means the number of memory heaps available in the device associated +// with the VmaAllocator being dealt with. +#ifndef VMA_LEN_IF_NOT_NULL + #define VMA_LEN_IF_NOT_NULL(len) +#endif + +// The VMA_NULLABLE macro is defined to be _Nullable when compiling with Clang. +// see: https://clang.llvm.org/docs/AttributeReference.html#nullable +#ifndef VMA_NULLABLE + #ifdef __clang__ + #define VMA_NULLABLE _Nullable + #else + #define VMA_NULLABLE + #endif +#endif + +// The VMA_NOT_NULL macro is defined to be _Nonnull when compiling with Clang. +// see: https://clang.llvm.org/docs/AttributeReference.html#nonnull +#ifndef VMA_NOT_NULL + #ifdef __clang__ + #define VMA_NOT_NULL _Nonnull + #else + #define VMA_NOT_NULL + #endif +#endif + +// If non-dispatchable handles are represented as pointers then we can give +// then nullability annotations +#ifndef VMA_NOT_NULL_NON_DISPATCHABLE + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VMA_NOT_NULL_NON_DISPATCHABLE VMA_NOT_NULL + #else + #define VMA_NOT_NULL_NON_DISPATCHABLE + #endif +#endif + +#ifndef VMA_NULLABLE_NON_DISPATCHABLE + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) + #define VMA_NULLABLE_NON_DISPATCHABLE VMA_NULLABLE + #else + #define VMA_NULLABLE_NON_DISPATCHABLE + #endif +#endif + +#ifndef VMA_STATS_STRING_ENABLED + #define VMA_STATS_STRING_ENABLED 1 +#endif + +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +// +// INTERFACE +// +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// Sections for managing code placement in file, only for development purposes e.g. for convenient folding inside an IDE. +#ifndef _VMA_ENUM_DECLARATIONS + +/** +\addtogroup group_init +@{ +*/ + +/// Flags for created #VmaAllocator. +typedef enum VmaAllocatorCreateFlagBits +{ + /** \brief Allocator and all objects created from it will not be synchronized internally, so you must guarantee they are used from only one thread at a time or synchronized externally by you. + + Using this flag may increase performance because internal mutexes are not used. + */ + VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + /** \brief Enables usage of VK_KHR_dedicated_allocation extension. + + The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. + When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. + + Using this extension will automatically allocate dedicated blocks of memory for + some buffers and images instead of suballocating place for them out of bigger + memory blocks (as if you explicitly used #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT + flag) when it is recommended by the driver. It may improve performance on some + GPUs. + + You may set this flag only if you found out that following device extensions are + supported, you enabled them while creating Vulkan device passed as + VmaAllocatorCreateInfo::device, and you want them to be used internally by this + library: + + - VK_KHR_get_memory_requirements2 (device extension) + - VK_KHR_dedicated_allocation (device extension) + + When this flag is set, you can experience following warnings reported by Vulkan + validation layer. You can ignore them. + + > vkBindBufferMemory(): Binding memory to buffer 0x2d but vkGetBufferMemoryRequirements() has not been called on that buffer. + */ + VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT = 0x00000002, + /** + Enables usage of VK_KHR_bind_memory2 extension. + + The flag works only if VmaAllocatorCreateInfo::vulkanApiVersion `== VK_API_VERSION_1_0`. + When it is `VK_API_VERSION_1_1`, the flag is ignored because the extension has been promoted to Vulkan 1.1. + + You may set this flag only if you found out that this device extension is supported, + you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + and you want it to be used internally by this library. + + The extension provides functions `vkBindBufferMemory2KHR` and `vkBindImageMemory2KHR`, + which allow to pass a chain of `pNext` structures while binding. + This flag is required if you use `pNext` parameter in vmaBindBufferMemory2() or vmaBindImageMemory2(). + */ + VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT = 0x00000004, + /** + Enables usage of VK_EXT_memory_budget extension. + + You may set this flag only if you found out that this device extension is supported, + you enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + and you want it to be used internally by this library, along with another instance extension + VK_KHR_get_physical_device_properties2, which is required by it (or Vulkan 1.1, where this extension is promoted). + + The extension provides query for current memory usage and budget, which will probably + be more accurate than an estimation used by the library otherwise. + */ + VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT = 0x00000008, + /** + Enables usage of VK_AMD_device_coherent_memory extension. + + You may set this flag only if you: + + - found out that this device extension is supported and enabled it while creating Vulkan device passed as VmaAllocatorCreateInfo::device, + - checked that `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true and set it while creating the Vulkan device, + - want it to be used internally by this library. + + The extension and accompanying device feature provide access to memory types with + `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flags. + They are useful mostly for writing breadcrumb markers - a common method for debugging GPU crash/hang/TDR. + + When the extension is not enabled, such memory types are still enumerated, but their usage is illegal. + To protect from this error, if you don't create the allocator with this flag, it will refuse to allocate any memory or create a custom pool in such memory type, + returning `VK_ERROR_FEATURE_NOT_PRESENT`. + */ + VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT = 0x00000010, + /** + Enables usage of "buffer device address" feature, which allows you to use function + `vkGetBufferDeviceAddress*` to get raw GPU pointer to a buffer and pass it for usage inside a shader. + + You may set this flag only if you: + + 1. (For Vulkan version < 1.2) Found as available and enabled device extension + VK_KHR_buffer_device_address. + This extension is promoted to core Vulkan 1.2. + 2. Found as available and enabled device feature `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress`. + + When this flag is set, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT` using VMA. + The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT` to + allocated memory blocks wherever it might be needed. + + For more information, see documentation chapter \ref enabling_buffer_device_address. + */ + VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT = 0x00000020, + /** + Enables usage of VK_EXT_memory_priority extension in the library. + + You may set this flag only if you found available and enabled this device extension, + along with `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority == VK_TRUE`, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + + When this flag is used, VmaAllocationCreateInfo::priority and VmaPoolCreateInfo::priority + are used to set priorities of allocated Vulkan memory. Without it, these variables are ignored. + + A priority must be a floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. + Larger values are higher priority. The granularity of the priorities is implementation-dependent. + It is automatically passed to every call to `vkAllocateMemory` done by the library using structure `VkMemoryPriorityAllocateInfoEXT`. + The value to be used for default priority is 0.5. + For more details, see the documentation of the VK_EXT_memory_priority extension. + */ + VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT = 0x00000040, + /** + Enables usage of VK_KHR_maintenance4 extension in the library. + + You may set this flag only if you found available and enabled this device extension, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + */ + VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT = 0x00000080, + /** + Enables usage of VK_KHR_maintenance5 extension in the library. + + You should set this flag if you found available and enabled this device extension, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + */ + VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT = 0x00000100, + + VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaAllocatorCreateFlagBits; +/// See #VmaAllocatorCreateFlagBits. +typedef VkFlags VmaAllocatorCreateFlags; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/// \brief Intended usage of the allocated memory. +typedef enum VmaMemoryUsage +{ + /** No intended memory usage specified. + Use other members of VmaAllocationCreateInfo to specify your requirements. + */ + VMA_MEMORY_USAGE_UNKNOWN = 0, + /** + \deprecated Obsolete, preserved for backward compatibility. + Prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ + VMA_MEMORY_USAGE_GPU_ONLY = 1, + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` and `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT`. + */ + VMA_MEMORY_USAGE_CPU_ONLY = 2, + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ + VMA_MEMORY_USAGE_CPU_TO_GPU = 3, + /** + \deprecated Obsolete, preserved for backward compatibility. + Guarantees `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`, prefers `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. + */ + VMA_MEMORY_USAGE_GPU_TO_CPU = 4, + /** + \deprecated Obsolete, preserved for backward compatibility. + Prefers not `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + */ + VMA_MEMORY_USAGE_CPU_COPY = 5, + /** + Lazily allocated GPU memory having `VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT`. + Exists mostly on mobile platforms. Using it on desktop PC or other GPUs with no such memory type present will fail the allocation. + + Usage: Memory for transient attachment images (color attachments, depth attachments etc.), created with `VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT`. + + Allocations with this usage are always created as dedicated - it implies #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + */ + VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED = 6, + /** + Selects best memory type automatically. + This flag is recommended for most common use cases. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO = 7, + /** + Selects best memory type automatically with preference for GPU (device) memory. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE = 8, + /** + Selects best memory type automatically with preference for CPU (host) memory. + + When using this flag, if you want to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT), + you must pass one of the flags: #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT + in VmaAllocationCreateInfo::flags. + + It can be used only with functions that let the library know `VkBufferCreateInfo` or `VkImageCreateInfo`, e.g. + vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo() + and not with generic memory allocation functions. + */ + VMA_MEMORY_USAGE_AUTO_PREFER_HOST = 9, + + VMA_MEMORY_USAGE_MAX_ENUM = 0x7FFFFFFF +} VmaMemoryUsage; + +/// Flags to be passed as VmaAllocationCreateInfo::flags. +typedef enum VmaAllocationCreateFlagBits +{ + /** \brief Set this flag if the allocation should have its own memory block. + + Use it for special, big resources, like fullscreen images used as attachments. + + If you use this flag while creating a buffer or an image, `VkMemoryDedicatedAllocateInfo` + structure is applied if possible. + */ + VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT = 0x00000001, + + /** \brief Set this flag to only try to allocate from existing `VkDeviceMemory` blocks and never create new such block. + + If new allocation cannot be placed in any of the existing blocks, allocation + fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY` error. + + You should not use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT and + #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT at the same time. It makes no sense. + */ + VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT = 0x00000002, + /** \brief Set this flag to use a memory that will be persistently mapped and retrieve pointer to it. + + Pointer to mapped memory will be returned through VmaAllocationInfo::pMappedData. + + It is valid to use this flag for allocation made from memory type that is not + `HOST_VISIBLE`. This flag is then ignored and memory is not mapped. This is + useful if you need an allocation that is efficient to use on GPU + (`DEVICE_LOCAL`) and still want to map it directly if possible on platforms that + support it (e.g. Intel GPU). + */ + VMA_ALLOCATION_CREATE_MAPPED_BIT = 0x00000004, + /** \deprecated Preserved for backward compatibility. Consider using vmaSetAllocationName() instead. + + Set this flag to treat VmaAllocationCreateInfo::pUserData as pointer to a + null-terminated string. Instead of copying pointer value, a local copy of the + string is made and stored in allocation's `pName`. The string is automatically + freed together with the allocation. It is also used in vmaBuildStatsString(). + */ + VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT = 0x00000020, + /** Allocation will be created from upper stack in a double stack pool. + + This flag is only allowed for custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT flag. + */ + VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = 0x00000040, + /** Create both buffer/image and allocation, but don't bind them together. + It is useful when you want to bind yourself to do some more advanced binding, e.g. using some extensions. + The flag is meaningful only with functions that bind by default: vmaCreateBuffer(), vmaCreateImage(). + Otherwise it is ignored. + + If you want to make sure the new buffer/image is not tied to the new memory allocation + through `VkMemoryDedicatedAllocateInfoKHR` structure in case the allocation ends up in its own memory block, + use also flag #VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT. + */ + VMA_ALLOCATION_CREATE_DONT_BIND_BIT = 0x00000080, + /** Create allocation only if additional device memory required for it, if any, won't exceed + memory budget. Otherwise return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + */ + VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT = 0x00000100, + /** \brief Set this flag if the allocated memory will have aliasing resources. + + Usage of this flag prevents supplying `VkMemoryDedicatedAllocateInfoKHR` when #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT is specified. + Otherwise created dedicated memory will not be suitable for aliasing resources, resulting in Vulkan Validation Layer errors. + */ + VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT = 0x00000200, + /** + Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). + + - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, + you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. + - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. + This includes allocations created in \ref custom_memory_pools. + + Declares that mapped memory will only be written sequentially, e.g. using `memcpy()` or a loop writing number-by-number, + never read or accessed randomly, so a memory type can be selected that is uncached and write-combined. + + \warning Violating this declaration may work correctly, but will likely be very slow. + Watch out for implicit reads introduced by doing e.g. `pMappedData[i] += x;` + Better prepare your data in a local variable and `memcpy()` it to the mapped pointer all at once. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT = 0x00000400, + /** + Requests possibility to map the allocation (using vmaMapMemory() or #VMA_ALLOCATION_CREATE_MAPPED_BIT). + + - If you use #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` value, + you must use this flag to be able to map the allocation. Otherwise, mapping is incorrect. + - If you use other value of #VmaMemoryUsage, this flag is ignored and mapping is always possible in memory types that are `HOST_VISIBLE`. + This includes allocations created in \ref custom_memory_pools. + + Declares that mapped memory can be read, written, and accessed in random order, + so a `HOST_CACHED` memory type is preferred. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT = 0x00000800, + /** + Together with #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT, + it says that despite request for host access, a not-`HOST_VISIBLE` memory type can be selected + if it may improve performance. + + By using this flag, you declare that you will check if the allocation ended up in a `HOST_VISIBLE` memory type + (e.g. using vmaGetAllocationMemoryProperties()) and if not, you will create some "staging" buffer and + issue an explicit transfer to write/read your data. + To prepare for this possibility, don't forget to add appropriate flags like + `VK_BUFFER_USAGE_TRANSFER_DST_BIT`, `VK_BUFFER_USAGE_TRANSFER_SRC_BIT` to the parameters of created buffer or image. + */ + VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT = 0x00001000, + /** Allocation strategy that chooses smallest possible free range for the allocation + to minimize memory usage and fragmentation, possibly at the expense of allocation time. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = 0x00010000, + /** Allocation strategy that chooses first suitable free range for the allocation - + not necessarily in terms of the smallest offset but the one that is easiest and fastest to find + to minimize allocation time, possibly at the expense of allocation quality. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = 0x00020000, + /** Allocation strategy that chooses always the lowest offset in available space. + This is not the most efficient strategy but achieves highly packed data. + Used internally by defragmentation, not recommended in typical usage. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = 0x00040000, + /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT. + */ + VMA_ALLOCATION_CREATE_STRATEGY_BEST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, + /** Alias to #VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT. + */ + VMA_ALLOCATION_CREATE_STRATEGY_FIRST_FIT_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, + /** A bit mask to extract only `STRATEGY` bits from entire set of flags. + */ + VMA_ALLOCATION_CREATE_STRATEGY_MASK = + VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT | + VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT | + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + + VMA_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaAllocationCreateFlagBits; +/// See #VmaAllocationCreateFlagBits. +typedef VkFlags VmaAllocationCreateFlags; + +/// Flags to be passed as VmaPoolCreateInfo::flags. +typedef enum VmaPoolCreateFlagBits +{ + /** \brief Use this flag if you always allocate only buffers and linear images or only optimal images out of this pool and so Buffer-Image Granularity can be ignored. + + This is an optional optimization flag. + + If you always allocate using vmaCreateBuffer(), vmaCreateImage(), + vmaAllocateMemoryForBuffer(), then you don't need to use it because allocator + knows exact type of your allocations so it can handle Buffer-Image Granularity + in the optimal way. + + If you also allocate using vmaAllocateMemoryForImage() or vmaAllocateMemory(), + exact type of such allocations is not known, so allocator must be conservative + in handling Buffer-Image Granularity, which can lead to suboptimal allocation + (wasted memory). In that case, if you can make sure you always allocate only + buffers and linear images or only optimal images out of this pool, use this flag + to make allocator disregard Buffer-Image Granularity and so make allocations + faster and more optimal. + */ + VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT = 0x00000002, + + /** \brief Enables alternative, linear allocation algorithm in this pool. + + Specify this flag to enable linear allocation algorithm, which always creates + new allocations after last one and doesn't reuse space from allocations freed in + between. It trades memory consumption for simplified algorithm and data + structure, which has better performance and uses less memory for metadata. + + By using this flag, you can achieve behavior of free-at-once, stack, + ring buffer, and double stack. + For details, see documentation chapter \ref linear_algorithm. + */ + VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT = 0x00000004, + + /** Bit mask to extract only `ALGORITHM` bits from entire set of flags. + */ + VMA_POOL_CREATE_ALGORITHM_MASK = + VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT, + + VMA_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaPoolCreateFlagBits; +/// Flags to be passed as VmaPoolCreateInfo::flags. See #VmaPoolCreateFlagBits. +typedef VkFlags VmaPoolCreateFlags; + +/// Flags to be passed as VmaDefragmentationInfo::flags. +typedef enum VmaDefragmentationFlagBits +{ + /* \brief Use simple but fast algorithm for defragmentation. + May not achieve best results but will require least time to compute and least allocations to copy. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT = 0x1, + /* \brief Default defragmentation algorithm, applied also when no `ALGORITHM` flag is specified. + Offers a balance between defragmentation quality and the amount of allocations and bytes that need to be moved. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT = 0x2, + /* \brief Perform full defragmentation of memory. + Can result in notably more time to compute and allocations to copy, but will achieve best memory packing. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT = 0x4, + /** \brief Use the most roboust algorithm at the cost of time to compute and number of copies to make. + Only available when bufferImageGranularity is greater than 1, since it aims to reduce + alignment issues between different types of resources. + Otherwise falls back to same behavior as #VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT. + */ + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT = 0x8, + + /// A bit mask to extract only `ALGORITHM` bits from entire set of flags. + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK = + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT | + VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT, + + VMA_DEFRAGMENTATION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaDefragmentationFlagBits; +/// See #VmaDefragmentationFlagBits. +typedef VkFlags VmaDefragmentationFlags; + +/// Operation performed on single defragmentation move. See structure #VmaDefragmentationMove. +typedef enum VmaDefragmentationMoveOperation +{ + /// Buffer/image has been recreated at `dstTmpAllocation`, data has been copied, old buffer/image has been destroyed. `srcAllocation` should be changed to point to the new place. This is the default value set by vmaBeginDefragmentationPass(). + VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY = 0, + /// Set this value if you cannot move the allocation. New place reserved at `dstTmpAllocation` will be freed. `srcAllocation` will remain unchanged. + VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE = 1, + /// Set this value if you decide to abandon the allocation and you destroyed the buffer/image. New place reserved at `dstTmpAllocation` will be freed, along with `srcAllocation`, which will be destroyed. + VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY = 2, +} VmaDefragmentationMoveOperation; + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. +typedef enum VmaVirtualBlockCreateFlagBits +{ + /** \brief Enables alternative, linear allocation algorithm in this virtual block. + + Specify this flag to enable linear allocation algorithm, which always creates + new allocations after last one and doesn't reuse space from allocations freed in + between. It trades memory consumption for simplified algorithm and data + structure, which has better performance and uses less memory for metadata. + + By using this flag, you can achieve behavior of free-at-once, stack, + ring buffer, and double stack. + For details, see documentation chapter \ref linear_algorithm. + */ + VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT = 0x00000001, + + /** \brief Bit mask to extract only `ALGORITHM` bits from entire set of flags. + */ + VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK = + VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT, + + VMA_VIRTUAL_BLOCK_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaVirtualBlockCreateFlagBits; +/// Flags to be passed as VmaVirtualBlockCreateInfo::flags. See #VmaVirtualBlockCreateFlagBits. +typedef VkFlags VmaVirtualBlockCreateFlags; + +/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. +typedef enum VmaVirtualAllocationCreateFlagBits +{ + /** \brief Allocation will be created from upper stack in a double stack pool. + + This flag is only allowed for virtual blocks created with #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT flag. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT = VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT, + /** \brief Allocation strategy that tries to minimize memory usage. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT, + /** \brief Allocation strategy that tries to minimize allocation time. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT, + /** Allocation strategy that chooses always the lowest offset in available space. + This is not the most efficient strategy but achieves highly packed data. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT = VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + /** \brief A bit mask to extract only `STRATEGY` bits from entire set of flags. + + These strategy flags are binary compatible with equivalent flags in #VmaAllocationCreateFlagBits. + */ + VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK = VMA_ALLOCATION_CREATE_STRATEGY_MASK, + + VMA_VIRTUAL_ALLOCATION_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VmaVirtualAllocationCreateFlagBits; +/// Flags to be passed as VmaVirtualAllocationCreateInfo::flags. See #VmaVirtualAllocationCreateFlagBits. +typedef VkFlags VmaVirtualAllocationCreateFlags; + +/** @} */ + +#endif // _VMA_ENUM_DECLARATIONS + +#ifndef _VMA_DATA_TYPES_DECLARATIONS + +/** +\addtogroup group_init +@{ */ + +/** \struct VmaAllocator +\brief Represents main object of this library initialized. + +Fill structure #VmaAllocatorCreateInfo and call function vmaCreateAllocator() to create it. +Call function vmaDestroyAllocator() to destroy it. + +It is recommended to create just one object of this type per `VkDevice` object, +right after Vulkan is initialized and keep it alive until before Vulkan device is destroyed. +*/ +VK_DEFINE_HANDLE(VmaAllocator) + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \struct VmaPool +\brief Represents custom memory pool + +Fill structure VmaPoolCreateInfo and call function vmaCreatePool() to create it. +Call function vmaDestroyPool() to destroy it. + +For more information see [Custom memory pools](@ref choosing_memory_type_custom_memory_pools). +*/ +VK_DEFINE_HANDLE(VmaPool) + +/** \struct VmaAllocation +\brief Represents single memory allocation. + +It may be either dedicated block of `VkDeviceMemory` or a specific region of a bigger block of this type +plus unique offset. + +There are multiple ways to create such object. +You need to fill structure VmaAllocationCreateInfo. +For more information see [Choosing memory type](@ref choosing_memory_type). + +Although the library provides convenience functions that create Vulkan buffer or image, +allocate memory for it and bind them together, +binding of the allocation to a buffer or an image is out of scope of the allocation itself. +Allocation object can exist without buffer/image bound, +binding can be done manually by the user, and destruction of it can be done +independently of destruction of the allocation. + +The object also remembers its size and some other information. +To retrieve this information, use function vmaGetAllocationInfo() and inspect +returned structure VmaAllocationInfo. +*/ +VK_DEFINE_HANDLE(VmaAllocation) + +/** \struct VmaDefragmentationContext +\brief An opaque object that represents started defragmentation process. + +Fill structure #VmaDefragmentationInfo and call function vmaBeginDefragmentation() to create it. +Call function vmaEndDefragmentation() to destroy it. +*/ +VK_DEFINE_HANDLE(VmaDefragmentationContext) + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/** \struct VmaVirtualAllocation +\brief Represents single memory allocation done inside VmaVirtualBlock. + +Use it as a unique identifier to virtual allocation within the single block. + +Use value `VK_NULL_HANDLE` to represent a null/invalid allocation. +*/ +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaVirtualAllocation) + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/** \struct VmaVirtualBlock +\brief Handle to a virtual block object that allows to use core allocation algorithm without allocating any real GPU memory. + +Fill in #VmaVirtualBlockCreateInfo structure and use vmaCreateVirtualBlock() to create it. Use vmaDestroyVirtualBlock() to destroy it. +For more information, see documentation chapter \ref virtual_allocator. + +This object is not thread-safe - should not be used from multiple threads simultaneously, must be synchronized externally. +*/ +VK_DEFINE_HANDLE(VmaVirtualBlock) + +/** @} */ + +/** +\addtogroup group_init +@{ +*/ + +/// Callback function called after successful vkAllocateMemory. +typedef void (VKAPI_PTR* PFN_vmaAllocateDeviceMemoryFunction)( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryType, + VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, + VkDeviceSize size, + void* VMA_NULLABLE pUserData); + +/// Callback function called before vkFreeMemory. +typedef void (VKAPI_PTR* PFN_vmaFreeDeviceMemoryFunction)( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryType, + VkDeviceMemory VMA_NOT_NULL_NON_DISPATCHABLE memory, + VkDeviceSize size, + void* VMA_NULLABLE pUserData); + +/** \brief Set of callbacks that the library will call for `vkAllocateMemory` and `vkFreeMemory`. + +Provided for informative purpose, e.g. to gather statistics about number of +allocations or total amount of memory allocated in Vulkan. + +Used in VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. +*/ +typedef struct VmaDeviceMemoryCallbacks +{ + /// Optional, can be null. + PFN_vmaAllocateDeviceMemoryFunction VMA_NULLABLE pfnAllocate; + /// Optional, can be null. + PFN_vmaFreeDeviceMemoryFunction VMA_NULLABLE pfnFree; + /// Optional, can be null. + void* VMA_NULLABLE pUserData; +} VmaDeviceMemoryCallbacks; + +/** \brief Pointers to some Vulkan functions - a subset used by the library. + +Used in VmaAllocatorCreateInfo::pVulkanFunctions. +*/ +typedef struct VmaVulkanFunctions +{ + /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. + PFN_vkGetInstanceProcAddr VMA_NULLABLE vkGetInstanceProcAddr; + /// Required when using VMA_DYNAMIC_VULKAN_FUNCTIONS. + PFN_vkGetDeviceProcAddr VMA_NULLABLE vkGetDeviceProcAddr; + PFN_vkGetPhysicalDeviceProperties VMA_NULLABLE vkGetPhysicalDeviceProperties; + PFN_vkGetPhysicalDeviceMemoryProperties VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties; + PFN_vkAllocateMemory VMA_NULLABLE vkAllocateMemory; + PFN_vkFreeMemory VMA_NULLABLE vkFreeMemory; + PFN_vkMapMemory VMA_NULLABLE vkMapMemory; + PFN_vkUnmapMemory VMA_NULLABLE vkUnmapMemory; + PFN_vkFlushMappedMemoryRanges VMA_NULLABLE vkFlushMappedMemoryRanges; + PFN_vkInvalidateMappedMemoryRanges VMA_NULLABLE vkInvalidateMappedMemoryRanges; + PFN_vkBindBufferMemory VMA_NULLABLE vkBindBufferMemory; + PFN_vkBindImageMemory VMA_NULLABLE vkBindImageMemory; + PFN_vkGetBufferMemoryRequirements VMA_NULLABLE vkGetBufferMemoryRequirements; + PFN_vkGetImageMemoryRequirements VMA_NULLABLE vkGetImageMemoryRequirements; + PFN_vkCreateBuffer VMA_NULLABLE vkCreateBuffer; + PFN_vkDestroyBuffer VMA_NULLABLE vkDestroyBuffer; + PFN_vkCreateImage VMA_NULLABLE vkCreateImage; + PFN_vkDestroyImage VMA_NULLABLE vkDestroyImage; + PFN_vkCmdCopyBuffer VMA_NULLABLE vkCmdCopyBuffer; +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + /// Fetch "vkGetBufferMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetBufferMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. + PFN_vkGetBufferMemoryRequirements2KHR VMA_NULLABLE vkGetBufferMemoryRequirements2KHR; + /// Fetch "vkGetImageMemoryRequirements2" on Vulkan >= 1.1, fetch "vkGetImageMemoryRequirements2KHR" when using VK_KHR_dedicated_allocation extension. + PFN_vkGetImageMemoryRequirements2KHR VMA_NULLABLE vkGetImageMemoryRequirements2KHR; +#endif +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + /// Fetch "vkBindBufferMemory2" on Vulkan >= 1.1, fetch "vkBindBufferMemory2KHR" when using VK_KHR_bind_memory2 extension. + PFN_vkBindBufferMemory2KHR VMA_NULLABLE vkBindBufferMemory2KHR; + /// Fetch "vkBindImageMemory2" on Vulkan >= 1.1, fetch "vkBindImageMemory2KHR" when using VK_KHR_bind_memory2 extension. + PFN_vkBindImageMemory2KHR VMA_NULLABLE vkBindImageMemory2KHR; +#endif +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + /// Fetch from "vkGetPhysicalDeviceMemoryProperties2" on Vulkan >= 1.1, but you can also fetch it from "vkGetPhysicalDeviceMemoryProperties2KHR" if you enabled extension VK_KHR_get_physical_device_properties2. + PFN_vkGetPhysicalDeviceMemoryProperties2KHR VMA_NULLABLE vkGetPhysicalDeviceMemoryProperties2KHR; +#endif +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + /// Fetch from "vkGetDeviceBufferMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceBufferMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. + PFN_vkGetDeviceBufferMemoryRequirementsKHR VMA_NULLABLE vkGetDeviceBufferMemoryRequirements; + /// Fetch from "vkGetDeviceImageMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceImageMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. + PFN_vkGetDeviceImageMemoryRequirementsKHR VMA_NULLABLE vkGetDeviceImageMemoryRequirements; +#endif +} VmaVulkanFunctions; + +/// Description of a Allocator to be created. +typedef struct VmaAllocatorCreateInfo +{ + /// Flags for created allocator. Use #VmaAllocatorCreateFlagBits enum. + VmaAllocatorCreateFlags flags; + /// Vulkan physical device. + /** It must be valid throughout whole lifetime of created allocator. */ + VkPhysicalDevice VMA_NOT_NULL physicalDevice; + /// Vulkan device. + /** It must be valid throughout whole lifetime of created allocator. */ + VkDevice VMA_NOT_NULL device; + /// Preferred size of a single `VkDeviceMemory` block to be allocated from large heaps > 1 GiB. Optional. + /** Set to 0 to use default, which is currently 256 MiB. */ + VkDeviceSize preferredLargeHeapBlockSize; + /// Custom CPU memory allocation callbacks. Optional. + /** Optional, can be null. When specified, will also be used for all CPU-side memory allocations. */ + const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; + /// Informative callbacks for `vkAllocateMemory`, `vkFreeMemory`. Optional. + /** Optional, can be null. */ + const VmaDeviceMemoryCallbacks* VMA_NULLABLE pDeviceMemoryCallbacks; + /** \brief Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out of particular Vulkan memory heap. + + If not NULL, it must be a pointer to an array of + `VkPhysicalDeviceMemoryProperties::memoryHeapCount` elements, defining limit on + maximum number of bytes that can be allocated out of particular Vulkan memory + heap. + + Any of the elements may be equal to `VK_WHOLE_SIZE`, which means no limit on that + heap. This is also the default in case of `pHeapSizeLimit` = NULL. + + If there is a limit defined for a heap: + + - If user tries to allocate more memory from that heap using this allocator, + the allocation fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + - If the limit is smaller than heap size reported in `VkMemoryHeap::size`, the + value of this limit will be reported instead when using vmaGetMemoryProperties(). + + Warning! Using this feature may not be equivalent to installing a GPU with + smaller amount of memory, because graphics driver doesn't necessary fail new + allocations with `VK_ERROR_OUT_OF_DEVICE_MEMORY` result when memory capacity is + exceeded. It may return success and just silently migrate some device memory + blocks to system RAM. This driver behavior can also be controlled using + VK_AMD_memory_overallocation_behavior extension. + */ + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pHeapSizeLimit; + + /** \brief Pointers to Vulkan functions. Can be null. + + For details see [Pointers to Vulkan functions](@ref config_Vulkan_functions). + */ + const VmaVulkanFunctions* VMA_NULLABLE pVulkanFunctions; + /** \brief Handle to Vulkan instance object. + + Starting from version 3.0.0 this member is no longer optional, it must be set! + */ + VkInstance VMA_NOT_NULL instance; + /** \brief Optional. Vulkan version that the application uses. + + It must be a value in the format as created by macro `VK_MAKE_VERSION` or a constant like: `VK_API_VERSION_1_1`, `VK_API_VERSION_1_0`. + The patch version number specified is ignored. Only the major and minor versions are considered. + Only versions 1.0, 1.1, 1.2, 1.3 are supported by the current implementation. + Leaving it initialized to zero is equivalent to `VK_API_VERSION_1_0`. + It must match the Vulkan version used by the application and supported on the selected physical device, + so it must be no higher than `VkApplicationInfo::apiVersion` passed to `vkCreateInstance` + and no higher than `VkPhysicalDeviceProperties::apiVersion` found on the physical device used. + */ + uint32_t vulkanApiVersion; +#if VMA_EXTERNAL_MEMORY + /** \brief Either null or a pointer to an array of external memory handle types for each Vulkan memory type. + + If not NULL, it must be a pointer to an array of `VkPhysicalDeviceMemoryProperties::memoryTypeCount` + elements, defining external memory handle types of particular Vulkan memory type, + to be passed using `VkExportMemoryAllocateInfoKHR`. + + Any of the elements may be equal to 0, which means not to use `VkExportMemoryAllocateInfoKHR` on this memory type. + This is also the default in case of `pTypeExternalMemoryHandleTypes` = NULL. + */ + const VkExternalMemoryHandleTypeFlagsKHR* VMA_NULLABLE VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryTypeCount") pTypeExternalMemoryHandleTypes; +#endif // #if VMA_EXTERNAL_MEMORY +} VmaAllocatorCreateInfo; + +/// Information about existing #VmaAllocator object. +typedef struct VmaAllocatorInfo +{ + /** \brief Handle to Vulkan instance object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::instance. + */ + VkInstance VMA_NOT_NULL instance; + /** \brief Handle to Vulkan physical device object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::physicalDevice. + */ + VkPhysicalDevice VMA_NOT_NULL physicalDevice; + /** \brief Handle to Vulkan device object. + + This is the same value as has been passed through VmaAllocatorCreateInfo::device. + */ + VkDevice VMA_NOT_NULL device; +} VmaAllocatorInfo; + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Calculated statistics of memory usage e.g. in a specific memory type, heap, custom pool, or total. + +These are fast to calculate. +See functions: vmaGetHeapBudgets(), vmaGetPoolStatistics(). +*/ +typedef struct VmaStatistics +{ + /** \brief Number of `VkDeviceMemory` objects - Vulkan memory blocks allocated. + */ + uint32_t blockCount; + /** \brief Number of #VmaAllocation objects allocated. + + Dedicated allocations have their own blocks, so each one adds 1 to `allocationCount` as well as `blockCount`. + */ + uint32_t allocationCount; + /** \brief Number of bytes allocated in `VkDeviceMemory` blocks. + + \note To avoid confusion, please be aware that what Vulkan calls an "allocation" - a whole `VkDeviceMemory` object + (e.g. as in `VkPhysicalDeviceLimits::maxMemoryAllocationCount`) is called a "block" in VMA, while VMA calls + "allocation" a #VmaAllocation object that represents a memory region sub-allocated from such block, usually for a single buffer or image. + */ + VkDeviceSize blockBytes; + /** \brief Total number of bytes occupied by all #VmaAllocation objects. + + Always less or equal than `blockBytes`. + Difference `(blockBytes - allocationBytes)` is the amount of memory allocated from Vulkan + but unused by any #VmaAllocation. + */ + VkDeviceSize allocationBytes; +} VmaStatistics; + +/** \brief More detailed statistics than #VmaStatistics. + +These are slower to calculate. Use for debugging purposes. +See functions: vmaCalculateStatistics(), vmaCalculatePoolStatistics(). + +Previous version of the statistics API provided averages, but they have been removed +because they can be easily calculated as: + +\code +VkDeviceSize allocationSizeAvg = detailedStats.statistics.allocationBytes / detailedStats.statistics.allocationCount; +VkDeviceSize unusedBytes = detailedStats.statistics.blockBytes - detailedStats.statistics.allocationBytes; +VkDeviceSize unusedRangeSizeAvg = unusedBytes / detailedStats.unusedRangeCount; +\endcode +*/ +typedef struct VmaDetailedStatistics +{ + /// Basic statistics. + VmaStatistics statistics; + /// Number of free ranges of memory between allocations. + uint32_t unusedRangeCount; + /// Smallest allocation size. `VK_WHOLE_SIZE` if there are 0 allocations. + VkDeviceSize allocationSizeMin; + /// Largest allocation size. 0 if there are 0 allocations. + VkDeviceSize allocationSizeMax; + /// Smallest empty range size. `VK_WHOLE_SIZE` if there are 0 empty ranges. + VkDeviceSize unusedRangeSizeMin; + /// Largest empty range size. 0 if there are 0 empty ranges. + VkDeviceSize unusedRangeSizeMax; +} VmaDetailedStatistics; + +/** \brief General statistics from current state of the Allocator - +total memory usage across all memory heaps and types. + +These are slower to calculate. Use for debugging purposes. +See function vmaCalculateStatistics(). +*/ +typedef struct VmaTotalStatistics +{ + VmaDetailedStatistics memoryType[VK_MAX_MEMORY_TYPES]; + VmaDetailedStatistics memoryHeap[VK_MAX_MEMORY_HEAPS]; + VmaDetailedStatistics total; +} VmaTotalStatistics; + +/** \brief Statistics of current memory usage and available budget for a specific memory heap. + +These are fast to calculate. +See function vmaGetHeapBudgets(). +*/ +typedef struct VmaBudget +{ + /** \brief Statistics fetched from the library. + */ + VmaStatistics statistics; + /** \brief Estimated current memory usage of the program, in bytes. + + Fetched from system using VK_EXT_memory_budget extension if enabled. + + It might be different than `statistics.blockBytes` (usually higher) due to additional implicit objects + also occupying the memory, like swapchain, pipelines, descriptor heaps, command buffers, or + `VkDeviceMemory` blocks allocated outside of this library, if any. + */ + VkDeviceSize usage; + /** \brief Estimated amount of memory available to the program, in bytes. + + Fetched from system using VK_EXT_memory_budget extension if enabled. + + It might be different (most probably smaller) than `VkMemoryHeap::size[heapIndex]` due to factors + external to the program, decided by the operating system. + Difference `budget - usage` is the amount of additional memory that can probably + be allocated without problems. Exceeding the budget may result in various problems. + */ + VkDeviceSize budget; +} VmaBudget; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \brief Parameters of new #VmaAllocation. + +To be used with functions like vmaCreateBuffer(), vmaCreateImage(), and many others. +*/ +typedef struct VmaAllocationCreateInfo +{ + /// Use #VmaAllocationCreateFlagBits enum. + VmaAllocationCreateFlags flags; + /** \brief Intended usage of memory. + + You can leave #VMA_MEMORY_USAGE_UNKNOWN if you specify memory requirements in other way. \n + If `pool` is not null, this member is ignored. + */ + VmaMemoryUsage usage; + /** \brief Flags that must be set in a Memory Type chosen for an allocation. + + Leave 0 if you specify memory requirements in other way. \n + If `pool` is not null, this member is ignored.*/ + VkMemoryPropertyFlags requiredFlags; + /** \brief Flags that preferably should be set in a memory type chosen for an allocation. + + Set to 0 if no additional flags are preferred. \n + If `pool` is not null, this member is ignored. */ + VkMemoryPropertyFlags preferredFlags; + /** \brief Bitmask containing one bit set for every memory type acceptable for this allocation. + + Value 0 is equivalent to `UINT32_MAX` - it means any memory type is accepted if + it meets other requirements specified by this structure, with no further + restrictions on memory type index. \n + If `pool` is not null, this member is ignored. + */ + uint32_t memoryTypeBits; + /** \brief Pool that this allocation should be created in. + + Leave `VK_NULL_HANDLE` to allocate from default pool. If not null, members: + `usage`, `requiredFlags`, `preferredFlags`, `memoryTypeBits` are ignored. + */ + VmaPool VMA_NULLABLE pool; + /** \brief Custom general-purpose pointer that will be stored in #VmaAllocation, can be read as VmaAllocationInfo::pUserData and changed using vmaSetAllocationUserData(). + + If #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT is used, it must be either + null or pointer to a null-terminated string. The string will be then copied to + internal buffer, so it doesn't need to be valid after allocation call. + */ + void* VMA_NULLABLE pUserData; + /** \brief A floating-point value between 0 and 1, indicating the priority of the allocation relative to other memory allocations. + + It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object + and this allocation ends up as dedicated or is explicitly forced as dedicated using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + Otherwise, it has the priority of a memory block where it is placed and this variable is ignored. + */ + float priority; +} VmaAllocationCreateInfo; + +/// Describes parameter of created #VmaPool. +typedef struct VmaPoolCreateInfo +{ + /** \brief Vulkan memory type index to allocate this pool from. + */ + uint32_t memoryTypeIndex; + /** \brief Use combination of #VmaPoolCreateFlagBits. + */ + VmaPoolCreateFlags flags; + /** \brief Size of a single `VkDeviceMemory` block to be allocated as part of this pool, in bytes. Optional. + + Specify nonzero to set explicit, constant size of memory blocks used by this + pool. + + Leave 0 to use default and let the library manage block sizes automatically. + Sizes of particular blocks may vary. + In this case, the pool will also support dedicated allocations. + */ + VkDeviceSize blockSize; + /** \brief Minimum number of blocks to be always allocated in this pool, even if they stay empty. + + Set to 0 to have no preallocated blocks and allow the pool be completely empty. + */ + size_t minBlockCount; + /** \brief Maximum number of blocks that can be allocated in this pool. Optional. + + Set to 0 to use default, which is `SIZE_MAX`, which means no limit. + + Set to same value as VmaPoolCreateInfo::minBlockCount to have fixed amount of memory allocated + throughout whole lifetime of this pool. + */ + size_t maxBlockCount; + /** \brief A floating-point value between 0 and 1, indicating the priority of the allocations in this pool relative to other memory allocations. + + It is used only when #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT flag was used during creation of the #VmaAllocator object. + Otherwise, this variable is ignored. + */ + float priority; + /** \brief Additional minimum alignment to be used for all allocations created from this pool. Can be 0. + + Leave 0 (default) not to impose any additional alignment. If not 0, it must be a power of two. + It can be useful in cases where alignment returned by Vulkan by functions like `vkGetBufferMemoryRequirements` is not enough, + e.g. when doing interop with OpenGL. + */ + VkDeviceSize minAllocationAlignment; + /** \brief Additional `pNext` chain to be attached to `VkMemoryAllocateInfo` used for every allocation made by this pool. Optional. + + Optional, can be null. If not null, it must point to a `pNext` chain of structures that can be attached to `VkMemoryAllocateInfo`. + It can be useful for special needs such as adding `VkExportMemoryAllocateInfoKHR`. + Structures pointed by this member must remain alive and unchanged for the whole lifetime of the custom pool. + + Please note that some structures, e.g. `VkMemoryPriorityAllocateInfoEXT`, `VkMemoryDedicatedAllocateInfoKHR`, + can be attached automatically by this library when using other, more convenient of its features. + */ + void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkMemoryAllocateInfo) pMemoryAllocateNext; +} VmaPoolCreateInfo; + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** +Parameters of #VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo(). + +There is also an extended version of this structure that carries additional parameters: #VmaAllocationInfo2. +*/ +typedef struct VmaAllocationInfo +{ + /** \brief Memory type index that this allocation was allocated from. + + It never changes. + */ + uint32_t memoryType; + /** \brief Handle to Vulkan memory object. + + Same memory object can be shared by multiple allocations. + + It can change after the allocation is moved during \ref defragmentation. + */ + VkDeviceMemory VMA_NULLABLE_NON_DISPATCHABLE deviceMemory; + /** \brief Offset in `VkDeviceMemory` object to the beginning of this allocation, in bytes. `(deviceMemory, offset)` pair is unique to this allocation. + + You usually don't need to use this offset. If you create a buffer or an image together with the allocation using e.g. function + vmaCreateBuffer(), vmaCreateImage(), functions that operate on these resources refer to the beginning of the buffer or image, + not entire device memory block. Functions like vmaMapMemory(), vmaBindBufferMemory() also refer to the beginning of the allocation + and apply this offset automatically. + + It can change after the allocation is moved during \ref defragmentation. + */ + VkDeviceSize offset; + /** \brief Size of this allocation, in bytes. + + It never changes. + + \note Allocation size returned in this variable may be greater than the size + requested for the resource e.g. as `VkBufferCreateInfo::size`. Whole size of the + allocation is accessible for operations on memory e.g. using a pointer after + mapping with vmaMapMemory(), but operations on the resource e.g. using + `vkCmdCopyBuffer` must be limited to the size of the resource. + */ + VkDeviceSize size; + /** \brief Pointer to the beginning of this allocation as mapped data. + + If the allocation hasn't been mapped using vmaMapMemory() and hasn't been + created with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag, this value is null. + + It can change after call to vmaMapMemory(), vmaUnmapMemory(). + It can also change after the allocation is moved during \ref defragmentation. + */ + void* VMA_NULLABLE pMappedData; + /** \brief Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vmaSetAllocationUserData(). + + It can change after call to vmaSetAllocationUserData() for this allocation. + */ + void* VMA_NULLABLE pUserData; + /** \brief Custom allocation name that was set with vmaSetAllocationName(). + + It can change after call to vmaSetAllocationName() for this allocation. + + Another way to set custom name is to pass it in VmaAllocationCreateInfo::pUserData with + additional flag #VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT set [DEPRECATED]. + */ + const char* VMA_NULLABLE pName; +} VmaAllocationInfo; + +/// Extended parameters of a #VmaAllocation object that can be retrieved using function vmaGetAllocationInfo2(). +typedef struct VmaAllocationInfo2 +{ + /** \brief Basic parameters of the allocation. + + If you need only these, you can use function vmaGetAllocationInfo() and structure #VmaAllocationInfo instead. + */ + VmaAllocationInfo allocationInfo; + /** \brief Size of the `VkDeviceMemory` block that the allocation belongs to. + + In case of an allocation with dedicated memory, it will be equal to `allocationInfo.size`. + */ + VkDeviceSize blockSize; + /** \brief `VK_TRUE` if the allocation has dedicated memory, `VK_FALSE` if it was placed as part of a larger memory block. + + When `VK_TRUE`, it also means `VkMemoryDedicatedAllocateInfo` was used when creating the allocation + (if VK_KHR_dedicated_allocation extension or Vulkan version >= 1.1 is enabled). + */ + VkBool32 dedicatedMemory; +} VmaAllocationInfo2; + +/** Callback function called during vmaBeginDefragmentation() to check custom criterion about ending current defragmentation pass. + +Should return true if the defragmentation needs to stop current pass. +*/ +typedef VkBool32 (VKAPI_PTR* PFN_vmaCheckDefragmentationBreakFunction)(void* VMA_NULLABLE pUserData); + +/** \brief Parameters for defragmentation. + +To be used with function vmaBeginDefragmentation(). +*/ +typedef struct VmaDefragmentationInfo +{ + /// \brief Use combination of #VmaDefragmentationFlagBits. + VmaDefragmentationFlags flags; + /** \brief Custom pool to be defragmented. + + If null then default pools will undergo defragmentation process. + */ + VmaPool VMA_NULLABLE pool; + /** \brief Maximum numbers of bytes that can be copied during single pass, while moving allocations to different places. + + `0` means no limit. + */ + VkDeviceSize maxBytesPerPass; + /** \brief Maximum number of allocations that can be moved during single pass to a different place. + + `0` means no limit. + */ + uint32_t maxAllocationsPerPass; + /** \brief Optional custom callback for stopping vmaBeginDefragmentation(). + + Have to return true for breaking current defragmentation pass. + */ + PFN_vmaCheckDefragmentationBreakFunction VMA_NULLABLE pfnBreakCallback; + /// \brief Optional data to pass to custom callback for stopping pass of defragmentation. + void* VMA_NULLABLE pBreakCallbackUserData; +} VmaDefragmentationInfo; + +/// Single move of an allocation to be done for defragmentation. +typedef struct VmaDefragmentationMove +{ + /// Operation to be performed on the allocation by vmaEndDefragmentationPass(). Default value is #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY. You can modify it. + VmaDefragmentationMoveOperation operation; + /// Allocation that should be moved. + VmaAllocation VMA_NOT_NULL srcAllocation; + /** \brief Temporary allocation pointing to destination memory that will replace `srcAllocation`. + + \warning Do not store this allocation in your data structures! It exists only temporarily, for the duration of the defragmentation pass, + to be used for binding new buffer/image to the destination memory using e.g. vmaBindBufferMemory(). + vmaEndDefragmentationPass() will destroy it and make `srcAllocation` point to this memory. + */ + VmaAllocation VMA_NOT_NULL dstTmpAllocation; +} VmaDefragmentationMove; + +/** \brief Parameters for incremental defragmentation steps. + +To be used with function vmaBeginDefragmentationPass(). +*/ +typedef struct VmaDefragmentationPassMoveInfo +{ + /// Number of elements in the `pMoves` array. + uint32_t moveCount; + /** \brief Array of moves to be performed by the user in the current defragmentation pass. + + Pointer to an array of `moveCount` elements, owned by VMA, created in vmaBeginDefragmentationPass(), destroyed in vmaEndDefragmentationPass(). + + For each element, you should: + + 1. Create a new buffer/image in the place pointed by VmaDefragmentationMove::dstMemory + VmaDefragmentationMove::dstOffset. + 2. Copy data from the VmaDefragmentationMove::srcAllocation e.g. using `vkCmdCopyBuffer`, `vkCmdCopyImage`. + 3. Make sure these commands finished executing on the GPU. + 4. Destroy the old buffer/image. + + Only then you can finish defragmentation pass by calling vmaEndDefragmentationPass(). + After this call, the allocation will point to the new place in memory. + + Alternatively, if you cannot move specific allocation, you can set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. + + Alternatively, if you decide you want to completely remove the allocation: + + 1. Destroy its buffer/image. + 2. Set VmaDefragmentationMove::operation to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. + + Then, after vmaEndDefragmentationPass() the allocation will be freed. + */ + VmaDefragmentationMove* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(moveCount) pMoves; +} VmaDefragmentationPassMoveInfo; + +/// Statistics returned for defragmentation process in function vmaEndDefragmentation(). +typedef struct VmaDefragmentationStats +{ + /// Total number of bytes that have been copied while moving allocations to different places. + VkDeviceSize bytesMoved; + /// Total number of bytes that have been released to the system by freeing empty `VkDeviceMemory` objects. + VkDeviceSize bytesFreed; + /// Number of allocations that have been moved to different places. + uint32_t allocationsMoved; + /// Number of empty `VkDeviceMemory` objects that have been released to the system. + uint32_t deviceMemoryBlocksFreed; +} VmaDefragmentationStats; + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/// Parameters of created #VmaVirtualBlock object to be passed to vmaCreateVirtualBlock(). +typedef struct VmaVirtualBlockCreateInfo +{ + /** \brief Total size of the virtual block. + + Sizes can be expressed in bytes or any units you want as long as you are consistent in using them. + For example, if you allocate from some array of structures, 1 can mean single instance of entire structure. + */ + VkDeviceSize size; + + /** \brief Use combination of #VmaVirtualBlockCreateFlagBits. + */ + VmaVirtualBlockCreateFlags flags; + + /** \brief Custom CPU memory allocation callbacks. Optional. + + Optional, can be null. When specified, they will be used for all CPU-side memory allocations. + */ + const VkAllocationCallbacks* VMA_NULLABLE pAllocationCallbacks; +} VmaVirtualBlockCreateInfo; + +/// Parameters of created virtual allocation to be passed to vmaVirtualAllocate(). +typedef struct VmaVirtualAllocationCreateInfo +{ + /** \brief Size of the allocation. + + Cannot be zero. + */ + VkDeviceSize size; + /** \brief Required alignment of the allocation. Optional. + + Must be power of two. Special value 0 has the same meaning as 1 - means no special alignment is required, so allocation can start at any offset. + */ + VkDeviceSize alignment; + /** \brief Use combination of #VmaVirtualAllocationCreateFlagBits. + */ + VmaVirtualAllocationCreateFlags flags; + /** \brief Custom pointer to be associated with the allocation. Optional. + + It can be any value and can be used for user-defined purposes. It can be fetched or changed later. + */ + void* VMA_NULLABLE pUserData; +} VmaVirtualAllocationCreateInfo; + +/// Parameters of an existing virtual allocation, returned by vmaGetVirtualAllocationInfo(). +typedef struct VmaVirtualAllocationInfo +{ + /** \brief Offset of the allocation. + + Offset at which the allocation was made. + */ + VkDeviceSize offset; + /** \brief Size of the allocation. + + Same value as passed in VmaVirtualAllocationCreateInfo::size. + */ + VkDeviceSize size; + /** \brief Custom pointer associated with the allocation. + + Same value as passed in VmaVirtualAllocationCreateInfo::pUserData or to vmaSetVirtualAllocationUserData(). + */ + void* VMA_NULLABLE pUserData; +} VmaVirtualAllocationInfo; + +/** @} */ + +#endif // _VMA_DATA_TYPES_DECLARATIONS + +#ifndef _VMA_FUNCTION_HEADERS + +/** +\addtogroup group_init +@{ +*/ + +/// Creates #VmaAllocator object. +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( + const VmaAllocatorCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocator VMA_NULLABLE* VMA_NOT_NULL pAllocator); + +/// Destroys allocator object. +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( + VmaAllocator VMA_NULLABLE allocator); + +/** \brief Returns information about existing #VmaAllocator object - handle to Vulkan device etc. + +It might be useful if you want to keep just the #VmaAllocator handle and fetch other required handles to +`VkPhysicalDevice`, `VkDevice` etc. every time using this function. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocatorInfo* VMA_NOT_NULL pAllocatorInfo); + +/** +PhysicalDeviceProperties are fetched from physicalDevice by the allocator. +You can access it here, without fetching it again on your own. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( + VmaAllocator VMA_NOT_NULL allocator, + const VkPhysicalDeviceProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceProperties); + +/** +PhysicalDeviceMemoryProperties are fetched from physicalDevice by the allocator. +You can access it here, without fetching it again on your own. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + const VkPhysicalDeviceMemoryProperties* VMA_NULLABLE* VMA_NOT_NULL ppPhysicalDeviceMemoryProperties); + +/** +\brief Given Memory Type Index, returns Property Flags of this memory type. + +This is just a convenience function. Same information can be obtained using +vmaGetMemoryProperties(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeIndex, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); + +/** \brief Sets index of the current frame. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t frameIndex); + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Retrieves statistics from current state of the Allocator. + +This function is called "calculate" not "get" because it has to traverse all +internal data structures, so it may be quite slow. Use it for debugging purposes. +For faster but more brief statistics suitable to be called every frame or every allocation, +use vmaGetHeapBudgets(). + +Note that when using allocator from multiple threads, returned information may immediately +become outdated. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaTotalStatistics* VMA_NOT_NULL pStats); + +/** \brief Retrieves information about current memory usage and budget for all memory heaps. + +\param allocator +\param[out] pBudgets Must point to array with number of elements at least equal to number of memory heaps in physical device used. + +This function is called "get" not "calculate" because it is very fast, suitable to be called +every frame or every allocation. For more detailed statistics use vmaCalculateStatistics(). + +Note that when using allocator from multiple threads, returned information may immediately +become outdated. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( + VmaAllocator VMA_NOT_NULL allocator, + VmaBudget* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL("VkPhysicalDeviceMemoryProperties::memoryHeapCount") pBudgets); + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** +\brief Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo. + +This algorithm tries to find a memory type that: + +- Is allowed by memoryTypeBits. +- Contains all the flags from pAllocationCreateInfo->requiredFlags. +- Matches intended usage. +- Has as many flags from pAllocationCreateInfo->preferredFlags as possible. + +\return Returns VK_ERROR_FEATURE_NOT_PRESENT if not found. Receiving such result +from this function or any other allocating function probably means that your +device doesn't support any memory type with requested features for the specific +type of resource you want to use it for. Please check parameters of your +resource, like image layout (OPTIMAL versus LINEAR) or mip level count. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** +\brief Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo. + +It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. +It internally creates a temporary, dummy buffer that never has memory bound. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** +\brief Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo. + +It can be useful e.g. to determine value to be used as VmaPoolCreateInfo::memoryTypeIndex. +It internally creates a temporary, dummy image that never has memory bound. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( + VmaAllocator VMA_NOT_NULL allocator, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + uint32_t* VMA_NOT_NULL pMemoryTypeIndex); + +/** \brief Allocates Vulkan device memory and creates #VmaPool object. + +\param allocator Allocator object. +\param pCreateInfo Parameters of pool to create. +\param[out] pPool Handle to created pool. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( + VmaAllocator VMA_NOT_NULL allocator, + const VmaPoolCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaPool VMA_NULLABLE* VMA_NOT_NULL pPool); + +/** \brief Destroys #VmaPool object and frees Vulkan device memory. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NULLABLE pool); + +/** @} */ + +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Retrieves statistics of existing #VmaPool object. + +\param allocator Allocator object. +\param pool Pool object. +\param[out] pPoolStats Statistics of specified pool. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + VmaStatistics* VMA_NOT_NULL pPoolStats); + +/** \brief Retrieves detailed statistics of existing #VmaPool object. + +\param allocator Allocator object. +\param pool Pool object. +\param[out] pPoolStats Statistics of specified pool. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + VmaDetailedStatistics* VMA_NOT_NULL pPoolStats); + +/** @} */ + +/** +\addtogroup group_alloc +@{ +*/ + +/** \brief Checks magic number in margins around all allocations in given memory pool in search for corruptions. + +Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, +`VMA_DEBUG_MARGIN` is defined to nonzero and the pool is created in memory type that is +`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). + +Possible return values: + +- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for specified pool. +- `VK_SUCCESS` - corruption detection has been performed and succeeded. +- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. + `VMA_ASSERT` is also fired in that case. +- Other value: Error returned by Vulkan, e.g. memory mapping failure. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool); + +/** \brief Retrieves name of a custom pool. + +After the call `ppName` is either null or points to an internally-owned null-terminated string +containing name of the pool that was previously set. The pointer becomes invalid when the pool is +destroyed or its name is changed using vmaSetPoolName(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + const char* VMA_NULLABLE* VMA_NOT_NULL ppName); + +/** \brief Sets name of a custom pool. + +`pName` can be either null or pointer to a null-terminated string with new name for the pool. +Function makes internal copy of the string, so it can be changed or freed immediately after this call. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( + VmaAllocator VMA_NOT_NULL allocator, + VmaPool VMA_NOT_NULL pool, + const char* VMA_NULLABLE pName); + +/** \brief General purpose memory allocation. + +\param allocator +\param pVkMemoryRequirements +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages(). + +It is recommended to use vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(), +vmaCreateBuffer(), vmaCreateImage() instead whenever possible. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( + VmaAllocator VMA_NOT_NULL allocator, + const VkMemoryRequirements* VMA_NOT_NULL pVkMemoryRequirements, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief General purpose memory allocation for multiple allocation objects at once. + +\param allocator Allocator object. +\param pVkMemoryRequirements Memory requirements for each allocation. +\param pCreateInfo Creation parameters for each allocation. +\param allocationCount Number of allocations to make. +\param[out] pAllocations Pointer to array that will be filled with handles to created allocations. +\param[out] pAllocationInfo Optional. Pointer to array that will be filled with parameters of created allocations. + +You should free the memory using vmaFreeMemory() or vmaFreeMemoryPages(). + +Word "pages" is just a suggestion to use this function to allocate pieces of memory needed for sparse binding. +It is just a general purpose allocation function able to make multiple allocations at once. +It may be internally optimized to be more efficient than calling vmaAllocateMemory() `allocationCount` times. + +All allocations are made using same parameters. All of them are created out of the same memory pool and type. +If any allocation fails, all allocations already made within this function call are also freed, so that when +returned result is not `VK_SUCCESS`, `pAllocation` array is always entirely filled with `VK_NULL_HANDLE`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( + VmaAllocator VMA_NOT_NULL allocator, + const VkMemoryRequirements* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pVkMemoryRequirements, + const VmaAllocationCreateInfo* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pCreateInfo, + size_t allocationCount, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations, + VmaAllocationInfo* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) pAllocationInfo); + +/** \brief Allocates memory suitable for given `VkBuffer`. + +\param allocator +\param buffer +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindBufferMemory(). + +This is a special-purpose function. In most cases you should use vmaCreateBuffer(). + +You must free the allocation using vmaFreeMemory() when no longer needed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Allocates memory suitable for given `VkImage`. + +\param allocator +\param image +\param pCreateInfo +\param[out] pAllocation Handle to allocated memory. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +It only creates #VmaAllocation. To bind the memory to the buffer, use vmaBindImageMemory(). + +This is a special-purpose function. In most cases you should use vmaCreateImage(). + +You must free the allocation using vmaFreeMemory() when no longer needed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, + const VmaAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage(). + +Passing `VK_NULL_HANDLE` as `allocation` is valid. Such function call is just skipped. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( + VmaAllocator VMA_NOT_NULL allocator, + const VmaAllocation VMA_NULLABLE allocation); + +/** \brief Frees memory and destroys multiple allocations. + +Word "pages" is just a suggestion to use this function to free pieces of memory used for sparse binding. +It is just a general purpose function to free memory and destroy allocations made using e.g. vmaAllocateMemory(), +vmaAllocateMemoryPages() and other functions. +It may be internally optimized to be more efficient than calling vmaFreeMemory() `allocationCount` times. + +Allocations in `pAllocations` array can come from any memory pools and types. +Passing `VK_NULL_HANDLE` as elements of `pAllocations` array is valid. Such entries are just skipped. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( + VmaAllocator VMA_NOT_NULL allocator, + size_t allocationCount, + const VmaAllocation VMA_NULLABLE* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(allocationCount) pAllocations); + +/** \brief Returns current information about specified allocation. + +Current parameters of given allocation are returned in `pAllocationInfo`. + +Although this function doesn't lock any mutex, so it should be quite efficient, +you should avoid calling it too often. +You can retrieve same VmaAllocationInfo structure while creating your resource, from function +vmaCreateBuffer(), vmaCreateImage(). You can remember it if you are sure parameters don't change +(e.g. due to defragmentation). + +There is also a new function vmaGetAllocationInfo2() that offers extended information +about the allocation, returned using new structure #VmaAllocationInfo2. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VmaAllocationInfo* VMA_NOT_NULL pAllocationInfo); + +/** \brief Returns extended information about specified allocation. + +Current parameters of given allocation are returned in `pAllocationInfo`. +Extended parameters in structure #VmaAllocationInfo2 include memory block size +and a flag telling whether the allocation has dedicated memory. +It can be useful e.g. for interop with OpenGL. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VmaAllocationInfo2* VMA_NOT_NULL pAllocationInfo); + +/** \brief Sets pUserData in given allocation to new value. + +The value of pointer `pUserData` is copied to allocation's `pUserData`. +It is opaque, so you can use it however you want - e.g. +as a pointer, ordinal number or some handle to you own data. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + void* VMA_NULLABLE pUserData); + +/** \brief Sets pName in given allocation to new value. + +`pName` must be either null, or pointer to a null-terminated string. The function +makes local copy of the string and sets it as allocation's `pName`. String +passed as pName doesn't need to be valid for whole lifetime of the allocation - +you can free it after this call. String previously pointed by allocation's +`pName` is freed from memory. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const char* VMA_NULLABLE pName); + +/** +\brief Given an allocation, returns Property Flags of its memory type. + +This is just a convenience function. Same information can be obtained using +vmaGetAllocationInfo() + vmaGetMemoryProperties(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); + +/** \brief Maps memory represented by given allocation and returns pointer to it. + +Maps memory represented by given allocation to make it accessible to CPU code. +When succeeded, `*ppData` contains pointer to first byte of this memory. + +\warning +If the allocation is part of a bigger `VkDeviceMemory` block, returned pointer is +correctly offsetted to the beginning of region assigned to this particular allocation. +Unlike the result of `vkMapMemory`, it points to the allocation, not to the beginning of the whole block. +You should not add VmaAllocationInfo::offset to it! + +Mapping is internally reference-counted and synchronized, so despite raw Vulkan +function `vkMapMemory()` cannot be used to map same block of `VkDeviceMemory` +multiple times simultaneously, it is safe to call this function on allocations +assigned to the same memory block. Actual Vulkan memory will be mapped on first +mapping and unmapped on last unmapping. + +If the function succeeded, you must call vmaUnmapMemory() to unmap the +allocation when mapping is no longer needed or before freeing the allocation, at +the latest. + +It also safe to call this function multiple times on the same allocation. You +must call vmaUnmapMemory() same number of times as you called vmaMapMemory(). + +It is also safe to call this function on allocation created with +#VMA_ALLOCATION_CREATE_MAPPED_BIT flag. Its memory stays mapped all the time. +You must still call vmaUnmapMemory() same number of times as you called +vmaMapMemory(). You must not call vmaUnmapMemory() additional time to free the +"0-th" mapping made automatically due to #VMA_ALLOCATION_CREATE_MAPPED_BIT flag. + +This function fails when used on allocation made in memory type that is not +`HOST_VISIBLE`. + +This function doesn't automatically flush or invalidate caches. +If the allocation is made from a memory types that is not `HOST_COHERENT`, +you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + void* VMA_NULLABLE* VMA_NOT_NULL ppData); + +/** \brief Unmaps memory represented by given allocation, mapped previously using vmaMapMemory(). + +For details, see description of vmaMapMemory(). + +This function doesn't automatically flush or invalidate caches. +If the allocation is made from a memory types that is not `HOST_COHERENT`, +you also need to use vmaInvalidateAllocation() / vmaFlushAllocation(), as required by Vulkan specification. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation); + +/** \brief Flushes memory of given allocation. + +Calls `vkFlushMappedMemoryRanges()` for memory associated with given range of given allocation. +It needs to be called after writing to a mapped memory for memory types that are not `HOST_COHERENT`. +Unmap operation doesn't do that automatically. + +- `offset` must be relative to the beginning of allocation. +- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. +- `offset` and `size` don't have to be aligned. + They are internally rounded down/up to multiply of `nonCoherentAtomSize`. +- If `size` is 0, this call is ignored. +- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, + this call is ignored. + +Warning! `offset` and `size` are relative to the contents of given `allocation`. +If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. +Do not pass allocation's offset as `offset`!!! + +This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize offset, + VkDeviceSize size); + +/** \brief Invalidates memory of given allocation. + +Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given range of given allocation. +It needs to be called before reading from a mapped memory for memory types that are not `HOST_COHERENT`. +Map operation doesn't do that automatically. + +- `offset` must be relative to the beginning of allocation. +- `size` can be `VK_WHOLE_SIZE`. It means all memory from `offset` the the end of given allocation. +- `offset` and `size` don't have to be aligned. + They are internally rounded down/up to multiply of `nonCoherentAtomSize`. +- If `size` is 0, this call is ignored. +- If memory type that the `allocation` belongs to is not `HOST_VISIBLE` or it is `HOST_COHERENT`, + this call is ignored. + +Warning! `offset` and `size` are relative to the contents of given `allocation`. +If you mean whole allocation, you can pass 0 and `VK_WHOLE_SIZE`, respectively. +Do not pass allocation's offset as `offset`!!! + +This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if +it is called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize offset, + VkDeviceSize size); + +/** \brief Flushes memory of given set of allocations. + +Calls `vkFlushMappedMemoryRanges()` for memory associated with given ranges of given allocations. +For more information, see documentation of vmaFlushAllocation(). + +\param allocator +\param allocationCount +\param allocations +\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all offsets are zero. +\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. + +This function returns the `VkResult` from `vkFlushMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t allocationCount, + const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); + +/** \brief Invalidates memory of given set of allocations. + +Calls `vkInvalidateMappedMemoryRanges()` for memory associated with given ranges of given allocations. +For more information, see documentation of vmaInvalidateAllocation(). + +\param allocator +\param allocationCount +\param allocations +\param offsets If not null, it must point to an array of offsets of regions to flush, relative to the beginning of respective allocations. Null means all offsets are zero. +\param sizes If not null, it must point to an array of sizes of regions to flush in respective allocations. Null means `VK_WHOLE_SIZE` for all allocations. + +This function returns the `VkResult` from `vkInvalidateMappedMemoryRanges` if it is +called, otherwise `VK_SUCCESS`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t allocationCount, + const VmaAllocation VMA_NOT_NULL* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) allocations, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) offsets, + const VkDeviceSize* VMA_NULLABLE VMA_LEN_IF_NOT_NULL(allocationCount) sizes); + +/** \brief Maps the allocation temporarily if needed, copies data from specified host pointer to it, and flushes the memory from the host caches if needed. + +\param allocator +\param pSrcHostPointer Pointer to the host data that become source of the copy. +\param dstAllocation Handle to the allocation that becomes destination of the copy. +\param dstAllocationLocalOffset Offset within `dstAllocation` where to write copied data, in bytes. +\param size Number of bytes to copy. + +This is a convenience function that allows to copy data from a host pointer to an allocation easily. +Same behavior can be achieved by calling vmaMapMemory(), `memcpy()`, vmaUnmapMemory(), vmaFlushAllocation(). + +This function can be called only for allocations created in a memory type that has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag. +It can be ensured e.g. by using #VMA_MEMORY_USAGE_AUTO and #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or +#VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +Otherwise, the function will fail and generate a Validation Layers error. + +`dstAllocationLocalOffset` is relative to the contents of given `dstAllocation`. +If you mean whole allocation, you should pass 0. +Do not pass allocation's offset within device memory block this parameter! +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyMemoryToAllocation( + VmaAllocator VMA_NOT_NULL allocator, + const void* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(size) pSrcHostPointer, + VmaAllocation VMA_NOT_NULL dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size); + +/** \brief Invalidates memory in the host caches if needed, maps the allocation temporarily if needed, and copies data from it to a specified host pointer. + +\param allocator +\param srcAllocation Handle to the allocation that becomes source of the copy. +\param srcAllocationLocalOffset Offset within `srcAllocation` where to read copied data, in bytes. +\param pDstHostPointer Pointer to the host memory that become destination of the copy. +\param size Number of bytes to copy. + +This is a convenience function that allows to copy data from an allocation to a host pointer easily. +Same behavior can be achieved by calling vmaInvalidateAllocation(), vmaMapMemory(), `memcpy()`, vmaUnmapMemory(). + +This function should be called only for allocations created in a memory type that has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +and `VK_MEMORY_PROPERTY_HOST_CACHED_BIT` flag. +It can be ensured e.g. by using #VMA_MEMORY_USAGE_AUTO and #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +Otherwise, the function may fail and generate a Validation Layers error. +It may also work very slowly when reading from an uncached memory. + +`srcAllocationLocalOffset` is relative to the contents of given `srcAllocation`. +If you mean whole allocation, you should pass 0. +Do not pass allocation's offset within device memory block as this parameter! +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyAllocationToMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* VMA_NOT_NULL VMA_LEN_IF_NOT_NULL(size) pDstHostPointer, + VkDeviceSize size); + +/** \brief Checks magic number in margins around all allocations in given memory types (in both default and custom pools) in search for corruptions. + +\param allocator +\param memoryTypeBits Bit mask, where each bit set means that a memory type with that index should be checked. + +Corruption detection is enabled only when `VMA_DEBUG_DETECT_CORRUPTION` macro is defined to nonzero, +`VMA_DEBUG_MARGIN` is defined to nonzero and only for memory types that are +`HOST_VISIBLE` and `HOST_COHERENT`. For more information, see [Corruption detection](@ref debugging_memory_usage_corruption_detection). + +Possible return values: + +- `VK_ERROR_FEATURE_NOT_PRESENT` - corruption detection is not enabled for any of specified memory types. +- `VK_SUCCESS` - corruption detection has been performed and succeeded. +- `VK_ERROR_UNKNOWN` - corruption detection has been performed and found memory corruptions around one of the allocations. + `VMA_ASSERT` is also fired in that case. +- Other value: Error returned by Vulkan, e.g. memory mapping failure. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( + VmaAllocator VMA_NOT_NULL allocator, + uint32_t memoryTypeBits); + +/** \brief Begins defragmentation process. + +\param allocator Allocator object. +\param pInfo Structure filled with parameters of defragmentation. +\param[out] pContext Context object that must be passed to vmaEndDefragmentation() to finish defragmentation. +\returns +- `VK_SUCCESS` if defragmentation can begin. +- `VK_ERROR_FEATURE_NOT_PRESENT` if defragmentation is not supported. + +For more information about defragmentation, see documentation chapter: +[Defragmentation](@ref defragmentation). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( + VmaAllocator VMA_NOT_NULL allocator, + const VmaDefragmentationInfo* VMA_NOT_NULL pInfo, + VmaDefragmentationContext VMA_NULLABLE* VMA_NOT_NULL pContext); + +/** \brief Ends defragmentation process. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param[out] pStats Optional stats for the defragmentation. Can be null. + +Use this function to finish defragmentation started by vmaBeginDefragmentation(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationStats* VMA_NULLABLE pStats); + +/** \brief Starts single defragmentation pass. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param[out] pPassInfo Computed information for current pass. +\returns +- `VK_SUCCESS` if no more moves are possible. Then you can omit call to vmaEndDefragmentationPass() and simply end whole defragmentation. +- `VK_INCOMPLETE` if there are pending moves returned in `pPassInfo`. You need to perform them, call vmaEndDefragmentationPass(), + and then preferably try another pass with vmaBeginDefragmentationPass(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); + +/** \brief Ends single defragmentation pass. + +\param allocator Allocator object. +\param context Context object that has been created by vmaBeginDefragmentation(). +\param pPassInfo Computed information for current pass filled by vmaBeginDefragmentationPass() and possibly modified by you. + +Returns `VK_SUCCESS` if no more moves are possible or `VK_INCOMPLETE` if more defragmentations are possible. + +Ends incremental defragmentation pass and commits all defragmentation moves from `pPassInfo`. +After this call: + +- Allocations at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY + (which is the default) will be pointing to the new destination place. +- Allocation at `pPassInfo[i].srcAllocation` that had `pPassInfo[i].operation ==` #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY + will be freed. + +If no more moves are possible you can end whole defragmentation. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo); + +/** \brief Binds buffer to allocation. + +Binds specified buffer to region of memory represented by specified allocation. +Gets `VkDeviceMemory` handle and offset from the allocation. +If you want to create a buffer, allocate memory for it and bind them together separately, +you should use this function for binding instead of standard `vkBindBufferMemory()`, +because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple +allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously +(which is illegal in Vulkan). + +It is recommended to use function vmaCreateBuffer() instead of this one. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer); + +/** \brief Binds buffer to allocation with additional parameters. + +\param allocator +\param allocation +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. +\param buffer +\param pNext A chain of structures to be attached to `VkBindBufferMemoryInfoKHR` structure used internally. Normally it should be null. + +This function is similar to vmaBindBufferMemory(), but it provides additional parameters. + +If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag +or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + VkBuffer VMA_NOT_NULL_NON_DISPATCHABLE buffer, + const void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkBindBufferMemoryInfoKHR) pNext); + +/** \brief Binds image to allocation. + +Binds specified image to region of memory represented by specified allocation. +Gets `VkDeviceMemory` handle and offset from the allocation. +If you want to create an image, allocate memory for it and bind them together separately, +you should use this function for binding instead of standard `vkBindImageMemory()`, +because it ensures proper synchronization so that when a `VkDeviceMemory` object is used by multiple +allocations, calls to `vkBind*Memory()` or `vkMapMemory()` won't happen from multiple threads simultaneously +(which is illegal in Vulkan). + +It is recommended to use function vmaCreateImage() instead of this one. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image); + +/** \brief Binds image to allocation with additional parameters. + +\param allocator +\param allocation +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the `allocation`. Normally it should be 0. +\param image +\param pNext A chain of structures to be attached to `VkBindImageMemoryInfoKHR` structure used internally. Normally it should be null. + +This function is similar to vmaBindImageMemory(), but it provides additional parameters. + +If `pNext` is not null, #VmaAllocator object must have been created with #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT flag +or with VmaAllocatorCreateInfo::vulkanApiVersion `>= VK_API_VERSION_1_1`. Otherwise the call fails. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + VkImage VMA_NOT_NULL_NON_DISPATCHABLE image, + const void* VMA_NULLABLE VMA_EXTENDS_VK_STRUCT(VkBindImageMemoryInfoKHR) pNext); + +/** \brief Creates a new `VkBuffer`, allocates and binds memory for it. + +\param allocator +\param pBufferCreateInfo +\param pAllocationCreateInfo +\param[out] pBuffer Buffer that was created. +\param[out] pAllocation Allocation that was created. +\param[out] pAllocationInfo Optional. Information about allocated memory. It can be later fetched using function vmaGetAllocationInfo(). + +This function automatically: + +-# Creates buffer. +-# Allocates appropriate memory for it. +-# Binds the buffer with the memory. + +If any of these operations fail, buffer and allocation are not created, +returned value is negative error code, `*pBuffer` and `*pAllocation` are null. + +If the function succeeded, you must destroy both buffer and allocation when you +no longer need them using either convenience function vmaDestroyBuffer() or +separately, using `vkDestroyBuffer()` and vmaFreeMemory(). + +If #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag was used, +VK_KHR_dedicated_allocation extension is used internally to query driver whether +it requires or prefers the new buffer to have dedicated allocation. If yes, +and if dedicated allocation is possible +(#VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT is not used), it creates dedicated +allocation for this buffer, just like when using +#VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + +\note This function creates a new `VkBuffer`. Sub-allocation of parts of one large buffer, +although recommended as a good practice, is out of scope of this library and could be implemented +by the user as a higher-level logic on top of VMA. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Creates a buffer with additional minimum alignment. + +Similar to vmaCreateBuffer() but provides additional parameter `minAlignment` which allows to specify custom, +minimum alignment to be used when placing the buffer inside a larger memory block, which may be needed e.g. +for interop with OpenGL. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( + VmaAllocator VMA_NOT_NULL allocator, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkDeviceSize minAlignment, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/** \brief Creates a new `VkBuffer`, binds already created memory for it. + +\param allocator +\param allocation Allocation that provides memory to be used for binding new buffer to it. +\param pBufferCreateInfo +\param[out] pBuffer Buffer that was created. + +This function automatically: + +-# Creates buffer. +-# Binds the buffer with the supplied memory. + +If any of these operations fail, buffer is not created, +returned value is negative error code and `*pBuffer` is null. + +If the function succeeded, you must destroy the buffer when you +no longer need it using `vkDestroyBuffer()`. If you want to also destroy the corresponding +allocation you can use convenience function vmaDestroyBuffer(). + +\note There is a new version of this function augmented with parameter `allocationLocalOffset` - see vmaCreateAliasingBuffer2(). +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer); + +/** \brief Creates a new `VkBuffer`, binds already created memory for it. + +\param allocator +\param allocation Allocation that provides memory to be used for binding new buffer to it. +\param allocationLocalOffset Additional offset to be added while binding, relative to the beginning of the allocation. Normally it should be 0. +\param pBufferCreateInfo +\param[out] pBuffer Buffer that was created. + +This function automatically: + +-# Creates buffer. +-# Binds the buffer with the supplied memory. + +If any of these operations fail, buffer is not created, +returned value is negative error code and `*pBuffer` is null. + +If the function succeeded, you must destroy the buffer when you +no longer need it using `vkDestroyBuffer()`. If you want to also destroy the corresponding +allocation you can use convenience function vmaDestroyBuffer(). + +\note This is a new version of the function augmented with parameter `allocationLocalOffset`. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer); + +/** \brief Destroys Vulkan buffer and frees allocated memory. + +This is just a convenience function equivalent to: + +\code +vkDestroyBuffer(device, buffer, allocationCallbacks); +vmaFreeMemory(allocator, allocation); +\endcode + +It is safe to pass null as buffer and/or allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE buffer, + VmaAllocation VMA_NULLABLE allocation); + +/// Function similar to vmaCreateBuffer(). +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( + VmaAllocator VMA_NOT_NULL allocator, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + const VmaAllocationCreateInfo* VMA_NOT_NULL pAllocationCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage, + VmaAllocation VMA_NULLABLE* VMA_NOT_NULL pAllocation, + VmaAllocationInfo* VMA_NULLABLE pAllocationInfo); + +/// Function similar to vmaCreateAliasingBuffer() but for images. +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage); + +/// Function similar to vmaCreateAliasingBuffer2() but for images. +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage); + +/** \brief Destroys Vulkan image and frees allocated memory. + +This is just a convenience function equivalent to: + +\code +vkDestroyImage(device, image, allocationCallbacks); +vmaFreeMemory(allocator, allocation); +\endcode + +It is safe to pass null as image and/or allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NULLABLE_NON_DISPATCHABLE image, + VmaAllocation VMA_NULLABLE allocation); + +/** @} */ + +/** +\addtogroup group_virtual +@{ +*/ + +/** \brief Creates new #VmaVirtualBlock object. + +\param pCreateInfo Parameters for creation. +\param[out] pVirtualBlock Returned virtual block object or `VMA_NULL` if creation failed. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( + const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualBlock VMA_NULLABLE* VMA_NOT_NULL pVirtualBlock); + +/** \brief Destroys #VmaVirtualBlock object. + +Please note that you should consciously handle virtual allocations that could remain unfreed in the block. +You should either free them individually using vmaVirtualFree() or call vmaClearVirtualBlock() +if you are sure this is what you want. If you do neither, an assert is called. + +If you keep pointers to some additional metadata associated with your virtual allocations in their `pUserData`, +don't forget to free them. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock( + VmaVirtualBlock VMA_NULLABLE virtualBlock); + +/** \brief Returns true of the #VmaVirtualBlock is empty - contains 0 virtual allocations and has all its space available for new allocations. +*/ +VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty( + VmaVirtualBlock VMA_NOT_NULL virtualBlock); + +/** \brief Returns information about a specific virtual allocation within a virtual block, like its size and `pUserData` pointer. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo); + +/** \brief Allocates new virtual allocation inside given #VmaVirtualBlock. + +If the allocation fails due to not enough free space available, `VK_ERROR_OUT_OF_DEVICE_MEMORY` is returned +(despite the function doesn't ever allocate actual GPU memory). +`pAllocation` is then set to `VK_NULL_HANDLE` and `pOffset`, if not null, it set to `UINT64_MAX`. + +\param virtualBlock Virtual block +\param pCreateInfo Parameters for the allocation +\param[out] pAllocation Returned handle of the new allocation +\param[out] pOffset Returned offset of the new allocation. Optional, can be null. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, + VkDeviceSize* VMA_NULLABLE pOffset); + +/** \brief Frees virtual allocation inside given #VmaVirtualBlock. + +It is correct to call this function with `allocation == VK_NULL_HANDLE` - it does nothing. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation); + +/** \brief Frees all virtual allocations inside given #VmaVirtualBlock. + +You must either call this function or free each virtual allocation individually with vmaVirtualFree() +before destroying a virtual block. Otherwise, an assert is called. + +If you keep pointer to some additional metadata associated with your virtual allocation in its `pUserData`, +don't forget to free it as well. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock( + VmaVirtualBlock VMA_NOT_NULL virtualBlock); + +/** \brief Changes custom pointer associated with given virtual allocation. +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, + void* VMA_NULLABLE pUserData); + +/** \brief Calculates and returns statistics about virtual allocations and memory usage in given #VmaVirtualBlock. + +This function is fast to call. For more detailed statistics, see vmaCalculateVirtualBlockStatistics(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaStatistics* VMA_NOT_NULL pStats); + +/** \brief Calculates and returns detailed statistics about virtual allocations and memory usage in given #VmaVirtualBlock. + +This function is slow to call. Use for debugging purposes. +For less detailed statistics, see vmaGetVirtualBlockStatistics(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaDetailedStatistics* VMA_NOT_NULL pStats); + +/** @} */ + +#if VMA_STATS_STRING_ENABLED +/** +\addtogroup group_stats +@{ +*/ + +/** \brief Builds and returns a null-terminated string in JSON format with information about given #VmaVirtualBlock. +\param virtualBlock Virtual block. +\param[out] ppStatsString Returned string. +\param detailedMap Pass `VK_FALSE` to only obtain statistics as returned by vmaCalculateVirtualBlockStatistics(). Pass `VK_TRUE` to also obtain full list of allocations and free spaces. + +Returned string must be freed using vmaFreeVirtualBlockStatsString(). +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, + VkBool32 detailedMap); + +/// Frees a string returned by vmaBuildVirtualBlockStatsString(). +VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString( + VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE pStatsString); + +/** \brief Builds and returns statistics as a null-terminated string in JSON format. +\param allocator +\param[out] ppStatsString Must be freed using vmaFreeStatsString() function. +\param detailedMap +*/ +VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( + VmaAllocator VMA_NOT_NULL allocator, + char* VMA_NULLABLE* VMA_NOT_NULL ppStatsString, + VkBool32 detailedMap); + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( + VmaAllocator VMA_NOT_NULL allocator, + char* VMA_NULLABLE pStatsString); + +/** @} */ + +#endif // VMA_STATS_STRING_ENABLED + +#endif // _VMA_FUNCTION_HEADERS + +#ifdef __cplusplus +} +#endif + +#endif // AMD_VULKAN_MEMORY_ALLOCATOR_H + +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// +// +// IMPLEMENTATION +// +//////////////////////////////////////////////////////////////////////////////// +//////////////////////////////////////////////////////////////////////////////// + +// For Visual Studio IntelliSense. +#if defined(__cplusplus) && defined(__INTELLISENSE__) +#define VMA_IMPLEMENTATION +#endif + +#ifdef VMA_IMPLEMENTATION +#undef VMA_IMPLEMENTATION + +#include +#include +#include +#include +#include +#include + +#if !defined(VMA_CPP20) + #if __cplusplus >= 202002L || _MSVC_LANG >= 202002L // C++20 + #define VMA_CPP20 1 + #else + #define VMA_CPP20 0 + #endif +#endif + +#ifdef _MSC_VER + #include // For functions like __popcnt, _BitScanForward etc. +#endif +#if VMA_CPP20 + #include +#endif + +#if VMA_STATS_STRING_ENABLED + #include // For snprintf +#endif + +/******************************************************************************* +CONFIGURATION SECTION + +Define some of these macros before each #include of this header or change them +here if you need other then default behavior depending on your environment. +*/ +#ifndef _VMA_CONFIGURATION + +/* +Define this macro to 1 to make the library fetch pointers to Vulkan functions +internally, like: + + vulkanFunctions.vkAllocateMemory = &vkAllocateMemory; +*/ +#if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) + #define VMA_STATIC_VULKAN_FUNCTIONS 1 +#endif + +/* +Define this macro to 1 to make the library fetch pointers to Vulkan functions +internally, like: + + vulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkGetDeviceProcAddr(device, "vkAllocateMemory"); + +To use this feature in new versions of VMA you now have to pass +VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as +VmaAllocatorCreateInfo::pVulkanFunctions. Other members can be null. +*/ +#if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS) + #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 +#endif + +#ifndef VMA_USE_STL_SHARED_MUTEX + #if __cplusplus >= 201703L || _MSVC_LANG >= 201703L // C++17 + #define VMA_USE_STL_SHARED_MUTEX 1 + // Visual studio defines __cplusplus properly only when passed additional parameter: /Zc:__cplusplus + // Otherwise it is always 199711L, despite shared_mutex works since Visual Studio 2015 Update 2. + #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L + #define VMA_USE_STL_SHARED_MUTEX 1 + #else + #define VMA_USE_STL_SHARED_MUTEX 0 + #endif +#endif + +/* +Define this macro to include custom header files without having to edit this file directly, e.g.: + + // Inside of "my_vma_configuration_user_includes.h": + + #include "my_custom_assert.h" // for MY_CUSTOM_ASSERT + #include "my_custom_min.h" // for my_custom_min + #include + #include + + // Inside a different file, which includes "vk_mem_alloc.h": + + #define VMA_CONFIGURATION_USER_INCLUDES_H "my_vma_configuration_user_includes.h" + #define VMA_ASSERT(expr) MY_CUSTOM_ASSERT(expr) + #define VMA_MIN(v1, v2) (my_custom_min(v1, v2)) + #include "vk_mem_alloc.h" + ... + +The following headers are used in this CONFIGURATION section only, so feel free to +remove them if not needed. +*/ +#if !defined(VMA_CONFIGURATION_USER_INCLUDES_H) + #include // for assert + #include // for min, max, swap + #include +#else + #include VMA_CONFIGURATION_USER_INCLUDES_H +#endif + +#ifndef VMA_NULL + // Value used as null pointer. Define it to e.g.: nullptr, NULL, 0, (void*)0. + #define VMA_NULL nullptr +#endif + +#ifndef VMA_FALLTHROUGH + #if __cplusplus >= 201703L || _MSVC_LANG >= 201703L // C++17 + #define VMA_FALLTHROUGH [[fallthrough]] + #else + #define VMA_FALLTHROUGH + #endif +#endif + +// Normal assert to check for programmer's errors, especially in Debug configuration. +#ifndef VMA_ASSERT + #ifdef NDEBUG + #define VMA_ASSERT(expr) + #else + #define VMA_ASSERT(expr) assert(expr) + #endif +#endif + +// Assert that will be called very often, like inside data structures e.g. operator[]. +// Making it non-empty can make program slow. +#ifndef VMA_HEAVY_ASSERT + #ifdef NDEBUG + #define VMA_HEAVY_ASSERT(expr) + #else + #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) + #endif +#endif + +// Assert used for reporting memory leaks - unfreed allocations. +#ifndef VMA_ASSERT_LEAK + #define VMA_ASSERT_LEAK(expr) VMA_ASSERT(expr) +#endif + +// If your compiler is not compatible with C++17 and definition of +// aligned_alloc() function is missing, uncommenting following line may help: + +//#include + +#if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) +#include +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + // alignment must be >= sizeof(void*) + if(alignment < sizeof(void*)) + { + alignment = sizeof(void*); + } + + return memalign(alignment, size); +} +#elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC)) +#include + +#if defined(__APPLE__) +#include +#endif + +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + // Unfortunately, aligned_alloc causes VMA to crash due to it returning null pointers. (At least under 11.4) + // Therefore, for now disable this specific exception until a proper solution is found. + //#if defined(__APPLE__) && (defined(MAC_OS_X_VERSION_10_16) || defined(__IPHONE_14_0)) + //#if MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_X_VERSION_10_16 || __IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_14_0 + // // For C++14, usr/include/malloc/_malloc.h declares aligned_alloc()) only + // // with the MacOSX11.0 SDK in Xcode 12 (which is what adds + // // MAC_OS_X_VERSION_10_16), even though the function is marked + // // available for 10.15. That is why the preprocessor checks for 10.16 but + // // the __builtin_available checks for 10.15. + // // People who use C++17 could call aligned_alloc with the 10.15 SDK already. + // if (__builtin_available(macOS 10.15, iOS 13, *)) + // return aligned_alloc(alignment, size); + //#endif + //#endif + + // alignment must be >= sizeof(void*) + if(alignment < sizeof(void*)) + { + alignment = sizeof(void*); + } + + void *pointer; + if(posix_memalign(&pointer, alignment, size) == 0) + return pointer; + return VMA_NULL; +} +#elif defined(_WIN32) +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + return _aligned_malloc(size, alignment); +} +#elif __cplusplus >= 201703L || _MSVC_LANG >= 201703L // C++17 +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + return aligned_alloc(alignment, size); +} +#else +static void* vma_aligned_alloc(size_t alignment, size_t size) +{ + VMA_ASSERT(0 && "Could not implement aligned_alloc automatically. Please enable C++17 or later in your compiler or provide custom implementation of macro VMA_SYSTEM_ALIGNED_MALLOC (and VMA_SYSTEM_ALIGNED_FREE if needed) using the API of your system."); + return VMA_NULL; +} +#endif + +#if defined(_WIN32) +static void vma_aligned_free(void* ptr) +{ + _aligned_free(ptr); +} +#else +static void vma_aligned_free(void* VMA_NULLABLE ptr) +{ + free(ptr); +} +#endif + +#ifndef VMA_ALIGN_OF + #define VMA_ALIGN_OF(type) (alignof(type)) +#endif + +#ifndef VMA_SYSTEM_ALIGNED_MALLOC + #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) vma_aligned_alloc((alignment), (size)) +#endif + +#ifndef VMA_SYSTEM_ALIGNED_FREE + // VMA_SYSTEM_FREE is the old name, but might have been defined by the user + #if defined(VMA_SYSTEM_FREE) + #define VMA_SYSTEM_ALIGNED_FREE(ptr) VMA_SYSTEM_FREE(ptr) + #else + #define VMA_SYSTEM_ALIGNED_FREE(ptr) vma_aligned_free(ptr) + #endif +#endif + +#ifndef VMA_COUNT_BITS_SET + // Returns number of bits set to 1 in (v) + #define VMA_COUNT_BITS_SET(v) VmaCountBitsSet(v) +#endif + +#ifndef VMA_BITSCAN_LSB + // Scans integer for index of first nonzero value from the Least Significant Bit (LSB). If mask is 0 then returns UINT8_MAX + #define VMA_BITSCAN_LSB(mask) VmaBitScanLSB(mask) +#endif + +#ifndef VMA_BITSCAN_MSB + // Scans integer for index of first nonzero value from the Most Significant Bit (MSB). If mask is 0 then returns UINT8_MAX + #define VMA_BITSCAN_MSB(mask) VmaBitScanMSB(mask) +#endif + +#ifndef VMA_MIN + #define VMA_MIN(v1, v2) ((std::min)((v1), (v2))) +#endif + +#ifndef VMA_MAX + #define VMA_MAX(v1, v2) ((std::max)((v1), (v2))) +#endif + +#ifndef VMA_SORT + #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) +#endif + +#ifndef VMA_DEBUG_LOG_FORMAT + #define VMA_DEBUG_LOG_FORMAT(format, ...) + /* + #define VMA_DEBUG_LOG_FORMAT(format, ...) do { \ + printf((format), __VA_ARGS__); \ + printf("\n"); \ + } while(false) + */ +#endif + +#ifndef VMA_DEBUG_LOG + #define VMA_DEBUG_LOG(str) VMA_DEBUG_LOG_FORMAT("%s", (str)) +#endif + +#ifndef VMA_LEAK_LOG_FORMAT + #define VMA_LEAK_LOG_FORMAT(format, ...) VMA_DEBUG_LOG_FORMAT(format, __VA_ARGS__) +#endif + +#ifndef VMA_CLASS_NO_COPY + #define VMA_CLASS_NO_COPY(className) \ + private: \ + className(const className&) = delete; \ + className& operator=(const className&) = delete; +#endif +#ifndef VMA_CLASS_NO_COPY_NO_MOVE + #define VMA_CLASS_NO_COPY_NO_MOVE(className) \ + private: \ + className(const className&) = delete; \ + className(className&&) = delete; \ + className& operator=(const className&) = delete; \ + className& operator=(className&&) = delete; +#endif + +// Define this macro to 1 to enable functions: vmaBuildStatsString, vmaFreeStatsString. +#if VMA_STATS_STRING_ENABLED + static inline void VmaUint32ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint32_t num) + { + snprintf(outStr, strLen, "%" PRIu32, num); + } + static inline void VmaUint64ToStr(char* VMA_NOT_NULL outStr, size_t strLen, uint64_t num) + { + snprintf(outStr, strLen, "%" PRIu64, num); + } + static inline void VmaPtrToStr(char* VMA_NOT_NULL outStr, size_t strLen, const void* ptr) + { + snprintf(outStr, strLen, "%p", ptr); + } +#endif + +#ifndef VMA_MUTEX + class VmaMutex + { + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutex) + public: + VmaMutex() { } + void Lock() { m_Mutex.lock(); } + void Unlock() { m_Mutex.unlock(); } + bool TryLock() { return m_Mutex.try_lock(); } + private: + std::mutex m_Mutex; + }; + #define VMA_MUTEX VmaMutex +#endif + +// Read-write mutex, where "read" is shared access, "write" is exclusive access. +#ifndef VMA_RW_MUTEX + #if VMA_USE_STL_SHARED_MUTEX + // Use std::shared_mutex from C++17. + #include + class VmaRWMutex + { + public: + void LockRead() { m_Mutex.lock_shared(); } + void UnlockRead() { m_Mutex.unlock_shared(); } + bool TryLockRead() { return m_Mutex.try_lock_shared(); } + void LockWrite() { m_Mutex.lock(); } + void UnlockWrite() { m_Mutex.unlock(); } + bool TryLockWrite() { return m_Mutex.try_lock(); } + private: + std::shared_mutex m_Mutex; + }; + #define VMA_RW_MUTEX VmaRWMutex + #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 + // Use SRWLOCK from WinAPI. + // Minimum supported client = Windows Vista, server = Windows Server 2008. + class VmaRWMutex + { + public: + VmaRWMutex() { InitializeSRWLock(&m_Lock); } + void LockRead() { AcquireSRWLockShared(&m_Lock); } + void UnlockRead() { ReleaseSRWLockShared(&m_Lock); } + bool TryLockRead() { return TryAcquireSRWLockShared(&m_Lock) != FALSE; } + void LockWrite() { AcquireSRWLockExclusive(&m_Lock); } + void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); } + bool TryLockWrite() { return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; } + private: + SRWLOCK m_Lock; + }; + #define VMA_RW_MUTEX VmaRWMutex + #else + // Less efficient fallback: Use normal mutex. + class VmaRWMutex + { + public: + void LockRead() { m_Mutex.Lock(); } + void UnlockRead() { m_Mutex.Unlock(); } + bool TryLockRead() { return m_Mutex.TryLock(); } + void LockWrite() { m_Mutex.Lock(); } + void UnlockWrite() { m_Mutex.Unlock(); } + bool TryLockWrite() { return m_Mutex.TryLock(); } + private: + VMA_MUTEX m_Mutex; + }; + #define VMA_RW_MUTEX VmaRWMutex + #endif // #if VMA_USE_STL_SHARED_MUTEX +#endif // #ifndef VMA_RW_MUTEX + +/* +If providing your own implementation, you need to implement a subset of std::atomic. +*/ +#ifndef VMA_ATOMIC_UINT32 + #include + #define VMA_ATOMIC_UINT32 std::atomic +#endif + +#ifndef VMA_ATOMIC_UINT64 + #include + #define VMA_ATOMIC_UINT64 std::atomic +#endif + +#ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY + /** + Every allocation will have its own memory block. + Define to 1 for debugging purposes only. + */ + #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) +#endif + +#ifndef VMA_MIN_ALIGNMENT + /** + Minimum alignment of all allocations, in bytes. + Set to more than 1 for debugging purposes. Must be power of two. + */ + #ifdef VMA_DEBUG_ALIGNMENT // Old name + #define VMA_MIN_ALIGNMENT VMA_DEBUG_ALIGNMENT + #else + #define VMA_MIN_ALIGNMENT (1) + #endif +#endif + +#ifndef VMA_DEBUG_MARGIN + /** + Minimum margin after every allocation, in bytes. + Set nonzero for debugging purposes only. + */ + #define VMA_DEBUG_MARGIN (0) +#endif + +#ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS + /** + Define this macro to 1 to automatically fill new allocations and destroyed + allocations with some bit pattern. + */ + #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) +#endif + +#ifndef VMA_DEBUG_DETECT_CORRUPTION + /** + Define this macro to 1 together with non-zero value of VMA_DEBUG_MARGIN to + enable writing magic value to the margin after every allocation and + validating it, so that memory corruptions (out-of-bounds writes) are detected. + */ + #define VMA_DEBUG_DETECT_CORRUPTION (0) +#endif + +#ifndef VMA_DEBUG_GLOBAL_MUTEX + /** + Set this to 1 for debugging purposes only, to enable single mutex protecting all + entry calls to the library. Can be useful for debugging multithreading issues. + */ + #define VMA_DEBUG_GLOBAL_MUTEX (0) +#endif + +#ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY + /** + Minimum value for VkPhysicalDeviceLimits::bufferImageGranularity. + Set to more than 1 for debugging purposes only. Must be power of two. + */ + #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) +#endif + +#ifndef VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT + /* + Set this to 1 to make VMA never exceed VkPhysicalDeviceLimits::maxMemoryAllocationCount + and return error instead of leaving up to Vulkan implementation what to do in such cases. + */ + #define VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT (0) +#endif + +#ifndef VMA_SMALL_HEAP_MAX_SIZE + /// Maximum size of a memory heap in Vulkan to consider it "small". + #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) +#endif + +#ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE + /// Default size of a block allocated as single VkDeviceMemory from a "large" heap. + #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) +#endif + +/* +Mapping hysteresis is a logic that launches when vmaMapMemory/vmaUnmapMemory is called +or a persistently mapped allocation is created and destroyed several times in a row. +It keeps additional +1 mapping of a device memory block to prevent calling actual +vkMapMemory/vkUnmapMemory too many times, which may improve performance and help +tools like RenderDoc. +*/ +#ifndef VMA_MAPPING_HYSTERESIS_ENABLED + #define VMA_MAPPING_HYSTERESIS_ENABLED 1 +#endif + +#define VMA_VALIDATE(cond) do { if(!(cond)) { \ + VMA_ASSERT(0 && "Validation failed: " #cond); \ + return false; \ + } } while(false) + +/******************************************************************************* +END OF CONFIGURATION +*/ +#endif // _VMA_CONFIGURATION + + +static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC; +static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF; +// Decimal 2139416166, float NaN, little-endian binary 66 E6 84 7F. +static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666; + +// Copy of some Vulkan definitions so we don't need to check their existence just to handle few constants. +static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040; +static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080; +static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000; +static const uint32_t VK_IMAGE_CREATE_DISJOINT_BIT_COPY = 0x00000200; +static const int32_t VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY = 1000158000; +static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u; +static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32; +static const uint32_t VMA_VENDOR_ID_AMD = 4098; + +// This one is tricky. Vulkan specification defines this code as available since +// Vulkan 1.0, but doesn't actually define it in Vulkan SDK earlier than 1.2.131. +// See pull request #207. +#define VK_ERROR_UNKNOWN_COPY ((VkResult)-13) + + +#if VMA_STATS_STRING_ENABLED +// Correspond to values of enum VmaSuballocationType. +static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = +{ + "FREE", + "UNKNOWN", + "BUFFER", + "IMAGE_UNKNOWN", + "IMAGE_LINEAR", + "IMAGE_OPTIMAL", +}; +#endif + +static VkAllocationCallbacks VmaEmptyAllocationCallbacks = + { VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL }; + + +#ifndef _VMA_ENUM_DECLARATIONS + +enum VmaSuballocationType +{ + VMA_SUBALLOCATION_TYPE_FREE = 0, + VMA_SUBALLOCATION_TYPE_UNKNOWN = 1, + VMA_SUBALLOCATION_TYPE_BUFFER = 2, + VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3, + VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4, + VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5, + VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +}; + +enum VMA_CACHE_OPERATION +{ + VMA_CACHE_FLUSH, + VMA_CACHE_INVALIDATE +}; + +enum class VmaAllocationRequestType +{ + Normal, + TLSF, + // Used by "Linear" algorithm. + UpperAddress, + EndOf1st, + EndOf2nd, +}; + +#endif // _VMA_ENUM_DECLARATIONS + +#ifndef _VMA_FORWARD_DECLARATIONS +// Opaque handle used by allocation algorithms to identify single allocation in any conforming way. +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VmaAllocHandle); + +struct VmaMutexLock; +struct VmaMutexLockRead; +struct VmaMutexLockWrite; + +template +struct AtomicTransactionalIncrement; + +template +struct VmaStlAllocator; + +template +class VmaVector; + +template +class VmaSmallVector; + +template +class VmaPoolAllocator; + +template +struct VmaListItem; + +template +class VmaRawList; + +template +class VmaList; + +template +class VmaIntrusiveLinkedList; + +#if VMA_STATS_STRING_ENABLED +class VmaStringBuilder; +class VmaJsonWriter; +#endif + +class VmaDeviceMemoryBlock; + +struct VmaDedicatedAllocationListItemTraits; +class VmaDedicatedAllocationList; + +struct VmaSuballocation; +struct VmaSuballocationOffsetLess; +struct VmaSuballocationOffsetGreater; +struct VmaSuballocationItemSizeLess; + +typedef VmaList> VmaSuballocationList; + +struct VmaAllocationRequest; + +class VmaBlockMetadata; +class VmaBlockMetadata_Linear; +class VmaBlockMetadata_TLSF; + +class VmaBlockVector; + +struct VmaPoolListItemTraits; + +struct VmaCurrentBudgetData; + +class VmaAllocationObjectAllocator; + +#endif // _VMA_FORWARD_DECLARATIONS + + +#ifndef _VMA_FUNCTIONS + +/* +Returns number of bits set to 1 in (v). + +On specific platforms and compilers you can use intrinsics like: + +Visual Studio: + return __popcnt(v); +GCC, Clang: + return static_cast(__builtin_popcount(v)); + +Define macro VMA_COUNT_BITS_SET to provide your optimized implementation. +But you need to check in runtime whether user's CPU supports these, as some old processors don't. +*/ +static inline uint32_t VmaCountBitsSet(uint32_t v) +{ +#if VMA_CPP20 + return std::popcount(v); +#else + uint32_t c = v - ((v >> 1) & 0x55555555); + c = ((c >> 2) & 0x33333333) + (c & 0x33333333); + c = ((c >> 4) + c) & 0x0F0F0F0F; + c = ((c >> 8) + c) & 0x00FF00FF; + c = ((c >> 16) + c) & 0x0000FFFF; + return c; +#endif +} + +static inline uint8_t VmaBitScanLSB(uint64_t mask) +{ +#if defined(_MSC_VER) && defined(_WIN64) + unsigned long pos; + if (_BitScanForward64(&pos, mask)) + return static_cast(pos); + return UINT8_MAX; +#elif VMA_CPP20 + if(mask) + return static_cast(std::countr_zero(mask)); + return UINT8_MAX; +#elif defined __GNUC__ || defined __clang__ + return static_cast(__builtin_ffsll(mask)) - 1U; +#else + uint8_t pos = 0; + uint64_t bit = 1; + do + { + if (mask & bit) + return pos; + bit <<= 1; + } while (pos++ < 63); + return UINT8_MAX; +#endif +} + +static inline uint8_t VmaBitScanLSB(uint32_t mask) +{ +#ifdef _MSC_VER + unsigned long pos; + if (_BitScanForward(&pos, mask)) + return static_cast(pos); + return UINT8_MAX; +#elif VMA_CPP20 + if(mask) + return static_cast(std::countr_zero(mask)); + return UINT8_MAX; +#elif defined __GNUC__ || defined __clang__ + return static_cast(__builtin_ffs(mask)) - 1U; +#else + uint8_t pos = 0; + uint32_t bit = 1; + do + { + if (mask & bit) + return pos; + bit <<= 1; + } while (pos++ < 31); + return UINT8_MAX; +#endif +} + +static inline uint8_t VmaBitScanMSB(uint64_t mask) +{ +#if defined(_MSC_VER) && defined(_WIN64) + unsigned long pos; + if (_BitScanReverse64(&pos, mask)) + return static_cast(pos); +#elif VMA_CPP20 + if(mask) + return 63 - static_cast(std::countl_zero(mask)); +#elif defined __GNUC__ || defined __clang__ + if (mask) + return 63 - static_cast(__builtin_clzll(mask)); +#else + uint8_t pos = 63; + uint64_t bit = 1ULL << 63; + do + { + if (mask & bit) + return pos; + bit >>= 1; + } while (pos-- > 0); +#endif + return UINT8_MAX; +} + +static inline uint8_t VmaBitScanMSB(uint32_t mask) +{ +#ifdef _MSC_VER + unsigned long pos; + if (_BitScanReverse(&pos, mask)) + return static_cast(pos); +#elif VMA_CPP20 + if(mask) + return 31 - static_cast(std::countl_zero(mask)); +#elif defined __GNUC__ || defined __clang__ + if (mask) + return 31 - static_cast(__builtin_clz(mask)); +#else + uint8_t pos = 31; + uint32_t bit = 1UL << 31; + do + { + if (mask & bit) + return pos; + bit >>= 1; + } while (pos-- > 0); +#endif + return UINT8_MAX; +} + +/* +Returns true if given number is a power of two. +T must be unsigned integer number or signed integer but always nonnegative. +For 0 returns true. +*/ +template +inline bool VmaIsPow2(T x) +{ + return (x & (x - 1)) == 0; +} + +// Aligns given value up to nearest multiply of align value. For example: VmaAlignUp(11, 8) = 16. +// Use types like uint32_t, uint64_t as T. +template +static inline T VmaAlignUp(T val, T alignment) +{ + VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); + return (val + alignment - 1) & ~(alignment - 1); +} + +// Aligns given value down to nearest multiply of align value. For example: VmaAlignDown(11, 8) = 8. +// Use types like uint32_t, uint64_t as T. +template +static inline T VmaAlignDown(T val, T alignment) +{ + VMA_HEAVY_ASSERT(VmaIsPow2(alignment)); + return val & ~(alignment - 1); +} + +// Division with mathematical rounding to nearest number. +template +static inline T VmaRoundDiv(T x, T y) +{ + return (x + (y / (T)2)) / y; +} + +// Divide by 'y' and round up to nearest integer. +template +static inline T VmaDivideRoundingUp(T x, T y) +{ + return (x + y - (T)1) / y; +} + +// Returns smallest power of 2 greater or equal to v. +static inline uint32_t VmaNextPow2(uint32_t v) +{ + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v++; + return v; +} + +static inline uint64_t VmaNextPow2(uint64_t v) +{ + v--; + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v |= v >> 32; + v++; + return v; +} + +// Returns largest power of 2 less or equal to v. +static inline uint32_t VmaPrevPow2(uint32_t v) +{ + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v = v ^ (v >> 1); + return v; +} + +static inline uint64_t VmaPrevPow2(uint64_t v) +{ + v |= v >> 1; + v |= v >> 2; + v |= v >> 4; + v |= v >> 8; + v |= v >> 16; + v |= v >> 32; + v = v ^ (v >> 1); + return v; +} + +static inline bool VmaStrIsEmpty(const char* pStr) +{ + return pStr == VMA_NULL || *pStr == '\0'; +} + +/* +Returns true if two memory blocks occupy overlapping pages. +ResourceA must be in less memory offset than ResourceB. + +Algorithm is based on "Vulkan 1.0.39 - A Specification (with all registered Vulkan extensions)" +chapter 11.6 "Resource Memory Association", paragraph "Buffer-Image Granularity". +*/ +static inline bool VmaBlocksOnSamePage( + VkDeviceSize resourceAOffset, + VkDeviceSize resourceASize, + VkDeviceSize resourceBOffset, + VkDeviceSize pageSize) +{ + VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0); + VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1; + VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1); + VkDeviceSize resourceBStart = resourceBOffset; + VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1); + return resourceAEndPage == resourceBStartPage; +} + +/* +Returns true if given suballocation types could conflict and must respect +VkPhysicalDeviceLimits::bufferImageGranularity. They conflict if one is buffer +or linear image and another one is optimal image. If type is unknown, behave +conservatively. +*/ +static inline bool VmaIsBufferImageGranularityConflict( + VmaSuballocationType suballocType1, + VmaSuballocationType suballocType2) +{ + if (suballocType1 > suballocType2) + { + std::swap(suballocType1, suballocType2); + } + + switch (suballocType1) + { + case VMA_SUBALLOCATION_TYPE_FREE: + return false; + case VMA_SUBALLOCATION_TYPE_UNKNOWN: + return true; + case VMA_SUBALLOCATION_TYPE_BUFFER: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR || + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR: + return + suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL; + case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL: + return false; + default: + VMA_ASSERT(0); + return true; + } +} + +static void VmaWriteMagicValue(void* pData, VkDeviceSize offset) +{ +#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION + uint32_t* pDst = (uint32_t*)((char*)pData + offset); + const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); + for (size_t i = 0; i < numberCount; ++i, ++pDst) + { + *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE; + } +#else + // no-op +#endif +} + +static bool VmaValidateMagicValue(const void* pData, VkDeviceSize offset) +{ +#if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION + const uint32_t* pSrc = (const uint32_t*)((const char*)pData + offset); + const size_t numberCount = VMA_DEBUG_MARGIN / sizeof(uint32_t); + for (size_t i = 0; i < numberCount; ++i, ++pSrc) + { + if (*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE) + { + return false; + } + } +#endif + return true; +} + +/* +Fills structure with parameters of an example buffer to be used for transfers +during GPU memory defragmentation. +*/ +static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo) +{ + memset(&outBufCreateInfo, 0, sizeof(outBufCreateInfo)); + outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; + outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE; // Example size. +} + + +/* +Performs binary search and returns iterator to first element that is greater or +equal to (key), according to comparison (cmp). + +Cmp should return true if first argument is less than second argument. + +Returned value is the found element, if present in the collection or place where +new element with value (key) should be inserted. +*/ +template +static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end, const KeyT& key, const CmpLess& cmp) +{ + size_t down = 0, up = size_t(end - beg); + while (down < up) + { + const size_t mid = down + (up - down) / 2; // Overflow-safe midpoint calculation + if (cmp(*(beg + mid), key)) + { + down = mid + 1; + } + else + { + up = mid; + } + } + return beg + down; +} + +template +IterT VmaBinaryFindSorted(const IterT& beg, const IterT& end, const KeyT& value, const CmpLess& cmp) +{ + IterT it = VmaBinaryFindFirstNotLess( + beg, end, value, cmp); + if (it == end || + (!cmp(*it, value) && !cmp(value, *it))) + { + return it; + } + return end; +} + +/* +Returns true if all pointers in the array are not-null and unique. +Warning! O(n^2) complexity. Use only inside VMA_HEAVY_ASSERT. +T must be pointer type, e.g. VmaAllocation, VmaPool. +*/ +template +static bool VmaValidatePointerArray(uint32_t count, const T* arr) +{ + for (uint32_t i = 0; i < count; ++i) + { + const T iPtr = arr[i]; + if (iPtr == VMA_NULL) + { + return false; + } + for (uint32_t j = i + 1; j < count; ++j) + { + if (iPtr == arr[j]) + { + return false; + } + } + } + return true; +} + +template +static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct) +{ + newStruct->pNext = mainStruct->pNext; + mainStruct->pNext = newStruct; +} +// Finds structure with s->sType == sType in mainStruct->pNext chain. +// Returns pointer to it. If not found, returns null. +template +static inline const FindT* VmaPnextChainFind(const MainT* mainStruct, VkStructureType sType) +{ + for(const VkBaseInStructure* s = (const VkBaseInStructure*)mainStruct->pNext; + s != VMA_NULL; s = s->pNext) + { + if(s->sType == sType) + { + return (const FindT*)s; + } + } + return VMA_NULL; +} + +// An abstraction over buffer or image `usage` flags, depending on available extensions. +struct VmaBufferImageUsage +{ +#if VMA_KHR_MAINTENANCE5 + typedef uint64_t BaseType; // VkFlags64 +#else + typedef uint32_t BaseType; // VkFlags32 +#endif + + static const VmaBufferImageUsage UNKNOWN; + + BaseType Value; + + VmaBufferImageUsage() { *this = UNKNOWN; } + explicit VmaBufferImageUsage(BaseType usage) : Value(usage) { } + VmaBufferImageUsage(const VkBufferCreateInfo &createInfo, bool useKhrMaintenance5); + explicit VmaBufferImageUsage(const VkImageCreateInfo &createInfo); + + bool operator==(const VmaBufferImageUsage& rhs) const { return Value == rhs.Value; } + bool operator!=(const VmaBufferImageUsage& rhs) const { return Value != rhs.Value; } + + bool Contains(BaseType flag) const { return (Value & flag) != 0; } + bool ContainsDeviceAccess() const + { + // This relies on values of VK_IMAGE_USAGE_TRANSFER* being the same as VK_BUFFER_IMAGE_TRANSFER*. + return (Value & ~BaseType(VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) != 0; + } +}; + +const VmaBufferImageUsage VmaBufferImageUsage::UNKNOWN = VmaBufferImageUsage(0); + +static void swap(VmaBufferImageUsage& lhs, VmaBufferImageUsage& rhs) noexcept +{ + using std::swap; + swap(lhs.Value, rhs.Value); +} + +VmaBufferImageUsage::VmaBufferImageUsage(const VkBufferCreateInfo &createInfo, + bool useKhrMaintenance5) +{ +#if VMA_KHR_MAINTENANCE5 + if(useKhrMaintenance5) + { + // If VkBufferCreateInfo::pNext chain contains VkBufferUsageFlags2CreateInfoKHR, + // take usage from it and ignore VkBufferCreateInfo::usage, per specification + // of the VK_KHR_maintenance5 extension. + const VkBufferUsageFlags2CreateInfoKHR* const usageFlags2 = + VmaPnextChainFind(&createInfo, VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR); + if(usageFlags2) + { + this->Value = usageFlags2->usage; + return; + } + } +#endif + + this->Value = (BaseType)createInfo.usage; +} + +VmaBufferImageUsage::VmaBufferImageUsage(const VkImageCreateInfo &createInfo) +{ + // Maybe in the future there will be VK_KHR_maintenanceN extension with structure + // VkImageUsageFlags2CreateInfoKHR, like the one for buffers... + + this->Value = (BaseType)createInfo.usage; +} + +// This is the main algorithm that guides the selection of a memory type best for an allocation - +// converts usage to required/preferred/not preferred flags. +static bool FindMemoryPreferences( + bool isIntegratedGPU, + const VmaAllocationCreateInfo& allocCreateInfo, + VmaBufferImageUsage bufImgUsage, + VkMemoryPropertyFlags& outRequiredFlags, + VkMemoryPropertyFlags& outPreferredFlags, + VkMemoryPropertyFlags& outNotPreferredFlags) +{ + outRequiredFlags = allocCreateInfo.requiredFlags; + outPreferredFlags = allocCreateInfo.preferredFlags; + outNotPreferredFlags = 0; + + switch(allocCreateInfo.usage) + { + case VMA_MEMORY_USAGE_UNKNOWN: + break; + case VMA_MEMORY_USAGE_GPU_ONLY: + if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + case VMA_MEMORY_USAGE_CPU_ONLY: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + break; + case VMA_MEMORY_USAGE_CPU_TO_GPU: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + if(!isIntegratedGPU || (outPreferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + case VMA_MEMORY_USAGE_GPU_TO_CPU: + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + break; + case VMA_MEMORY_USAGE_CPU_COPY: + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + break; + case VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED: + outRequiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT; + break; + case VMA_MEMORY_USAGE_AUTO: + case VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE: + case VMA_MEMORY_USAGE_AUTO_PREFER_HOST: + { + if(bufImgUsage == VmaBufferImageUsage::UNKNOWN) + { + VMA_ASSERT(0 && "VMA_MEMORY_USAGE_AUTO* values can only be used with functions like vmaCreateBuffer, vmaCreateImage so that the details of the created resource are known." + " Maybe you use VkBufferUsageFlags2CreateInfoKHR but forgot to use VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT?" ); + return false; + } + + const bool deviceAccess = bufImgUsage.ContainsDeviceAccess(); + const bool hostAccessSequentialWrite = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT) != 0; + const bool hostAccessRandom = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) != 0; + const bool hostAccessAllowTransferInstead = (allocCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) != 0; + const bool preferDevice = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; + const bool preferHost = allocCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST; + + // CPU random access - e.g. a buffer written to or transferred from GPU to read back on CPU. + if(hostAccessRandom) + { + // Prefer cached. Cannot require it, because some platforms don't have it (e.g. Raspberry Pi - see #362)! + outPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + + if (!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) + { + // Nice if it will end up in HOST_VISIBLE, but more importantly prefer DEVICE_LOCAL. + // Omitting HOST_VISIBLE here is intentional. + // In case there is DEVICE_LOCAL | HOST_VISIBLE | HOST_CACHED, it will pick that one. + // Otherwise, this will give same weight to DEVICE_LOCAL as HOST_VISIBLE | HOST_CACHED and select the former if occurs first on the list. + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + else + { + // Always CPU memory. + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + } + // CPU sequential write - may be CPU or host-visible GPU memory, uncached and write-combined. + else if(hostAccessSequentialWrite) + { + // Want uncached and write-combined. + outNotPreferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + + if(!isIntegratedGPU && deviceAccess && hostAccessAllowTransferInstead && !preferHost) + { + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + else + { + outRequiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + // Direct GPU access, CPU sequential write (e.g. a dynamic uniform buffer updated every frame) + if(deviceAccess) + { + // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose GPU memory. + if(preferHost) + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + // GPU no direct access, CPU sequential write (e.g. an upload buffer to be transferred to the GPU) + else + { + // Could go to CPU memory or GPU BAR/unified. Up to the user to decide. If no preference, choose CPU memory. + if(preferDevice) + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + } + } + // No CPU access + else + { + // if(deviceAccess) + // + // GPU access, no CPU access (e.g. a color attachment image) - prefer GPU memory, + // unless there is a clear preference from the user not to do so. + // + // else: + // + // No direct GPU access, no CPU access, just transfers. + // It may be staging copy intended for e.g. preserving image for next frame (then better GPU memory) or + // a "swap file" copy to free some GPU memory (then better CPU memory). + // Up to the user to decide. If no preferece, assume the former and choose GPU memory. + + if(preferHost) + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + else + outPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + } + break; + } + default: + VMA_ASSERT(0); + } + + // Avoid DEVICE_COHERENT unless explicitly requested. + if(((allocCreateInfo.requiredFlags | allocCreateInfo.preferredFlags) & + (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0) + { + outNotPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY; + } + + return true; +} + +//////////////////////////////////////////////////////////////////////////////// +// Memory allocation + +static void* VmaMalloc(const VkAllocationCallbacks* pAllocationCallbacks, size_t size, size_t alignment) +{ + void* result = VMA_NULL; + if ((pAllocationCallbacks != VMA_NULL) && + (pAllocationCallbacks->pfnAllocation != VMA_NULL)) + { + result = (*pAllocationCallbacks->pfnAllocation)( + pAllocationCallbacks->pUserData, + size, + alignment, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT); + } + else + { + result = VMA_SYSTEM_ALIGNED_MALLOC(size, alignment); + } + VMA_ASSERT(result != VMA_NULL && "CPU memory allocation failed."); + return result; +} + +static void VmaFree(const VkAllocationCallbacks* pAllocationCallbacks, void* ptr) +{ + if ((pAllocationCallbacks != VMA_NULL) && + (pAllocationCallbacks->pfnFree != VMA_NULL)) + { + (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr); + } + else + { + VMA_SYSTEM_ALIGNED_FREE(ptr); + } +} + +template +static T* VmaAllocate(const VkAllocationCallbacks* pAllocationCallbacks) +{ + return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T), VMA_ALIGN_OF(T)); +} + +template +static T* VmaAllocateArray(const VkAllocationCallbacks* pAllocationCallbacks, size_t count) +{ + return (T*)VmaMalloc(pAllocationCallbacks, sizeof(T) * count, VMA_ALIGN_OF(T)); +} + +#define vma_new(allocator, type) new(VmaAllocate(allocator))(type) + +#define vma_new_array(allocator, type, count) new(VmaAllocateArray((allocator), (count)))(type) + +template +static void vma_delete(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr) +{ + ptr->~T(); + VmaFree(pAllocationCallbacks, ptr); +} + +template +static void vma_delete_array(const VkAllocationCallbacks* pAllocationCallbacks, T* ptr, size_t count) +{ + if (ptr != VMA_NULL) + { + for (size_t i = count; i--; ) + { + ptr[i].~T(); + } + VmaFree(pAllocationCallbacks, ptr); + } +} + +static char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr) +{ + if (srcStr != VMA_NULL) + { + const size_t len = strlen(srcStr); + char* const result = vma_new_array(allocs, char, len + 1); + memcpy(result, srcStr, len + 1); + return result; + } + return VMA_NULL; +} + +#if VMA_STATS_STRING_ENABLED +static char* VmaCreateStringCopy(const VkAllocationCallbacks* allocs, const char* srcStr, size_t strLen) +{ + if (srcStr != VMA_NULL) + { + char* const result = vma_new_array(allocs, char, strLen + 1); + memcpy(result, srcStr, strLen); + result[strLen] = '\0'; + return result; + } + return VMA_NULL; +} +#endif // VMA_STATS_STRING_ENABLED + +static void VmaFreeString(const VkAllocationCallbacks* allocs, char* str) +{ + if (str != VMA_NULL) + { + const size_t len = strlen(str); + vma_delete_array(allocs, str, len + 1); + } +} + +template +size_t VmaVectorInsertSorted(VectorT& vector, const typename VectorT::value_type& value) +{ + const size_t indexToInsert = VmaBinaryFindFirstNotLess( + vector.data(), + vector.data() + vector.size(), + value, + CmpLess()) - vector.data(); + VmaVectorInsert(vector, indexToInsert, value); + return indexToInsert; +} + +template +bool VmaVectorRemoveSorted(VectorT& vector, const typename VectorT::value_type& value) +{ + CmpLess comparator; + typename VectorT::iterator it = VmaBinaryFindFirstNotLess( + vector.begin(), + vector.end(), + value, + comparator); + if ((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it)) + { + size_t indexToRemove = it - vector.begin(); + VmaVectorRemove(vector, indexToRemove); + return true; + } + return false; +} +#endif // _VMA_FUNCTIONS + +#ifndef _VMA_STATISTICS_FUNCTIONS + +static void VmaClearStatistics(VmaStatistics& outStats) +{ + outStats.blockCount = 0; + outStats.allocationCount = 0; + outStats.blockBytes = 0; + outStats.allocationBytes = 0; +} + +static void VmaAddStatistics(VmaStatistics& inoutStats, const VmaStatistics& src) +{ + inoutStats.blockCount += src.blockCount; + inoutStats.allocationCount += src.allocationCount; + inoutStats.blockBytes += src.blockBytes; + inoutStats.allocationBytes += src.allocationBytes; +} + +static void VmaClearDetailedStatistics(VmaDetailedStatistics& outStats) +{ + VmaClearStatistics(outStats.statistics); + outStats.unusedRangeCount = 0; + outStats.allocationSizeMin = VK_WHOLE_SIZE; + outStats.allocationSizeMax = 0; + outStats.unusedRangeSizeMin = VK_WHOLE_SIZE; + outStats.unusedRangeSizeMax = 0; +} + +static void VmaAddDetailedStatisticsAllocation(VmaDetailedStatistics& inoutStats, VkDeviceSize size) +{ + inoutStats.statistics.allocationCount++; + inoutStats.statistics.allocationBytes += size; + inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, size); + inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, size); +} + +static void VmaAddDetailedStatisticsUnusedRange(VmaDetailedStatistics& inoutStats, VkDeviceSize size) +{ + inoutStats.unusedRangeCount++; + inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, size); + inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, size); +} + +static void VmaAddDetailedStatistics(VmaDetailedStatistics& inoutStats, const VmaDetailedStatistics& src) +{ + VmaAddStatistics(inoutStats.statistics, src.statistics); + inoutStats.unusedRangeCount += src.unusedRangeCount; + inoutStats.allocationSizeMin = VMA_MIN(inoutStats.allocationSizeMin, src.allocationSizeMin); + inoutStats.allocationSizeMax = VMA_MAX(inoutStats.allocationSizeMax, src.allocationSizeMax); + inoutStats.unusedRangeSizeMin = VMA_MIN(inoutStats.unusedRangeSizeMin, src.unusedRangeSizeMin); + inoutStats.unusedRangeSizeMax = VMA_MAX(inoutStats.unusedRangeSizeMax, src.unusedRangeSizeMax); +} + +#endif // _VMA_STATISTICS_FUNCTIONS + +#ifndef _VMA_MUTEX_LOCK +// Helper RAII class to lock a mutex in constructor and unlock it in destructor (at the end of scope). +struct VmaMutexLock +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutexLock) +public: + VmaMutexLock(VMA_MUTEX& mutex, bool useMutex = true) : + m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->Lock(); } + } + ~VmaMutexLock() { if (m_pMutex) { m_pMutex->Unlock(); } } + +private: + VMA_MUTEX* m_pMutex; +}; + +// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for reading. +struct VmaMutexLockRead +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutexLockRead) +public: + VmaMutexLockRead(VMA_RW_MUTEX& mutex, bool useMutex) : + m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->LockRead(); } + } + ~VmaMutexLockRead() { if (m_pMutex) { m_pMutex->UnlockRead(); } } + +private: + VMA_RW_MUTEX* m_pMutex; +}; + +// Helper RAII class to lock a RW mutex in constructor and unlock it in destructor (at the end of scope), for writing. +struct VmaMutexLockWrite +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMutexLockWrite) +public: + VmaMutexLockWrite(VMA_RW_MUTEX& mutex, bool useMutex) + : m_pMutex(useMutex ? &mutex : VMA_NULL) + { + if (m_pMutex) { m_pMutex->LockWrite(); } + } + ~VmaMutexLockWrite() { if (m_pMutex) { m_pMutex->UnlockWrite(); } } + +private: + VMA_RW_MUTEX* m_pMutex; +}; + +#if VMA_DEBUG_GLOBAL_MUTEX + static VMA_MUTEX gDebugGlobalMutex; + #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); +#else + #define VMA_DEBUG_GLOBAL_MUTEX_LOCK +#endif +#endif // _VMA_MUTEX_LOCK + +#ifndef _VMA_ATOMIC_TRANSACTIONAL_INCREMENT +// An object that increments given atomic but decrements it back in the destructor unless Commit() is called. +template +struct AtomicTransactionalIncrement +{ +public: + using T = decltype(AtomicT().load()); + + ~AtomicTransactionalIncrement() + { + if(m_Atomic) + --(*m_Atomic); + } + + void Commit() { m_Atomic = VMA_NULL; } + T Increment(AtomicT* atomic) + { + m_Atomic = atomic; + return m_Atomic->fetch_add(1); + } + +private: + AtomicT* m_Atomic = VMA_NULL; +}; +#endif // _VMA_ATOMIC_TRANSACTIONAL_INCREMENT + +#ifndef _VMA_STL_ALLOCATOR +// STL-compatible allocator. +template +struct VmaStlAllocator +{ + const VkAllocationCallbacks* const m_pCallbacks; + typedef T value_type; + + VmaStlAllocator(const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) {} + template + VmaStlAllocator(const VmaStlAllocator& src) : m_pCallbacks(src.m_pCallbacks) {} + VmaStlAllocator(const VmaStlAllocator&) = default; + VmaStlAllocator& operator=(const VmaStlAllocator&) = delete; + + T* allocate(size_t n) { return VmaAllocateArray(m_pCallbacks, n); } + void deallocate(T* p, size_t n) { VmaFree(m_pCallbacks, p); } + + template + bool operator==(const VmaStlAllocator& rhs) const + { + return m_pCallbacks == rhs.m_pCallbacks; + } + template + bool operator!=(const VmaStlAllocator& rhs) const + { + return m_pCallbacks != rhs.m_pCallbacks; + } +}; +#endif // _VMA_STL_ALLOCATOR + +#ifndef _VMA_VECTOR +/* Class with interface compatible with subset of std::vector. +T must be POD because constructors and destructors are not called and memcpy is +used for these objects. */ +template +class VmaVector +{ +public: + typedef T value_type; + typedef T* iterator; + typedef const T* const_iterator; + + VmaVector(const AllocatorT& allocator); + VmaVector(size_t count, const AllocatorT& allocator); + // This version of the constructor is here for compatibility with pre-C++14 std::vector. + // value is unused. + VmaVector(size_t count, const T& value, const AllocatorT& allocator) : VmaVector(count, allocator) {} + VmaVector(const VmaVector& src); + VmaVector& operator=(const VmaVector& rhs); + ~VmaVector() { VmaFree(m_Allocator.m_pCallbacks, m_pArray); } + + bool empty() const { return m_Count == 0; } + size_t size() const { return m_Count; } + T* data() { return m_pArray; } + T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } + T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } + const T* data() const { return m_pArray; } + const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[0]; } + const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return m_pArray[m_Count - 1]; } + + iterator begin() { return m_pArray; } + iterator end() { return m_pArray + m_Count; } + const_iterator cbegin() const { return m_pArray; } + const_iterator cend() const { return m_pArray + m_Count; } + const_iterator begin() const { return cbegin(); } + const_iterator end() const { return cend(); } + + void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } + void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } + void push_front(const T& src) { insert(0, src); } + + void push_back(const T& src); + void reserve(size_t newCapacity, bool freeMemory = false); + void resize(size_t newCount); + void clear() { resize(0); } + void shrink_to_fit(); + void insert(size_t index, const T& src); + void remove(size_t index); + + T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } + const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return m_pArray[index]; } + +private: + AllocatorT m_Allocator; + T* m_pArray; + size_t m_Count; + size_t m_Capacity; +}; + +#ifndef _VMA_VECTOR_FUNCTIONS +template +VmaVector::VmaVector(const AllocatorT& allocator) + : m_Allocator(allocator), + m_pArray(VMA_NULL), + m_Count(0), + m_Capacity(0) {} + +template +VmaVector::VmaVector(size_t count, const AllocatorT& allocator) + : m_Allocator(allocator), + m_pArray(count ? (T*)VmaAllocateArray(allocator.m_pCallbacks, count) : VMA_NULL), + m_Count(count), + m_Capacity(count) {} + +template +VmaVector::VmaVector(const VmaVector& src) + : m_Allocator(src.m_Allocator), + m_pArray(src.m_Count ? (T*)VmaAllocateArray(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL), + m_Count(src.m_Count), + m_Capacity(src.m_Count) +{ + if (m_Count != 0) + { + memcpy(m_pArray, src.m_pArray, m_Count * sizeof(T)); + } +} + +template +VmaVector& VmaVector::operator=(const VmaVector& rhs) +{ + if (&rhs != this) + { + resize(rhs.m_Count); + if (m_Count != 0) + { + memcpy(m_pArray, rhs.m_pArray, m_Count * sizeof(T)); + } + } + return *this; +} + +template +void VmaVector::push_back(const T& src) +{ + const size_t newIndex = size(); + resize(newIndex + 1); + m_pArray[newIndex] = src; +} + +template +void VmaVector::reserve(size_t newCapacity, bool freeMemory) +{ + newCapacity = VMA_MAX(newCapacity, m_Count); + + if ((newCapacity < m_Capacity) && !freeMemory) + { + newCapacity = m_Capacity; + } + + if (newCapacity != m_Capacity) + { + T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator, newCapacity) : VMA_NULL; + if (m_Count != 0) + { + memcpy(newArray, m_pArray, m_Count * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = newCapacity; + m_pArray = newArray; + } +} + +template +void VmaVector::resize(size_t newCount) +{ + size_t newCapacity = m_Capacity; + if (newCount > m_Capacity) + { + newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (size_t)8)); + } + + if (newCapacity != m_Capacity) + { + T* const newArray = newCapacity ? VmaAllocateArray(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL; + const size_t elementsToCopy = VMA_MIN(m_Count, newCount); + if (elementsToCopy != 0) + { + memcpy(newArray, m_pArray, elementsToCopy * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = newCapacity; + m_pArray = newArray; + } + + m_Count = newCount; +} + +template +void VmaVector::shrink_to_fit() +{ + if (m_Capacity > m_Count) + { + T* newArray = VMA_NULL; + if (m_Count > 0) + { + newArray = VmaAllocateArray(m_Allocator.m_pCallbacks, m_Count); + memcpy(newArray, m_pArray, m_Count * sizeof(T)); + } + VmaFree(m_Allocator.m_pCallbacks, m_pArray); + m_Capacity = m_Count; + m_pArray = newArray; + } +} + +template +void VmaVector::insert(size_t index, const T& src) +{ + VMA_HEAVY_ASSERT(index <= m_Count); + const size_t oldCount = size(); + resize(oldCount + 1); + if (index < oldCount) + { + memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) * sizeof(T)); + } + m_pArray[index] = src; +} + +template +void VmaVector::remove(size_t index) +{ + VMA_HEAVY_ASSERT(index < m_Count); + const size_t oldCount = size(); + if (index < oldCount - 1) + { + memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) * sizeof(T)); + } + resize(oldCount - 1); +} +#endif // _VMA_VECTOR_FUNCTIONS + +template +static void VmaVectorInsert(VmaVector& vec, size_t index, const T& item) +{ + vec.insert(index, item); +} + +template +static void VmaVectorRemove(VmaVector& vec, size_t index) +{ + vec.remove(index); +} +#endif // _VMA_VECTOR + +#ifndef _VMA_SMALL_VECTOR +/* +This is a vector (a variable-sized array), optimized for the case when the array is small. + +It contains some number of elements in-place, which allows it to avoid heap allocation +when the actual number of elements is below that threshold. This allows normal "small" +cases to be fast without losing generality for large inputs. +*/ +template +class VmaSmallVector +{ +public: + typedef T value_type; + typedef T* iterator; + + VmaSmallVector(const AllocatorT& allocator); + VmaSmallVector(size_t count, const AllocatorT& allocator); + template + VmaSmallVector(const VmaSmallVector&) = delete; + template + VmaSmallVector& operator=(const VmaSmallVector&) = delete; + ~VmaSmallVector() = default; + + bool empty() const { return m_Count == 0; } + size_t size() const { return m_Count; } + T* data() { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } + T& front() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } + T& back() { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } + const T* data() const { return m_Count > N ? m_DynamicArray.data() : m_StaticArray; } + const T& front() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[0]; } + const T& back() const { VMA_HEAVY_ASSERT(m_Count > 0); return data()[m_Count - 1]; } + + iterator begin() { return data(); } + iterator end() { return data() + m_Count; } + + void pop_front() { VMA_HEAVY_ASSERT(m_Count > 0); remove(0); } + void pop_back() { VMA_HEAVY_ASSERT(m_Count > 0); resize(size() - 1); } + void push_front(const T& src) { insert(0, src); } + + void push_back(const T& src); + void resize(size_t newCount, bool freeMemory = false); + void clear(bool freeMemory = false); + void insert(size_t index, const T& src); + void remove(size_t index); + + T& operator[](size_t index) { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } + const T& operator[](size_t index) const { VMA_HEAVY_ASSERT(index < m_Count); return data()[index]; } + +private: + size_t m_Count; + T m_StaticArray[N]; // Used when m_Size <= N + VmaVector m_DynamicArray; // Used when m_Size > N +}; + +#ifndef _VMA_SMALL_VECTOR_FUNCTIONS +template +VmaSmallVector::VmaSmallVector(const AllocatorT& allocator) + : m_Count(0), + m_DynamicArray(allocator) {} + +template +VmaSmallVector::VmaSmallVector(size_t count, const AllocatorT& allocator) + : m_Count(count), + m_DynamicArray(count > N ? count : 0, allocator) {} + +template +void VmaSmallVector::push_back(const T& src) +{ + const size_t newIndex = size(); + resize(newIndex + 1); + data()[newIndex] = src; +} + +template +void VmaSmallVector::resize(size_t newCount, bool freeMemory) +{ + if (newCount > N && m_Count > N) + { + // Any direction, staying in m_DynamicArray + m_DynamicArray.resize(newCount); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + } + else if (newCount > N && m_Count <= N) + { + // Growing, moving from m_StaticArray to m_DynamicArray + m_DynamicArray.resize(newCount); + if (m_Count > 0) + { + memcpy(m_DynamicArray.data(), m_StaticArray, m_Count * sizeof(T)); + } + } + else if (newCount <= N && m_Count > N) + { + // Shrinking, moving from m_DynamicArray to m_StaticArray + if (newCount > 0) + { + memcpy(m_StaticArray, m_DynamicArray.data(), newCount * sizeof(T)); + } + m_DynamicArray.resize(0); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + } + else + { + // Any direction, staying in m_StaticArray - nothing to do here + } + m_Count = newCount; +} + +template +void VmaSmallVector::clear(bool freeMemory) +{ + m_DynamicArray.clear(); + if (freeMemory) + { + m_DynamicArray.shrink_to_fit(); + } + m_Count = 0; +} + +template +void VmaSmallVector::insert(size_t index, const T& src) +{ + VMA_HEAVY_ASSERT(index <= m_Count); + const size_t oldCount = size(); + resize(oldCount + 1); + T* const dataPtr = data(); + if (index < oldCount) + { + // I know, this could be more optimal for case where memmove can be memcpy directly from m_StaticArray to m_DynamicArray. + memmove(dataPtr + (index + 1), dataPtr + index, (oldCount - index) * sizeof(T)); + } + dataPtr[index] = src; +} + +template +void VmaSmallVector::remove(size_t index) +{ + VMA_HEAVY_ASSERT(index < m_Count); + const size_t oldCount = size(); + if (index < oldCount - 1) + { + // I know, this could be more optimal for case where memmove can be memcpy directly from m_DynamicArray to m_StaticArray. + T* const dataPtr = data(); + memmove(dataPtr + index, dataPtr + (index + 1), (oldCount - index - 1) * sizeof(T)); + } + resize(oldCount - 1); +} +#endif // _VMA_SMALL_VECTOR_FUNCTIONS +#endif // _VMA_SMALL_VECTOR + +#ifndef _VMA_POOL_ALLOCATOR +/* +Allocator for objects of type T using a list of arrays (pools) to speed up +allocation. Number of elements that can be allocated is not bounded because +allocator can create multiple blocks. +*/ +template +class VmaPoolAllocator +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaPoolAllocator) +public: + VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity); + ~VmaPoolAllocator(); + template T* Alloc(Types&&... args); + void Free(T* ptr); + +private: + union Item + { + uint32_t NextFreeIndex; + alignas(T) char Value[sizeof(T)]; + }; + struct ItemBlock + { + Item* pItems; + uint32_t Capacity; + uint32_t FirstFreeIndex; + }; + + const VkAllocationCallbacks* m_pAllocationCallbacks; + const uint32_t m_FirstBlockCapacity; + VmaVector> m_ItemBlocks; + + ItemBlock& CreateNewBlock(); +}; + +#ifndef _VMA_POOL_ALLOCATOR_FUNCTIONS +template +VmaPoolAllocator::VmaPoolAllocator(const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) + : m_pAllocationCallbacks(pAllocationCallbacks), + m_FirstBlockCapacity(firstBlockCapacity), + m_ItemBlocks(VmaStlAllocator(pAllocationCallbacks)) +{ + VMA_ASSERT(m_FirstBlockCapacity > 1); +} + +template +VmaPoolAllocator::~VmaPoolAllocator() +{ + for (size_t i = m_ItemBlocks.size(); i--;) + vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity); + m_ItemBlocks.clear(); +} + +template +template T* VmaPoolAllocator::Alloc(Types&&... args) +{ + for (size_t i = m_ItemBlocks.size(); i--; ) + { + ItemBlock& block = m_ItemBlocks[i]; + // This block has some free items: Use first one. + if (block.FirstFreeIndex != UINT32_MAX) + { + Item* const pItem = &block.pItems[block.FirstFreeIndex]; + block.FirstFreeIndex = pItem->NextFreeIndex; + T* result = (T*)&pItem->Value; + new(result)T(std::forward(args)...); // Explicit constructor call. + return result; + } + } + + // No block has free item: Create new one and use it. + ItemBlock& newBlock = CreateNewBlock(); + Item* const pItem = &newBlock.pItems[0]; + newBlock.FirstFreeIndex = pItem->NextFreeIndex; + T* result = (T*)&pItem->Value; + new(result) T(std::forward(args)...); // Explicit constructor call. + return result; +} + +template +void VmaPoolAllocator::Free(T* ptr) +{ + // Search all memory blocks to find ptr. + for (size_t i = m_ItemBlocks.size(); i--; ) + { + ItemBlock& block = m_ItemBlocks[i]; + + // Casting to union. + Item* pItemPtr; + memcpy(&pItemPtr, &ptr, sizeof(pItemPtr)); + + // Check if pItemPtr is in address range of this block. + if ((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity)) + { + ptr->~T(); // Explicit destructor call. + const uint32_t index = static_cast(pItemPtr - block.pItems); + pItemPtr->NextFreeIndex = block.FirstFreeIndex; + block.FirstFreeIndex = index; + return; + } + } + VMA_ASSERT(0 && "Pointer doesn't belong to this memory pool."); +} + +template +typename VmaPoolAllocator::ItemBlock& VmaPoolAllocator::CreateNewBlock() +{ + const uint32_t newBlockCapacity = m_ItemBlocks.empty() ? + m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2; + + const ItemBlock newBlock = + { + vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity), + newBlockCapacity, + 0 + }; + + m_ItemBlocks.push_back(newBlock); + + // Setup singly-linked list of all free items in this block. + for (uint32_t i = 0; i < newBlockCapacity - 1; ++i) + newBlock.pItems[i].NextFreeIndex = i + 1; + newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX; + return m_ItemBlocks.back(); +} +#endif // _VMA_POOL_ALLOCATOR_FUNCTIONS +#endif // _VMA_POOL_ALLOCATOR + +#ifndef _VMA_RAW_LIST +template +struct VmaListItem +{ + VmaListItem* pPrev; + VmaListItem* pNext; + T Value; +}; + +// Doubly linked list. +template +class VmaRawList +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaRawList) +public: + typedef VmaListItem ItemType; + + VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks); + // Intentionally not calling Clear, because that would be unnecessary + // computations to return all items to m_ItemAllocator as free. + ~VmaRawList() = default; + + size_t GetCount() const { return m_Count; } + bool IsEmpty() const { return m_Count == 0; } + + ItemType* Front() { return m_pFront; } + ItemType* Back() { return m_pBack; } + const ItemType* Front() const { return m_pFront; } + const ItemType* Back() const { return m_pBack; } + + ItemType* PushFront(); + ItemType* PushBack(); + ItemType* PushFront(const T& value); + ItemType* PushBack(const T& value); + void PopFront(); + void PopBack(); + + // Item can be null - it means PushBack. + ItemType* InsertBefore(ItemType* pItem); + // Item can be null - it means PushFront. + ItemType* InsertAfter(ItemType* pItem); + ItemType* InsertBefore(ItemType* pItem, const T& value); + ItemType* InsertAfter(ItemType* pItem, const T& value); + + void Clear(); + void Remove(ItemType* pItem); + +private: + const VkAllocationCallbacks* const m_pAllocationCallbacks; + VmaPoolAllocator m_ItemAllocator; + ItemType* m_pFront; + ItemType* m_pBack; + size_t m_Count; +}; + +#ifndef _VMA_RAW_LIST_FUNCTIONS +template +VmaRawList::VmaRawList(const VkAllocationCallbacks* pAllocationCallbacks) + : m_pAllocationCallbacks(pAllocationCallbacks), + m_ItemAllocator(pAllocationCallbacks, 128), + m_pFront(VMA_NULL), + m_pBack(VMA_NULL), + m_Count(0) {} + +template +VmaListItem* VmaRawList::PushFront() +{ + ItemType* const pNewItem = m_ItemAllocator.Alloc(); + pNewItem->pPrev = VMA_NULL; + if (IsEmpty()) + { + pNewItem->pNext = VMA_NULL; + m_pFront = pNewItem; + m_pBack = pNewItem; + m_Count = 1; + } + else + { + pNewItem->pNext = m_pFront; + m_pFront->pPrev = pNewItem; + m_pFront = pNewItem; + ++m_Count; + } + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushBack() +{ + ItemType* const pNewItem = m_ItemAllocator.Alloc(); + pNewItem->pNext = VMA_NULL; + if(IsEmpty()) + { + pNewItem->pPrev = VMA_NULL; + m_pFront = pNewItem; + m_pBack = pNewItem; + m_Count = 1; + } + else + { + pNewItem->pPrev = m_pBack; + m_pBack->pNext = pNewItem; + m_pBack = pNewItem; + ++m_Count; + } + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushFront(const T& value) +{ + ItemType* const pNewItem = PushFront(); + pNewItem->Value = value; + return pNewItem; +} + +template +VmaListItem* VmaRawList::PushBack(const T& value) +{ + ItemType* const pNewItem = PushBack(); + pNewItem->Value = value; + return pNewItem; +} + +template +void VmaRawList::PopFront() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const pFrontItem = m_pFront; + ItemType* const pNextItem = pFrontItem->pNext; + if (pNextItem != VMA_NULL) + { + pNextItem->pPrev = VMA_NULL; + } + m_pFront = pNextItem; + m_ItemAllocator.Free(pFrontItem); + --m_Count; +} + +template +void VmaRawList::PopBack() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const pBackItem = m_pBack; + ItemType* const pPrevItem = pBackItem->pPrev; + if(pPrevItem != VMA_NULL) + { + pPrevItem->pNext = VMA_NULL; + } + m_pBack = pPrevItem; + m_ItemAllocator.Free(pBackItem); + --m_Count; +} + +template +void VmaRawList::Clear() +{ + if (IsEmpty() == false) + { + ItemType* pItem = m_pBack; + while (pItem != VMA_NULL) + { + ItemType* const pPrevItem = pItem->pPrev; + m_ItemAllocator.Free(pItem); + pItem = pPrevItem; + } + m_pFront = VMA_NULL; + m_pBack = VMA_NULL; + m_Count = 0; + } +} + +template +void VmaRawList::Remove(ItemType* pItem) +{ + VMA_HEAVY_ASSERT(pItem != VMA_NULL); + VMA_HEAVY_ASSERT(m_Count > 0); + + if(pItem->pPrev != VMA_NULL) + { + pItem->pPrev->pNext = pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(m_pFront == pItem); + m_pFront = pItem->pNext; + } + + if(pItem->pNext != VMA_NULL) + { + pItem->pNext->pPrev = pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(m_pBack == pItem); + m_pBack = pItem->pPrev; + } + + m_ItemAllocator.Free(pItem); + --m_Count; +} + +template +VmaListItem* VmaRawList::InsertBefore(ItemType* pItem) +{ + if(pItem != VMA_NULL) + { + ItemType* const prevItem = pItem->pPrev; + ItemType* const newItem = m_ItemAllocator.Alloc(); + newItem->pPrev = prevItem; + newItem->pNext = pItem; + pItem->pPrev = newItem; + if(prevItem != VMA_NULL) + { + prevItem->pNext = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_pFront == pItem); + m_pFront = newItem; + } + ++m_Count; + return newItem; + } + else + return PushBack(); +} + +template +VmaListItem* VmaRawList::InsertAfter(ItemType* pItem) +{ + if(pItem != VMA_NULL) + { + ItemType* const nextItem = pItem->pNext; + ItemType* const newItem = m_ItemAllocator.Alloc(); + newItem->pNext = nextItem; + newItem->pPrev = pItem; + pItem->pNext = newItem; + if(nextItem != VMA_NULL) + { + nextItem->pPrev = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_pBack == pItem); + m_pBack = newItem; + } + ++m_Count; + return newItem; + } + else + return PushFront(); +} + +template +VmaListItem* VmaRawList::InsertBefore(ItemType* pItem, const T& value) +{ + ItemType* const newItem = InsertBefore(pItem); + newItem->Value = value; + return newItem; +} + +template +VmaListItem* VmaRawList::InsertAfter(ItemType* pItem, const T& value) +{ + ItemType* const newItem = InsertAfter(pItem); + newItem->Value = value; + return newItem; +} +#endif // _VMA_RAW_LIST_FUNCTIONS +#endif // _VMA_RAW_LIST + +#ifndef _VMA_LIST +template +class VmaList +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaList) +public: + class reverse_iterator; + class const_iterator; + class const_reverse_iterator; + + class iterator + { + friend class const_iterator; + friend class VmaList; + public: + iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + iterator operator++(int) { iterator result = *this; ++*this; return result; } + iterator operator--(int) { iterator result = *this; --*this; return result; } + + iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } + iterator& operator--(); + + private: + VmaRawList* m_pList; + VmaListItem* m_pItem; + + iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class reverse_iterator + { + friend class const_reverse_iterator; + friend class VmaList; + public: + reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + reverse_iterator operator++(int) { reverse_iterator result = *this; ++* this; return result; } + reverse_iterator operator--(int) { reverse_iterator result = *this; --* this; return result; } + + reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } + reverse_iterator& operator--(); + + private: + VmaRawList* m_pList; + VmaListItem* m_pItem; + + reverse_iterator(VmaRawList* pList, VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class const_iterator + { + friend class VmaList; + public: + const_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + const_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + const_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } + + const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const const_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const_iterator operator++(int) { const_iterator result = *this; ++* this; return result; } + const_iterator operator--(int) { const_iterator result = *this; --* this; return result; } + + const_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pNext; return *this; } + const_iterator& operator--(); + + private: + const VmaRawList* m_pList; + const VmaListItem* m_pItem; + + const_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + class const_reverse_iterator + { + friend class VmaList; + public: + const_reverse_iterator() : m_pList(VMA_NULL), m_pItem(VMA_NULL) {} + const_reverse_iterator(const reverse_iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + const_reverse_iterator(const iterator& src) : m_pList(src.m_pList), m_pItem(src.m_pItem) {} + + reverse_iterator drop_const() { return { const_cast*>(m_pList), const_cast*>(m_pItem) }; } + + const T& operator*() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return m_pItem->Value; } + const T* operator->() const { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); return &m_pItem->Value; } + + bool operator==(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem == rhs.m_pItem; } + bool operator!=(const const_reverse_iterator& rhs) const { VMA_HEAVY_ASSERT(m_pList == rhs.m_pList); return m_pItem != rhs.m_pItem; } + + const_reverse_iterator operator++(int) { const_reverse_iterator result = *this; ++* this; return result; } + const_reverse_iterator operator--(int) { const_reverse_iterator result = *this; --* this; return result; } + + const_reverse_iterator& operator++() { VMA_HEAVY_ASSERT(m_pItem != VMA_NULL); m_pItem = m_pItem->pPrev; return *this; } + const_reverse_iterator& operator--(); + + private: + const VmaRawList* m_pList; + const VmaListItem* m_pItem; + + const_reverse_iterator(const VmaRawList* pList, const VmaListItem* pItem) : m_pList(pList), m_pItem(pItem) {} + }; + + VmaList(const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) {} + + bool empty() const { return m_RawList.IsEmpty(); } + size_t size() const { return m_RawList.GetCount(); } + + iterator begin() { return iterator(&m_RawList, m_RawList.Front()); } + iterator end() { return iterator(&m_RawList, VMA_NULL); } + + const_iterator cbegin() const { return const_iterator(&m_RawList, m_RawList.Front()); } + const_iterator cend() const { return const_iterator(&m_RawList, VMA_NULL); } + + const_iterator begin() const { return cbegin(); } + const_iterator end() const { return cend(); } + + reverse_iterator rbegin() { return reverse_iterator(&m_RawList, m_RawList.Back()); } + reverse_iterator rend() { return reverse_iterator(&m_RawList, VMA_NULL); } + + const_reverse_iterator crbegin() const { return const_reverse_iterator(&m_RawList, m_RawList.Back()); } + const_reverse_iterator crend() const { return const_reverse_iterator(&m_RawList, VMA_NULL); } + + const_reverse_iterator rbegin() const { return crbegin(); } + const_reverse_iterator rend() const { return crend(); } + + void push_back(const T& value) { m_RawList.PushBack(value); } + iterator insert(iterator it, const T& value) { return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); } + + void clear() { m_RawList.Clear(); } + void erase(iterator it) { m_RawList.Remove(it.m_pItem); } + +private: + VmaRawList m_RawList; +}; + +#ifndef _VMA_LIST_FUNCTIONS +template +typename VmaList::iterator& VmaList::iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} + +template +typename VmaList::reverse_iterator& VmaList::reverse_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Front(); + } + return *this; +} + +template +typename VmaList::const_iterator& VmaList::const_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pPrev; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} + +template +typename VmaList::const_reverse_iterator& VmaList::const_reverse_iterator::operator--() +{ + if (m_pItem != VMA_NULL) + { + m_pItem = m_pItem->pNext; + } + else + { + VMA_HEAVY_ASSERT(!m_pList->IsEmpty()); + m_pItem = m_pList->Back(); + } + return *this; +} +#endif // _VMA_LIST_FUNCTIONS +#endif // _VMA_LIST + +#ifndef _VMA_INTRUSIVE_LINKED_LIST +/* +Expected interface of ItemTypeTraits: +struct MyItemTypeTraits +{ + typedef MyItem ItemType; + static ItemType* GetPrev(const ItemType* item) { return item->myPrevPtr; } + static ItemType* GetNext(const ItemType* item) { return item->myNextPtr; } + static ItemType*& AccessPrev(ItemType* item) { return item->myPrevPtr; } + static ItemType*& AccessNext(ItemType* item) { return item->myNextPtr; } +}; +*/ +template +class VmaIntrusiveLinkedList +{ +public: + typedef typename ItemTypeTraits::ItemType ItemType; + static ItemType* GetPrev(const ItemType* item) { return ItemTypeTraits::GetPrev(item); } + static ItemType* GetNext(const ItemType* item) { return ItemTypeTraits::GetNext(item); } + + // Movable, not copyable. + VmaIntrusiveLinkedList() = default; + VmaIntrusiveLinkedList(VmaIntrusiveLinkedList && src); + VmaIntrusiveLinkedList(const VmaIntrusiveLinkedList&) = delete; + VmaIntrusiveLinkedList& operator=(VmaIntrusiveLinkedList&& src); + VmaIntrusiveLinkedList& operator=(const VmaIntrusiveLinkedList&) = delete; + ~VmaIntrusiveLinkedList() { VMA_HEAVY_ASSERT(IsEmpty()); } + + size_t GetCount() const { return m_Count; } + bool IsEmpty() const { return m_Count == 0; } + ItemType* Front() { return m_Front; } + ItemType* Back() { return m_Back; } + const ItemType* Front() const { return m_Front; } + const ItemType* Back() const { return m_Back; } + + void PushBack(ItemType* item); + void PushFront(ItemType* item); + ItemType* PopBack(); + ItemType* PopFront(); + + // MyItem can be null - it means PushBack. + void InsertBefore(ItemType* existingItem, ItemType* newItem); + // MyItem can be null - it means PushFront. + void InsertAfter(ItemType* existingItem, ItemType* newItem); + void Remove(ItemType* item); + void RemoveAll(); + +private: + ItemType* m_Front = VMA_NULL; + ItemType* m_Back = VMA_NULL; + size_t m_Count = 0; +}; + +#ifndef _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS +template +VmaIntrusiveLinkedList::VmaIntrusiveLinkedList(VmaIntrusiveLinkedList&& src) + : m_Front(src.m_Front), m_Back(src.m_Back), m_Count(src.m_Count) +{ + src.m_Front = src.m_Back = VMA_NULL; + src.m_Count = 0; +} + +template +VmaIntrusiveLinkedList& VmaIntrusiveLinkedList::operator=(VmaIntrusiveLinkedList&& src) +{ + if (&src != this) + { + VMA_HEAVY_ASSERT(IsEmpty()); + m_Front = src.m_Front; + m_Back = src.m_Back; + m_Count = src.m_Count; + src.m_Front = src.m_Back = VMA_NULL; + src.m_Count = 0; + } + return *this; +} + +template +void VmaIntrusiveLinkedList::PushBack(ItemType* item) +{ + VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); + if (IsEmpty()) + { + m_Front = item; + m_Back = item; + m_Count = 1; + } + else + { + ItemTypeTraits::AccessPrev(item) = m_Back; + ItemTypeTraits::AccessNext(m_Back) = item; + m_Back = item; + ++m_Count; + } +} + +template +void VmaIntrusiveLinkedList::PushFront(ItemType* item) +{ + VMA_HEAVY_ASSERT(ItemTypeTraits::GetPrev(item) == VMA_NULL && ItemTypeTraits::GetNext(item) == VMA_NULL); + if (IsEmpty()) + { + m_Front = item; + m_Back = item; + m_Count = 1; + } + else + { + ItemTypeTraits::AccessNext(item) = m_Front; + ItemTypeTraits::AccessPrev(m_Front) = item; + m_Front = item; + ++m_Count; + } +} + +template +typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopBack() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const backItem = m_Back; + ItemType* const prevItem = ItemTypeTraits::GetPrev(backItem); + if (prevItem != VMA_NULL) + { + ItemTypeTraits::AccessNext(prevItem) = VMA_NULL; + } + m_Back = prevItem; + --m_Count; + ItemTypeTraits::AccessPrev(backItem) = VMA_NULL; + ItemTypeTraits::AccessNext(backItem) = VMA_NULL; + return backItem; +} + +template +typename VmaIntrusiveLinkedList::ItemType* VmaIntrusiveLinkedList::PopFront() +{ + VMA_HEAVY_ASSERT(m_Count > 0); + ItemType* const frontItem = m_Front; + ItemType* const nextItem = ItemTypeTraits::GetNext(frontItem); + if (nextItem != VMA_NULL) + { + ItemTypeTraits::AccessPrev(nextItem) = VMA_NULL; + } + m_Front = nextItem; + --m_Count; + ItemTypeTraits::AccessPrev(frontItem) = VMA_NULL; + ItemTypeTraits::AccessNext(frontItem) = VMA_NULL; + return frontItem; +} + +template +void VmaIntrusiveLinkedList::InsertBefore(ItemType* existingItem, ItemType* newItem) +{ + VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); + if (existingItem != VMA_NULL) + { + ItemType* const prevItem = ItemTypeTraits::GetPrev(existingItem); + ItemTypeTraits::AccessPrev(newItem) = prevItem; + ItemTypeTraits::AccessNext(newItem) = existingItem; + ItemTypeTraits::AccessPrev(existingItem) = newItem; + if (prevItem != VMA_NULL) + { + ItemTypeTraits::AccessNext(prevItem) = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_Front == existingItem); + m_Front = newItem; + } + ++m_Count; + } + else + PushBack(newItem); +} + +template +void VmaIntrusiveLinkedList::InsertAfter(ItemType* existingItem, ItemType* newItem) +{ + VMA_HEAVY_ASSERT(newItem != VMA_NULL && ItemTypeTraits::GetPrev(newItem) == VMA_NULL && ItemTypeTraits::GetNext(newItem) == VMA_NULL); + if (existingItem != VMA_NULL) + { + ItemType* const nextItem = ItemTypeTraits::GetNext(existingItem); + ItemTypeTraits::AccessNext(newItem) = nextItem; + ItemTypeTraits::AccessPrev(newItem) = existingItem; + ItemTypeTraits::AccessNext(existingItem) = newItem; + if (nextItem != VMA_NULL) + { + ItemTypeTraits::AccessPrev(nextItem) = newItem; + } + else + { + VMA_HEAVY_ASSERT(m_Back == existingItem); + m_Back = newItem; + } + ++m_Count; + } + else + return PushFront(newItem); +} + +template +void VmaIntrusiveLinkedList::Remove(ItemType* item) +{ + VMA_HEAVY_ASSERT(item != VMA_NULL && m_Count > 0); + if (ItemTypeTraits::GetPrev(item) != VMA_NULL) + { + ItemTypeTraits::AccessNext(ItemTypeTraits::AccessPrev(item)) = ItemTypeTraits::GetNext(item); + } + else + { + VMA_HEAVY_ASSERT(m_Front == item); + m_Front = ItemTypeTraits::GetNext(item); + } + + if (ItemTypeTraits::GetNext(item) != VMA_NULL) + { + ItemTypeTraits::AccessPrev(ItemTypeTraits::AccessNext(item)) = ItemTypeTraits::GetPrev(item); + } + else + { + VMA_HEAVY_ASSERT(m_Back == item); + m_Back = ItemTypeTraits::GetPrev(item); + } + ItemTypeTraits::AccessPrev(item) = VMA_NULL; + ItemTypeTraits::AccessNext(item) = VMA_NULL; + --m_Count; +} + +template +void VmaIntrusiveLinkedList::RemoveAll() +{ + if (!IsEmpty()) + { + ItemType* item = m_Back; + while (item != VMA_NULL) + { + ItemType* const prevItem = ItemTypeTraits::AccessPrev(item); + ItemTypeTraits::AccessPrev(item) = VMA_NULL; + ItemTypeTraits::AccessNext(item) = VMA_NULL; + item = prevItem; + } + m_Front = VMA_NULL; + m_Back = VMA_NULL; + m_Count = 0; + } +} +#endif // _VMA_INTRUSIVE_LINKED_LIST_FUNCTIONS +#endif // _VMA_INTRUSIVE_LINKED_LIST + +#if !defined(_VMA_STRING_BUILDER) && VMA_STATS_STRING_ENABLED +class VmaStringBuilder +{ +public: + VmaStringBuilder(const VkAllocationCallbacks* allocationCallbacks) : m_Data(VmaStlAllocator(allocationCallbacks)) {} + ~VmaStringBuilder() = default; + + size_t GetLength() const { return m_Data.size(); } + const char* GetData() const { return m_Data.data(); } + void AddNewLine() { Add('\n'); } + void Add(char ch) { m_Data.push_back(ch); } + + void Add(const char* pStr); + void AddNumber(uint32_t num); + void AddNumber(uint64_t num); + void AddPointer(const void* ptr); + +private: + VmaVector> m_Data; +}; + +#ifndef _VMA_STRING_BUILDER_FUNCTIONS +void VmaStringBuilder::Add(const char* pStr) +{ + const size_t strLen = strlen(pStr); + if (strLen > 0) + { + const size_t oldCount = m_Data.size(); + m_Data.resize(oldCount + strLen); + memcpy(m_Data.data() + oldCount, pStr, strLen); + } +} + +void VmaStringBuilder::AddNumber(uint32_t num) +{ + char buf[11]; + buf[10] = '\0'; + char* p = &buf[10]; + do + { + *--p = '0' + (char)(num % 10); + num /= 10; + } while (num); + Add(p); +} + +void VmaStringBuilder::AddNumber(uint64_t num) +{ + char buf[21]; + buf[20] = '\0'; + char* p = &buf[20]; + do + { + *--p = '0' + (char)(num % 10); + num /= 10; + } while (num); + Add(p); +} + +void VmaStringBuilder::AddPointer(const void* ptr) +{ + char buf[21]; + VmaPtrToStr(buf, sizeof(buf), ptr); + Add(buf); +} +#endif //_VMA_STRING_BUILDER_FUNCTIONS +#endif // _VMA_STRING_BUILDER + +#if !defined(_VMA_JSON_WRITER) && VMA_STATS_STRING_ENABLED +/* +Allows to conveniently build a correct JSON document to be written to the +VmaStringBuilder passed to the constructor. +*/ +class VmaJsonWriter +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaJsonWriter) +public: + // sb - string builder to write the document to. Must remain alive for the whole lifetime of this object. + VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb); + ~VmaJsonWriter(); + + // Begins object by writing "{". + // Inside an object, you must call pairs of WriteString and a value, e.g.: + // j.BeginObject(true); j.WriteString("A"); j.WriteNumber(1); j.WriteString("B"); j.WriteNumber(2); j.EndObject(); + // Will write: { "A": 1, "B": 2 } + void BeginObject(bool singleLine = false); + // Ends object by writing "}". + void EndObject(); + + // Begins array by writing "[". + // Inside an array, you can write a sequence of any values. + void BeginArray(bool singleLine = false); + // Ends array by writing "[". + void EndArray(); + + // Writes a string value inside "". + // pStr can contain any ANSI characters, including '"', new line etc. - they will be properly escaped. + void WriteString(const char* pStr); + + // Begins writing a string value. + // Call BeginString, ContinueString, ContinueString, ..., EndString instead of + // WriteString to conveniently build the string content incrementally, made of + // parts including numbers. + void BeginString(const char* pStr = VMA_NULL); + // Posts next part of an open string. + void ContinueString(const char* pStr); + // Posts next part of an open string. The number is converted to decimal characters. + void ContinueString(uint32_t n); + void ContinueString(uint64_t n); + // Posts next part of an open string. Pointer value is converted to characters + // using "%p" formatting - shown as hexadecimal number, e.g.: 000000081276Ad00 + void ContinueString_Pointer(const void* ptr); + // Ends writing a string value by writing '"'. + void EndString(const char* pStr = VMA_NULL); + + // Writes a number value. + void WriteNumber(uint32_t n); + void WriteNumber(uint64_t n); + // Writes a boolean value - false or true. + void WriteBool(bool b); + // Writes a null value. + void WriteNull(); + +private: + enum COLLECTION_TYPE + { + COLLECTION_TYPE_OBJECT, + COLLECTION_TYPE_ARRAY, + }; + struct StackItem + { + COLLECTION_TYPE type; + uint32_t valueCount; + bool singleLineMode; + }; + + static const char* const INDENT; + + VmaStringBuilder& m_SB; + VmaVector< StackItem, VmaStlAllocator > m_Stack; + bool m_InsideString; + + void BeginValue(bool isString); + void WriteIndent(bool oneLess = false); +}; +const char* const VmaJsonWriter::INDENT = " "; + +#ifndef _VMA_JSON_WRITER_FUNCTIONS +VmaJsonWriter::VmaJsonWriter(const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) + : m_SB(sb), + m_Stack(VmaStlAllocator(pAllocationCallbacks)), + m_InsideString(false) {} + +VmaJsonWriter::~VmaJsonWriter() +{ + VMA_ASSERT(!m_InsideString); + VMA_ASSERT(m_Stack.empty()); +} + +void VmaJsonWriter::BeginObject(bool singleLine) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(false); + m_SB.Add('{'); + + StackItem item; + item.type = COLLECTION_TYPE_OBJECT; + item.valueCount = 0; + item.singleLineMode = singleLine; + m_Stack.push_back(item); +} + +void VmaJsonWriter::EndObject() +{ + VMA_ASSERT(!m_InsideString); + + WriteIndent(true); + m_SB.Add('}'); + + VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT); + m_Stack.pop_back(); +} + +void VmaJsonWriter::BeginArray(bool singleLine) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(false); + m_SB.Add('['); + + StackItem item; + item.type = COLLECTION_TYPE_ARRAY; + item.valueCount = 0; + item.singleLineMode = singleLine; + m_Stack.push_back(item); +} + +void VmaJsonWriter::EndArray() +{ + VMA_ASSERT(!m_InsideString); + + WriteIndent(true); + m_SB.Add(']'); + + VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY); + m_Stack.pop_back(); +} + +void VmaJsonWriter::WriteString(const char* pStr) +{ + BeginString(pStr); + EndString(); +} + +void VmaJsonWriter::BeginString(const char* pStr) +{ + VMA_ASSERT(!m_InsideString); + + BeginValue(true); + m_SB.Add('"'); + m_InsideString = true; + if (pStr != VMA_NULL && pStr[0] != '\0') + { + ContinueString(pStr); + } +} + +void VmaJsonWriter::ContinueString(const char* pStr) +{ + VMA_ASSERT(m_InsideString); + + const size_t strLen = strlen(pStr); + for (size_t i = 0; i < strLen; ++i) + { + char ch = pStr[i]; + if (ch == '\\') + { + m_SB.Add("\\\\"); + } + else if (ch == '"') + { + m_SB.Add("\\\""); + } + else if ((uint8_t)ch >= 32) + { + m_SB.Add(ch); + } + else switch (ch) + { + case '\b': + m_SB.Add("\\b"); + break; + case '\f': + m_SB.Add("\\f"); + break; + case '\n': + m_SB.Add("\\n"); + break; + case '\r': + m_SB.Add("\\r"); + break; + case '\t': + m_SB.Add("\\t"); + break; + default: + VMA_ASSERT(0 && "Character not currently supported."); + } + } +} + +void VmaJsonWriter::ContinueString(uint32_t n) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::ContinueString(uint64_t n) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::ContinueString_Pointer(const void* ptr) +{ + VMA_ASSERT(m_InsideString); + m_SB.AddPointer(ptr); +} + +void VmaJsonWriter::EndString(const char* pStr) +{ + VMA_ASSERT(m_InsideString); + if (pStr != VMA_NULL && pStr[0] != '\0') + { + ContinueString(pStr); + } + m_SB.Add('"'); + m_InsideString = false; +} + +void VmaJsonWriter::WriteNumber(uint32_t n) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::WriteNumber(uint64_t n) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.AddNumber(n); +} + +void VmaJsonWriter::WriteBool(bool b) +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.Add(b ? "true" : "false"); +} + +void VmaJsonWriter::WriteNull() +{ + VMA_ASSERT(!m_InsideString); + BeginValue(false); + m_SB.Add("null"); +} + +void VmaJsonWriter::BeginValue(bool isString) +{ + if (!m_Stack.empty()) + { + StackItem& currItem = m_Stack.back(); + if (currItem.type == COLLECTION_TYPE_OBJECT && + currItem.valueCount % 2 == 0) + { + VMA_ASSERT(isString); + } + + if (currItem.type == COLLECTION_TYPE_OBJECT && + currItem.valueCount % 2 != 0) + { + m_SB.Add(": "); + } + else if (currItem.valueCount > 0) + { + m_SB.Add(", "); + WriteIndent(); + } + else + { + WriteIndent(); + } + ++currItem.valueCount; + } +} + +void VmaJsonWriter::WriteIndent(bool oneLess) +{ + if (!m_Stack.empty() && !m_Stack.back().singleLineMode) + { + m_SB.AddNewLine(); + + size_t count = m_Stack.size(); + if (count > 0 && oneLess) + { + --count; + } + for (size_t i = 0; i < count; ++i) + { + m_SB.Add(INDENT); + } + } +} +#endif // _VMA_JSON_WRITER_FUNCTIONS + +static void VmaPrintDetailedStatistics(VmaJsonWriter& json, const VmaDetailedStatistics& stat) +{ + json.BeginObject(); + + json.WriteString("BlockCount"); + json.WriteNumber(stat.statistics.blockCount); + json.WriteString("BlockBytes"); + json.WriteNumber(stat.statistics.blockBytes); + json.WriteString("AllocationCount"); + json.WriteNumber(stat.statistics.allocationCount); + json.WriteString("AllocationBytes"); + json.WriteNumber(stat.statistics.allocationBytes); + json.WriteString("UnusedRangeCount"); + json.WriteNumber(stat.unusedRangeCount); + + if (stat.statistics.allocationCount > 1) + { + json.WriteString("AllocationSizeMin"); + json.WriteNumber(stat.allocationSizeMin); + json.WriteString("AllocationSizeMax"); + json.WriteNumber(stat.allocationSizeMax); + } + if (stat.unusedRangeCount > 1) + { + json.WriteString("UnusedRangeSizeMin"); + json.WriteNumber(stat.unusedRangeSizeMin); + json.WriteString("UnusedRangeSizeMax"); + json.WriteNumber(stat.unusedRangeSizeMax); + } + json.EndObject(); +} +#endif // _VMA_JSON_WRITER + +#ifndef _VMA_MAPPING_HYSTERESIS + +class VmaMappingHysteresis +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaMappingHysteresis) +public: + VmaMappingHysteresis() = default; + + uint32_t GetExtraMapping() const { return m_ExtraMapping; } + + // Call when Map was called. + // Returns true if switched to extra +1 mapping reference count. + bool PostMap() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 0) + { + ++m_MajorCounter; + if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING) + { + m_ExtraMapping = 1; + m_MajorCounter = 0; + m_MinorCounter = 0; + return true; + } + } + else // m_ExtraMapping == 1 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + return false; + } + + // Call when Unmap was called. + void PostUnmap() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 0) + ++m_MajorCounter; + else // m_ExtraMapping == 1 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + } + + // Call when allocation was made from the memory block. + void PostAlloc() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 1) + ++m_MajorCounter; + else // m_ExtraMapping == 0 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + } + + // Call when allocation was freed from the memory block. + // Returns true if switched to extra -1 mapping reference count. + bool PostFree() + { +#if VMA_MAPPING_HYSTERESIS_ENABLED + if(m_ExtraMapping == 1) + { + ++m_MajorCounter; + if(m_MajorCounter >= COUNTER_MIN_EXTRA_MAPPING && + m_MajorCounter > m_MinorCounter + 1) + { + m_ExtraMapping = 0; + m_MajorCounter = 0; + m_MinorCounter = 0; + return true; + } + } + else // m_ExtraMapping == 0 + PostMinorCounter(); +#endif // #if VMA_MAPPING_HYSTERESIS_ENABLED + return false; + } + +private: + static const int32_t COUNTER_MIN_EXTRA_MAPPING = 7; + + uint32_t m_MinorCounter = 0; + uint32_t m_MajorCounter = 0; + uint32_t m_ExtraMapping = 0; // 0 or 1. + + void PostMinorCounter() + { + if(m_MinorCounter < m_MajorCounter) + { + ++m_MinorCounter; + } + else if(m_MajorCounter > 0) + { + --m_MajorCounter; + --m_MinorCounter; + } + } +}; + +#endif // _VMA_MAPPING_HYSTERESIS + +#ifndef _VMA_DEVICE_MEMORY_BLOCK +/* +Represents a single block of device memory (`VkDeviceMemory`) with all the +data about its regions (aka suballocations, #VmaAllocation), assigned and free. + +Thread-safety: +- Access to m_pMetadata must be externally synchronized. +- Map, Unmap, Bind* are synchronized internally. +*/ +class VmaDeviceMemoryBlock +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaDeviceMemoryBlock) +public: + VmaBlockMetadata* m_pMetadata; + + VmaDeviceMemoryBlock(VmaAllocator hAllocator); + ~VmaDeviceMemoryBlock(); + + // Always call after construction. + void Init( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t newMemoryTypeIndex, + VkDeviceMemory newMemory, + VkDeviceSize newSize, + uint32_t id, + uint32_t algorithm, + VkDeviceSize bufferImageGranularity); + // Always call before destruction. + void Destroy(VmaAllocator allocator); + + VmaPool GetParentPool() const { return m_hParentPool; } + VkDeviceMemory GetDeviceMemory() const { return m_hMemory; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + uint32_t GetId() const { return m_Id; } + void* GetMappedData() const { return m_pMappedData; } + uint32_t GetMapRefCount() const { return m_MapCount; } + + // Call when allocation/free was made from m_pMetadata. + // Used for m_MappingHysteresis. + void PostAlloc(VmaAllocator hAllocator); + void PostFree(VmaAllocator hAllocator); + + // Validates all data structures inside this object. If not valid, returns false. + bool Validate() const; + VkResult CheckCorruption(VmaAllocator hAllocator); + + // ppData can be null. + VkResult Map(VmaAllocator hAllocator, uint32_t count, void** ppData); + void Unmap(VmaAllocator hAllocator, uint32_t count); + + VkResult WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); + VkResult ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize); + + VkResult BindBufferMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext); + VkResult BindImageMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext); + +private: + VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. + uint32_t m_MemoryTypeIndex; + uint32_t m_Id; + VkDeviceMemory m_hMemory; + + /* + Protects access to m_hMemory so it is not used by multiple threads simultaneously, e.g. vkMapMemory, vkBindBufferMemory. + Also protects m_MapCount, m_pMappedData. + Allocations, deallocations, any change in m_pMetadata is protected by parent's VmaBlockVector::m_Mutex. + */ + VMA_MUTEX m_MapAndBindMutex; + VmaMappingHysteresis m_MappingHysteresis; + uint32_t m_MapCount; + void* m_pMappedData; +}; +#endif // _VMA_DEVICE_MEMORY_BLOCK + +#ifndef _VMA_ALLOCATION_T +struct VmaAllocation_T +{ + friend struct VmaDedicatedAllocationListItemTraits; + + enum FLAGS + { + FLAG_PERSISTENT_MAP = 0x01, + FLAG_MAPPING_ALLOWED = 0x02, + }; + +public: + enum ALLOCATION_TYPE + { + ALLOCATION_TYPE_NONE, + ALLOCATION_TYPE_BLOCK, + ALLOCATION_TYPE_DEDICATED, + }; + + // This struct is allocated using VmaPoolAllocator. + VmaAllocation_T(bool mappingAllowed); + ~VmaAllocation_T(); + + void InitBlockAllocation( + VmaDeviceMemoryBlock* block, + VmaAllocHandle allocHandle, + VkDeviceSize alignment, + VkDeviceSize size, + uint32_t memoryTypeIndex, + VmaSuballocationType suballocationType, + bool mapped); + // pMappedData not null means allocation is created with MAPPED flag. + void InitDedicatedAllocation( + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceMemory hMemory, + VmaSuballocationType suballocationType, + void* pMappedData, + VkDeviceSize size); + + ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; } + VkDeviceSize GetAlignment() const { return m_Alignment; } + VkDeviceSize GetSize() const { return m_Size; } + void* GetUserData() const { return m_pUserData; } + const char* GetName() const { return m_pName; } + VmaSuballocationType GetSuballocationType() const { return (VmaSuballocationType)m_SuballocationType; } + + VmaDeviceMemoryBlock* GetBlock() const { VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); return m_BlockAllocation.m_Block; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + bool IsPersistentMap() const { return (m_Flags & FLAG_PERSISTENT_MAP) != 0; } + bool IsMappingAllowed() const { return (m_Flags & FLAG_MAPPING_ALLOWED) != 0; } + + void SetUserData(VmaAllocator hAllocator, void* pUserData) { m_pUserData = pUserData; } + void SetName(VmaAllocator hAllocator, const char* pName); + void FreeName(VmaAllocator hAllocator); + uint8_t SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation); + VmaAllocHandle GetAllocHandle() const; + VkDeviceSize GetOffset() const; + VmaPool GetParentPool() const; + VkDeviceMemory GetMemory() const; + void* GetMappedData() const; + + void BlockAllocMap(); + void BlockAllocUnmap(); + VkResult DedicatedAllocMap(VmaAllocator hAllocator, void** ppData); + void DedicatedAllocUnmap(VmaAllocator hAllocator); + +#if VMA_STATS_STRING_ENABLED + VmaBufferImageUsage GetBufferImageUsage() const { return m_BufferImageUsage; } + void InitBufferUsage(const VkBufferCreateInfo &createInfo, bool useKhrMaintenance5) + { + VMA_ASSERT(m_BufferImageUsage == VmaBufferImageUsage::UNKNOWN); + m_BufferImageUsage = VmaBufferImageUsage(createInfo, useKhrMaintenance5); + } + void InitImageUsage(const VkImageCreateInfo &createInfo) + { + VMA_ASSERT(m_BufferImageUsage == VmaBufferImageUsage::UNKNOWN); + m_BufferImageUsage = VmaBufferImageUsage(createInfo); + } + void PrintParameters(class VmaJsonWriter& json) const; +#endif + +private: + // Allocation out of VmaDeviceMemoryBlock. + struct BlockAllocation + { + VmaDeviceMemoryBlock* m_Block; + VmaAllocHandle m_AllocHandle; + }; + // Allocation for an object that has its own private VkDeviceMemory. + struct DedicatedAllocation + { + VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. + VkDeviceMemory m_hMemory; + void* m_pMappedData; // Not null means memory is mapped. + VmaAllocation_T* m_Prev; + VmaAllocation_T* m_Next; + }; + union + { + // Allocation out of VmaDeviceMemoryBlock. + BlockAllocation m_BlockAllocation; + // Allocation for an object that has its own private VkDeviceMemory. + DedicatedAllocation m_DedicatedAllocation; + }; + + VkDeviceSize m_Alignment; + VkDeviceSize m_Size; + void* m_pUserData; + char* m_pName; + uint32_t m_MemoryTypeIndex; + uint8_t m_Type; // ALLOCATION_TYPE + uint8_t m_SuballocationType; // VmaSuballocationType + // Reference counter for vmaMapMemory()/vmaUnmapMemory(). + uint8_t m_MapCount; + uint8_t m_Flags; // enum FLAGS +#if VMA_STATS_STRING_ENABLED + VmaBufferImageUsage m_BufferImageUsage; // 0 if unknown. +#endif +}; +#endif // _VMA_ALLOCATION_T + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS +struct VmaDedicatedAllocationListItemTraits +{ + typedef VmaAllocation_T ItemType; + + static ItemType* GetPrev(const ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Prev; + } + static ItemType* GetNext(const ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Next; + } + static ItemType*& AccessPrev(ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Prev; + } + static ItemType*& AccessNext(ItemType* item) + { + VMA_HEAVY_ASSERT(item->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + return item->m_DedicatedAllocation.m_Next; + } +}; +#endif // _VMA_DEDICATED_ALLOCATION_LIST_ITEM_TRAITS + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST +/* +Stores linked list of VmaAllocation_T objects. +Thread-safe, synchronized internally. +*/ +class VmaDedicatedAllocationList +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaDedicatedAllocationList) +public: + VmaDedicatedAllocationList() {} + ~VmaDedicatedAllocationList(); + + void Init(bool useMutex) { m_UseMutex = useMutex; } + bool Validate(); + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); + void AddStatistics(VmaStatistics& inoutStats); +#if VMA_STATS_STRING_ENABLED + // Writes JSON array with the list of allocations. + void BuildStatsString(VmaJsonWriter& json); +#endif + + bool IsEmpty(); + void Register(VmaAllocation alloc); + void Unregister(VmaAllocation alloc); + +private: + typedef VmaIntrusiveLinkedList DedicatedAllocationLinkedList; + + bool m_UseMutex = true; + VMA_RW_MUTEX m_Mutex; + DedicatedAllocationLinkedList m_AllocationList; +}; + +#ifndef _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS + +VmaDedicatedAllocationList::~VmaDedicatedAllocationList() +{ + VMA_HEAVY_ASSERT(Validate()); + + if (!m_AllocationList.IsEmpty()) + { + VMA_ASSERT_LEAK(false && "Unfreed dedicated allocations found!"); + } +} + +bool VmaDedicatedAllocationList::Validate() +{ + const size_t declaredCount = m_AllocationList.GetCount(); + size_t actualCount = 0; + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + for (VmaAllocation alloc = m_AllocationList.Front(); + alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) + { + ++actualCount; + } + VMA_VALIDATE(actualCount == declaredCount); + + return true; +} + +void VmaDedicatedAllocationList::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) +{ + for(auto* item = m_AllocationList.Front(); item != VMA_NULL; item = DedicatedAllocationLinkedList::GetNext(item)) + { + const VkDeviceSize size = item->GetSize(); + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += size; + VmaAddDetailedStatisticsAllocation(inoutStats, item->GetSize()); + } +} + +void VmaDedicatedAllocationList::AddStatistics(VmaStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + + const uint32_t allocCount = (uint32_t)m_AllocationList.GetCount(); + inoutStats.blockCount += allocCount; + inoutStats.allocationCount += allocCount; + + for(auto* item = m_AllocationList.Front(); item != VMA_NULL; item = DedicatedAllocationLinkedList::GetNext(item)) + { + const VkDeviceSize size = item->GetSize(); + inoutStats.blockBytes += size; + inoutStats.allocationBytes += size; + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaDedicatedAllocationList::BuildStatsString(VmaJsonWriter& json) +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + json.BeginArray(); + for (VmaAllocation alloc = m_AllocationList.Front(); + alloc != VMA_NULL; alloc = m_AllocationList.GetNext(alloc)) + { + json.BeginObject(true); + alloc->PrintParameters(json); + json.EndObject(); + } + json.EndArray(); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaDedicatedAllocationList::IsEmpty() +{ + VmaMutexLockRead lock(m_Mutex, m_UseMutex); + return m_AllocationList.IsEmpty(); +} + +void VmaDedicatedAllocationList::Register(VmaAllocation alloc) +{ + VmaMutexLockWrite lock(m_Mutex, m_UseMutex); + m_AllocationList.PushBack(alloc); +} + +void VmaDedicatedAllocationList::Unregister(VmaAllocation alloc) +{ + VmaMutexLockWrite lock(m_Mutex, m_UseMutex); + m_AllocationList.Remove(alloc); +} +#endif // _VMA_DEDICATED_ALLOCATION_LIST_FUNCTIONS +#endif // _VMA_DEDICATED_ALLOCATION_LIST + +#ifndef _VMA_SUBALLOCATION +/* +Represents a region of VmaDeviceMemoryBlock that is either assigned and returned as +allocated memory block or free. +*/ +struct VmaSuballocation +{ + VkDeviceSize offset; + VkDeviceSize size; + void* userData; + VmaSuballocationType type; +}; + +// Comparator for offsets. +struct VmaSuballocationOffsetLess +{ + bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const + { + return lhs.offset < rhs.offset; + } +}; + +struct VmaSuballocationOffsetGreater +{ + bool operator()(const VmaSuballocation& lhs, const VmaSuballocation& rhs) const + { + return lhs.offset > rhs.offset; + } +}; + +struct VmaSuballocationItemSizeLess +{ + bool operator()(const VmaSuballocationList::iterator lhs, + const VmaSuballocationList::iterator rhs) const + { + return lhs->size < rhs->size; + } + + bool operator()(const VmaSuballocationList::iterator lhs, + VkDeviceSize rhsSize) const + { + return lhs->size < rhsSize; + } +}; +#endif // _VMA_SUBALLOCATION + +#ifndef _VMA_ALLOCATION_REQUEST +/* +Parameters of planned allocation inside a VmaDeviceMemoryBlock. +item points to a FREE suballocation. +*/ +struct VmaAllocationRequest +{ + VmaAllocHandle allocHandle; + VkDeviceSize size; + VmaSuballocationList::iterator item; + void* customData; + uint64_t algorithmData; + VmaAllocationRequestType type; +}; +#endif // _VMA_ALLOCATION_REQUEST + +#ifndef _VMA_BLOCK_METADATA +/* +Data structure used for bookkeeping of allocations and unused ranges of memory +in a single VkDeviceMemory block. +*/ +class VmaBlockMetadata +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockMetadata) +public: + // pAllocationCallbacks, if not null, must be owned externally - alive and unchanged for the whole lifetime of this object. + VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata() = default; + + virtual void Init(VkDeviceSize size) { m_Size = size; } + bool IsVirtual() const { return m_IsVirtual; } + VkDeviceSize GetSize() const { return m_Size; } + + // Validates all data structures inside this object. If not valid, returns false. + virtual bool Validate() const = 0; + virtual size_t GetAllocationCount() const = 0; + virtual size_t GetFreeRegionsCount() const = 0; + virtual VkDeviceSize GetSumFreeSize() const = 0; + // Returns true if this block is empty - contains only single free suballocation. + virtual bool IsEmpty() const = 0; + virtual void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) = 0; + virtual VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const = 0; + virtual void* GetAllocationUserData(VmaAllocHandle allocHandle) const = 0; + + virtual VmaAllocHandle GetAllocationListBegin() const = 0; + virtual VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const = 0; + virtual VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const = 0; + + // Shouldn't modify blockCount. + virtual void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const = 0; + virtual void AddStatistics(VmaStatistics& inoutStats) const = 0; + +#if VMA_STATS_STRING_ENABLED + virtual void PrintDetailedMap(class VmaJsonWriter& json) const = 0; +#endif + + // Tries to find a place for suballocation with given parameters inside this block. + // If succeeded, fills pAllocationRequest and returns true. + // If failed, returns false. + virtual bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + // Always one of VMA_ALLOCATION_CREATE_STRATEGY_* or VMA_ALLOCATION_INTERNAL_STRATEGY_* flags. + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) = 0; + + virtual VkResult CheckCorruption(const void* pBlockData) = 0; + + // Makes actual allocation based on request. Request must already be checked and valid. + virtual void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) = 0; + + // Frees suballocation assigned to given memory region. + virtual void Free(VmaAllocHandle allocHandle) = 0; + + // Frees all allocations. + // Careful! Don't call it if there are VmaAllocation objects owned by userData of cleared allocations! + virtual void Clear() = 0; + + virtual void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) = 0; + virtual void DebugLogAllAllocations() const = 0; + +protected: + const VkAllocationCallbacks* GetAllocationCallbacks() const { return m_pAllocationCallbacks; } + VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } + VkDeviceSize GetDebugMargin() const { return VkDeviceSize(IsVirtual() ? 0 : VMA_DEBUG_MARGIN); } + + void DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const; +#if VMA_STATS_STRING_ENABLED + // mapRefCount == UINT32_MAX means unspecified. + void PrintDetailedMap_Begin(class VmaJsonWriter& json, + VkDeviceSize unusedBytes, + size_t allocationCount, + size_t unusedRangeCount) const; + void PrintDetailedMap_Allocation(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size, void* userData) const; + void PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, + VkDeviceSize offset, + VkDeviceSize size) const; + void PrintDetailedMap_End(class VmaJsonWriter& json) const; +#endif + +private: + VkDeviceSize m_Size; + const VkAllocationCallbacks* m_pAllocationCallbacks; + const VkDeviceSize m_BufferImageGranularity; + const bool m_IsVirtual; +}; + +#ifndef _VMA_BLOCK_METADATA_FUNCTIONS +VmaBlockMetadata::VmaBlockMetadata(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : m_Size(0), + m_pAllocationCallbacks(pAllocationCallbacks), + m_BufferImageGranularity(bufferImageGranularity), + m_IsVirtual(isVirtual) {} + +void VmaBlockMetadata::DebugLogAllocation(VkDeviceSize offset, VkDeviceSize size, void* userData) const +{ + if (IsVirtual()) + { + VMA_LEAK_LOG_FORMAT("UNFREED VIRTUAL ALLOCATION; Offset: %" PRIu64 "; Size: %" PRIu64 "; UserData: %p", offset, size, userData); + } + else + { + VMA_ASSERT(userData != VMA_NULL); + VmaAllocation allocation = reinterpret_cast(userData); + + userData = allocation->GetUserData(); + const char* name = allocation->GetName(); + +#if VMA_STATS_STRING_ENABLED + VMA_LEAK_LOG_FORMAT("UNFREED ALLOCATION; Offset: %" PRIu64 "; Size: %" PRIu64 "; UserData: %p; Name: %s; Type: %s; Usage: %" PRIu64, + offset, size, userData, name ? name : "vma_empty", + VMA_SUBALLOCATION_TYPE_NAMES[allocation->GetSuballocationType()], + (uint64_t)allocation->GetBufferImageUsage().Value); +#else + VMA_LEAK_LOG_FORMAT("UNFREED ALLOCATION; Offset: %" PRIu64 "; Size: %" PRIu64 "; UserData: %p; Name: %s; Type: %u", + offset, size, userData, name ? name : "vma_empty", + (unsigned)allocation->GetSuballocationType()); +#endif // VMA_STATS_STRING_ENABLED + } + +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata::PrintDetailedMap_Begin(class VmaJsonWriter& json, + VkDeviceSize unusedBytes, size_t allocationCount, size_t unusedRangeCount) const +{ + json.WriteString("TotalBytes"); + json.WriteNumber(GetSize()); + + json.WriteString("UnusedBytes"); + json.WriteNumber(unusedBytes); + + json.WriteString("Allocations"); + json.WriteNumber((uint64_t)allocationCount); + + json.WriteString("UnusedRanges"); + json.WriteNumber((uint64_t)unusedRangeCount); + + json.WriteString("Suballocations"); + json.BeginArray(); +} + +void VmaBlockMetadata::PrintDetailedMap_Allocation(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size, void* userData) const +{ + json.BeginObject(true); + + json.WriteString("Offset"); + json.WriteNumber(offset); + + if (IsVirtual()) + { + json.WriteString("Size"); + json.WriteNumber(size); + if (userData) + { + json.WriteString("CustomData"); + json.BeginString(); + json.ContinueString_Pointer(userData); + json.EndString(); + } + } + else + { + ((VmaAllocation)userData)->PrintParameters(json); + } + + json.EndObject(); +} + +void VmaBlockMetadata::PrintDetailedMap_UnusedRange(class VmaJsonWriter& json, + VkDeviceSize offset, VkDeviceSize size) const +{ + json.BeginObject(true); + + json.WriteString("Offset"); + json.WriteNumber(offset); + + json.WriteString("Type"); + json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]); + + json.WriteString("Size"); + json.WriteNumber(size); + + json.EndObject(); +} + +void VmaBlockMetadata::PrintDetailedMap_End(class VmaJsonWriter& json) const +{ + json.EndArray(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_BLOCK_METADATA_FUNCTIONS +#endif // _VMA_BLOCK_METADATA + +#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY +// Before deleting object of this class remember to call 'Destroy()' +class VmaBlockBufferImageGranularity final +{ +public: + struct ValidationContext + { + const VkAllocationCallbacks* allocCallbacks; + uint16_t* pageAllocs; + }; + + VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity); + ~VmaBlockBufferImageGranularity(); + + bool IsEnabled() const { return m_BufferImageGranularity > MAX_LOW_BUFFER_IMAGE_GRANULARITY; } + + void Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size); + // Before destroying object you must call free it's memory + void Destroy(const VkAllocationCallbacks* pAllocationCallbacks); + + void RoundupAllocRequest(VmaSuballocationType allocType, + VkDeviceSize& inOutAllocSize, + VkDeviceSize& inOutAllocAlignment) const; + + bool CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, + VkDeviceSize allocSize, + VkDeviceSize blockOffset, + VkDeviceSize blockSize, + VmaSuballocationType allocType) const; + + void AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size); + void FreePages(VkDeviceSize offset, VkDeviceSize size); + void Clear(); + + ValidationContext StartValidation(const VkAllocationCallbacks* pAllocationCallbacks, + bool isVirutal) const; + bool Validate(ValidationContext& ctx, VkDeviceSize offset, VkDeviceSize size) const; + bool FinishValidation(ValidationContext& ctx) const; + +private: + static const uint16_t MAX_LOW_BUFFER_IMAGE_GRANULARITY = 256; + + struct RegionInfo + { + uint8_t allocType; + uint16_t allocCount; + }; + + VkDeviceSize m_BufferImageGranularity; + uint32_t m_RegionCount; + RegionInfo* m_RegionInfo; + + uint32_t GetStartPage(VkDeviceSize offset) const { return OffsetToPageIndex(offset & ~(m_BufferImageGranularity - 1)); } + uint32_t GetEndPage(VkDeviceSize offset, VkDeviceSize size) const { return OffsetToPageIndex((offset + size - 1) & ~(m_BufferImageGranularity - 1)); } + + uint32_t OffsetToPageIndex(VkDeviceSize offset) const; + void AllocPage(RegionInfo& page, uint8_t allocType); +}; + +#ifndef _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS +VmaBlockBufferImageGranularity::VmaBlockBufferImageGranularity(VkDeviceSize bufferImageGranularity) + : m_BufferImageGranularity(bufferImageGranularity), + m_RegionCount(0), + m_RegionInfo(VMA_NULL) {} + +VmaBlockBufferImageGranularity::~VmaBlockBufferImageGranularity() +{ + VMA_ASSERT(m_RegionInfo == VMA_NULL && "Free not called before destroying object!"); +} + +void VmaBlockBufferImageGranularity::Init(const VkAllocationCallbacks* pAllocationCallbacks, VkDeviceSize size) +{ + if (IsEnabled()) + { + m_RegionCount = static_cast(VmaDivideRoundingUp(size, m_BufferImageGranularity)); + m_RegionInfo = vma_new_array(pAllocationCallbacks, RegionInfo, m_RegionCount); + memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); + } +} + +void VmaBlockBufferImageGranularity::Destroy(const VkAllocationCallbacks* pAllocationCallbacks) +{ + if (m_RegionInfo) + { + vma_delete_array(pAllocationCallbacks, m_RegionInfo, m_RegionCount); + m_RegionInfo = VMA_NULL; + } +} + +void VmaBlockBufferImageGranularity::RoundupAllocRequest(VmaSuballocationType allocType, + VkDeviceSize& inOutAllocSize, + VkDeviceSize& inOutAllocAlignment) const +{ + if (m_BufferImageGranularity > 1 && + m_BufferImageGranularity <= MAX_LOW_BUFFER_IMAGE_GRANULARITY) + { + if (allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN || + allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL) + { + inOutAllocAlignment = VMA_MAX(inOutAllocAlignment, m_BufferImageGranularity); + inOutAllocSize = VmaAlignUp(inOutAllocSize, m_BufferImageGranularity); + } + } +} + +bool VmaBlockBufferImageGranularity::CheckConflictAndAlignUp(VkDeviceSize& inOutAllocOffset, + VkDeviceSize allocSize, + VkDeviceSize blockOffset, + VkDeviceSize blockSize, + VmaSuballocationType allocType) const +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(inOutAllocOffset); + if (m_RegionInfo[startPage].allocCount > 0 && + VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[startPage].allocType), allocType)) + { + inOutAllocOffset = VmaAlignUp(inOutAllocOffset, m_BufferImageGranularity); + if (blockSize < allocSize + inOutAllocOffset - blockOffset) + return true; + ++startPage; + } + uint32_t endPage = GetEndPage(inOutAllocOffset, allocSize); + if (endPage != startPage && + m_RegionInfo[endPage].allocCount > 0 && + VmaIsBufferImageGranularityConflict(static_cast(m_RegionInfo[endPage].allocType), allocType)) + { + return true; + } + } + return false; +} + +void VmaBlockBufferImageGranularity::AllocPages(uint8_t allocType, VkDeviceSize offset, VkDeviceSize size) +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(offset); + AllocPage(m_RegionInfo[startPage], allocType); + + uint32_t endPage = GetEndPage(offset, size); + if (startPage != endPage) + AllocPage(m_RegionInfo[endPage], allocType); + } +} + +void VmaBlockBufferImageGranularity::FreePages(VkDeviceSize offset, VkDeviceSize size) +{ + if (IsEnabled()) + { + uint32_t startPage = GetStartPage(offset); + --m_RegionInfo[startPage].allocCount; + if (m_RegionInfo[startPage].allocCount == 0) + m_RegionInfo[startPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; + uint32_t endPage = GetEndPage(offset, size); + if (startPage != endPage) + { + --m_RegionInfo[endPage].allocCount; + if (m_RegionInfo[endPage].allocCount == 0) + m_RegionInfo[endPage].allocType = VMA_SUBALLOCATION_TYPE_FREE; + } + } +} + +void VmaBlockBufferImageGranularity::Clear() +{ + if (m_RegionInfo) + memset(m_RegionInfo, 0, m_RegionCount * sizeof(RegionInfo)); +} + +VmaBlockBufferImageGranularity::ValidationContext VmaBlockBufferImageGranularity::StartValidation( + const VkAllocationCallbacks* pAllocationCallbacks, bool isVirutal) const +{ + ValidationContext ctx{ pAllocationCallbacks, VMA_NULL }; + if (!isVirutal && IsEnabled()) + { + ctx.pageAllocs = vma_new_array(pAllocationCallbacks, uint16_t, m_RegionCount); + memset(ctx.pageAllocs, 0, m_RegionCount * sizeof(uint16_t)); + } + return ctx; +} + +bool VmaBlockBufferImageGranularity::Validate(ValidationContext& ctx, + VkDeviceSize offset, VkDeviceSize size) const +{ + if (IsEnabled()) + { + uint32_t start = GetStartPage(offset); + ++ctx.pageAllocs[start]; + VMA_VALIDATE(m_RegionInfo[start].allocCount > 0); + + uint32_t end = GetEndPage(offset, size); + if (start != end) + { + ++ctx.pageAllocs[end]; + VMA_VALIDATE(m_RegionInfo[end].allocCount > 0); + } + } + return true; +} + +bool VmaBlockBufferImageGranularity::FinishValidation(ValidationContext& ctx) const +{ + // Check proper page structure + if (IsEnabled()) + { + VMA_ASSERT(ctx.pageAllocs != VMA_NULL && "Validation context not initialized!"); + + for (uint32_t page = 0; page < m_RegionCount; ++page) + { + VMA_VALIDATE(ctx.pageAllocs[page] == m_RegionInfo[page].allocCount); + } + vma_delete_array(ctx.allocCallbacks, ctx.pageAllocs, m_RegionCount); + ctx.pageAllocs = VMA_NULL; + } + return true; +} + +uint32_t VmaBlockBufferImageGranularity::OffsetToPageIndex(VkDeviceSize offset) const +{ + return static_cast(offset >> VMA_BITSCAN_MSB(m_BufferImageGranularity)); +} + +void VmaBlockBufferImageGranularity::AllocPage(RegionInfo& page, uint8_t allocType) +{ + // When current alloc type is free then it can be overridden by new type + if (page.allocCount == 0 || (page.allocCount > 0 && page.allocType == VMA_SUBALLOCATION_TYPE_FREE)) + page.allocType = allocType; + + ++page.allocCount; +} +#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY_FUNCTIONS +#endif // _VMA_BLOCK_BUFFER_IMAGE_GRANULARITY + +#ifndef _VMA_BLOCK_METADATA_LINEAR +/* +Allocations and their references in internal data structure look like this: + +if(m_2ndVectorMode == SECOND_VECTOR_EMPTY): + + 0 +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | + | | + | | +GetSize() +-------+ + +if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER): + + 0 +-------+ + | Alloc | 2nd[0] + +-------+ + | Alloc | 2nd[1] + +-------+ + | ... | + +-------+ + | Alloc | 2nd[2nd.size() - 1] + +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | +GetSize() +-------+ + +if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK): + + 0 +-------+ + | | + | | + | | + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount] + +-------+ + | Alloc | 1st[m_1stNullItemsBeginCount + 1] + +-------+ + | ... | + +-------+ + | Alloc | 1st[1st.size() - 1] + +-------+ + | | + | | + | | + +-------+ + | Alloc | 2nd[2nd.size() - 1] + +-------+ + | ... | + +-------+ + | Alloc | 2nd[1] + +-------+ + | Alloc | 2nd[0] +GetSize() +-------+ + +*/ +class VmaBlockMetadata_Linear : public VmaBlockMetadata +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockMetadata_Linear) +public: + VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata_Linear() = default; + + VkDeviceSize GetSumFreeSize() const override { return m_SumFreeSize; } + bool IsEmpty() const override { return GetAllocationCount() == 0; } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return (VkDeviceSize)allocHandle - 1; } + + void Init(VkDeviceSize size) override; + bool Validate() const override; + size_t GetAllocationCount() const override; + size_t GetFreeRegionsCount() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + VkResult CheckCorruption(const void* pBlockData) override; + + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void Free(VmaAllocHandle allocHandle) override; + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + void DebugLogAllAllocations() const override; + +private: + /* + There are two suballocation vectors, used in ping-pong way. + The one with index m_1stVectorIndex is called 1st. + The one with index (m_1stVectorIndex ^ 1) is called 2nd. + 2nd can be non-empty only when 1st is not empty. + When 2nd is not empty, m_2ndVectorMode indicates its mode of operation. + */ + typedef VmaVector> SuballocationVectorType; + + enum SECOND_VECTOR_MODE + { + SECOND_VECTOR_EMPTY, + /* + Suballocations in 2nd vector are created later than the ones in 1st, but they + all have smaller offset. + */ + SECOND_VECTOR_RING_BUFFER, + /* + Suballocations in 2nd vector are upper side of double stack. + They all have offsets higher than those in 1st vector. + Top of this stack means smaller offsets, but higher indices in this vector. + */ + SECOND_VECTOR_DOUBLE_STACK, + }; + + VkDeviceSize m_SumFreeSize; + SuballocationVectorType m_Suballocations0, m_Suballocations1; + uint32_t m_1stVectorIndex; + SECOND_VECTOR_MODE m_2ndVectorMode; + // Number of items in 1st vector with hAllocation = null at the beginning. + size_t m_1stNullItemsBeginCount; + // Number of other items in 1st vector with hAllocation = null somewhere in the middle. + size_t m_1stNullItemsMiddleCount; + // Number of items in 2nd vector with hAllocation = null. + size_t m_2ndNullItemsCount; + + SuballocationVectorType& AccessSuballocations1st() { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } + SuballocationVectorType& AccessSuballocations2nd() { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } + const SuballocationVectorType& AccessSuballocations1st() const { return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; } + const SuballocationVectorType& AccessSuballocations2nd() const { return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; } + + VmaSuballocation& FindSuballocation(VkDeviceSize offset) const; + bool ShouldCompact1st() const; + void CleanupAfterFree(); + + bool CreateAllocationRequest_LowerAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest); + bool CreateAllocationRequest_UpperAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest); +}; + +#ifndef _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS +VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_SumFreeSize(0), + m_Suballocations0(VmaStlAllocator(pAllocationCallbacks)), + m_Suballocations1(VmaStlAllocator(pAllocationCallbacks)), + m_1stVectorIndex(0), + m_2ndVectorMode(SECOND_VECTOR_EMPTY), + m_1stNullItemsBeginCount(0), + m_1stNullItemsMiddleCount(0), + m_2ndNullItemsCount(0) {} + +void VmaBlockMetadata_Linear::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + m_SumFreeSize = size; +} + +bool VmaBlockMetadata_Linear::Validate() const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY)); + VMA_VALIDATE(!suballocations1st.empty() || + suballocations2nd.empty() || + m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER); + + if (!suballocations1st.empty()) + { + // Null item at the beginning should be accounted into m_1stNullItemsBeginCount. + VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].type != VMA_SUBALLOCATION_TYPE_FREE); + // Null item at the end should be just pop_back(). + VMA_VALIDATE(suballocations1st.back().type != VMA_SUBALLOCATION_TYPE_FREE); + } + if (!suballocations2nd.empty()) + { + // Null item at the end should be just pop_back(). + VMA_VALIDATE(suballocations2nd.back().type != VMA_SUBALLOCATION_TYPE_FREE); + } + + VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size()); + VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size()); + + VkDeviceSize sumUsedSize = 0; + const size_t suballoc1stCount = suballocations1st.size(); + const VkDeviceSize debugMargin = GetDebugMargin(); + VkDeviceSize offset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const size_t suballoc2ndCount = suballocations2nd.size(); + size_t nullItem2ndCount = 0; + for (size_t i = 0; i < suballoc2ndCount; ++i) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem2ndCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + + VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); + } + + for (size_t i = 0; i < m_1stNullItemsBeginCount; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE && + suballoc.userData == VMA_NULL); + } + + size_t nullItem1stCount = m_1stNullItemsBeginCount; + + for (size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem1stCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount); + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + const size_t suballoc2ndCount = suballocations2nd.size(); + size_t nullItem2ndCount = 0; + for (size_t i = suballoc2ndCount; i--; ) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE); + + VmaAllocation const alloc = (VmaAllocation)suballoc.userData; + if (!IsVirtual()) + { + VMA_VALIDATE(currFree == (alloc == VK_NULL_HANDLE)); + } + VMA_VALIDATE(suballoc.offset >= offset); + + if (!currFree) + { + if (!IsVirtual()) + { + VMA_VALIDATE((VkDeviceSize)alloc->GetAllocHandle() == suballoc.offset + 1); + VMA_VALIDATE(alloc->GetSize() == suballoc.size); + } + sumUsedSize += suballoc.size; + } + else + { + ++nullItem2ndCount; + } + + offset = suballoc.offset + suballoc.size + debugMargin; + } + + VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount); + } + + VMA_VALIDATE(offset <= GetSize()); + VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize); + + return true; +} + +size_t VmaBlockMetadata_Linear::GetAllocationCount() const +{ + return AccessSuballocations1st().size() - m_1stNullItemsBeginCount - m_1stNullItemsMiddleCount + + AccessSuballocations2nd().size() - m_2ndNullItemsCount; +} + +size_t VmaBlockMetadata_Linear::GetFreeRegionsCount() const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return SIZE_MAX; +} + +void VmaBlockMetadata_Linear::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + const VkDeviceSize size = GetSize(); + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += size; + + VkDeviceSize lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + if (lastOffset < freeSpace2ndTo1stEnd) + { + const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + if (lastOffset < freeSpace1stTo2ndEnd) + { + const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + VmaAddDetailedStatisticsAllocation(inoutStats, suballoc.size); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + // There is free space from lastOffset to size. + if (lastOffset < size) + { + const VkDeviceSize unusedRangeSize = size - lastOffset; + VmaAddDetailedStatisticsUnusedRange(inoutStats, unusedRangeSize); + } + + // End of loop. + lastOffset = size; + } + } + } +} + +void VmaBlockMetadata_Linear::AddStatistics(VmaStatistics& inoutStats) const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const VkDeviceSize size = GetSize(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + inoutStats.blockCount++; + inoutStats.blockBytes += size; + inoutStats.allocationBytes += size - m_SumFreeSize; + + VkDeviceSize lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++inoutStats.allocationCount; + + // Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + // End of loop. + lastOffset = size; + } + } + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_Linear::PrintDetailedMap(class VmaJsonWriter& json) const +{ + const VkDeviceSize size = GetSize(); + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + const size_t suballoc1stCount = suballocations1st.size(); + const size_t suballoc2ndCount = suballocations2nd.size(); + + // FIRST PASS + + size_t unusedRangeCount = 0; + VkDeviceSize usedBytes = 0; + + VkDeviceSize lastOffset = 0; + + size_t alloc2ndCount = 0; + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc2ndCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace2ndTo1stEnd) + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + size_t nextAlloc1stIndex = m_1stNullItemsBeginCount; + size_t alloc1stCount = 0; + const VkDeviceSize freeSpace1stTo2ndEnd = + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc1stCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace1stTo2ndEnd) + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + ++unusedRangeCount; + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + ++alloc2ndCount; + usedBytes += suballoc.size; + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to size. + ++unusedRangeCount; + } + + // End of loop. + lastOffset = size; + } + } + } + + const VkDeviceSize unusedBytes = size - usedBytes; + PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount); + + // SECOND PASS + lastOffset = 0; + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset; + size_t nextAlloc2ndIndex = 0; + while (lastOffset < freeSpace2ndTo1stEnd) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex < suballoc2ndCount && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + ++nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex < suballoc2ndCount) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace2ndTo1stEnd) + { + // There is free space from lastOffset to freeSpace2ndTo1stEnd. + const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace2ndTo1stEnd; + } + } + } + + nextAlloc1stIndex = m_1stNullItemsBeginCount; + while (lastOffset < freeSpace1stTo2ndEnd) + { + // Find next non-null allocation or move nextAllocIndex to the end. + while (nextAlloc1stIndex < suballoc1stCount && + suballocations1st[nextAlloc1stIndex].userData == VMA_NULL) + { + ++nextAlloc1stIndex; + } + + // Found non-null allocation. + if (nextAlloc1stIndex < suballoc1stCount) + { + const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + ++nextAlloc1stIndex; + } + // We are at the end. + else + { + if (lastOffset < freeSpace1stTo2ndEnd) + { + // There is free space from lastOffset to freeSpace1stTo2ndEnd. + const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = freeSpace1stTo2ndEnd; + } + } + + if (m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + size_t nextAlloc2ndIndex = suballocations2nd.size() - 1; + while (lastOffset < size) + { + // Find next non-null allocation or move nextAlloc2ndIndex to the end. + while (nextAlloc2ndIndex != SIZE_MAX && + suballocations2nd[nextAlloc2ndIndex].userData == VMA_NULL) + { + --nextAlloc2ndIndex; + } + + // Found non-null allocation. + if (nextAlloc2ndIndex != SIZE_MAX) + { + const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex]; + + // 1. Process free space before this allocation. + if (lastOffset < suballoc.offset) + { + // There is free space from lastOffset to suballoc.offset. + const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // 2. Process this allocation. + // There is allocation with suballoc.offset, suballoc.size. + PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.size, suballoc.userData); + + // 3. Prepare for next iteration. + lastOffset = suballoc.offset + suballoc.size; + --nextAlloc2ndIndex; + } + // We are at the end. + else + { + if (lastOffset < size) + { + // There is free space from lastOffset to size. + const VkDeviceSize unusedRangeSize = size - lastOffset; + PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize); + } + + // End of loop. + lastOffset = size; + } + } + } + + PrintDetailedMap_End(json); +} +#endif // VMA_STATS_STRING_ENABLED + +bool VmaBlockMetadata_Linear::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(allocSize > 0); + VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE); + VMA_ASSERT(pAllocationRequest != VMA_NULL); + VMA_HEAVY_ASSERT(Validate()); + + if(allocSize > GetSize()) + return false; + + pAllocationRequest->size = allocSize; + return upperAddress ? + CreateAllocationRequest_UpperAddress( + allocSize, allocAlignment, allocType, strategy, pAllocationRequest) : + CreateAllocationRequest_LowerAddress( + allocSize, allocAlignment, allocType, strategy, pAllocationRequest); +} + +VkResult VmaBlockMetadata_Linear::CheckCorruption(const void* pBlockData) +{ + VMA_ASSERT(!IsVirtual()); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + for (size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i) + { + const VmaSuballocation& suballoc = suballocations1st[i]; + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + { + if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + for (size_t i = 0, count = suballocations2nd.size(); i < count; ++i) + { + const VmaSuballocation& suballoc = suballocations2nd[i]; + if (suballoc.type != VMA_SUBALLOCATION_TYPE_FREE) + { + if (!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + return VK_SUCCESS; +} + +void VmaBlockMetadata_Linear::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + const VkDeviceSize offset = (VkDeviceSize)request.allocHandle - 1; + const VmaSuballocation newSuballoc = { offset, request.size, userData, type }; + + switch (request.type) + { + case VmaAllocationRequestType::UpperAddress: + { + VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER && + "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer."); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + suballocations2nd.push_back(newSuballoc); + m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK; + } + break; + case VmaAllocationRequestType::EndOf1st: + { + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + + VMA_ASSERT(suballocations1st.empty() || + offset >= suballocations1st.back().offset + suballocations1st.back().size); + // Check if it fits before the end of the block. + VMA_ASSERT(offset + request.size <= GetSize()); + + suballocations1st.push_back(newSuballoc); + } + break; + case VmaAllocationRequestType::EndOf2nd: + { + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + // New allocation at the end of 2-part ring buffer, so before first allocation from 1st vector. + VMA_ASSERT(!suballocations1st.empty() && + offset + request.size <= suballocations1st[m_1stNullItemsBeginCount].offset); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + switch (m_2ndVectorMode) + { + case SECOND_VECTOR_EMPTY: + // First allocation from second part ring buffer. + VMA_ASSERT(suballocations2nd.empty()); + m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER; + break; + case SECOND_VECTOR_RING_BUFFER: + // 2-part ring buffer is already started. + VMA_ASSERT(!suballocations2nd.empty()); + break; + case SECOND_VECTOR_DOUBLE_STACK: + VMA_ASSERT(0 && "CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack."); + break; + default: + VMA_ASSERT(0); + } + + suballocations2nd.push_back(newSuballoc); + } + break; + default: + VMA_ASSERT(0 && "CRITICAL INTERNAL ERROR."); + } + + m_SumFreeSize -= newSuballoc.size; +} + +void VmaBlockMetadata_Linear::Free(VmaAllocHandle allocHandle) +{ + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + VkDeviceSize offset = (VkDeviceSize)allocHandle - 1; + + if (!suballocations1st.empty()) + { + // First allocation: Mark it as next empty at the beginning. + VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount]; + if (firstSuballoc.offset == offset) + { + firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE; + firstSuballoc.userData = VMA_NULL; + m_SumFreeSize += firstSuballoc.size; + ++m_1stNullItemsBeginCount; + CleanupAfterFree(); + return; + } + } + + // Last allocation in 2-part ring buffer or top of upper stack (same logic). + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER || + m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + VmaSuballocation& lastSuballoc = suballocations2nd.back(); + if (lastSuballoc.offset == offset) + { + m_SumFreeSize += lastSuballoc.size; + suballocations2nd.pop_back(); + CleanupAfterFree(); + return; + } + } + // Last allocation in 1st vector. + else if (m_2ndVectorMode == SECOND_VECTOR_EMPTY) + { + VmaSuballocation& lastSuballoc = suballocations1st.back(); + if (lastSuballoc.offset == offset) + { + m_SumFreeSize += lastSuballoc.size; + suballocations1st.pop_back(); + CleanupAfterFree(); + return; + } + } + + VmaSuballocation refSuballoc; + refSuballoc.offset = offset; + // Rest of members stays uninitialized intentionally for better performance. + + // Item from the middle of 1st vector. + { + const SuballocationVectorType::iterator it = VmaBinaryFindSorted( + suballocations1st.begin() + m_1stNullItemsBeginCount, + suballocations1st.end(), + refSuballoc, + VmaSuballocationOffsetLess()); + if (it != suballocations1st.end()) + { + it->type = VMA_SUBALLOCATION_TYPE_FREE; + it->userData = VMA_NULL; + ++m_1stNullItemsMiddleCount; + m_SumFreeSize += it->size; + CleanupAfterFree(); + return; + } + } + + if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) + { + // Item from the middle of 2nd vector. + const SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); + if (it != suballocations2nd.end()) + { + it->type = VMA_SUBALLOCATION_TYPE_FREE; + it->userData = VMA_NULL; + ++m_2ndNullItemsCount; + m_SumFreeSize += it->size; + CleanupAfterFree(); + return; + } + } + + VMA_ASSERT(0 && "Allocation to free not found in linear allocator!"); +} + +void VmaBlockMetadata_Linear::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + outInfo.offset = (VkDeviceSize)allocHandle - 1; + VmaSuballocation& suballoc = FindSuballocation(outInfo.offset); + outInfo.size = suballoc.size; + outInfo.pUserData = suballoc.userData; +} + +void* VmaBlockMetadata_Linear::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + return FindSuballocation((VkDeviceSize)allocHandle - 1).userData; +} + +VmaAllocHandle VmaBlockMetadata_Linear::GetAllocationListBegin() const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_Linear::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return VK_NULL_HANDLE; +} + +VkDeviceSize VmaBlockMetadata_Linear::GetNextFreeRegionSize(VmaAllocHandle alloc) const +{ + // Function only used for defragmentation, which is disabled for this algorithm + VMA_ASSERT(0); + return 0; +} + +void VmaBlockMetadata_Linear::Clear() +{ + m_SumFreeSize = GetSize(); + m_Suballocations0.clear(); + m_Suballocations1.clear(); + // Leaving m_1stVectorIndex unchanged - it doesn't matter. + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + m_2ndNullItemsCount = 0; +} + +void VmaBlockMetadata_Linear::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + VmaSuballocation& suballoc = FindSuballocation((VkDeviceSize)allocHandle - 1); + suballoc.userData = userData; +} + +void VmaBlockMetadata_Linear::DebugLogAllAllocations() const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + for (auto it = suballocations1st.begin() + m_1stNullItemsBeginCount; it != suballocations1st.end(); ++it) + if (it->type != VMA_SUBALLOCATION_TYPE_FREE) + DebugLogAllocation(it->offset, it->size, it->userData); + + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + for (auto it = suballocations2nd.begin(); it != suballocations2nd.end(); ++it) + if (it->type != VMA_SUBALLOCATION_TYPE_FREE) + DebugLogAllocation(it->offset, it->size, it->userData); +} + +VmaSuballocation& VmaBlockMetadata_Linear::FindSuballocation(VkDeviceSize offset) const +{ + const SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + VmaSuballocation refSuballoc; + refSuballoc.offset = offset; + // Rest of members stays uninitialized intentionally for better performance. + + // Item from the 1st vector. + { + SuballocationVectorType::const_iterator it = VmaBinaryFindSorted( + suballocations1st.begin() + m_1stNullItemsBeginCount, + suballocations1st.end(), + refSuballoc, + VmaSuballocationOffsetLess()); + if (it != suballocations1st.end()) + { + return const_cast(*it); + } + } + + if (m_2ndVectorMode != SECOND_VECTOR_EMPTY) + { + // Rest of members stays uninitialized intentionally for better performance. + SuballocationVectorType::const_iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ? + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) : + VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater()); + if (it != suballocations2nd.end()) + { + return const_cast(*it); + } + } + + VMA_ASSERT(0 && "Allocation not found in linear allocator!"); + return const_cast(suballocations1st.back()); // Should never occur. +} + +bool VmaBlockMetadata_Linear::ShouldCompact1st() const +{ + const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; + const size_t suballocCount = AccessSuballocations1st().size(); + return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3; +} + +void VmaBlockMetadata_Linear::CleanupAfterFree() +{ + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (IsEmpty()) + { + suballocations1st.clear(); + suballocations2nd.clear(); + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + m_2ndNullItemsCount = 0; + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + } + else + { + const size_t suballoc1stCount = suballocations1st.size(); + const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount; + VMA_ASSERT(nullItem1stCount <= suballoc1stCount); + + // Find more null items at the beginning of 1st vector. + while (m_1stNullItemsBeginCount < suballoc1stCount && + suballocations1st[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++m_1stNullItemsBeginCount; + --m_1stNullItemsMiddleCount; + } + + // Find more null items at the end of 1st vector. + while (m_1stNullItemsMiddleCount > 0 && + suballocations1st.back().type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_1stNullItemsMiddleCount; + suballocations1st.pop_back(); + } + + // Find more null items at the end of 2nd vector. + while (m_2ndNullItemsCount > 0 && + suballocations2nd.back().type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_2ndNullItemsCount; + suballocations2nd.pop_back(); + } + + // Find more null items at the beginning of 2nd vector. + while (m_2ndNullItemsCount > 0 && + suballocations2nd[0].type == VMA_SUBALLOCATION_TYPE_FREE) + { + --m_2ndNullItemsCount; + VmaVectorRemove(suballocations2nd, 0); + } + + if (ShouldCompact1st()) + { + const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount; + size_t srcIndex = m_1stNullItemsBeginCount; + for (size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex) + { + while (suballocations1st[srcIndex].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++srcIndex; + } + if (dstIndex != srcIndex) + { + suballocations1st[dstIndex] = suballocations1st[srcIndex]; + } + ++srcIndex; + } + suballocations1st.resize(nonNullItemCount); + m_1stNullItemsBeginCount = 0; + m_1stNullItemsMiddleCount = 0; + } + + // 2nd vector became empty. + if (suballocations2nd.empty()) + { + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + } + + // 1st vector became empty. + if (suballocations1st.size() - m_1stNullItemsBeginCount == 0) + { + suballocations1st.clear(); + m_1stNullItemsBeginCount = 0; + + if (!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + // Swap 1st with 2nd. Now 2nd is empty. + m_2ndVectorMode = SECOND_VECTOR_EMPTY; + m_1stNullItemsMiddleCount = m_2ndNullItemsCount; + while (m_1stNullItemsBeginCount < suballocations2nd.size() && + suballocations2nd[m_1stNullItemsBeginCount].type == VMA_SUBALLOCATION_TYPE_FREE) + { + ++m_1stNullItemsBeginCount; + --m_1stNullItemsMiddleCount; + } + m_2ndNullItemsCount = 0; + m_1stVectorIndex ^= 1; + } + } + } + + VMA_HEAVY_ASSERT(Validate()); +} + +bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + const VkDeviceSize blockSize = GetSize(); + const VkDeviceSize debugMargin = GetDebugMargin(); + const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + // Try to allocate at the end of 1st vector. + + VkDeviceSize resultBaseOffset = 0; + if (!suballocations1st.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations1st.back(); + resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; + } + + // Start from offset equal to beginning of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + // Apply alignment. + resultOffset = VmaAlignUp(resultOffset, allocAlignment); + + // Check previous suballocations for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations1st.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); + } + } + + const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? + suballocations2nd.back().offset : blockSize; + + // There is enough free space at the end after alignment. + if (resultOffset + allocSize + debugMargin <= freeSpaceEnd) + { + // Check next suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if ((allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK) + { + for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) + { + const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) + { + return false; + } + } + else + { + // Already on previous page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + // pAllocationRequest->item, customData unused. + pAllocationRequest->type = VmaAllocationRequestType::EndOf1st; + return true; + } + } + + // Wrap-around to end of 2nd vector. Try to allocate there, watching for the + // beginning of 1st vector as the end of free space. + if (m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + VMA_ASSERT(!suballocations1st.empty()); + + VkDeviceSize resultBaseOffset = 0; + if (!suballocations2nd.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations2nd.back(); + resultBaseOffset = lastSuballoc.offset + lastSuballoc.size + debugMargin; + } + + // Start from offset equal to beginning of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + // Apply alignment. + resultOffset = VmaAlignUp(resultOffset, allocAlignment); + + // Check previous suballocations for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity); + } + } + + size_t index1st = m_1stNullItemsBeginCount; + + // There is enough free space at the end after alignment. + if ((index1st == suballocations1st.size() && resultOffset + allocSize + debugMargin <= blockSize) || + (index1st < suballocations1st.size() && resultOffset + allocSize + debugMargin <= suballocations1st[index1st].offset)) + { + // Check next suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if (allocSize % bufferImageGranularity || resultOffset % bufferImageGranularity) + { + for (size_t nextSuballocIndex = index1st; + nextSuballocIndex < suballocations1st.size(); + nextSuballocIndex++) + { + const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type)) + { + return false; + } + } + else + { + // Already on next page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd; + // pAllocationRequest->item, customData unused. + return true; + } + } + + return false; +} + +bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + const VkDeviceSize blockSize = GetSize(); + const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity(); + SuballocationVectorType& suballocations1st = AccessSuballocations1st(); + SuballocationVectorType& suballocations2nd = AccessSuballocations2nd(); + + if (m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER) + { + VMA_ASSERT(0 && "Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer."); + return false; + } + + // Try to allocate before 2nd.back(), or end of block if 2nd.empty(). + if (allocSize > blockSize) + { + return false; + } + VkDeviceSize resultBaseOffset = blockSize - allocSize; + if (!suballocations2nd.empty()) + { + const VmaSuballocation& lastSuballoc = suballocations2nd.back(); + resultBaseOffset = lastSuballoc.offset - allocSize; + if (allocSize > lastSuballoc.offset) + { + return false; + } + } + + // Start from offset equal to end of free space. + VkDeviceSize resultOffset = resultBaseOffset; + + const VkDeviceSize debugMargin = GetDebugMargin(); + + // Apply debugMargin at the end. + if (debugMargin > 0) + { + if (resultOffset < debugMargin) + { + return false; + } + resultOffset -= debugMargin; + } + + // Apply alignment. + resultOffset = VmaAlignDown(resultOffset, allocAlignment); + + // Check next suballocations from 2nd for BufferImageGranularity conflicts. + // Make bigger alignment if necessary. + if (bufferImageGranularity > 1 && bufferImageGranularity != allocAlignment && !suballocations2nd.empty()) + { + bool bufferImageGranularityConflict = false; + for (size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; ) + { + const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex]; + if (VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType)) + { + bufferImageGranularityConflict = true; + break; + } + } + else + // Already on previous page. + break; + } + if (bufferImageGranularityConflict) + { + resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity); + } + } + + // There is enough free space. + const VkDeviceSize endOf1st = !suballocations1st.empty() ? + suballocations1st.back().offset + suballocations1st.back().size : + 0; + if (endOf1st + debugMargin <= resultOffset) + { + // Check previous suballocations for BufferImageGranularity conflicts. + // If conflict exists, allocation cannot be made here. + if (bufferImageGranularity > 1) + { + for (size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; ) + { + const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex]; + if (VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity)) + { + if (VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type)) + { + return false; + } + } + else + { + // Already on next page. + break; + } + } + } + + // All tests passed: Success. + pAllocationRequest->allocHandle = (VmaAllocHandle)(resultOffset + 1); + // pAllocationRequest->item unused. + pAllocationRequest->type = VmaAllocationRequestType::UpperAddress; + return true; + } + + return false; +} +#endif // _VMA_BLOCK_METADATA_LINEAR_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_LINEAR + +#ifndef _VMA_BLOCK_METADATA_TLSF +// To not search current larger region if first allocation won't succeed and skip to smaller range +// use with VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT as strategy in CreateAllocationRequest(). +// When fragmentation and reusal of previous blocks doesn't matter then use with +// VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT for fastest alloc time possible. +class VmaBlockMetadata_TLSF : public VmaBlockMetadata +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockMetadata_TLSF) +public: + VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual); + virtual ~VmaBlockMetadata_TLSF(); + + size_t GetAllocationCount() const override { return m_AllocCount; } + size_t GetFreeRegionsCount() const override { return m_BlocksFreeCount + 1; } + VkDeviceSize GetSumFreeSize() const override { return m_BlocksFreeSize + m_NullBlock->size; } + bool IsEmpty() const override { return m_NullBlock->offset == 0; } + VkDeviceSize GetAllocationOffset(VmaAllocHandle allocHandle) const override { return ((Block*)allocHandle)->offset; } + + void Init(VkDeviceSize size) override; + bool Validate() const override; + + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const override; + void AddStatistics(VmaStatistics& inoutStats) const override; + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json) const override; +#endif + + bool CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) override; + + VkResult CheckCorruption(const void* pBlockData) override; + void Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) override; + + void Free(VmaAllocHandle allocHandle) override; + void GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) override; + void* GetAllocationUserData(VmaAllocHandle allocHandle) const override; + VmaAllocHandle GetAllocationListBegin() const override; + VmaAllocHandle GetNextAllocation(VmaAllocHandle prevAlloc) const override; + VkDeviceSize GetNextFreeRegionSize(VmaAllocHandle alloc) const override; + void Clear() override; + void SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) override; + void DebugLogAllAllocations() const override; + +private: + // According to original paper it should be preferable 4 or 5: + // M. Masmano, I. Ripoll, A. Crespo, and J. Real "TLSF: a New Dynamic Memory Allocator for Real-Time Systems" + // http://www.gii.upv.es/tlsf/files/ecrts04_tlsf.pdf + static const uint8_t SECOND_LEVEL_INDEX = 5; + static const uint16_t SMALL_BUFFER_SIZE = 256; + static const uint32_t INITIAL_BLOCK_ALLOC_COUNT = 16; + static const uint8_t MEMORY_CLASS_SHIFT = 7; + static const uint8_t MAX_MEMORY_CLASSES = 65 - MEMORY_CLASS_SHIFT; + + class Block + { + public: + VkDeviceSize offset; + VkDeviceSize size; + Block* prevPhysical; + Block* nextPhysical; + + void MarkFree() { prevFree = VMA_NULL; } + void MarkTaken() { prevFree = this; } + bool IsFree() const { return prevFree != this; } + void*& UserData() { VMA_HEAVY_ASSERT(!IsFree()); return userData; } + Block*& PrevFree() { return prevFree; } + Block*& NextFree() { VMA_HEAVY_ASSERT(IsFree()); return nextFree; } + + private: + Block* prevFree; // Address of the same block here indicates that block is taken + union + { + Block* nextFree; + void* userData; + }; + }; + + size_t m_AllocCount; + // Total number of free blocks besides null block + size_t m_BlocksFreeCount; + // Total size of free blocks excluding null block + VkDeviceSize m_BlocksFreeSize; + uint32_t m_IsFreeBitmap; + uint8_t m_MemoryClasses; + uint32_t m_InnerIsFreeBitmap[MAX_MEMORY_CLASSES]; + uint32_t m_ListsCount; + /* + * 0: 0-3 lists for small buffers + * 1+: 0-(2^SLI-1) lists for normal buffers + */ + Block** m_FreeList; + VmaPoolAllocator m_BlockAllocator; + Block* m_NullBlock; + VmaBlockBufferImageGranularity m_GranularityHandler; + + uint8_t SizeToMemoryClass(VkDeviceSize size) const; + uint16_t SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const; + uint32_t GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const; + uint32_t GetListIndex(VkDeviceSize size) const; + + void RemoveFreeBlock(Block* block); + void InsertFreeBlock(Block* block); + void MergeBlock(Block* block, Block* prev); + + Block* FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const; + bool CheckBlock( + Block& block, + uint32_t listIndex, + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaAllocationRequest* pAllocationRequest); +}; + +#ifndef _VMA_BLOCK_METADATA_TLSF_FUNCTIONS +VmaBlockMetadata_TLSF::VmaBlockMetadata_TLSF(const VkAllocationCallbacks* pAllocationCallbacks, + VkDeviceSize bufferImageGranularity, bool isVirtual) + : VmaBlockMetadata(pAllocationCallbacks, bufferImageGranularity, isVirtual), + m_AllocCount(0), + m_BlocksFreeCount(0), + m_BlocksFreeSize(0), + m_IsFreeBitmap(0), + m_MemoryClasses(0), + m_ListsCount(0), + m_FreeList(VMA_NULL), + m_BlockAllocator(pAllocationCallbacks, INITIAL_BLOCK_ALLOC_COUNT), + m_NullBlock(VMA_NULL), + m_GranularityHandler(bufferImageGranularity) {} + +VmaBlockMetadata_TLSF::~VmaBlockMetadata_TLSF() +{ + if (m_FreeList) + vma_delete_array(GetAllocationCallbacks(), m_FreeList, m_ListsCount); + m_GranularityHandler.Destroy(GetAllocationCallbacks()); +} + +void VmaBlockMetadata_TLSF::Init(VkDeviceSize size) +{ + VmaBlockMetadata::Init(size); + + if (!IsVirtual()) + m_GranularityHandler.Init(GetAllocationCallbacks(), size); + + m_NullBlock = m_BlockAllocator.Alloc(); + m_NullBlock->size = size; + m_NullBlock->offset = 0; + m_NullBlock->prevPhysical = VMA_NULL; + m_NullBlock->nextPhysical = VMA_NULL; + m_NullBlock->MarkFree(); + m_NullBlock->NextFree() = VMA_NULL; + m_NullBlock->PrevFree() = VMA_NULL; + uint8_t memoryClass = SizeToMemoryClass(size); + uint16_t sli = SizeToSecondIndex(size, memoryClass); + m_ListsCount = (memoryClass == 0 ? 0 : (memoryClass - 1) * (1UL << SECOND_LEVEL_INDEX) + sli) + 1; + if (IsVirtual()) + m_ListsCount += 1UL << SECOND_LEVEL_INDEX; + else + m_ListsCount += 4; + + m_MemoryClasses = memoryClass + uint8_t(2); + memset(m_InnerIsFreeBitmap, 0, MAX_MEMORY_CLASSES * sizeof(uint32_t)); + + m_FreeList = vma_new_array(GetAllocationCallbacks(), Block*, m_ListsCount); + memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); +} + +bool VmaBlockMetadata_TLSF::Validate() const +{ + VMA_VALIDATE(GetSumFreeSize() <= GetSize()); + + VkDeviceSize calculatedSize = m_NullBlock->size; + VkDeviceSize calculatedFreeSize = m_NullBlock->size; + size_t allocCount = 0; + size_t freeCount = 0; + + // Check integrity of free lists + for (uint32_t list = 0; list < m_ListsCount; ++list) + { + Block* block = m_FreeList[list]; + if (block != VMA_NULL) + { + VMA_VALIDATE(block->IsFree()); + VMA_VALIDATE(block->PrevFree() == VMA_NULL); + while (block->NextFree()) + { + VMA_VALIDATE(block->NextFree()->IsFree()); + VMA_VALIDATE(block->NextFree()->PrevFree() == block); + block = block->NextFree(); + } + } + } + + VkDeviceSize nextOffset = m_NullBlock->offset; + auto validateCtx = m_GranularityHandler.StartValidation(GetAllocationCallbacks(), IsVirtual()); + + VMA_VALIDATE(m_NullBlock->nextPhysical == VMA_NULL); + if (m_NullBlock->prevPhysical) + { + VMA_VALIDATE(m_NullBlock->prevPhysical->nextPhysical == m_NullBlock); + } + // Check all blocks + for (Block* prev = m_NullBlock->prevPhysical; prev != VMA_NULL; prev = prev->prevPhysical) + { + VMA_VALIDATE(prev->offset + prev->size == nextOffset); + nextOffset = prev->offset; + calculatedSize += prev->size; + + uint32_t listIndex = GetListIndex(prev->size); + if (prev->IsFree()) + { + ++freeCount; + // Check if free block belongs to free list + Block* freeBlock = m_FreeList[listIndex]; + VMA_VALIDATE(freeBlock != VMA_NULL); + + bool found = false; + do + { + if (freeBlock == prev) + found = true; + + freeBlock = freeBlock->NextFree(); + } while (!found && freeBlock != VMA_NULL); + + VMA_VALIDATE(found); + calculatedFreeSize += prev->size; + } + else + { + ++allocCount; + // Check if taken block is not on a free list + Block* freeBlock = m_FreeList[listIndex]; + while (freeBlock) + { + VMA_VALIDATE(freeBlock != prev); + freeBlock = freeBlock->NextFree(); + } + + if (!IsVirtual()) + { + VMA_VALIDATE(m_GranularityHandler.Validate(validateCtx, prev->offset, prev->size)); + } + } + + if (prev->prevPhysical) + { + VMA_VALIDATE(prev->prevPhysical->nextPhysical == prev); + } + } + + if (!IsVirtual()) + { + VMA_VALIDATE(m_GranularityHandler.FinishValidation(validateCtx)); + } + + VMA_VALIDATE(nextOffset == 0); + VMA_VALIDATE(calculatedSize == GetSize()); + VMA_VALIDATE(calculatedFreeSize == GetSumFreeSize()); + VMA_VALIDATE(allocCount == m_AllocCount); + VMA_VALIDATE(freeCount == m_BlocksFreeCount); + + return true; +} + +void VmaBlockMetadata_TLSF::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) const +{ + inoutStats.statistics.blockCount++; + inoutStats.statistics.blockBytes += GetSize(); + if (m_NullBlock->size > 0) + VmaAddDetailedStatisticsUnusedRange(inoutStats, m_NullBlock->size); + + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (block->IsFree()) + VmaAddDetailedStatisticsUnusedRange(inoutStats, block->size); + else + VmaAddDetailedStatisticsAllocation(inoutStats, block->size); + } +} + +void VmaBlockMetadata_TLSF::AddStatistics(VmaStatistics& inoutStats) const +{ + inoutStats.blockCount++; + inoutStats.allocationCount += (uint32_t)m_AllocCount; + inoutStats.blockBytes += GetSize(); + inoutStats.allocationBytes += GetSize() - GetSumFreeSize(); +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockMetadata_TLSF::PrintDetailedMap(class VmaJsonWriter& json) const +{ + size_t blockCount = m_AllocCount + m_BlocksFreeCount; + VmaStlAllocator allocator(GetAllocationCallbacks()); + VmaVector> blockList(blockCount, allocator); + + size_t i = blockCount; + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + blockList[--i] = block; + } + VMA_ASSERT(i == 0); + + VmaDetailedStatistics stats; + VmaClearDetailedStatistics(stats); + AddDetailedStatistics(stats); + + PrintDetailedMap_Begin(json, + stats.statistics.blockBytes - stats.statistics.allocationBytes, + stats.statistics.allocationCount, + stats.unusedRangeCount); + + for (; i < blockCount; ++i) + { + Block* block = blockList[i]; + if (block->IsFree()) + PrintDetailedMap_UnusedRange(json, block->offset, block->size); + else + PrintDetailedMap_Allocation(json, block->offset, block->size, block->UserData()); + } + if (m_NullBlock->size > 0) + PrintDetailedMap_UnusedRange(json, m_NullBlock->offset, m_NullBlock->size); + + PrintDetailedMap_End(json); +} +#endif + +bool VmaBlockMetadata_TLSF::CreateAllocationRequest( + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + bool upperAddress, + VmaSuballocationType allocType, + uint32_t strategy, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(allocSize > 0 && "Cannot allocate empty block!"); + VMA_ASSERT(!upperAddress && "VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm."); + + // For small granularity round up + if (!IsVirtual()) + m_GranularityHandler.RoundupAllocRequest(allocType, allocSize, allocAlignment); + + allocSize += GetDebugMargin(); + // Quick check for too small pool + if (allocSize > GetSumFreeSize()) + return false; + + // If no free blocks in pool then check only null block + if (m_BlocksFreeCount == 0) + return CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest); + + // Round up to the next block + VkDeviceSize sizeForNextList = allocSize; + VkDeviceSize smallSizeStep = VkDeviceSize(SMALL_BUFFER_SIZE / (IsVirtual() ? 1 << SECOND_LEVEL_INDEX : 4)); + if (allocSize > SMALL_BUFFER_SIZE) + { + sizeForNextList += (1ULL << (VMA_BITSCAN_MSB(allocSize) - SECOND_LEVEL_INDEX)); + } + else if (allocSize > SMALL_BUFFER_SIZE - smallSizeStep) + sizeForNextList = SMALL_BUFFER_SIZE + 1; + else + sizeForNextList += smallSizeStep; + + uint32_t nextListIndex = m_ListsCount; + uint32_t prevListIndex = m_ListsCount; + Block* nextListBlock = VMA_NULL; + Block* prevListBlock = VMA_NULL; + + // Check blocks according to strategies + if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) + { + // Quick check for larger block first + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + if (nextListBlock != VMA_NULL && CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // If not fitted then null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Null block failed, search larger bucket + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + + // Failed again, check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + } + else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_MEMORY_BIT) + { + // Check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Check larger bucket + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + } + else if (strategy & VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT ) + { + // Perform search from the start + VmaStlAllocator allocator(GetAllocationCallbacks()); + VmaVector> blockList(m_BlocksFreeCount, allocator); + + size_t i = m_BlocksFreeCount; + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (block->IsFree() && block->size >= allocSize) + blockList[--i] = block; + } + + for (; i < m_BlocksFreeCount; ++i) + { + Block& block = *blockList[i]; + if (CheckBlock(block, GetListIndex(block.size), allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Whole range searched, no more memory + return false; + } + else + { + // Check larger bucket + nextListBlock = FindFreeBlock(sizeForNextList, nextListIndex); + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + + // If failed check null block + if (CheckBlock(*m_NullBlock, m_ListsCount, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + + // Check best fit bucket + prevListBlock = FindFreeBlock(allocSize, prevListIndex); + while (prevListBlock) + { + if (CheckBlock(*prevListBlock, prevListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + prevListBlock = prevListBlock->NextFree(); + } + } + + // Worst case, full search has to be done + while (++nextListIndex < m_ListsCount) + { + nextListBlock = m_FreeList[nextListIndex]; + while (nextListBlock) + { + if (CheckBlock(*nextListBlock, nextListIndex, allocSize, allocAlignment, allocType, pAllocationRequest)) + return true; + nextListBlock = nextListBlock->NextFree(); + } + } + + // No more memory sadly + return false; +} + +VkResult VmaBlockMetadata_TLSF::CheckCorruption(const void* pBlockData) +{ + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + { + if (!block->IsFree()) + { + if (!VmaValidateMagicValue(pBlockData, block->offset + block->size)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!"); + return VK_ERROR_UNKNOWN_COPY; + } + } + } + + return VK_SUCCESS; +} + +void VmaBlockMetadata_TLSF::Alloc( + const VmaAllocationRequest& request, + VmaSuballocationType type, + void* userData) +{ + VMA_ASSERT(request.type == VmaAllocationRequestType::TLSF); + + // Get block and pop it from the free list + Block* currentBlock = (Block*)request.allocHandle; + VkDeviceSize offset = request.algorithmData; + VMA_ASSERT(currentBlock != VMA_NULL); + VMA_ASSERT(currentBlock->offset <= offset); + + if (currentBlock != m_NullBlock) + RemoveFreeBlock(currentBlock); + + VkDeviceSize debugMargin = GetDebugMargin(); + VkDeviceSize misssingAlignment = offset - currentBlock->offset; + + // Append missing alignment to prev block or create new one + if (misssingAlignment) + { + Block* prevBlock = currentBlock->prevPhysical; + VMA_ASSERT(prevBlock != VMA_NULL && "There should be no missing alignment at offset 0!"); + + if (prevBlock->IsFree() && prevBlock->size != debugMargin) + { + uint32_t oldList = GetListIndex(prevBlock->size); + prevBlock->size += misssingAlignment; + // Check if new size crosses list bucket + if (oldList != GetListIndex(prevBlock->size)) + { + prevBlock->size -= misssingAlignment; + RemoveFreeBlock(prevBlock); + prevBlock->size += misssingAlignment; + InsertFreeBlock(prevBlock); + } + else + m_BlocksFreeSize += misssingAlignment; + } + else + { + Block* newBlock = m_BlockAllocator.Alloc(); + currentBlock->prevPhysical = newBlock; + prevBlock->nextPhysical = newBlock; + newBlock->prevPhysical = prevBlock; + newBlock->nextPhysical = currentBlock; + newBlock->size = misssingAlignment; + newBlock->offset = currentBlock->offset; + newBlock->MarkTaken(); + + InsertFreeBlock(newBlock); + } + + currentBlock->size -= misssingAlignment; + currentBlock->offset += misssingAlignment; + } + + VkDeviceSize size = request.size + debugMargin; + if (currentBlock->size == size) + { + if (currentBlock == m_NullBlock) + { + // Setup new null block + m_NullBlock = m_BlockAllocator.Alloc(); + m_NullBlock->size = 0; + m_NullBlock->offset = currentBlock->offset + size; + m_NullBlock->prevPhysical = currentBlock; + m_NullBlock->nextPhysical = VMA_NULL; + m_NullBlock->MarkFree(); + m_NullBlock->PrevFree() = VMA_NULL; + m_NullBlock->NextFree() = VMA_NULL; + currentBlock->nextPhysical = m_NullBlock; + currentBlock->MarkTaken(); + } + } + else + { + VMA_ASSERT(currentBlock->size > size && "Proper block already found, shouldn't find smaller one!"); + + // Create new free block + Block* newBlock = m_BlockAllocator.Alloc(); + newBlock->size = currentBlock->size - size; + newBlock->offset = currentBlock->offset + size; + newBlock->prevPhysical = currentBlock; + newBlock->nextPhysical = currentBlock->nextPhysical; + currentBlock->nextPhysical = newBlock; + currentBlock->size = size; + + if (currentBlock == m_NullBlock) + { + m_NullBlock = newBlock; + m_NullBlock->MarkFree(); + m_NullBlock->NextFree() = VMA_NULL; + m_NullBlock->PrevFree() = VMA_NULL; + currentBlock->MarkTaken(); + } + else + { + newBlock->nextPhysical->prevPhysical = newBlock; + newBlock->MarkTaken(); + InsertFreeBlock(newBlock); + } + } + currentBlock->UserData() = userData; + + if (debugMargin > 0) + { + currentBlock->size -= debugMargin; + Block* newBlock = m_BlockAllocator.Alloc(); + newBlock->size = debugMargin; + newBlock->offset = currentBlock->offset + currentBlock->size; + newBlock->prevPhysical = currentBlock; + newBlock->nextPhysical = currentBlock->nextPhysical; + newBlock->MarkTaken(); + currentBlock->nextPhysical->prevPhysical = newBlock; + currentBlock->nextPhysical = newBlock; + InsertFreeBlock(newBlock); + } + + if (!IsVirtual()) + m_GranularityHandler.AllocPages((uint8_t)(uintptr_t)request.customData, + currentBlock->offset, currentBlock->size); + ++m_AllocCount; +} + +void VmaBlockMetadata_TLSF::Free(VmaAllocHandle allocHandle) +{ + Block* block = (Block*)allocHandle; + Block* next = block->nextPhysical; + VMA_ASSERT(!block->IsFree() && "Block is already free!"); + + if (!IsVirtual()) + m_GranularityHandler.FreePages(block->offset, block->size); + --m_AllocCount; + + VkDeviceSize debugMargin = GetDebugMargin(); + if (debugMargin > 0) + { + RemoveFreeBlock(next); + MergeBlock(next, block); + block = next; + next = next->nextPhysical; + } + + // Try merging + Block* prev = block->prevPhysical; + if (prev != VMA_NULL && prev->IsFree() && prev->size != debugMargin) + { + RemoveFreeBlock(prev); + MergeBlock(block, prev); + } + + if (!next->IsFree()) + InsertFreeBlock(block); + else if (next == m_NullBlock) + MergeBlock(m_NullBlock, block); + else + { + RemoveFreeBlock(next); + MergeBlock(next, block); + InsertFreeBlock(next); + } +} + +void VmaBlockMetadata_TLSF::GetAllocationInfo(VmaAllocHandle allocHandle, VmaVirtualAllocationInfo& outInfo) +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Cannot get allocation info for free block!"); + outInfo.offset = block->offset; + outInfo.size = block->size; + outInfo.pUserData = block->UserData(); +} + +void* VmaBlockMetadata_TLSF::GetAllocationUserData(VmaAllocHandle allocHandle) const +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Cannot get user data for free block!"); + return block->UserData(); +} + +VmaAllocHandle VmaBlockMetadata_TLSF::GetAllocationListBegin() const +{ + if (m_AllocCount == 0) + return VK_NULL_HANDLE; + + for (Block* block = m_NullBlock->prevPhysical; block; block = block->prevPhysical) + { + if (!block->IsFree()) + return (VmaAllocHandle)block; + } + VMA_ASSERT(false && "If m_AllocCount > 0 then should find any allocation!"); + return VK_NULL_HANDLE; +} + +VmaAllocHandle VmaBlockMetadata_TLSF::GetNextAllocation(VmaAllocHandle prevAlloc) const +{ + Block* startBlock = (Block*)prevAlloc; + VMA_ASSERT(!startBlock->IsFree() && "Incorrect block!"); + + for (Block* block = startBlock->prevPhysical; block; block = block->prevPhysical) + { + if (!block->IsFree()) + return (VmaAllocHandle)block; + } + return VK_NULL_HANDLE; +} + +VkDeviceSize VmaBlockMetadata_TLSF::GetNextFreeRegionSize(VmaAllocHandle alloc) const +{ + Block* block = (Block*)alloc; + VMA_ASSERT(!block->IsFree() && "Incorrect block!"); + + if (block->prevPhysical) + return block->prevPhysical->IsFree() ? block->prevPhysical->size : 0; + return 0; +} + +void VmaBlockMetadata_TLSF::Clear() +{ + m_AllocCount = 0; + m_BlocksFreeCount = 0; + m_BlocksFreeSize = 0; + m_IsFreeBitmap = 0; + m_NullBlock->offset = 0; + m_NullBlock->size = GetSize(); + Block* block = m_NullBlock->prevPhysical; + m_NullBlock->prevPhysical = VMA_NULL; + while (block) + { + Block* prev = block->prevPhysical; + m_BlockAllocator.Free(block); + block = prev; + } + memset(m_FreeList, 0, m_ListsCount * sizeof(Block*)); + memset(m_InnerIsFreeBitmap, 0, m_MemoryClasses * sizeof(uint32_t)); + m_GranularityHandler.Clear(); +} + +void VmaBlockMetadata_TLSF::SetAllocationUserData(VmaAllocHandle allocHandle, void* userData) +{ + Block* block = (Block*)allocHandle; + VMA_ASSERT(!block->IsFree() && "Trying to set user data for not allocated block!"); + block->UserData() = userData; +} + +void VmaBlockMetadata_TLSF::DebugLogAllAllocations() const +{ + for (Block* block = m_NullBlock->prevPhysical; block != VMA_NULL; block = block->prevPhysical) + if (!block->IsFree()) + DebugLogAllocation(block->offset, block->size, block->UserData()); +} + +uint8_t VmaBlockMetadata_TLSF::SizeToMemoryClass(VkDeviceSize size) const +{ + if (size > SMALL_BUFFER_SIZE) + return uint8_t(VMA_BITSCAN_MSB(size) - MEMORY_CLASS_SHIFT); + return 0; +} + +uint16_t VmaBlockMetadata_TLSF::SizeToSecondIndex(VkDeviceSize size, uint8_t memoryClass) const +{ + if (memoryClass == 0) + { + if (IsVirtual()) + return static_cast((size - 1) / 8); + else + return static_cast((size - 1) / 64); + } + return static_cast((size >> (memoryClass + MEMORY_CLASS_SHIFT - SECOND_LEVEL_INDEX)) ^ (1U << SECOND_LEVEL_INDEX)); +} + +uint32_t VmaBlockMetadata_TLSF::GetListIndex(uint8_t memoryClass, uint16_t secondIndex) const +{ + if (memoryClass == 0) + return secondIndex; + + const uint32_t index = static_cast(memoryClass - 1) * (1 << SECOND_LEVEL_INDEX) + secondIndex; + if (IsVirtual()) + return index + (1 << SECOND_LEVEL_INDEX); + else + return index + 4; +} + +uint32_t VmaBlockMetadata_TLSF::GetListIndex(VkDeviceSize size) const +{ + uint8_t memoryClass = SizeToMemoryClass(size); + return GetListIndex(memoryClass, SizeToSecondIndex(size, memoryClass)); +} + +void VmaBlockMetadata_TLSF::RemoveFreeBlock(Block* block) +{ + VMA_ASSERT(block != m_NullBlock); + VMA_ASSERT(block->IsFree()); + + if (block->NextFree() != VMA_NULL) + block->NextFree()->PrevFree() = block->PrevFree(); + if (block->PrevFree() != VMA_NULL) + block->PrevFree()->NextFree() = block->NextFree(); + else + { + uint8_t memClass = SizeToMemoryClass(block->size); + uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); + uint32_t index = GetListIndex(memClass, secondIndex); + VMA_ASSERT(m_FreeList[index] == block); + m_FreeList[index] = block->NextFree(); + if (block->NextFree() == VMA_NULL) + { + m_InnerIsFreeBitmap[memClass] &= ~(1U << secondIndex); + if (m_InnerIsFreeBitmap[memClass] == 0) + m_IsFreeBitmap &= ~(1UL << memClass); + } + } + block->MarkTaken(); + block->UserData() = VMA_NULL; + --m_BlocksFreeCount; + m_BlocksFreeSize -= block->size; +} + +void VmaBlockMetadata_TLSF::InsertFreeBlock(Block* block) +{ + VMA_ASSERT(block != m_NullBlock); + VMA_ASSERT(!block->IsFree() && "Cannot insert block twice!"); + + uint8_t memClass = SizeToMemoryClass(block->size); + uint16_t secondIndex = SizeToSecondIndex(block->size, memClass); + uint32_t index = GetListIndex(memClass, secondIndex); + VMA_ASSERT(index < m_ListsCount); + block->PrevFree() = VMA_NULL; + block->NextFree() = m_FreeList[index]; + m_FreeList[index] = block; + if (block->NextFree() != VMA_NULL) + block->NextFree()->PrevFree() = block; + else + { + m_InnerIsFreeBitmap[memClass] |= 1U << secondIndex; + m_IsFreeBitmap |= 1UL << memClass; + } + ++m_BlocksFreeCount; + m_BlocksFreeSize += block->size; +} + +void VmaBlockMetadata_TLSF::MergeBlock(Block* block, Block* prev) +{ + VMA_ASSERT(block->prevPhysical == prev && "Cannot merge separate physical regions!"); + VMA_ASSERT(!prev->IsFree() && "Cannot merge block that belongs to free list!"); + + block->offset = prev->offset; + block->size += prev->size; + block->prevPhysical = prev->prevPhysical; + if (block->prevPhysical) + block->prevPhysical->nextPhysical = block; + m_BlockAllocator.Free(prev); +} + +VmaBlockMetadata_TLSF::Block* VmaBlockMetadata_TLSF::FindFreeBlock(VkDeviceSize size, uint32_t& listIndex) const +{ + uint8_t memoryClass = SizeToMemoryClass(size); + uint32_t innerFreeMap = m_InnerIsFreeBitmap[memoryClass] & (~0U << SizeToSecondIndex(size, memoryClass)); + if (!innerFreeMap) + { + // Check higher levels for available blocks + uint32_t freeMap = m_IsFreeBitmap & (~0UL << (memoryClass + 1)); + if (!freeMap) + return VMA_NULL; // No more memory available + + // Find lowest free region + memoryClass = VMA_BITSCAN_LSB(freeMap); + innerFreeMap = m_InnerIsFreeBitmap[memoryClass]; + VMA_ASSERT(innerFreeMap != 0); + } + // Find lowest free subregion + listIndex = GetListIndex(memoryClass, VMA_BITSCAN_LSB(innerFreeMap)); + VMA_ASSERT(m_FreeList[listIndex]); + return m_FreeList[listIndex]; +} + +bool VmaBlockMetadata_TLSF::CheckBlock( + Block& block, + uint32_t listIndex, + VkDeviceSize allocSize, + VkDeviceSize allocAlignment, + VmaSuballocationType allocType, + VmaAllocationRequest* pAllocationRequest) +{ + VMA_ASSERT(block.IsFree() && "Block is already taken!"); + + VkDeviceSize alignedOffset = VmaAlignUp(block.offset, allocAlignment); + if (block.size < allocSize + alignedOffset - block.offset) + return false; + + // Check for granularity conflicts + if (!IsVirtual() && + m_GranularityHandler.CheckConflictAndAlignUp(alignedOffset, allocSize, block.offset, block.size, allocType)) + return false; + + // Alloc successful + pAllocationRequest->type = VmaAllocationRequestType::TLSF; + pAllocationRequest->allocHandle = (VmaAllocHandle)█ + pAllocationRequest->size = allocSize - GetDebugMargin(); + pAllocationRequest->customData = (void*)allocType; + pAllocationRequest->algorithmData = alignedOffset; + + // Place block at the start of list if it's normal block + if (listIndex != m_ListsCount && block.PrevFree()) + { + block.PrevFree()->NextFree() = block.NextFree(); + if (block.NextFree()) + block.NextFree()->PrevFree() = block.PrevFree(); + block.PrevFree() = VMA_NULL; + block.NextFree() = m_FreeList[listIndex]; + m_FreeList[listIndex] = █ + if (block.NextFree()) + block.NextFree()->PrevFree() = █ + } + + return true; +} +#endif // _VMA_BLOCK_METADATA_TLSF_FUNCTIONS +#endif // _VMA_BLOCK_METADATA_TLSF + +#ifndef _VMA_BLOCK_VECTOR +/* +Sequence of VmaDeviceMemoryBlock. Represents memory blocks allocated for a specific +Vulkan memory type. + +Synchronized internally with a mutex. +*/ +class VmaBlockVector +{ + friend struct VmaDefragmentationContext_T; + VMA_CLASS_NO_COPY_NO_MOVE(VmaBlockVector) +public: + VmaBlockVector( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceSize preferredBlockSize, + size_t minBlockCount, + size_t maxBlockCount, + VkDeviceSize bufferImageGranularity, + bool explicitBlockSize, + uint32_t algorithm, + float priority, + VkDeviceSize minAllocationAlignment, + void* pMemoryAllocateNext); + ~VmaBlockVector(); + + VmaAllocator GetAllocator() const { return m_hAllocator; } + VmaPool GetParentPool() const { return m_hParentPool; } + bool IsCustomPool() const { return m_hParentPool != VMA_NULL; } + uint32_t GetMemoryTypeIndex() const { return m_MemoryTypeIndex; } + VkDeviceSize GetPreferredBlockSize() const { return m_PreferredBlockSize; } + VkDeviceSize GetBufferImageGranularity() const { return m_BufferImageGranularity; } + uint32_t GetAlgorithm() const { return m_Algorithm; } + bool HasExplicitBlockSize() const { return m_ExplicitBlockSize; } + float GetPriority() const { return m_Priority; } + const void* GetAllocationNextPtr() const { return m_pMemoryAllocateNext; } + // To be used only while the m_Mutex is locked. Used during defragmentation. + size_t GetBlockCount() const { return m_Blocks.size(); } + // To be used only while the m_Mutex is locked. Used during defragmentation. + VmaDeviceMemoryBlock* GetBlock(size_t index) const { return m_Blocks[index]; } + VMA_RW_MUTEX &GetMutex() { return m_Mutex; } + + VkResult CreateMinBlocks(); + void AddStatistics(VmaStatistics& inoutStats); + void AddDetailedStatistics(VmaDetailedStatistics& inoutStats); + bool IsEmpty(); + bool IsCorruptionDetectionEnabled() const; + + VkResult Allocate( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations); + + void Free(const VmaAllocation hAllocation); + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json); +#endif + + VkResult CheckCorruption(); + +private: + const VmaAllocator m_hAllocator; + const VmaPool m_hParentPool; + const uint32_t m_MemoryTypeIndex; + const VkDeviceSize m_PreferredBlockSize; + const size_t m_MinBlockCount; + const size_t m_MaxBlockCount; + const VkDeviceSize m_BufferImageGranularity; + const bool m_ExplicitBlockSize; + const uint32_t m_Algorithm; + const float m_Priority; + const VkDeviceSize m_MinAllocationAlignment; + + void* const m_pMemoryAllocateNext; + VMA_RW_MUTEX m_Mutex; + // Incrementally sorted by sumFreeSize, ascending. + VmaVector> m_Blocks; + uint32_t m_NextBlockId; + bool m_IncrementalSort = true; + + void SetIncrementalSort(bool val) { m_IncrementalSort = val; } + + VkDeviceSize CalcMaxBlockSize() const; + // Finds and removes given block from vector. + void Remove(VmaDeviceMemoryBlock* pBlock); + // Performs single step in sorting m_Blocks. They may not be fully sorted + // after this call. + void IncrementallySortBlocks(); + void SortByFreeSize(); + + VkResult AllocatePage( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation); + + VkResult AllocateFromBlock( + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize size, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + uint32_t strategy, + VmaAllocation* pAllocation); + + VkResult CommitAllocationRequest( + VmaAllocationRequest& allocRequest, + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation); + + VkResult CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex); + bool HasEmptyBlock(); +}; +#endif // _VMA_BLOCK_VECTOR + +#ifndef _VMA_DEFRAGMENTATION_CONTEXT +struct VmaDefragmentationContext_T +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaDefragmentationContext_T) +public: + VmaDefragmentationContext_T( + VmaAllocator hAllocator, + const VmaDefragmentationInfo& info); + ~VmaDefragmentationContext_T(); + + void GetStats(VmaDefragmentationStats& outStats) { outStats = m_GlobalStats; } + + VkResult DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo); + VkResult DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo); + +private: + // Max number of allocations to ignore due to size constraints before ending single pass + static const uint8_t MAX_ALLOCS_TO_IGNORE = 16; + enum class CounterStatus { Pass, Ignore, End }; + + struct FragmentedBlock + { + uint32_t data; + VmaDeviceMemoryBlock* block; + }; + struct StateBalanced + { + VkDeviceSize avgFreeSize = 0; + VkDeviceSize avgAllocSize = UINT64_MAX; + }; + struct StateExtensive + { + enum class Operation : uint8_t + { + FindFreeBlockBuffer, FindFreeBlockTexture, FindFreeBlockAll, + MoveBuffers, MoveTextures, MoveAll, + Cleanup, Done + }; + + Operation operation = Operation::FindFreeBlockTexture; + size_t firstFreeBlock = SIZE_MAX; + }; + struct MoveAllocationData + { + VkDeviceSize size; + VkDeviceSize alignment; + VmaSuballocationType type; + VmaAllocationCreateFlags flags; + VmaDefragmentationMove move = {}; + }; + + const VkDeviceSize m_MaxPassBytes; + const uint32_t m_MaxPassAllocations; + const PFN_vmaCheckDefragmentationBreakFunction m_BreakCallback; + void* m_BreakCallbackUserData; + + VmaStlAllocator m_MoveAllocator; + VmaVector> m_Moves; + + uint8_t m_IgnoredAllocs = 0; + uint32_t m_Algorithm; + uint32_t m_BlockVectorCount; + VmaBlockVector* m_PoolBlockVector; + VmaBlockVector** m_pBlockVectors; + size_t m_ImmovableBlockCount = 0; + VmaDefragmentationStats m_GlobalStats = { 0 }; + VmaDefragmentationStats m_PassStats = { 0 }; + void* m_AlgorithmState = VMA_NULL; + + static MoveAllocationData GetMoveData(VmaAllocHandle handle, VmaBlockMetadata* metadata); + CounterStatus CheckCounters(VkDeviceSize bytes); + bool IncrementCounters(VkDeviceSize bytes); + bool ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block); + bool AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector); + + bool ComputeDefragmentation(VmaBlockVector& vector, size_t index); + bool ComputeDefragmentation_Fast(VmaBlockVector& vector); + bool ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update); + bool ComputeDefragmentation_Full(VmaBlockVector& vector); + bool ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index); + + void UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state); + bool MoveDataToFreeBlocks(VmaSuballocationType currentType, + VmaBlockVector& vector, size_t firstFreeBlock, + bool& texturePresent, bool& bufferPresent, bool& otherPresent); +}; +#endif // _VMA_DEFRAGMENTATION_CONTEXT + +#ifndef _VMA_POOL_T +struct VmaPool_T +{ + friend struct VmaPoolListItemTraits; + VMA_CLASS_NO_COPY_NO_MOVE(VmaPool_T) +public: + VmaBlockVector m_BlockVector; + VmaDedicatedAllocationList m_DedicatedAllocations; + + VmaPool_T( + VmaAllocator hAllocator, + const VmaPoolCreateInfo& createInfo, + VkDeviceSize preferredBlockSize); + ~VmaPool_T(); + + uint32_t GetId() const { return m_Id; } + void SetId(uint32_t id) { VMA_ASSERT(m_Id == 0); m_Id = id; } + + const char* GetName() const { return m_Name; } + void SetName(const char* pName); + +#if VMA_STATS_STRING_ENABLED + //void PrintDetailedMap(class VmaStringBuilder& sb); +#endif + +private: + uint32_t m_Id; + char* m_Name; + VmaPool_T* m_PrevPool = VMA_NULL; + VmaPool_T* m_NextPool = VMA_NULL; +}; + +struct VmaPoolListItemTraits +{ + typedef VmaPool_T ItemType; + + static ItemType* GetPrev(const ItemType* item) { return item->m_PrevPool; } + static ItemType* GetNext(const ItemType* item) { return item->m_NextPool; } + static ItemType*& AccessPrev(ItemType* item) { return item->m_PrevPool; } + static ItemType*& AccessNext(ItemType* item) { return item->m_NextPool; } +}; +#endif // _VMA_POOL_T + +#ifndef _VMA_CURRENT_BUDGET_DATA +struct VmaCurrentBudgetData +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaCurrentBudgetData) +public: + + VMA_ATOMIC_UINT32 m_BlockCount[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT32 m_AllocationCount[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS]; + VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS]; + +#if VMA_MEMORY_BUDGET + VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch; + VMA_RW_MUTEX m_BudgetMutex; + uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS]; + uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS]; + uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS]; +#endif // VMA_MEMORY_BUDGET + + VmaCurrentBudgetData(); + + void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); + void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize); +}; + +#ifndef _VMA_CURRENT_BUDGET_DATA_FUNCTIONS +VmaCurrentBudgetData::VmaCurrentBudgetData() +{ + for (uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex) + { + m_BlockCount[heapIndex] = 0; + m_AllocationCount[heapIndex] = 0; + m_BlockBytes[heapIndex] = 0; + m_AllocationBytes[heapIndex] = 0; +#if VMA_MEMORY_BUDGET + m_VulkanUsage[heapIndex] = 0; + m_VulkanBudget[heapIndex] = 0; + m_BlockBytesAtBudgetFetch[heapIndex] = 0; +#endif + } + +#if VMA_MEMORY_BUDGET + m_OperationsSinceBudgetFetch = 0; +#endif +} + +void VmaCurrentBudgetData::AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) +{ + m_AllocationBytes[heapIndex] += allocationSize; + ++m_AllocationCount[heapIndex]; +#if VMA_MEMORY_BUDGET + ++m_OperationsSinceBudgetFetch; +#endif +} + +void VmaCurrentBudgetData::RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize) +{ + VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize); + m_AllocationBytes[heapIndex] -= allocationSize; + VMA_ASSERT(m_AllocationCount[heapIndex] > 0); + --m_AllocationCount[heapIndex]; +#if VMA_MEMORY_BUDGET + ++m_OperationsSinceBudgetFetch; +#endif +} +#endif // _VMA_CURRENT_BUDGET_DATA_FUNCTIONS +#endif // _VMA_CURRENT_BUDGET_DATA + +#ifndef _VMA_ALLOCATION_OBJECT_ALLOCATOR +/* +Thread-safe wrapper over VmaPoolAllocator free list, for allocation of VmaAllocation_T objects. +*/ +class VmaAllocationObjectAllocator +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaAllocationObjectAllocator) +public: + VmaAllocationObjectAllocator(const VkAllocationCallbacks* pAllocationCallbacks) + : m_Allocator(pAllocationCallbacks, 1024) {} + + template VmaAllocation Allocate(Types&&... args); + void Free(VmaAllocation hAlloc); + +private: + VMA_MUTEX m_Mutex; + VmaPoolAllocator m_Allocator; +}; + +template +VmaAllocation VmaAllocationObjectAllocator::Allocate(Types&&... args) +{ + VmaMutexLock mutexLock(m_Mutex); + return m_Allocator.Alloc(std::forward(args)...); +} + +void VmaAllocationObjectAllocator::Free(VmaAllocation hAlloc) +{ + VmaMutexLock mutexLock(m_Mutex); + m_Allocator.Free(hAlloc); +} +#endif // _VMA_ALLOCATION_OBJECT_ALLOCATOR + +#ifndef _VMA_VIRTUAL_BLOCK_T +struct VmaVirtualBlock_T +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaVirtualBlock_T) +public: + const bool m_AllocationCallbacksSpecified; + const VkAllocationCallbacks m_AllocationCallbacks; + + VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo); + ~VmaVirtualBlock_T(); + + VkResult Init() { return VK_SUCCESS; } + bool IsEmpty() const { return m_Metadata->IsEmpty(); } + void Free(VmaVirtualAllocation allocation) { m_Metadata->Free((VmaAllocHandle)allocation); } + void SetAllocationUserData(VmaVirtualAllocation allocation, void* userData) { m_Metadata->SetAllocationUserData((VmaAllocHandle)allocation, userData); } + void Clear() { m_Metadata->Clear(); } + + const VkAllocationCallbacks* GetAllocationCallbacks() const; + void GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo); + VkResult Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, + VkDeviceSize* outOffset); + void GetStatistics(VmaStatistics& outStats) const; + void CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const; +#if VMA_STATS_STRING_ENABLED + void BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const; +#endif + +private: + VmaBlockMetadata* m_Metadata; +}; + +#ifndef _VMA_VIRTUAL_BLOCK_T_FUNCTIONS +VmaVirtualBlock_T::VmaVirtualBlock_T(const VmaVirtualBlockCreateInfo& createInfo) + : m_AllocationCallbacksSpecified(createInfo.pAllocationCallbacks != VMA_NULL), + m_AllocationCallbacks(createInfo.pAllocationCallbacks != VMA_NULL ? *createInfo.pAllocationCallbacks : VmaEmptyAllocationCallbacks) +{ + const uint32_t algorithm = createInfo.flags & VMA_VIRTUAL_BLOCK_CREATE_ALGORITHM_MASK; + switch (algorithm) + { + case 0: + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true); + break; + case VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT: + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_Linear)(VK_NULL_HANDLE, 1, true); + break; + default: + VMA_ASSERT(0); + m_Metadata = vma_new(GetAllocationCallbacks(), VmaBlockMetadata_TLSF)(VK_NULL_HANDLE, 1, true); + } + + m_Metadata->Init(createInfo.size); +} + +VmaVirtualBlock_T::~VmaVirtualBlock_T() +{ + // Define macro VMA_DEBUG_LOG_FORMAT or more specialized VMA_LEAK_LOG_FORMAT + // to receive the list of the unfreed allocations. + if (!m_Metadata->IsEmpty()) + m_Metadata->DebugLogAllAllocations(); + // This is the most important assert in the entire library. + // Hitting it means you have some memory leak - unreleased virtual allocations. + VMA_ASSERT_LEAK(m_Metadata->IsEmpty() && "Some virtual allocations were not freed before destruction of this virtual block!"); + + vma_delete(GetAllocationCallbacks(), m_Metadata); +} + +const VkAllocationCallbacks* VmaVirtualBlock_T::GetAllocationCallbacks() const +{ + return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; +} + +void VmaVirtualBlock_T::GetAllocationInfo(VmaVirtualAllocation allocation, VmaVirtualAllocationInfo& outInfo) +{ + m_Metadata->GetAllocationInfo((VmaAllocHandle)allocation, outInfo); +} + +VkResult VmaVirtualBlock_T::Allocate(const VmaVirtualAllocationCreateInfo& createInfo, VmaVirtualAllocation& outAllocation, + VkDeviceSize* outOffset) +{ + VmaAllocationRequest request = {}; + if (m_Metadata->CreateAllocationRequest( + createInfo.size, // allocSize + VMA_MAX(createInfo.alignment, (VkDeviceSize)1), // allocAlignment + (createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0, // upperAddress + VMA_SUBALLOCATION_TYPE_UNKNOWN, // allocType - unimportant + createInfo.flags & VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MASK, // strategy + &request)) + { + m_Metadata->Alloc(request, + VMA_SUBALLOCATION_TYPE_UNKNOWN, // type - unimportant + createInfo.pUserData); + outAllocation = (VmaVirtualAllocation)request.allocHandle; + if(outOffset) + *outOffset = m_Metadata->GetAllocationOffset(request.allocHandle); + return VK_SUCCESS; + } + outAllocation = (VmaVirtualAllocation)VK_NULL_HANDLE; + if (outOffset) + *outOffset = UINT64_MAX; + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +void VmaVirtualBlock_T::GetStatistics(VmaStatistics& outStats) const +{ + VmaClearStatistics(outStats); + m_Metadata->AddStatistics(outStats); +} + +void VmaVirtualBlock_T::CalculateDetailedStatistics(VmaDetailedStatistics& outStats) const +{ + VmaClearDetailedStatistics(outStats); + m_Metadata->AddDetailedStatistics(outStats); +} + +#if VMA_STATS_STRING_ENABLED +void VmaVirtualBlock_T::BuildStatsString(bool detailedMap, VmaStringBuilder& sb) const +{ + VmaJsonWriter json(GetAllocationCallbacks(), sb); + json.BeginObject(); + + VmaDetailedStatistics stats; + CalculateDetailedStatistics(stats); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats); + + if (detailedMap) + { + json.WriteString("Details"); + json.BeginObject(); + m_Metadata->PrintDetailedMap(json); + json.EndObject(); + } + + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_VIRTUAL_BLOCK_T_FUNCTIONS +#endif // _VMA_VIRTUAL_BLOCK_T + + +// Main allocator object. +struct VmaAllocator_T +{ + VMA_CLASS_NO_COPY_NO_MOVE(VmaAllocator_T) +public: + const bool m_UseMutex; + const uint32_t m_VulkanApiVersion; + bool m_UseKhrDedicatedAllocation; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). + bool m_UseKhrBindMemory2; // Can be set only if m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0). + bool m_UseExtMemoryBudget; + bool m_UseAmdDeviceCoherentMemory; + bool m_UseKhrBufferDeviceAddress; + bool m_UseExtMemoryPriority; + bool m_UseKhrMaintenance4; + bool m_UseKhrMaintenance5; + const VkDevice m_hDevice; + const VkInstance m_hInstance; + const bool m_AllocationCallbacksSpecified; + const VkAllocationCallbacks m_AllocationCallbacks; + VmaDeviceMemoryCallbacks m_DeviceMemoryCallbacks; + VmaAllocationObjectAllocator m_AllocationObjectAllocator; + + // Each bit (1 << i) is set if HeapSizeLimit is enabled for that heap, so cannot allocate more than the heap size. + uint32_t m_HeapSizeLimitMask; + + VkPhysicalDeviceProperties m_PhysicalDeviceProperties; + VkPhysicalDeviceMemoryProperties m_MemProps; + + // Default pools. + VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES]; + VmaDedicatedAllocationList m_DedicatedAllocations[VK_MAX_MEMORY_TYPES]; + + VmaCurrentBudgetData m_Budget; + VMA_ATOMIC_UINT32 m_DeviceMemoryCount; // Total number of VkDeviceMemory objects. + + VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo); + VkResult Init(const VmaAllocatorCreateInfo* pCreateInfo); + ~VmaAllocator_T(); + + const VkAllocationCallbacks* GetAllocationCallbacks() const + { + return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : VMA_NULL; + } + const VmaVulkanFunctions& GetVulkanFunctions() const + { + return m_VulkanFunctions; + } + + VkPhysicalDevice GetPhysicalDevice() const { return m_PhysicalDevice; } + + VkDeviceSize GetBufferImageGranularity() const + { + return VMA_MAX( + static_cast(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY), + m_PhysicalDeviceProperties.limits.bufferImageGranularity); + } + + uint32_t GetMemoryHeapCount() const { return m_MemProps.memoryHeapCount; } + uint32_t GetMemoryTypeCount() const { return m_MemProps.memoryTypeCount; } + + uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex) const + { + VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount); + return m_MemProps.memoryTypes[memTypeIndex].heapIndex; + } + // True when specific memory type is HOST_VISIBLE but not HOST_COHERENT. + bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex) const + { + return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) == + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + } + // Minimum alignment for all allocations in specific memory type. + VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex) const + { + return IsMemoryTypeNonCoherent(memTypeIndex) ? + VMA_MAX((VkDeviceSize)VMA_MIN_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) : + (VkDeviceSize)VMA_MIN_ALIGNMENT; + } + + bool IsIntegratedGpu() const + { + return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU; + } + + uint32_t GetGlobalMemoryTypeBits() const { return m_GlobalMemoryTypeBits; } + + void GetBufferMemoryRequirements( + VkBuffer hBuffer, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const; + void GetImageMemoryRequirements( + VkImage hImage, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const; + VkResult FindMemoryTypeIndex( + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VmaBufferImageUsage bufImgUsage, + uint32_t* pMemoryTypeIndex) const; + + // Main allocation function. + VkResult AllocateMemory( + const VkMemoryRequirements& vkMemReq, + bool requiresDedicatedAllocation, + bool prefersDedicatedAllocation, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations); + + // Main deallocation function. + void FreeMemory( + size_t allocationCount, + const VmaAllocation* pAllocations); + + void CalculateStatistics(VmaTotalStatistics* pStats); + + void GetHeapBudgets( + VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount); + +#if VMA_STATS_STRING_ENABLED + void PrintDetailedMap(class VmaJsonWriter& json); +#endif + + void GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo); + void GetAllocationInfo2(VmaAllocation hAllocation, VmaAllocationInfo2* pAllocationInfo); + + VkResult CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool); + void DestroyPool(VmaPool pool); + void GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats); + void CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats); + + void SetCurrentFrameIndex(uint32_t frameIndex); + uint32_t GetCurrentFrameIndex() const { return m_CurrentFrameIndex.load(); } + + VkResult CheckPoolCorruption(VmaPool hPool); + VkResult CheckCorruption(uint32_t memoryTypeBits); + + // Call to Vulkan function vkAllocateMemory with accompanying bookkeeping. + VkResult AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory); + // Call to Vulkan function vkFreeMemory with accompanying bookkeeping. + void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory); + // Call to Vulkan function vkBindBufferMemory or vkBindBufferMemory2KHR. + VkResult BindVulkanBuffer( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkBuffer buffer, + const void* pNext); + // Call to Vulkan function vkBindImageMemory or vkBindImageMemory2KHR. + VkResult BindVulkanImage( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkImage image, + const void* pNext); + + VkResult Map(VmaAllocation hAllocation, void** ppData); + void Unmap(VmaAllocation hAllocation); + + VkResult BindBufferMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext); + VkResult BindImageMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext); + + VkResult FlushOrInvalidateAllocation( + VmaAllocation hAllocation, + VkDeviceSize offset, VkDeviceSize size, + VMA_CACHE_OPERATION op); + VkResult FlushOrInvalidateAllocations( + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + VMA_CACHE_OPERATION op); + + VkResult CopyMemoryToAllocation( + const void* pSrcHostPointer, + VmaAllocation dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size); + VkResult CopyAllocationToMemory( + VmaAllocation srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* pDstHostPointer, + VkDeviceSize size); + + void FillAllocation(const VmaAllocation hAllocation, uint8_t pattern); + + /* + Returns bit mask of memory types that can support defragmentation on GPU as + they support creation of required buffer for copy operations. + */ + uint32_t GetGpuDefragmentationMemoryTypeBits(); + +#if VMA_EXTERNAL_MEMORY + VkExternalMemoryHandleTypeFlagsKHR GetExternalMemoryHandleTypeFlags(uint32_t memTypeIndex) const + { + return m_TypeExternalMemoryHandleTypes[memTypeIndex]; + } +#endif // #if VMA_EXTERNAL_MEMORY + +private: + VkDeviceSize m_PreferredLargeHeapBlockSize; + + VkPhysicalDevice m_PhysicalDevice; + VMA_ATOMIC_UINT32 m_CurrentFrameIndex; + VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits; // UINT32_MAX means uninitialized. +#if VMA_EXTERNAL_MEMORY + VkExternalMemoryHandleTypeFlagsKHR m_TypeExternalMemoryHandleTypes[VK_MAX_MEMORY_TYPES]; +#endif // #if VMA_EXTERNAL_MEMORY + + VMA_RW_MUTEX m_PoolsMutex; + typedef VmaIntrusiveLinkedList PoolList; + // Protected by m_PoolsMutex. + PoolList m_Pools; + uint32_t m_NextPoolId; + + VmaVulkanFunctions m_VulkanFunctions; + + // Global bit mask AND-ed with any memoryTypeBits to disallow certain memory types. + uint32_t m_GlobalMemoryTypeBits; + + void ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions); + +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + void ImportVulkanFunctions_Static(); +#endif + + void ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions); + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + void ImportVulkanFunctions_Dynamic(); +#endif + + void ValidateVulkanFunctions(); + + VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex); + + VkResult AllocateMemoryOfType( + VmaPool pool, + VkDeviceSize size, + VkDeviceSize alignment, + bool dedicatedPreferred, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + const VmaAllocationCreateInfo& createInfo, + uint32_t memTypeIndex, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + VmaBlockVector& blockVector, + size_t allocationCount, + VmaAllocation* pAllocations); + + // Helper function only to be used inside AllocateDedicatedMemory. + VkResult AllocateDedicatedMemoryPage( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + uint32_t memTypeIndex, + const VkMemoryAllocateInfo& allocInfo, + bool map, + bool isUserDataString, + bool isMappingAllowed, + void* pUserData, + VmaAllocation* pAllocation); + + // Allocates and registers new VkDeviceMemory specifically for dedicated allocations. + VkResult AllocateDedicatedMemory( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + uint32_t memTypeIndex, + bool map, + bool isUserDataString, + bool isMappingAllowed, + bool canAliasMemory, + void* pUserData, + float priority, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + size_t allocationCount, + VmaAllocation* pAllocations, + const void* pNextChain = VMA_NULL); + + void FreeDedicatedMemory(const VmaAllocation allocation); + + VkResult CalcMemTypeParams( + VmaAllocationCreateInfo& outCreateInfo, + uint32_t memTypeIndex, + VkDeviceSize size, + size_t allocationCount); + VkResult CalcAllocationParams( + VmaAllocationCreateInfo& outCreateInfo, + bool dedicatedRequired, + bool dedicatedPreferred); + + /* + Calculates and returns bit mask of memory types that can support defragmentation + on GPU as they support creation of required buffer for copy operations. + */ + uint32_t CalculateGpuDefragmentationMemoryTypeBits() const; + uint32_t CalculateGlobalMemoryTypeBits() const; + + bool GetFlushOrInvalidateRange( + VmaAllocation allocation, + VkDeviceSize offset, VkDeviceSize size, + VkMappedMemoryRange& outRange) const; + +#if VMA_MEMORY_BUDGET + void UpdateVulkanBudget(); +#endif // #if VMA_MEMORY_BUDGET +}; + + +#ifndef _VMA_MEMORY_FUNCTIONS +static void* VmaMalloc(VmaAllocator hAllocator, size_t size, size_t alignment) +{ + return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment); +} + +static void VmaFree(VmaAllocator hAllocator, void* ptr) +{ + VmaFree(&hAllocator->m_AllocationCallbacks, ptr); +} + +template +static T* VmaAllocate(VmaAllocator hAllocator) +{ + return (T*)VmaMalloc(hAllocator, sizeof(T), VMA_ALIGN_OF(T)); +} + +template +static T* VmaAllocateArray(VmaAllocator hAllocator, size_t count) +{ + return (T*)VmaMalloc(hAllocator, sizeof(T) * count, VMA_ALIGN_OF(T)); +} + +template +static void vma_delete(VmaAllocator hAllocator, T* ptr) +{ + if(ptr != VMA_NULL) + { + ptr->~T(); + VmaFree(hAllocator, ptr); + } +} + +template +static void vma_delete_array(VmaAllocator hAllocator, T* ptr, size_t count) +{ + if(ptr != VMA_NULL) + { + for(size_t i = count; i--; ) + ptr[i].~T(); + VmaFree(hAllocator, ptr); + } +} +#endif // _VMA_MEMORY_FUNCTIONS + +#ifndef _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS +VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) + : m_pMetadata(VMA_NULL), + m_MemoryTypeIndex(UINT32_MAX), + m_Id(0), + m_hMemory(VK_NULL_HANDLE), + m_MapCount(0), + m_pMappedData(VMA_NULL) {} + +VmaDeviceMemoryBlock::~VmaDeviceMemoryBlock() +{ + VMA_ASSERT_LEAK(m_MapCount == 0 && "VkDeviceMemory block is being destroyed while it is still mapped."); + VMA_ASSERT_LEAK(m_hMemory == VK_NULL_HANDLE); +} + +void VmaDeviceMemoryBlock::Init( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t newMemoryTypeIndex, + VkDeviceMemory newMemory, + VkDeviceSize newSize, + uint32_t id, + uint32_t algorithm, + VkDeviceSize bufferImageGranularity) +{ + VMA_ASSERT(m_hMemory == VK_NULL_HANDLE); + + m_hParentPool = hParentPool; + m_MemoryTypeIndex = newMemoryTypeIndex; + m_Id = id; + m_hMemory = newMemory; + + switch (algorithm) + { + case 0: + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + break; + case VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT: + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + break; + default: + VMA_ASSERT(0); + m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_TLSF)(hAllocator->GetAllocationCallbacks(), + bufferImageGranularity, false); // isVirtual + } + m_pMetadata->Init(newSize); +} + +void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator) +{ + // Define macro VMA_DEBUG_LOG_FORMAT or more specialized VMA_LEAK_LOG_FORMAT + // to receive the list of the unfreed allocations. + if (!m_pMetadata->IsEmpty()) + m_pMetadata->DebugLogAllAllocations(); + // This is the most important assert in the entire library. + // Hitting it means you have some memory leak - unreleased VmaAllocation objects. + VMA_ASSERT_LEAK(m_pMetadata->IsEmpty() && "Some allocations were not freed before destruction of this memory block!"); + + VMA_ASSERT_LEAK(m_hMemory != VK_NULL_HANDLE); + allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory); + m_hMemory = VK_NULL_HANDLE; + + vma_delete(allocator, m_pMetadata); + m_pMetadata = VMA_NULL; +} + +void VmaDeviceMemoryBlock::PostAlloc(VmaAllocator hAllocator) +{ + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + m_MappingHysteresis.PostAlloc(); +} + +void VmaDeviceMemoryBlock::PostFree(VmaAllocator hAllocator) +{ + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + if(m_MappingHysteresis.PostFree()) + { + VMA_ASSERT(m_MappingHysteresis.GetExtraMapping() == 0); + if (m_MapCount == 0) + { + m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); + } + } +} + +bool VmaDeviceMemoryBlock::Validate() const +{ + VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) && + (m_pMetadata->GetSize() != 0)); + + return m_pMetadata->Validate(); +} + +VkResult VmaDeviceMemoryBlock::CheckCorruption(VmaAllocator hAllocator) +{ + void* pData = VMA_NULL; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + res = m_pMetadata->CheckCorruption(pData); + + Unmap(hAllocator, 1); + + return res; +} + +VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count, void** ppData) +{ + if (count == 0) + { + return VK_SUCCESS; + } + + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + const uint32_t oldTotalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); + if (oldTotalMapCount != 0) + { + VMA_ASSERT(m_pMappedData != VMA_NULL); + m_MappingHysteresis.PostMap(); + m_MapCount += count; + if (ppData != VMA_NULL) + { + *ppData = m_pMappedData; + } + return VK_SUCCESS; + } + else + { + VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( + hAllocator->m_hDevice, + m_hMemory, + 0, // offset + VK_WHOLE_SIZE, + 0, // flags + &m_pMappedData); + if (result == VK_SUCCESS) + { + VMA_ASSERT(m_pMappedData != VMA_NULL); + m_MappingHysteresis.PostMap(); + m_MapCount = count; + if (ppData != VMA_NULL) + { + *ppData = m_pMappedData; + } + } + return result; + } +} + +void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count) +{ + if (count == 0) + { + return; + } + + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + if (m_MapCount >= count) + { + m_MapCount -= count; + const uint32_t totalMapCount = m_MapCount + m_MappingHysteresis.GetExtraMapping(); + if (totalMapCount == 0) + { + m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory); + } + m_MappingHysteresis.PostUnmap(); + } + else + { + VMA_ASSERT(0 && "VkDeviceMemory block is being unmapped while it was not previously mapped."); + } +} + +VkResult VmaDeviceMemoryBlock::WriteMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) +{ + VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); + + void* pData; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + VmaWriteMagicValue(pData, allocOffset + allocSize); + + Unmap(hAllocator, 1); + return VK_SUCCESS; +} + +VkResult VmaDeviceMemoryBlock::ValidateMagicValueAfterAllocation(VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize) +{ + VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION); + + void* pData; + VkResult res = Map(hAllocator, 1, &pData); + if (res != VK_SUCCESS) + { + return res; + } + + if (!VmaValidateMagicValue(pData, allocOffset + allocSize)) + { + VMA_ASSERT(0 && "MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!"); + } + + Unmap(hAllocator, 1); + return VK_SUCCESS; +} + +VkResult VmaDeviceMemoryBlock::BindBufferMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext) +{ + VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && + hAllocation->GetBlock() == this); + VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && + "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); + const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; + // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext); +} + +VkResult VmaDeviceMemoryBlock::BindImageMemory( + const VmaAllocator hAllocator, + const VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext) +{ + VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK && + hAllocation->GetBlock() == this); + VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() && + "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?"); + const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset; + // This lock is important so that we don't call vkBind... and/or vkMap... simultaneously on the same VkDeviceMemory from multiple threads. + VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); + return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext); +} +#endif // _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS + +#ifndef _VMA_ALLOCATION_T_FUNCTIONS +VmaAllocation_T::VmaAllocation_T(bool mappingAllowed) + : m_Alignment{ 1 }, + m_Size{ 0 }, + m_pUserData{ VMA_NULL }, + m_pName{ VMA_NULL }, + m_MemoryTypeIndex{ 0 }, + m_Type{ (uint8_t)ALLOCATION_TYPE_NONE }, + m_SuballocationType{ (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN }, + m_MapCount{ 0 }, + m_Flags{ 0 } +{ + if(mappingAllowed) + m_Flags |= (uint8_t)FLAG_MAPPING_ALLOWED; +} + +VmaAllocation_T::~VmaAllocation_T() +{ + VMA_ASSERT_LEAK(m_MapCount == 0 && "Allocation was not unmapped before destruction."); + + // Check if owned string was freed. + VMA_ASSERT(m_pName == VMA_NULL); +} + +void VmaAllocation_T::InitBlockAllocation( + VmaDeviceMemoryBlock* block, + VmaAllocHandle allocHandle, + VkDeviceSize alignment, + VkDeviceSize size, + uint32_t memoryTypeIndex, + VmaSuballocationType suballocationType, + bool mapped) +{ + VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); + VMA_ASSERT(block != VMA_NULL); + m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK; + m_Alignment = alignment; + m_Size = size; + m_MemoryTypeIndex = memoryTypeIndex; + if(mapped) + { + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; + } + m_SuballocationType = (uint8_t)suballocationType; + m_BlockAllocation.m_Block = block; + m_BlockAllocation.m_AllocHandle = allocHandle; +} + +void VmaAllocation_T::InitDedicatedAllocation( + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceMemory hMemory, + VmaSuballocationType suballocationType, + void* pMappedData, + VkDeviceSize size) +{ + VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE); + VMA_ASSERT(hMemory != VK_NULL_HANDLE); + m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED; + m_Alignment = 0; + m_Size = size; + m_MemoryTypeIndex = memoryTypeIndex; + m_SuballocationType = (uint8_t)suballocationType; + if(pMappedData != VMA_NULL) + { + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; + } + m_DedicatedAllocation.m_hParentPool = hParentPool; + m_DedicatedAllocation.m_hMemory = hMemory; + m_DedicatedAllocation.m_pMappedData = pMappedData; + m_DedicatedAllocation.m_Prev = VMA_NULL; + m_DedicatedAllocation.m_Next = VMA_NULL; +} + +void VmaAllocation_T::SetName(VmaAllocator hAllocator, const char* pName) +{ + VMA_ASSERT(pName == VMA_NULL || pName != m_pName); + + FreeName(hAllocator); + + if (pName != VMA_NULL) + m_pName = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), pName); +} + +uint8_t VmaAllocation_T::SwapBlockAllocation(VmaAllocator hAllocator, VmaAllocation allocation) +{ + VMA_ASSERT(allocation != VMA_NULL); + VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK); + VMA_ASSERT(allocation->m_Type == ALLOCATION_TYPE_BLOCK); + + if (m_MapCount != 0) + m_BlockAllocation.m_Block->Unmap(hAllocator, m_MapCount); + + m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, allocation); + std::swap(m_BlockAllocation, allocation->m_BlockAllocation); + m_BlockAllocation.m_Block->m_pMetadata->SetAllocationUserData(m_BlockAllocation.m_AllocHandle, this); + +#if VMA_STATS_STRING_ENABLED + std::swap(m_BufferImageUsage, allocation->m_BufferImageUsage); +#endif + return m_MapCount; +} + +VmaAllocHandle VmaAllocation_T::GetAllocHandle() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_AllocHandle; + case ALLOCATION_TYPE_DEDICATED: + return VK_NULL_HANDLE; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +VkDeviceSize VmaAllocation_T::GetOffset() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->m_pMetadata->GetAllocationOffset(m_BlockAllocation.m_AllocHandle); + case ALLOCATION_TYPE_DEDICATED: + return 0; + default: + VMA_ASSERT(0); + return 0; + } +} + +VmaPool VmaAllocation_T::GetParentPool() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->GetParentPool(); + case ALLOCATION_TYPE_DEDICATED: + return m_DedicatedAllocation.m_hParentPool; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +VkDeviceMemory VmaAllocation_T::GetMemory() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->GetDeviceMemory(); + case ALLOCATION_TYPE_DEDICATED: + return m_DedicatedAllocation.m_hMemory; + default: + VMA_ASSERT(0); + return VK_NULL_HANDLE; + } +} + +void* VmaAllocation_T::GetMappedData() const +{ + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + if (m_MapCount != 0 || IsPersistentMap()) + { + void* pBlockData = m_BlockAllocation.m_Block->GetMappedData(); + VMA_ASSERT(pBlockData != VMA_NULL); + return (char*)pBlockData + GetOffset(); + } + else + { + return VMA_NULL; + } + break; + case ALLOCATION_TYPE_DEDICATED: + VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0 || IsPersistentMap())); + return m_DedicatedAllocation.m_pMappedData; + default: + VMA_ASSERT(0); + return VMA_NULL; + } +} + +void VmaAllocation_T::BlockAllocMap() +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + + if (m_MapCount < 0xFF) + { + ++m_MapCount; + } + else + { + VMA_ASSERT(0 && "Allocation mapped too many times simultaneously."); + } +} + +void VmaAllocation_T::BlockAllocUnmap() +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK); + + if (m_MapCount > 0) + { + --m_MapCount; + } + else + { + VMA_ASSERT(0 && "Unmapping allocation not previously mapped."); + } +} + +VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppData) +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); + VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + + if (m_MapCount != 0 || IsPersistentMap()) + { + if (m_MapCount < 0xFF) + { + VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL); + *ppData = m_DedicatedAllocation.m_pMappedData; + ++m_MapCount; + return VK_SUCCESS; + } + else + { + VMA_ASSERT(0 && "Dedicated allocation mapped too many times simultaneously."); + return VK_ERROR_MEMORY_MAP_FAILED; + } + } + else + { + VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)( + hAllocator->m_hDevice, + m_DedicatedAllocation.m_hMemory, + 0, // offset + VK_WHOLE_SIZE, + 0, // flags + ppData); + if (result == VK_SUCCESS) + { + m_DedicatedAllocation.m_pMappedData = *ppData; + m_MapCount = 1; + } + return result; + } +} + +void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator) +{ + VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); + + if (m_MapCount > 0) + { + --m_MapCount; + if (m_MapCount == 0 && !IsPersistentMap()) + { + m_DedicatedAllocation.m_pMappedData = VMA_NULL; + (*hAllocator->GetVulkanFunctions().vkUnmapMemory)( + hAllocator->m_hDevice, + m_DedicatedAllocation.m_hMemory); + } + } + else + { + VMA_ASSERT(0 && "Unmapping dedicated allocation not previously mapped."); + } +} + +#if VMA_STATS_STRING_ENABLED +void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const +{ + json.WriteString("Type"); + json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]); + + json.WriteString("Size"); + json.WriteNumber(m_Size); + json.WriteString("Usage"); + json.WriteNumber(m_BufferImageUsage.Value); // It may be uint32_t or uint64_t. + + if (m_pUserData != VMA_NULL) + { + json.WriteString("CustomData"); + json.BeginString(); + json.ContinueString_Pointer(m_pUserData); + json.EndString(); + } + if (m_pName != VMA_NULL) + { + json.WriteString("Name"); + json.WriteString(m_pName); + } +} +#endif // VMA_STATS_STRING_ENABLED + +void VmaAllocation_T::FreeName(VmaAllocator hAllocator) +{ + if(m_pName) + { + VmaFreeString(hAllocator->GetAllocationCallbacks(), m_pName); + m_pName = VMA_NULL; + } +} +#endif // _VMA_ALLOCATION_T_FUNCTIONS + +#ifndef _VMA_BLOCK_VECTOR_FUNCTIONS +VmaBlockVector::VmaBlockVector( + VmaAllocator hAllocator, + VmaPool hParentPool, + uint32_t memoryTypeIndex, + VkDeviceSize preferredBlockSize, + size_t minBlockCount, + size_t maxBlockCount, + VkDeviceSize bufferImageGranularity, + bool explicitBlockSize, + uint32_t algorithm, + float priority, + VkDeviceSize minAllocationAlignment, + void* pMemoryAllocateNext) + : m_hAllocator(hAllocator), + m_hParentPool(hParentPool), + m_MemoryTypeIndex(memoryTypeIndex), + m_PreferredBlockSize(preferredBlockSize), + m_MinBlockCount(minBlockCount), + m_MaxBlockCount(maxBlockCount), + m_BufferImageGranularity(bufferImageGranularity), + m_ExplicitBlockSize(explicitBlockSize), + m_Algorithm(algorithm), + m_Priority(priority), + m_MinAllocationAlignment(minAllocationAlignment), + m_pMemoryAllocateNext(pMemoryAllocateNext), + m_Blocks(VmaStlAllocator(hAllocator->GetAllocationCallbacks())), + m_NextBlockId(0) {} + +VmaBlockVector::~VmaBlockVector() +{ + for (size_t i = m_Blocks.size(); i--; ) + { + m_Blocks[i]->Destroy(m_hAllocator); + vma_delete(m_hAllocator, m_Blocks[i]); + } +} + +VkResult VmaBlockVector::CreateMinBlocks() +{ + for (size_t i = 0; i < m_MinBlockCount; ++i) + { + VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL); + if (res != VK_SUCCESS) + { + return res; + } + } + return VK_SUCCESS; +} + +void VmaBlockVector::AddStatistics(VmaStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + const size_t blockCount = m_Blocks.size(); + for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) + { + const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VMA_HEAVY_ASSERT(pBlock->Validate()); + pBlock->m_pMetadata->AddStatistics(inoutStats); + } +} + +void VmaBlockVector::AddDetailedStatistics(VmaDetailedStatistics& inoutStats) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + const size_t blockCount = m_Blocks.size(); + for (uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex) + { + const VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VMA_HEAVY_ASSERT(pBlock->Validate()); + pBlock->m_pMetadata->AddDetailedStatistics(inoutStats); + } +} + +bool VmaBlockVector::IsEmpty() +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + return m_Blocks.empty(); +} + +bool VmaBlockVector::IsCorruptionDetectionEnabled() const +{ + const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + return (VMA_DEBUG_DETECT_CORRUPTION != 0) && + (VMA_DEBUG_MARGIN > 0) && + (m_Algorithm == 0 || m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) && + (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags; +} + +VkResult VmaBlockVector::Allocate( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + size_t allocIndex; + VkResult res = VK_SUCCESS; + + alignment = VMA_MAX(alignment, m_MinAllocationAlignment); + + if (IsCorruptionDetectionEnabled()) + { + size = VmaAlignUp(size, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); + alignment = VmaAlignUp(alignment, sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE)); + } + + { + VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); + for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + res = AllocatePage( + size, + alignment, + createInfo, + suballocType, + pAllocations + allocIndex); + if (res != VK_SUCCESS) + { + break; + } + } + } + + if (res != VK_SUCCESS) + { + // Free all already created allocations. + while (allocIndex--) + Free(pAllocations[allocIndex]); + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + } + + return res; +} + +VkResult VmaBlockVector::AllocatePage( + VkDeviceSize size, + VkDeviceSize alignment, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation) +{ + const bool isUpperAddress = (createInfo.flags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; + + VkDeviceSize freeMemory; + { + const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); + VmaBudget heapBudget = {}; + m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); + freeMemory = (heapBudget.usage < heapBudget.budget) ? (heapBudget.budget - heapBudget.usage) : 0; + } + + const bool canFallbackToDedicated = !HasExplicitBlockSize() && + (createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0; + const bool canCreateNewBlock = + ((createInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0) && + (m_Blocks.size() < m_MaxBlockCount) && + (freeMemory >= size || !canFallbackToDedicated); + uint32_t strategy = createInfo.flags & VMA_ALLOCATION_CREATE_STRATEGY_MASK; + + // Upper address can only be used with linear allocator and within single memory block. + if (isUpperAddress && + (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT || m_MaxBlockCount > 1)) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + // Early reject: requested allocation size is larger that maximum block size for this block vector. + if (size + VMA_DEBUG_MARGIN > m_PreferredBlockSize) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + + // 1. Search existing allocations. Try to allocate. + if (m_Algorithm == VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + { + // Use only last block. + if (!m_Blocks.empty()) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks.back(); + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from last block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + else + { + if (strategy != VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT) // MIN_MEMORY or default + { + const bool isHostVisible = + (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0; + if(isHostVisible) + { + const bool isMappingAllowed = (createInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; + /* + For non-mappable allocations, check blocks that are not mapped first. + For mappable allocations, check blocks that are already mapped first. + This way, having many blocks, we will separate mappable and non-mappable allocations, + hopefully limiting the number of blocks that are mapped, which will help tools like RenderDoc. + */ + for(size_t mappingI = 0; mappingI < 2; ++mappingI) + { + // Forward order in m_Blocks - prefer blocks with smallest amount of free space. + for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + const bool isBlockMapped = pCurrBlock->GetMappedData() != VMA_NULL; + if((mappingI == 0) == (isMappingAllowed == isBlockMapped)) + { + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from existing block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + } + else + { + // Forward order in m_Blocks - prefer blocks with smallest amount of free space. + for (size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock( + pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from existing block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + else // VMA_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT + { + // Backward order in m_Blocks - prefer blocks with largest amount of free space. + for (size_t blockIndex = m_Blocks.size(); blockIndex--; ) + { + VmaDeviceMemoryBlock* const pCurrBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pCurrBlock); + VkResult res = AllocateFromBlock(pCurrBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Returned from existing block #%" PRIu32, pCurrBlock->GetId()); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + } + } + } + + // 2. Try to create new block. + if (canCreateNewBlock) + { + // Calculate optimal size for new block. + VkDeviceSize newBlockSize = m_PreferredBlockSize; + uint32_t newBlockSizeShift = 0; + const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3; + + if (!m_ExplicitBlockSize) + { + // Allocate 1/8, 1/4, 1/2 as first blocks. + const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize(); + for (uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i) + { + const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; + if (smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2) + { + newBlockSize = smallerNewBlockSize; + ++newBlockSizeShift; + } + else + { + break; + } + } + } + + size_t newBlockIndex = 0; + VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? + CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; + // Allocation of this size failed? Try 1/2, 1/4, 1/8 of m_PreferredBlockSize. + if (!m_ExplicitBlockSize) + { + while (res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX) + { + const VkDeviceSize smallerNewBlockSize = newBlockSize / 2; + if (smallerNewBlockSize >= size) + { + newBlockSize = smallerNewBlockSize; + ++newBlockSizeShift; + res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ? + CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + else + { + break; + } + } + } + + if (res == VK_SUCCESS) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[newBlockIndex]; + VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size); + + res = AllocateFromBlock( + pBlock, size, alignment, createInfo.flags, createInfo.pUserData, suballocType, strategy, pAllocation); + if (res == VK_SUCCESS) + { + VMA_DEBUG_LOG_FORMAT(" Created new block #%" PRIu32 " Size=%" PRIu64, pBlock->GetId(), newBlockSize); + IncrementallySortBlocks(); + return VK_SUCCESS; + } + else + { + // Allocation from new block failed, possibly due to VMA_DEBUG_MARGIN or alignment. + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + } + } + + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +void VmaBlockVector::Free(const VmaAllocation hAllocation) +{ + VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL; + + bool budgetExceeded = false; + { + const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex); + VmaBudget heapBudget = {}; + m_hAllocator->GetHeapBudgets(&heapBudget, heapIndex, 1); + budgetExceeded = heapBudget.usage >= heapBudget.budget; + } + + // Scope for lock. + { + VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex); + + VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); + + if (IsCorruptionDetectionEnabled()) + { + VkResult res = pBlock->ValidateMagicValueAfterAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize()); + VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to validate magic value."); + } + + if (hAllocation->IsPersistentMap()) + { + pBlock->Unmap(m_hAllocator, 1); + } + + const bool hadEmptyBlockBeforeFree = HasEmptyBlock(); + pBlock->m_pMetadata->Free(hAllocation->GetAllocHandle()); + pBlock->PostFree(m_hAllocator); + VMA_HEAVY_ASSERT(pBlock->Validate()); + + VMA_DEBUG_LOG_FORMAT(" Freed from MemoryTypeIndex=%" PRIu32, m_MemoryTypeIndex); + + const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount; + // pBlock became empty after this deallocation. + if (pBlock->m_pMetadata->IsEmpty()) + { + // Already had empty block. We don't want to have two, so delete this one. + if ((hadEmptyBlockBeforeFree || budgetExceeded) && canDeleteBlock) + { + pBlockToDelete = pBlock; + Remove(pBlock); + } + // else: We now have one empty block - leave it. A hysteresis to avoid allocating whole block back and forth. + } + // pBlock didn't become empty, but we have another empty block - find and free that one. + // (This is optional, heuristics.) + else if (hadEmptyBlockBeforeFree && canDeleteBlock) + { + VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back(); + if (pLastBlock->m_pMetadata->IsEmpty()) + { + pBlockToDelete = pLastBlock; + m_Blocks.pop_back(); + } + } + + IncrementallySortBlocks(); + } + + // Destruction of a free block. Deferred until this point, outside of mutex + // lock, for performance reason. + if (pBlockToDelete != VMA_NULL) + { + VMA_DEBUG_LOG_FORMAT(" Deleted empty block #%" PRIu32, pBlockToDelete->GetId()); + pBlockToDelete->Destroy(m_hAllocator); + vma_delete(m_hAllocator, pBlockToDelete); + } + + m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize()); + m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation); +} + +VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const +{ + VkDeviceSize result = 0; + for (size_t i = m_Blocks.size(); i--; ) + { + result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize()); + if (result >= m_PreferredBlockSize) + { + break; + } + } + return result; +} + +void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock) +{ + for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + if (m_Blocks[blockIndex] == pBlock) + { + VmaVectorRemove(m_Blocks, blockIndex); + return; + } + } + VMA_ASSERT(0); +} + +void VmaBlockVector::IncrementallySortBlocks() +{ + if (!m_IncrementalSort) + return; + if (m_Algorithm != VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + { + // Bubble sort only until first swap. + for (size_t i = 1; i < m_Blocks.size(); ++i) + { + if (m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize()) + { + std::swap(m_Blocks[i - 1], m_Blocks[i]); + return; + } + } + } +} + +void VmaBlockVector::SortByFreeSize() +{ + VMA_SORT(m_Blocks.begin(), m_Blocks.end(), + [](VmaDeviceMemoryBlock* b1, VmaDeviceMemoryBlock* b2) -> bool + { + return b1->m_pMetadata->GetSumFreeSize() < b2->m_pMetadata->GetSumFreeSize(); + }); +} + +VkResult VmaBlockVector::AllocateFromBlock( + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize size, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + uint32_t strategy, + VmaAllocation* pAllocation) +{ + const bool isUpperAddress = (allocFlags & VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT) != 0; + + VmaAllocationRequest currRequest = {}; + if (pBlock->m_pMetadata->CreateAllocationRequest( + size, + alignment, + isUpperAddress, + suballocType, + strategy, + &currRequest)) + { + return CommitAllocationRequest(currRequest, pBlock, alignment, allocFlags, pUserData, suballocType, pAllocation); + } + return VK_ERROR_OUT_OF_DEVICE_MEMORY; +} + +VkResult VmaBlockVector::CommitAllocationRequest( + VmaAllocationRequest& allocRequest, + VmaDeviceMemoryBlock* pBlock, + VkDeviceSize alignment, + VmaAllocationCreateFlags allocFlags, + void* pUserData, + VmaSuballocationType suballocType, + VmaAllocation* pAllocation) +{ + const bool mapped = (allocFlags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0; + const bool isUserDataString = (allocFlags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0; + const bool isMappingAllowed = (allocFlags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0; + + pBlock->PostAlloc(m_hAllocator); + // Allocate from pCurrBlock. + if (mapped) + { + VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL); + if (res != VK_SUCCESS) + { + return res; + } + } + + *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(isMappingAllowed); + pBlock->m_pMetadata->Alloc(allocRequest, suballocType, *pAllocation); + (*pAllocation)->InitBlockAllocation( + pBlock, + allocRequest.allocHandle, + alignment, + allocRequest.size, // Not size, as actual allocation size may be larger than requested! + m_MemoryTypeIndex, + suballocType, + mapped); + VMA_HEAVY_ASSERT(pBlock->Validate()); + if (isUserDataString) + (*pAllocation)->SetName(m_hAllocator, (const char*)pUserData); + else + (*pAllocation)->SetUserData(m_hAllocator, pUserData); + m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), allocRequest.size); + if (VMA_DEBUG_INITIALIZE_ALLOCATIONS) + { + m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); + } + if (IsCorruptionDetectionEnabled()) + { + VkResult res = pBlock->WriteMagicValueAfterAllocation(m_hAllocator, (*pAllocation)->GetOffset(), allocRequest.size); + VMA_ASSERT(res == VK_SUCCESS && "Couldn't map block memory to write magic value."); + } + return VK_SUCCESS; +} + +VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize, size_t* pNewBlockIndex) +{ + VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; + allocInfo.pNext = m_pMemoryAllocateNext; + allocInfo.memoryTypeIndex = m_MemoryTypeIndex; + allocInfo.allocationSize = blockSize; + +#if VMA_BUFFER_DEVICE_ADDRESS + // Every standalone block can potentially contain a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT - always enable the feature. + VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; + if (m_hAllocator->m_UseKhrBufferDeviceAddress) + { + allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; + VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); + } +#endif // VMA_BUFFER_DEVICE_ADDRESS + +#if VMA_MEMORY_PRIORITY + VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; + if (m_hAllocator->m_UseExtMemoryPriority) + { + VMA_ASSERT(m_Priority >= 0.f && m_Priority <= 1.f); + priorityInfo.priority = m_Priority; + VmaPnextChainPushFront(&allocInfo, &priorityInfo); + } +#endif // VMA_MEMORY_PRIORITY + +#if VMA_EXTERNAL_MEMORY + // Attach VkExportMemoryAllocateInfoKHR if necessary. + VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; + exportMemoryAllocInfo.handleTypes = m_hAllocator->GetExternalMemoryHandleTypeFlags(m_MemoryTypeIndex); + if (exportMemoryAllocInfo.handleTypes != 0) + { + VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); + } +#endif // VMA_EXTERNAL_MEMORY + + VkDeviceMemory mem = VK_NULL_HANDLE; + VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem); + if (res < 0) + { + return res; + } + + // New VkDeviceMemory successfully created. + + // Create new Allocation for it. + VmaDeviceMemoryBlock* const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator); + pBlock->Init( + m_hAllocator, + m_hParentPool, + m_MemoryTypeIndex, + mem, + allocInfo.allocationSize, + m_NextBlockId++, + m_Algorithm, + m_BufferImageGranularity); + + m_Blocks.push_back(pBlock); + if (pNewBlockIndex != VMA_NULL) + { + *pNewBlockIndex = m_Blocks.size() - 1; + } + + return VK_SUCCESS; +} + +bool VmaBlockVector::HasEmptyBlock() +{ + for (size_t index = 0, count = m_Blocks.size(); index < count; ++index) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[index]; + if (pBlock->m_pMetadata->IsEmpty()) + { + return true; + } + } + return false; +} + +#if VMA_STATS_STRING_ENABLED +void VmaBlockVector::PrintDetailedMap(class VmaJsonWriter& json) +{ + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + + + json.BeginObject(); + for (size_t i = 0; i < m_Blocks.size(); ++i) + { + json.BeginString(); + json.ContinueString(m_Blocks[i]->GetId()); + json.EndString(); + + json.BeginObject(); + json.WriteString("MapRefCount"); + json.WriteNumber(m_Blocks[i]->GetMapRefCount()); + + m_Blocks[i]->m_pMetadata->PrintDetailedMap(json); + json.EndObject(); + } + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED + +VkResult VmaBlockVector::CheckCorruption() +{ + if (!IsCorruptionDetectionEnabled()) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex); + for (uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex) + { + VmaDeviceMemoryBlock* const pBlock = m_Blocks[blockIndex]; + VMA_ASSERT(pBlock); + VkResult res = pBlock->CheckCorruption(m_hAllocator); + if (res != VK_SUCCESS) + { + return res; + } + } + return VK_SUCCESS; +} + +#endif // _VMA_BLOCK_VECTOR_FUNCTIONS + +#ifndef _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS +VmaDefragmentationContext_T::VmaDefragmentationContext_T( + VmaAllocator hAllocator, + const VmaDefragmentationInfo& info) + : m_MaxPassBytes(info.maxBytesPerPass == 0 ? VK_WHOLE_SIZE : info.maxBytesPerPass), + m_MaxPassAllocations(info.maxAllocationsPerPass == 0 ? UINT32_MAX : info.maxAllocationsPerPass), + m_BreakCallback(info.pfnBreakCallback), + m_BreakCallbackUserData(info.pBreakCallbackUserData), + m_MoveAllocator(hAllocator->GetAllocationCallbacks()), + m_Moves(m_MoveAllocator) +{ + m_Algorithm = info.flags & VMA_DEFRAGMENTATION_FLAG_ALGORITHM_MASK; + + if (info.pool != VMA_NULL) + { + m_BlockVectorCount = 1; + m_PoolBlockVector = &info.pool->m_BlockVector; + m_pBlockVectors = &m_PoolBlockVector; + m_PoolBlockVector->SetIncrementalSort(false); + m_PoolBlockVector->SortByFreeSize(); + } + else + { + m_BlockVectorCount = hAllocator->GetMemoryTypeCount(); + m_PoolBlockVector = VMA_NULL; + m_pBlockVectors = hAllocator->m_pBlockVectors; + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + VmaBlockVector* vector = m_pBlockVectors[i]; + if (vector != VMA_NULL) + { + vector->SetIncrementalSort(false); + vector->SortByFreeSize(); + } + } + } + + switch (m_Algorithm) + { + case 0: // Default algorithm + m_Algorithm = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT; + m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount); + break; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + m_AlgorithmState = vma_new_array(hAllocator, StateBalanced, m_BlockVectorCount); + break; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + if (hAllocator->GetBufferImageGranularity() > 1) + { + m_AlgorithmState = vma_new_array(hAllocator, StateExtensive, m_BlockVectorCount); + } + break; + } +} + +VmaDefragmentationContext_T::~VmaDefragmentationContext_T() +{ + if (m_PoolBlockVector != VMA_NULL) + { + m_PoolBlockVector->SetIncrementalSort(true); + } + else + { + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + VmaBlockVector* vector = m_pBlockVectors[i]; + if (vector != VMA_NULL) + vector->SetIncrementalSort(true); + } + } + + if (m_AlgorithmState) + { + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); + break; + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + vma_delete_array(m_MoveAllocator.m_pCallbacks, reinterpret_cast(m_AlgorithmState), m_BlockVectorCount); + break; + default: + VMA_ASSERT(0); + } + } +} + +VkResult VmaDefragmentationContext_T::DefragmentPassBegin(VmaDefragmentationPassMoveInfo& moveInfo) +{ + if (m_PoolBlockVector != VMA_NULL) + { + VmaMutexLockWrite lock(m_PoolBlockVector->GetMutex(), m_PoolBlockVector->GetAllocator()->m_UseMutex); + + if (m_PoolBlockVector->GetBlockCount() > 1) + ComputeDefragmentation(*m_PoolBlockVector, 0); + else if (m_PoolBlockVector->GetBlockCount() == 1) + ReallocWithinBlock(*m_PoolBlockVector, m_PoolBlockVector->GetBlock(0)); + } + else + { + for (uint32_t i = 0; i < m_BlockVectorCount; ++i) + { + if (m_pBlockVectors[i] != VMA_NULL) + { + VmaMutexLockWrite lock(m_pBlockVectors[i]->GetMutex(), m_pBlockVectors[i]->GetAllocator()->m_UseMutex); + + if (m_pBlockVectors[i]->GetBlockCount() > 1) + { + if (ComputeDefragmentation(*m_pBlockVectors[i], i)) + break; + } + else if (m_pBlockVectors[i]->GetBlockCount() == 1) + { + if (ReallocWithinBlock(*m_pBlockVectors[i], m_pBlockVectors[i]->GetBlock(0))) + break; + } + } + } + } + + moveInfo.moveCount = static_cast(m_Moves.size()); + if (moveInfo.moveCount > 0) + { + moveInfo.pMoves = m_Moves.data(); + return VK_INCOMPLETE; + } + + moveInfo.pMoves = VMA_NULL; + return VK_SUCCESS; +} + +VkResult VmaDefragmentationContext_T::DefragmentPassEnd(VmaDefragmentationPassMoveInfo& moveInfo) +{ + VMA_ASSERT(moveInfo.moveCount > 0 ? moveInfo.pMoves != VMA_NULL : true); + + VkResult result = VK_SUCCESS; + VmaStlAllocator blockAllocator(m_MoveAllocator.m_pCallbacks); + VmaVector> immovableBlocks(blockAllocator); + VmaVector> mappedBlocks(blockAllocator); + + VmaAllocator allocator = VMA_NULL; + for (uint32_t i = 0; i < moveInfo.moveCount; ++i) + { + VmaDefragmentationMove& move = moveInfo.pMoves[i]; + size_t prevCount = 0, currentCount = 0; + VkDeviceSize freedBlockSize = 0; + + uint32_t vectorIndex; + VmaBlockVector* vector; + if (m_PoolBlockVector != VMA_NULL) + { + vectorIndex = 0; + vector = m_PoolBlockVector; + } + else + { + vectorIndex = move.srcAllocation->GetMemoryTypeIndex(); + vector = m_pBlockVectors[vectorIndex]; + VMA_ASSERT(vector != VMA_NULL); + } + + switch (move.operation) + { + case VMA_DEFRAGMENTATION_MOVE_OPERATION_COPY: + { + uint8_t mapCount = move.srcAllocation->SwapBlockAllocation(vector->m_hAllocator, move.dstTmpAllocation); + if (mapCount > 0) + { + allocator = vector->m_hAllocator; + VmaDeviceMemoryBlock* newMapBlock = move.srcAllocation->GetBlock(); + bool notPresent = true; + for (FragmentedBlock& block : mappedBlocks) + { + if (block.block == newMapBlock) + { + notPresent = false; + block.data += mapCount; + break; + } + } + if (notPresent) + mappedBlocks.push_back({ mapCount, newMapBlock }); + } + + // Scope for locks, Free have it's own lock + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + prevCount = vector->GetBlockCount(); + freedBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.dstTmpAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + currentCount = vector->GetBlockCount(); + } + + result = VK_INCOMPLETE; + break; + } + case VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE: + { + m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); + --m_PassStats.allocationsMoved; + vector->Free(move.dstTmpAllocation); + + VmaDeviceMemoryBlock* newBlock = move.srcAllocation->GetBlock(); + bool notPresent = true; + for (const FragmentedBlock& block : immovableBlocks) + { + if (block.block == newBlock) + { + notPresent = false; + break; + } + } + if (notPresent) + immovableBlocks.push_back({ vectorIndex, newBlock }); + break; + } + case VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY: + { + m_PassStats.bytesMoved -= move.srcAllocation->GetSize(); + --m_PassStats.allocationsMoved; + // Scope for locks, Free have it's own lock + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + prevCount = vector->GetBlockCount(); + freedBlockSize = move.srcAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.srcAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + currentCount = vector->GetBlockCount(); + } + freedBlockSize *= prevCount - currentCount; + + VkDeviceSize dstBlockSize; + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + dstBlockSize = move.dstTmpAllocation->GetBlock()->m_pMetadata->GetSize(); + } + vector->Free(move.dstTmpAllocation); + { + VmaMutexLockRead lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + freedBlockSize += dstBlockSize * (currentCount - vector->GetBlockCount()); + currentCount = vector->GetBlockCount(); + } + + result = VK_INCOMPLETE; + break; + } + default: + VMA_ASSERT(0); + } + + if (prevCount > currentCount) + { + size_t freedBlocks = prevCount - currentCount; + m_PassStats.deviceMemoryBlocksFreed += static_cast(freedBlocks); + m_PassStats.bytesFreed += freedBlockSize; + } + + if(m_Algorithm == VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT && + m_AlgorithmState != VMA_NULL) + { + // Avoid unnecessary tries to allocate when new free block is available + StateExtensive& state = reinterpret_cast(m_AlgorithmState)[vectorIndex]; + if (state.firstFreeBlock != SIZE_MAX) + { + const size_t diff = prevCount - currentCount; + if (state.firstFreeBlock >= diff) + { + state.firstFreeBlock -= diff; + if (state.firstFreeBlock != 0) + state.firstFreeBlock -= vector->GetBlock(state.firstFreeBlock - 1)->m_pMetadata->IsEmpty(); + } + else + state.firstFreeBlock = 0; + } + } + } + moveInfo.moveCount = 0; + moveInfo.pMoves = VMA_NULL; + m_Moves.clear(); + + // Update stats + m_GlobalStats.allocationsMoved += m_PassStats.allocationsMoved; + m_GlobalStats.bytesFreed += m_PassStats.bytesFreed; + m_GlobalStats.bytesMoved += m_PassStats.bytesMoved; + m_GlobalStats.deviceMemoryBlocksFreed += m_PassStats.deviceMemoryBlocksFreed; + m_PassStats = { 0 }; + + // Move blocks with immovable allocations according to algorithm + if (immovableBlocks.size() > 0) + { + do + { + if(m_Algorithm == VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT) + { + if (m_AlgorithmState != VMA_NULL) + { + bool swapped = false; + // Move to the start of free blocks range + for (const FragmentedBlock& block : immovableBlocks) + { + StateExtensive& state = reinterpret_cast(m_AlgorithmState)[block.data]; + if (state.operation != StateExtensive::Operation::Cleanup) + { + VmaBlockVector* vector = m_pBlockVectors[block.data]; + VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + + for (size_t i = 0, count = vector->GetBlockCount() - m_ImmovableBlockCount; i < count; ++i) + { + if (vector->GetBlock(i) == block.block) + { + std::swap(vector->m_Blocks[i], vector->m_Blocks[vector->GetBlockCount() - ++m_ImmovableBlockCount]); + if (state.firstFreeBlock != SIZE_MAX) + { + if (i + 1 < state.firstFreeBlock) + { + if (state.firstFreeBlock > 1) + std::swap(vector->m_Blocks[i], vector->m_Blocks[--state.firstFreeBlock]); + else + --state.firstFreeBlock; + } + } + swapped = true; + break; + } + } + } + } + if (swapped) + result = VK_INCOMPLETE; + break; + } + } + + // Move to the beginning + for (const FragmentedBlock& block : immovableBlocks) + { + VmaBlockVector* vector = m_pBlockVectors[block.data]; + VmaMutexLockWrite lock(vector->GetMutex(), vector->GetAllocator()->m_UseMutex); + + for (size_t i = m_ImmovableBlockCount; i < vector->GetBlockCount(); ++i) + { + if (vector->GetBlock(i) == block.block) + { + std::swap(vector->m_Blocks[i], vector->m_Blocks[m_ImmovableBlockCount++]); + break; + } + } + } + } while (false); + } + + // Bulk-map destination blocks + for (const FragmentedBlock& block : mappedBlocks) + { + VkResult res = block.block->Map(allocator, block.data, VMA_NULL); + VMA_ASSERT(res == VK_SUCCESS); + } + return result; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation(VmaBlockVector& vector, size_t index) +{ + switch (m_Algorithm) + { + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT: + return ComputeDefragmentation_Fast(vector); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_BALANCED_BIT: + return ComputeDefragmentation_Balanced(vector, index, true); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FULL_BIT: + return ComputeDefragmentation_Full(vector); + case VMA_DEFRAGMENTATION_FLAG_ALGORITHM_EXTENSIVE_BIT: + return ComputeDefragmentation_Extensive(vector, index); + default: + VMA_ASSERT(0); + return ComputeDefragmentation_Balanced(vector, index, true); + } +} + +VmaDefragmentationContext_T::MoveAllocationData VmaDefragmentationContext_T::GetMoveData( + VmaAllocHandle handle, VmaBlockMetadata* metadata) +{ + MoveAllocationData moveData; + moveData.move.srcAllocation = (VmaAllocation)metadata->GetAllocationUserData(handle); + moveData.size = moveData.move.srcAllocation->GetSize(); + moveData.alignment = moveData.move.srcAllocation->GetAlignment(); + moveData.type = moveData.move.srcAllocation->GetSuballocationType(); + moveData.flags = 0; + + if (moveData.move.srcAllocation->IsPersistentMap()) + moveData.flags |= VMA_ALLOCATION_CREATE_MAPPED_BIT; + if (moveData.move.srcAllocation->IsMappingAllowed()) + moveData.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; + + return moveData; +} + +VmaDefragmentationContext_T::CounterStatus VmaDefragmentationContext_T::CheckCounters(VkDeviceSize bytes) +{ + // Check custom criteria if exists + if (m_BreakCallback && m_BreakCallback(m_BreakCallbackUserData)) + return CounterStatus::End; + + // Ignore allocation if will exceed max size for copy + if (m_PassStats.bytesMoved + bytes > m_MaxPassBytes) + { + if (++m_IgnoredAllocs < MAX_ALLOCS_TO_IGNORE) + return CounterStatus::Ignore; + else + return CounterStatus::End; + } + else + m_IgnoredAllocs = 0; + return CounterStatus::Pass; +} + +bool VmaDefragmentationContext_T::IncrementCounters(VkDeviceSize bytes) +{ + m_PassStats.bytesMoved += bytes; + // Early return when max found + if (++m_PassStats.allocationsMoved >= m_MaxPassAllocations || m_PassStats.bytesMoved >= m_MaxPassBytes) + { + VMA_ASSERT((m_PassStats.allocationsMoved == m_MaxPassAllocations || + m_PassStats.bytesMoved == m_MaxPassBytes) && "Exceeded maximal pass threshold!"); + return true; + } + return false; +} + +bool VmaDefragmentationContext_T::ReallocWithinBlock(VmaBlockVector& vector, VmaDeviceMemoryBlock* block) +{ + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::AllocInOtherBlock(size_t start, size_t end, MoveAllocationData& data, VmaBlockVector& vector) +{ + for (; start < end; ++start) + { + VmaDeviceMemoryBlock* dstBlock = vector.GetBlock(start); + if (dstBlock->m_pMetadata->GetSumFreeSize() >= data.size) + { + if (vector.AllocateFromBlock(dstBlock, + data.size, + data.alignment, + data.flags, + this, + data.type, + 0, + &data.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(data.move); + if (IncrementCounters(data.size)) + return true; + break; + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Fast(VmaBlockVector& vector) +{ + // Move only between blocks + + // Go through allocations in last blocks and try to fit them inside first ones + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Balanced(VmaBlockVector& vector, size_t index, bool update) +{ + // Go over every allocation and try to fit it in previous blocks at lowest offsets, + // if not possible: realloc within single block to minimize offset (exclude offset == 0), + // but only if there are noticeable gaps between them (some heuristic, ex. average size of allocation in block) + VMA_ASSERT(m_AlgorithmState != VMA_NULL); + + StateBalanced& vectorState = reinterpret_cast(m_AlgorithmState)[index]; + if (update && vectorState.avgAllocSize == UINT64_MAX) + UpdateVectorStatistics(vector, vectorState); + + const size_t startMoveCount = m_Moves.size(); + VkDeviceSize minimalFreeRegion = vectorState.avgFreeSize / 2; + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(i); + VmaBlockMetadata* metadata = block->m_pMetadata; + VkDeviceSize prevFreeRegionSize = 0; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + const size_t prevMoveCount = m_Moves.size(); + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + + VkDeviceSize nextFreeRegionSize = metadata->GetNextFreeRegionSize(handle); + // If no room found then realloc within block for lower offset + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + // Check if realloc will make sense + if (prevFreeRegionSize >= minimalFreeRegion || + nextFreeRegionSize >= minimalFreeRegion || + moveData.size <= vectorState.avgFreeSize || + moveData.size <= vectorState.avgAllocSize) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + prevFreeRegionSize = nextFreeRegionSize; + } + } + + // No moves performed, update statistics to current vector state + if (startMoveCount == m_Moves.size() && !update) + { + vectorState.avgAllocSize = UINT64_MAX; + return ComputeDefragmentation_Balanced(vector, index, false); + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Full(VmaBlockVector& vector) +{ + // Go over every allocation and try to fit it in previous blocks at lowest offsets, + // if not possible: realloc within single block to minimize offset (exclude offset == 0) + + for (size_t i = vector.GetBlockCount() - 1; i > m_ImmovableBlockCount; --i) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(i); + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + const size_t prevMoveCount = m_Moves.size(); + if (AllocInOtherBlock(0, i, moveData, vector)) + return true; + + // If no room found then realloc within block for lower offset + VkDeviceSize offset = moveData.move.srcAllocation->GetOffset(); + if (prevMoveCount == m_Moves.size() && offset != 0 && metadata->GetSumFreeSize() >= moveData.size) + { + VmaAllocationRequest request = {}; + if (metadata->CreateAllocationRequest( + moveData.size, + moveData.alignment, + false, + moveData.type, + VMA_ALLOCATION_CREATE_STRATEGY_MIN_OFFSET_BIT, + &request)) + { + if (metadata->GetAllocationOffset(request.allocHandle) < offset) + { + if (vector.CommitAllocationRequest( + request, + block, + moveData.alignment, + moveData.flags, + this, + moveData.type, + &moveData.move.dstTmpAllocation) == VK_SUCCESS) + { + m_Moves.push_back(moveData.move); + if (IncrementCounters(moveData.size)) + return true; + } + } + } + } + } + } + return false; +} + +bool VmaDefragmentationContext_T::ComputeDefragmentation_Extensive(VmaBlockVector& vector, size_t index) +{ + // First free single block, then populate it to the brim, then free another block, and so on + + // Fallback to previous algorithm since without granularity conflicts it can achieve max packing + if (vector.m_BufferImageGranularity == 1) + return ComputeDefragmentation_Full(vector); + + VMA_ASSERT(m_AlgorithmState != VMA_NULL); + + StateExtensive& vectorState = reinterpret_cast(m_AlgorithmState)[index]; + + bool texturePresent = false, bufferPresent = false, otherPresent = false; + switch (vectorState.operation) + { + case StateExtensive::Operation::Done: // Vector defragmented + return false; + case StateExtensive::Operation::FindFreeBlockBuffer: + case StateExtensive::Operation::FindFreeBlockTexture: + case StateExtensive::Operation::FindFreeBlockAll: + { + // No more blocks to free, just perform fast realloc and move to cleanup + if (vectorState.firstFreeBlock == 0) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + return ComputeDefragmentation_Fast(vector); + } + + // No free blocks, have to clear last one + size_t last = (vectorState.firstFreeBlock == SIZE_MAX ? vector.GetBlockCount() : vectorState.firstFreeBlock) - 1; + VmaBlockMetadata* freeMetadata = vector.GetBlock(last)->m_pMetadata; + + const size_t prevMoveCount = m_Moves.size(); + for (VmaAllocHandle handle = freeMetadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = freeMetadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, freeMetadata); + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Check all previous blocks for free space + if (AllocInOtherBlock(0, last, moveData, vector)) + { + // Full clear performed already + if (prevMoveCount != m_Moves.size() && freeMetadata->GetNextAllocation(handle) == VK_NULL_HANDLE) + vectorState.firstFreeBlock = last; + return true; + } + } + + if (prevMoveCount == m_Moves.size()) + { + // Cannot perform full clear, have to move data in other blocks around + if (last != 0) + { + for (size_t i = last - 1; i; --i) + { + if (ReallocWithinBlock(vector, vector.GetBlock(i))) + return true; + } + } + + if (prevMoveCount == m_Moves.size()) + { + // No possible reallocs within blocks, try to move them around fast + return ComputeDefragmentation_Fast(vector); + } + } + else + { + switch (vectorState.operation) + { + case StateExtensive::Operation::FindFreeBlockBuffer: + vectorState.operation = StateExtensive::Operation::MoveBuffers; + break; + case StateExtensive::Operation::FindFreeBlockTexture: + vectorState.operation = StateExtensive::Operation::MoveTextures; + break; + case StateExtensive::Operation::FindFreeBlockAll: + vectorState.operation = StateExtensive::Operation::MoveAll; + break; + default: + VMA_ASSERT(0); + vectorState.operation = StateExtensive::Operation::MoveTextures; + } + vectorState.firstFreeBlock = last; + // Nothing done, block found without reallocations, can perform another reallocs in same pass + return ComputeDefragmentation_Extensive(vector, index); + } + break; + } + case StateExtensive::Operation::MoveTextures: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (texturePresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockTexture; + return ComputeDefragmentation_Extensive(vector, index); + } + + if (!bufferPresent && !otherPresent) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + break; + } + + // No more textures to move, check buffers + vectorState.operation = StateExtensive::Operation::MoveBuffers; + bufferPresent = false; + otherPresent = false; + } + else + break; + VMA_FALLTHROUGH; // Fallthrough + } + case StateExtensive::Operation::MoveBuffers: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_BUFFER, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (bufferPresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; + return ComputeDefragmentation_Extensive(vector, index); + } + + if (!otherPresent) + { + vectorState.operation = StateExtensive::Operation::Cleanup; + break; + } + + // No more buffers to move, check all others + vectorState.operation = StateExtensive::Operation::MoveAll; + otherPresent = false; + } + else + break; + VMA_FALLTHROUGH; // Fallthrough + } + case StateExtensive::Operation::MoveAll: + { + if (MoveDataToFreeBlocks(VMA_SUBALLOCATION_TYPE_FREE, vector, + vectorState.firstFreeBlock, texturePresent, bufferPresent, otherPresent)) + { + if (otherPresent) + { + vectorState.operation = StateExtensive::Operation::FindFreeBlockBuffer; + return ComputeDefragmentation_Extensive(vector, index); + } + // Everything moved + vectorState.operation = StateExtensive::Operation::Cleanup; + } + break; + } + case StateExtensive::Operation::Cleanup: + // Cleanup is handled below so that other operations may reuse the cleanup code. This case is here to prevent the unhandled enum value warning (C4062). + break; + } + + if (vectorState.operation == StateExtensive::Operation::Cleanup) + { + // All other work done, pack data in blocks even tighter if possible + const size_t prevMoveCount = m_Moves.size(); + for (size_t i = 0; i < vector.GetBlockCount(); ++i) + { + if (ReallocWithinBlock(vector, vector.GetBlock(i))) + return true; + } + + if (prevMoveCount == m_Moves.size()) + vectorState.operation = StateExtensive::Operation::Done; + } + return false; +} + +void VmaDefragmentationContext_T::UpdateVectorStatistics(VmaBlockVector& vector, StateBalanced& state) +{ + size_t allocCount = 0; + size_t freeCount = 0; + state.avgFreeSize = 0; + state.avgAllocSize = 0; + + for (size_t i = 0; i < vector.GetBlockCount(); ++i) + { + VmaBlockMetadata* metadata = vector.GetBlock(i)->m_pMetadata; + + allocCount += metadata->GetAllocationCount(); + freeCount += metadata->GetFreeRegionsCount(); + state.avgFreeSize += metadata->GetSumFreeSize(); + state.avgAllocSize += metadata->GetSize(); + } + + state.avgAllocSize = (state.avgAllocSize - state.avgFreeSize) / allocCount; + state.avgFreeSize /= freeCount; +} + +bool VmaDefragmentationContext_T::MoveDataToFreeBlocks(VmaSuballocationType currentType, + VmaBlockVector& vector, size_t firstFreeBlock, + bool& texturePresent, bool& bufferPresent, bool& otherPresent) +{ + const size_t prevMoveCount = m_Moves.size(); + for (size_t i = firstFreeBlock ; i;) + { + VmaDeviceMemoryBlock* block = vector.GetBlock(--i); + VmaBlockMetadata* metadata = block->m_pMetadata; + + for (VmaAllocHandle handle = metadata->GetAllocationListBegin(); + handle != VK_NULL_HANDLE; + handle = metadata->GetNextAllocation(handle)) + { + MoveAllocationData moveData = GetMoveData(handle, metadata); + // Ignore newly created allocations by defragmentation algorithm + if (moveData.move.srcAllocation->GetUserData() == this) + continue; + switch (CheckCounters(moveData.move.srcAllocation->GetSize())) + { + case CounterStatus::Ignore: + continue; + case CounterStatus::End: + return true; + case CounterStatus::Pass: + break; + default: + VMA_ASSERT(0); + } + + // Move only single type of resources at once + if (!VmaIsBufferImageGranularityConflict(moveData.type, currentType)) + { + // Try to fit allocation into free blocks + if (AllocInOtherBlock(firstFreeBlock, vector.GetBlockCount(), moveData, vector)) + return false; + } + + if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)) + texturePresent = true; + else if (!VmaIsBufferImageGranularityConflict(moveData.type, VMA_SUBALLOCATION_TYPE_BUFFER)) + bufferPresent = true; + else + otherPresent = true; + } + } + return prevMoveCount == m_Moves.size(); +} +#endif // _VMA_DEFRAGMENTATION_CONTEXT_FUNCTIONS + +#ifndef _VMA_POOL_T_FUNCTIONS +VmaPool_T::VmaPool_T( + VmaAllocator hAllocator, + const VmaPoolCreateInfo& createInfo, + VkDeviceSize preferredBlockSize) + : m_BlockVector( + hAllocator, + this, // hParentPool + createInfo.memoryTypeIndex, + createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize, + createInfo.minBlockCount, + createInfo.maxBlockCount, + (createInfo.flags& VMA_POOL_CREATE_IGNORE_BUFFER_IMAGE_GRANULARITY_BIT) != 0 ? 1 : hAllocator->GetBufferImageGranularity(), + createInfo.blockSize != 0, // explicitBlockSize + createInfo.flags & VMA_POOL_CREATE_ALGORITHM_MASK, // algorithm + createInfo.priority, + VMA_MAX(hAllocator->GetMemoryTypeMinAlignment(createInfo.memoryTypeIndex), createInfo.minAllocationAlignment), + createInfo.pMemoryAllocateNext), + m_Id(0), + m_Name(VMA_NULL) {} + +VmaPool_T::~VmaPool_T() +{ + VMA_ASSERT(m_PrevPool == VMA_NULL && m_NextPool == VMA_NULL); + + const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks(); + VmaFreeString(allocs, m_Name); +} + +void VmaPool_T::SetName(const char* pName) +{ + const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks(); + VmaFreeString(allocs, m_Name); + + if (pName != VMA_NULL) + { + m_Name = VmaCreateStringCopy(allocs, pName); + } + else + { + m_Name = VMA_NULL; + } +} +#endif // _VMA_POOL_T_FUNCTIONS + +#ifndef _VMA_ALLOCATOR_T_FUNCTIONS +VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : + m_UseMutex((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT) == 0), + m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0), + m_UseKhrDedicatedAllocation((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0), + m_UseKhrBindMemory2((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0), + m_UseExtMemoryBudget((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0), + m_UseAmdDeviceCoherentMemory((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT) != 0), + m_UseKhrBufferDeviceAddress((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT) != 0), + m_UseExtMemoryPriority((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT) != 0), + m_UseKhrMaintenance4((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT) != 0), + m_UseKhrMaintenance5((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT) != 0), + m_hDevice(pCreateInfo->device), + m_hInstance(pCreateInfo->instance), + m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL), + m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ? + *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks), + m_AllocationObjectAllocator(&m_AllocationCallbacks), + m_HeapSizeLimitMask(0), + m_DeviceMemoryCount(0), + m_PreferredLargeHeapBlockSize(0), + m_PhysicalDevice(pCreateInfo->physicalDevice), + m_GpuDefragmentationMemoryTypeBits(UINT32_MAX), + m_NextPoolId(0), + m_GlobalMemoryTypeBits(UINT32_MAX) +{ + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_UseKhrDedicatedAllocation = false; + m_UseKhrBindMemory2 = false; + } + + if(VMA_DEBUG_DETECT_CORRUPTION) + { + // Needs to be multiply of uint32_t size because we are going to write VMA_CORRUPTION_DETECTION_MAGIC_VALUE to it. + VMA_ASSERT(VMA_DEBUG_MARGIN % sizeof(uint32_t) == 0); + } + + VMA_ASSERT(pCreateInfo->physicalDevice && pCreateInfo->device && pCreateInfo->instance); + + if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0)) + { +#if !(VMA_DEDICATED_ALLOCATION) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros."); + } +#endif +#if !(VMA_BIND_MEMORY2) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros."); + } +#endif + } +#if !(VMA_MEMORY_BUDGET) + if((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT) != 0) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros."); + } +#endif +#if !(VMA_BUFFER_DEVICE_ADDRESS) + if(m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if VMA_VULKAN_VERSION < 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_3 but required Vulkan version is disabled by preprocessor macros."); + } +#endif +#if VMA_VULKAN_VERSION < 1002000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0)) + { + VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros."); + } +#endif +#if VMA_VULKAN_VERSION < 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros."); + } +#endif +#if !(VMA_MEMORY_PRIORITY) + if(m_UseExtMemoryPriority) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if !(VMA_KHR_MAINTENANCE4) + if(m_UseKhrMaintenance4) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif +#if !(VMA_KHR_MAINTENANCE5) + if(m_UseKhrMaintenance5) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif + + memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks)); + memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties)); + memset(&m_MemProps, 0, sizeof(m_MemProps)); + + memset(&m_pBlockVectors, 0, sizeof(m_pBlockVectors)); + memset(&m_VulkanFunctions, 0, sizeof(m_VulkanFunctions)); + +#if VMA_EXTERNAL_MEMORY + memset(&m_TypeExternalMemoryHandleTypes, 0, sizeof(m_TypeExternalMemoryHandleTypes)); +#endif // #if VMA_EXTERNAL_MEMORY + + if(pCreateInfo->pDeviceMemoryCallbacks != VMA_NULL) + { + m_DeviceMemoryCallbacks.pUserData = pCreateInfo->pDeviceMemoryCallbacks->pUserData; + m_DeviceMemoryCallbacks.pfnAllocate = pCreateInfo->pDeviceMemoryCallbacks->pfnAllocate; + m_DeviceMemoryCallbacks.pfnFree = pCreateInfo->pDeviceMemoryCallbacks->pfnFree; + } + + ImportVulkanFunctions(pCreateInfo->pVulkanFunctions); + + (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties); + (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps); + + VMA_ASSERT(VmaIsPow2(VMA_MIN_ALIGNMENT)); + VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY)); + VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity)); + VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize)); + + m_PreferredLargeHeapBlockSize = (pCreateInfo->preferredLargeHeapBlockSize != 0) ? + pCreateInfo->preferredLargeHeapBlockSize : static_cast(VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE); + + m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits(); + +#if VMA_EXTERNAL_MEMORY + if(pCreateInfo->pTypeExternalMemoryHandleTypes != VMA_NULL) + { + memcpy(m_TypeExternalMemoryHandleTypes, pCreateInfo->pTypeExternalMemoryHandleTypes, + sizeof(VkExternalMemoryHandleTypeFlagsKHR) * GetMemoryTypeCount()); + } +#endif // #if VMA_EXTERNAL_MEMORY + + if(pCreateInfo->pHeapSizeLimit != VMA_NULL) + { + for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) + { + const VkDeviceSize limit = pCreateInfo->pHeapSizeLimit[heapIndex]; + if(limit != VK_WHOLE_SIZE) + { + m_HeapSizeLimitMask |= 1u << heapIndex; + if(limit < m_MemProps.memoryHeaps[heapIndex].size) + { + m_MemProps.memoryHeaps[heapIndex].size = limit; + } + } + } + } + + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + // Create only supported types + if((m_GlobalMemoryTypeBits & (1u << memTypeIndex)) != 0) + { + const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex); + m_pBlockVectors[memTypeIndex] = vma_new(this, VmaBlockVector)( + this, + VK_NULL_HANDLE, // hParentPool + memTypeIndex, + preferredBlockSize, + 0, + SIZE_MAX, + GetBufferImageGranularity(), + false, // explicitBlockSize + 0, // algorithm + 0.5f, // priority (0.5 is the default per Vulkan spec) + GetMemoryTypeMinAlignment(memTypeIndex), // minAllocationAlignment + VMA_NULL); // // pMemoryAllocateNext + // No need to call m_pBlockVectors[memTypeIndex][blockVectorTypeIndex]->CreateMinBlocks here, + // because minBlockCount is 0. + } + } +} + +VkResult VmaAllocator_T::Init(const VmaAllocatorCreateInfo* pCreateInfo) +{ + VkResult res = VK_SUCCESS; + +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + UpdateVulkanBudget(); + } +#endif // #if VMA_MEMORY_BUDGET + + return res; +} + +VmaAllocator_T::~VmaAllocator_T() +{ + VMA_ASSERT(m_Pools.IsEmpty()); + + for(size_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; ) + { + vma_delete(this, m_pBlockVectors[memTypeIndex]); + } +} + +void VmaAllocator_T::ImportVulkanFunctions(const VmaVulkanFunctions* pVulkanFunctions) +{ +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + ImportVulkanFunctions_Static(); +#endif + + if(pVulkanFunctions != VMA_NULL) + { + ImportVulkanFunctions_Custom(pVulkanFunctions); + } + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + ImportVulkanFunctions_Dynamic(); +#endif + + ValidateVulkanFunctions(); +} + +#if VMA_STATIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Static() +{ + // Vulkan 1.0 + m_VulkanFunctions.vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)vkGetInstanceProcAddr; + m_VulkanFunctions.vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)vkGetDeviceProcAddr; + m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties; + m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties; + m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; + m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory; + m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory; + m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory; + m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges; + m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges; + m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory; + m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory; + m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements; + m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements; + m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer; + m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer; + m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage; + m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage; + m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer; + + // Vulkan 1.1 +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2; + m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2; + m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2; + m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2; + } +#endif + +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2; + } +#endif + +#if VMA_VULKAN_VERSION >= 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)vkGetDeviceBufferMemoryRequirements; + m_VulkanFunctions.vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)vkGetDeviceImageMemoryRequirements; + } +#endif +} + +#endif // VMA_STATIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVulkanFunctions) +{ + VMA_ASSERT(pVulkanFunctions != VMA_NULL); + +#define VMA_COPY_IF_NOT_NULL(funcName) \ + if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; + + VMA_COPY_IF_NOT_NULL(vkGetInstanceProcAddr); + VMA_COPY_IF_NOT_NULL(vkGetDeviceProcAddr); + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties); + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties); + VMA_COPY_IF_NOT_NULL(vkAllocateMemory); + VMA_COPY_IF_NOT_NULL(vkFreeMemory); + VMA_COPY_IF_NOT_NULL(vkMapMemory); + VMA_COPY_IF_NOT_NULL(vkUnmapMemory); + VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges); + VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges); + VMA_COPY_IF_NOT_NULL(vkBindBufferMemory); + VMA_COPY_IF_NOT_NULL(vkBindImageMemory); + VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkCreateBuffer); + VMA_COPY_IF_NOT_NULL(vkDestroyBuffer); + VMA_COPY_IF_NOT_NULL(vkCreateImage); + VMA_COPY_IF_NOT_NULL(vkDestroyImage); + VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer); + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR); + VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR); +#endif + +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR); + VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR); +#endif + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR); +#endif + +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + VMA_COPY_IF_NOT_NULL(vkGetDeviceBufferMemoryRequirements); + VMA_COPY_IF_NOT_NULL(vkGetDeviceImageMemoryRequirements); +#endif + +#undef VMA_COPY_IF_NOT_NULL +} + +#if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ImportVulkanFunctions_Dynamic() +{ + VMA_ASSERT(m_VulkanFunctions.vkGetInstanceProcAddr && m_VulkanFunctions.vkGetDeviceProcAddr && + "To use VMA_DYNAMIC_VULKAN_FUNCTIONS in new versions of VMA you now have to pass " + "VmaVulkanFunctions::vkGetInstanceProcAddr and vkGetDeviceProcAddr as VmaAllocatorCreateInfo::pVulkanFunctions. " + "Other members can be null."); + +#define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \ + if(m_VulkanFunctions.memberName == VMA_NULL) \ + m_VulkanFunctions.memberName = \ + (functionPointerType)m_VulkanFunctions.vkGetInstanceProcAddr(m_hInstance, functionNameString); +#define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \ + if(m_VulkanFunctions.memberName == VMA_NULL) \ + m_VulkanFunctions.memberName = \ + (functionPointerType)m_VulkanFunctions.vkGetDeviceProcAddr(m_hDevice, functionNameString); + + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties"); + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties"); + VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory, "vkAllocateMemory"); + VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory, "vkFreeMemory"); + VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory, "vkMapMemory"); + VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory, "vkUnmapMemory"); + VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges, "vkFlushMappedMemoryRanges"); + VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges, "vkInvalidateMappedMemoryRanges"); + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory, "vkBindBufferMemory"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory, "vkBindImageMemory"); + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements, "vkGetBufferMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements, "vkGetImageMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer, "vkCreateBuffer"); + VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer, "vkDestroyBuffer"); + VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage, "vkCreateImage"); + VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage, "vkDestroyImage"); + VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer, "vkCmdCopyBuffer"); + +#if VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2, "vkGetBufferMemoryRequirements2"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2, "vkGetImageMemoryRequirements2"); + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2, "vkBindBufferMemory2"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2, "vkBindImageMemory2"); + } +#endif + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2"); + } + else if(m_UseExtMemoryBudget) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2KHR"); + } +#endif + +#if VMA_DEDICATED_ALLOCATION + if(m_UseKhrDedicatedAllocation) + { + VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR, "vkGetBufferMemoryRequirements2KHR"); + VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR, "vkGetImageMemoryRequirements2KHR"); + } +#endif + +#if VMA_BIND_MEMORY2 + if(m_UseKhrBindMemory2) + { + VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR, "vkBindBufferMemory2KHR"); + VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR, "vkBindImageMemory2KHR"); + } +#endif // #if VMA_BIND_MEMORY2 + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2"); + } + else if(m_UseExtMemoryBudget) + { + VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR, "vkGetPhysicalDeviceMemoryProperties2KHR"); + } +#endif // #if VMA_MEMORY_BUDGET + +#if VMA_VULKAN_VERSION >= 1003000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) + { + VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirements, "vkGetDeviceBufferMemoryRequirements"); + VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirements, "vkGetDeviceImageMemoryRequirements"); + } +#endif +#if VMA_KHR_MAINTENANCE4 + if(m_UseKhrMaintenance4) + { + VMA_FETCH_DEVICE_FUNC(vkGetDeviceBufferMemoryRequirements, PFN_vkGetDeviceBufferMemoryRequirementsKHR, "vkGetDeviceBufferMemoryRequirementsKHR"); + VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirementsKHR, "vkGetDeviceImageMemoryRequirementsKHR"); + } +#endif + +#undef VMA_FETCH_DEVICE_FUNC +#undef VMA_FETCH_INSTANCE_FUNC +} + +#endif // VMA_DYNAMIC_VULKAN_FUNCTIONS == 1 + +void VmaAllocator_T::ValidateVulkanFunctions() +{ + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL); + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation) + { + VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL); + } +#endif + +#if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000 + if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2) + { + VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL); + VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL); + } +#endif + +#if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000 + if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL); + } +#endif + + // Not validating these due to suspected driver bugs with these function + // pointers being null despite correct extension or Vulkan version is enabled. + // See issue #397. Their usage in VMA is optional anyway. + // + // VMA_ASSERT(m_VulkanFunctions.vkGetDeviceBufferMemoryRequirements != VMA_NULL); + // VMA_ASSERT(m_VulkanFunctions.vkGetDeviceImageMemoryRequirements != VMA_NULL); +} + +VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex) +{ + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); + const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; + const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE; + return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32); +} + +VkResult VmaAllocator_T::AllocateMemoryOfType( + VmaPool pool, + VkDeviceSize size, + VkDeviceSize alignment, + bool dedicatedPreferred, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + const VmaAllocationCreateInfo& createInfo, + uint32_t memTypeIndex, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + VmaBlockVector& blockVector, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + VMA_ASSERT(pAllocations != VMA_NULL); + VMA_DEBUG_LOG_FORMAT(" AllocateMemory: MemoryTypeIndex=%" PRIu32 ", AllocationCount=%zu, Size=%" PRIu64, memTypeIndex, allocationCount, size); + + VmaAllocationCreateInfo finalCreateInfo = createInfo; + VkResult res = CalcMemTypeParams( + finalCreateInfo, + memTypeIndex, + size, + allocationCount); + if(res != VK_SUCCESS) + return res; + + if((finalCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) + { + return AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + blockVector.GetAllocationNextPtr()); + } + else + { + const bool canAllocateDedicated = + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) == 0 && + (pool == VK_NULL_HANDLE || !blockVector.HasExplicitBlockSize()); + + if(canAllocateDedicated) + { + // Heuristics: Allocate dedicated memory if requested size if greater than half of preferred block size. + if(size > blockVector.GetPreferredBlockSize() / 2) + { + dedicatedPreferred = true; + } + // Protection against creating each allocation as dedicated when we reach or exceed heap size/budget, + // which can quickly deplete maxMemoryAllocationCount: Don't prefer dedicated allocations when above + // 3/4 of the maximum allocation count. + if(m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount < UINT32_MAX / 4 && + m_DeviceMemoryCount.load() > m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount * 3 / 4) + { + dedicatedPreferred = false; + } + + if(dedicatedPreferred) + { + res = AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + blockVector.GetAllocationNextPtr()); + if(res == VK_SUCCESS) + { + // Succeeded: AllocateDedicatedMemory function already filled pMemory, nothing more to do here. + VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); + return VK_SUCCESS; + } + } + } + + res = blockVector.Allocate( + size, + alignment, + finalCreateInfo, + suballocType, + allocationCount, + pAllocations); + if(res == VK_SUCCESS) + return VK_SUCCESS; + + // Try dedicated memory. + if(canAllocateDedicated && !dedicatedPreferred) + { + res = AllocateDedicatedMemory( + pool, + size, + suballocType, + dedicatedAllocations, + memTypeIndex, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_USER_DATA_COPY_STRING_BIT) != 0, + (finalCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0, + (finalCreateInfo.flags & VMA_ALLOCATION_CREATE_CAN_ALIAS_BIT) != 0, + finalCreateInfo.pUserData, + finalCreateInfo.priority, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + allocationCount, + pAllocations, + blockVector.GetAllocationNextPtr()); + if(res == VK_SUCCESS) + { + // Succeeded: AllocateDedicatedMemory function already filled pMemory, nothing more to do here. + VMA_DEBUG_LOG(" Allocated as DedicatedMemory"); + return VK_SUCCESS; + } + } + // Everything failed: Return error code. + VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); + return res; + } +} + +VkResult VmaAllocator_T::AllocateDedicatedMemory( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + VmaDedicatedAllocationList& dedicatedAllocations, + uint32_t memTypeIndex, + bool map, + bool isUserDataString, + bool isMappingAllowed, + bool canAliasMemory, + void* pUserData, + float priority, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + size_t allocationCount, + VmaAllocation* pAllocations, + const void* pNextChain) +{ + VMA_ASSERT(allocationCount > 0 && pAllocations); + + VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO }; + allocInfo.memoryTypeIndex = memTypeIndex; + allocInfo.allocationSize = size; + allocInfo.pNext = pNextChain; + +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR }; + if(!canAliasMemory) + { + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + if(dedicatedBuffer != VK_NULL_HANDLE) + { + VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE); + dedicatedAllocInfo.buffer = dedicatedBuffer; + VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); + } + else if(dedicatedImage != VK_NULL_HANDLE) + { + dedicatedAllocInfo.image = dedicatedImage; + VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo); + } + } + } +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + +#if VMA_BUFFER_DEVICE_ADDRESS + VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR }; + if(m_UseKhrBufferDeviceAddress) + { + bool canContainBufferWithDeviceAddress = true; + if(dedicatedBuffer != VK_NULL_HANDLE) + { + canContainBufferWithDeviceAddress = dedicatedBufferImageUsage == VmaBufferImageUsage::UNKNOWN || + dedicatedBufferImageUsage.Contains(VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT); + } + else if(dedicatedImage != VK_NULL_HANDLE) + { + canContainBufferWithDeviceAddress = false; + } + if(canContainBufferWithDeviceAddress) + { + allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR; + VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo); + } + } +#endif // #if VMA_BUFFER_DEVICE_ADDRESS + +#if VMA_MEMORY_PRIORITY + VkMemoryPriorityAllocateInfoEXT priorityInfo = { VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT }; + if(m_UseExtMemoryPriority) + { + VMA_ASSERT(priority >= 0.f && priority <= 1.f); + priorityInfo.priority = priority; + VmaPnextChainPushFront(&allocInfo, &priorityInfo); + } +#endif // #if VMA_MEMORY_PRIORITY + +#if VMA_EXTERNAL_MEMORY + // Attach VkExportMemoryAllocateInfoKHR if necessary. + VkExportMemoryAllocateInfoKHR exportMemoryAllocInfo = { VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR }; + exportMemoryAllocInfo.handleTypes = GetExternalMemoryHandleTypeFlags(memTypeIndex); + if(exportMemoryAllocInfo.handleTypes != 0) + { + VmaPnextChainPushFront(&allocInfo, &exportMemoryAllocInfo); + } +#endif // #if VMA_EXTERNAL_MEMORY + + size_t allocIndex; + VkResult res = VK_SUCCESS; + for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + res = AllocateDedicatedMemoryPage( + pool, + size, + suballocType, + memTypeIndex, + allocInfo, + map, + isUserDataString, + isMappingAllowed, + pUserData, + pAllocations + allocIndex); + if(res != VK_SUCCESS) + { + break; + } + } + + if(res == VK_SUCCESS) + { + for (allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + dedicatedAllocations.Register(pAllocations[allocIndex]); + } + VMA_DEBUG_LOG_FORMAT(" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%" PRIu32, allocationCount, memTypeIndex); + } + else + { + // Free all already created allocations. + while(allocIndex--) + { + VmaAllocation currAlloc = pAllocations[allocIndex]; + VkDeviceMemory hMemory = currAlloc->GetMemory(); + + /* + There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory + before vkFreeMemory. + + if(currAlloc->GetMappedData() != VMA_NULL) + { + (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); + } + */ + + FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory); + m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize()); + m_AllocationObjectAllocator.Free(currAlloc); + } + + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + } + + return res; +} + +VkResult VmaAllocator_T::AllocateDedicatedMemoryPage( + VmaPool pool, + VkDeviceSize size, + VmaSuballocationType suballocType, + uint32_t memTypeIndex, + const VkMemoryAllocateInfo& allocInfo, + bool map, + bool isUserDataString, + bool isMappingAllowed, + void* pUserData, + VmaAllocation* pAllocation) +{ + VkDeviceMemory hMemory = VK_NULL_HANDLE; + VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory); + if(res < 0) + { + VMA_DEBUG_LOG(" vkAllocateMemory FAILED"); + return res; + } + + void* pMappedData = VMA_NULL; + if(map) + { + res = (*m_VulkanFunctions.vkMapMemory)( + m_hDevice, + hMemory, + 0, + VK_WHOLE_SIZE, + 0, + &pMappedData); + if(res < 0) + { + VMA_DEBUG_LOG(" vkMapMemory FAILED"); + FreeVulkanMemory(memTypeIndex, size, hMemory); + return res; + } + } + + *pAllocation = m_AllocationObjectAllocator.Allocate(isMappingAllowed); + (*pAllocation)->InitDedicatedAllocation(pool, memTypeIndex, hMemory, suballocType, pMappedData, size); + if (isUserDataString) + (*pAllocation)->SetName(this, (const char*)pUserData); + else + (*pAllocation)->SetUserData(this, pUserData); + m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size); + if(VMA_DEBUG_INITIALIZE_ALLOCATIONS) + { + FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED); + } + + return VK_SUCCESS; +} + +void VmaAllocator_T::GetBufferMemoryRequirements( + VkBuffer hBuffer, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const +{ +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR }; + memReqInfo.buffer = hBuffer; + + VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; + + VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; + VmaPnextChainPushFront(&memReq2, &memDedicatedReq); + + (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); + + memReq = memReq2.memoryRequirements; + requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); + prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); + } + else +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + { + (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq); + requiresDedicatedAllocation = false; + prefersDedicatedAllocation = false; + } +} + +void VmaAllocator_T::GetImageMemoryRequirements( + VkImage hImage, + VkMemoryRequirements& memReq, + bool& requiresDedicatedAllocation, + bool& prefersDedicatedAllocation) const +{ +#if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) + { + VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR }; + memReqInfo.image = hImage; + + VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR }; + + VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR }; + VmaPnextChainPushFront(&memReq2, &memDedicatedReq); + + (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2); + + memReq = memReq2.memoryRequirements; + requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE); + prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE); + } + else +#endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000 + { + (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq); + requiresDedicatedAllocation = false; + prefersDedicatedAllocation = false; + } +} + +VkResult VmaAllocator_T::FindMemoryTypeIndex( + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VmaBufferImageUsage bufImgUsage, + uint32_t* pMemoryTypeIndex) const +{ + memoryTypeBits &= GetGlobalMemoryTypeBits(); + + if(pAllocationCreateInfo->memoryTypeBits != 0) + { + memoryTypeBits &= pAllocationCreateInfo->memoryTypeBits; + } + + VkMemoryPropertyFlags requiredFlags = 0, preferredFlags = 0, notPreferredFlags = 0; + if(!FindMemoryPreferences( + IsIntegratedGpu(), + *pAllocationCreateInfo, + bufImgUsage, + requiredFlags, preferredFlags, notPreferredFlags)) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + *pMemoryTypeIndex = UINT32_MAX; + uint32_t minCost = UINT32_MAX; + for(uint32_t memTypeIndex = 0, memTypeBit = 1; + memTypeIndex < GetMemoryTypeCount(); + ++memTypeIndex, memTypeBit <<= 1) + { + // This memory type is acceptable according to memoryTypeBits bitmask. + if((memTypeBit & memoryTypeBits) != 0) + { + const VkMemoryPropertyFlags currFlags = + m_MemProps.memoryTypes[memTypeIndex].propertyFlags; + // This memory type contains requiredFlags. + if((requiredFlags & ~currFlags) == 0) + { + // Calculate cost as number of bits from preferredFlags not present in this memory type. + uint32_t currCost = VMA_COUNT_BITS_SET(preferredFlags & ~currFlags) + + VMA_COUNT_BITS_SET(currFlags & notPreferredFlags); + // Remember memory type with lowest cost. + if(currCost < minCost) + { + *pMemoryTypeIndex = memTypeIndex; + if(currCost == 0) + { + return VK_SUCCESS; + } + minCost = currCost; + } + } + } + } + return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT; +} + +VkResult VmaAllocator_T::CalcMemTypeParams( + VmaAllocationCreateInfo& inoutCreateInfo, + uint32_t memTypeIndex, + VkDeviceSize size, + size_t allocationCount) +{ + // If memory type is not HOST_VISIBLE, disable MAPPED. + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0 && + (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0) + { + inoutCreateInfo.flags &= ~VMA_ALLOCATION_CREATE_MAPPED_BIT; + } + + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT) != 0) + { + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex); + VmaBudget heapBudget = {}; + GetHeapBudgets(&heapBudget, heapIndex, 1); + if(heapBudget.usage + size * allocationCount > heapBudget.budget) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + } + return VK_SUCCESS; +} + +VkResult VmaAllocator_T::CalcAllocationParams( + VmaAllocationCreateInfo& inoutCreateInfo, + bool dedicatedRequired, + bool dedicatedPreferred) +{ + VMA_ASSERT((inoutCreateInfo.flags & + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != + (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT) && + "Specifying both flags VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT and VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT is incorrect."); + VMA_ASSERT((((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT) == 0 || + (inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0)) && + "Specifying VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT requires also VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); + if(inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE || inoutCreateInfo.usage == VMA_MEMORY_USAGE_AUTO_PREFER_HOST) + { + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_MAPPED_BIT) != 0) + { + VMA_ASSERT((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) != 0 && + "When using VMA_ALLOCATION_CREATE_MAPPED_BIT and usage = VMA_MEMORY_USAGE_AUTO*, you must also specify VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT."); + } + } + + // If memory is lazily allocated, it should be always dedicated. + if(dedicatedRequired || + inoutCreateInfo.usage == VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + } + + if(inoutCreateInfo.pool != VK_NULL_HANDLE) + { + if(inoutCreateInfo.pool->m_BlockVector.HasExplicitBlockSize() && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0) + { + VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT while current custom pool doesn't support dedicated allocations."); + return VK_ERROR_FEATURE_NOT_PRESENT; + } + inoutCreateInfo.priority = inoutCreateInfo.pool->m_BlockVector.GetPriority(); + } + + if((inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT) != 0 && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) + { + VMA_ASSERT(0 && "Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense."); + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + if(VMA_DEBUG_ALWAYS_DEDICATED_MEMORY && + (inoutCreateInfo.flags & VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT) != 0) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; + } + + // Non-auto USAGE values imply HOST_ACCESS flags. + // And so does VMA_MEMORY_USAGE_UNKNOWN because it is used with custom pools. + // Which specific flag is used doesn't matter. They change things only when used with VMA_MEMORY_USAGE_AUTO*. + // Otherwise they just protect from assert on mapping. + if(inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO && + inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE && + inoutCreateInfo.usage != VMA_MEMORY_USAGE_AUTO_PREFER_HOST) + { + if((inoutCreateInfo.flags & (VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT)) == 0) + { + inoutCreateInfo.flags |= VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; + } + } + + return VK_SUCCESS; +} + +VkResult VmaAllocator_T::AllocateMemory( + const VkMemoryRequirements& vkMemReq, + bool requiresDedicatedAllocation, + bool prefersDedicatedAllocation, + VkBuffer dedicatedBuffer, + VkImage dedicatedImage, + VmaBufferImageUsage dedicatedBufferImageUsage, + const VmaAllocationCreateInfo& createInfo, + VmaSuballocationType suballocType, + size_t allocationCount, + VmaAllocation* pAllocations) +{ + memset(pAllocations, 0, sizeof(VmaAllocation) * allocationCount); + + VMA_ASSERT(VmaIsPow2(vkMemReq.alignment)); + + if(vkMemReq.size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VmaAllocationCreateInfo createInfoFinal = createInfo; + VkResult res = CalcAllocationParams(createInfoFinal, requiresDedicatedAllocation, prefersDedicatedAllocation); + if(res != VK_SUCCESS) + return res; + + if(createInfoFinal.pool != VK_NULL_HANDLE) + { + VmaBlockVector& blockVector = createInfoFinal.pool->m_BlockVector; + return AllocateMemoryOfType( + createInfoFinal.pool, + vkMemReq.size, + vkMemReq.alignment, + prefersDedicatedAllocation, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + createInfoFinal, + blockVector.GetMemoryTypeIndex(), + suballocType, + createInfoFinal.pool->m_DedicatedAllocations, + blockVector, + allocationCount, + pAllocations); + } + else + { + // Bit mask of memory Vulkan types acceptable for this allocation. + uint32_t memoryTypeBits = vkMemReq.memoryTypeBits; + uint32_t memTypeIndex = UINT32_MAX; + res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); + // Can't find any single memory type matching requirements. res is VK_ERROR_FEATURE_NOT_PRESENT. + if(res != VK_SUCCESS) + return res; + do + { + VmaBlockVector* blockVector = m_pBlockVectors[memTypeIndex]; + VMA_ASSERT(blockVector && "Trying to use unsupported memory type!"); + res = AllocateMemoryOfType( + VK_NULL_HANDLE, + vkMemReq.size, + vkMemReq.alignment, + requiresDedicatedAllocation || prefersDedicatedAllocation, + dedicatedBuffer, + dedicatedImage, + dedicatedBufferImageUsage, + createInfoFinal, + memTypeIndex, + suballocType, + m_DedicatedAllocations[memTypeIndex], + *blockVector, + allocationCount, + pAllocations); + // Allocation succeeded + if(res == VK_SUCCESS) + return VK_SUCCESS; + + // Remove old memTypeIndex from list of possibilities. + memoryTypeBits &= ~(1u << memTypeIndex); + // Find alternative memTypeIndex. + res = FindMemoryTypeIndex(memoryTypeBits, &createInfoFinal, dedicatedBufferImageUsage, &memTypeIndex); + } while(res == VK_SUCCESS); + + // No other matching memory type index could be found. + // Not returning res, which is VK_ERROR_FEATURE_NOT_PRESENT, because we already failed to allocate once. + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } +} + +void VmaAllocator_T::FreeMemory( + size_t allocationCount, + const VmaAllocation* pAllocations) +{ + VMA_ASSERT(pAllocations); + + for(size_t allocIndex = allocationCount; allocIndex--; ) + { + VmaAllocation allocation = pAllocations[allocIndex]; + + if(allocation != VK_NULL_HANDLE) + { + if(VMA_DEBUG_INITIALIZE_ALLOCATIONS) + { + FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED); + } + + allocation->FreeName(this); + + switch(allocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaBlockVector* pBlockVector = VMA_NULL; + VmaPool hPool = allocation->GetParentPool(); + if(hPool != VK_NULL_HANDLE) + { + pBlockVector = &hPool->m_BlockVector; + } + else + { + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + pBlockVector = m_pBlockVectors[memTypeIndex]; + VMA_ASSERT(pBlockVector && "Trying to free memory of unsupported type!"); + } + pBlockVector->Free(allocation); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + FreeDedicatedMemory(allocation); + break; + default: + VMA_ASSERT(0); + } + } + } +} + +void VmaAllocator_T::CalculateStatistics(VmaTotalStatistics* pStats) +{ + // Initialize. + VmaClearDetailedStatistics(pStats->total); + for(uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) + VmaClearDetailedStatistics(pStats->memoryType[i]); + for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i) + VmaClearDetailedStatistics(pStats->memoryHeap[i]); + + // Process default pools. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; + if (pBlockVector != VMA_NULL) + pBlockVector->AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + + // Process custom pools. + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + VmaBlockVector& blockVector = pool->m_BlockVector; + const uint32_t memTypeIndex = blockVector.GetMemoryTypeIndex(); + blockVector.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + pool->m_DedicatedAllocations.AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + } + + // Process dedicated allocations. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + m_DedicatedAllocations[memTypeIndex].AddDetailedStatistics(pStats->memoryType[memTypeIndex]); + } + + // Sum from memory types to memory heaps. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + const uint32_t memHeapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex; + VmaAddDetailedStatistics(pStats->memoryHeap[memHeapIndex], pStats->memoryType[memTypeIndex]); + } + + // Sum from memory heaps to total. + for(uint32_t memHeapIndex = 0; memHeapIndex < GetMemoryHeapCount(); ++memHeapIndex) + VmaAddDetailedStatistics(pStats->total, pStats->memoryHeap[memHeapIndex]); + + VMA_ASSERT(pStats->total.statistics.allocationCount == 0 || + pStats->total.allocationSizeMax >= pStats->total.allocationSizeMin); + VMA_ASSERT(pStats->total.unusedRangeCount == 0 || + pStats->total.unusedRangeSizeMax >= pStats->total.unusedRangeSizeMin); +} + +void VmaAllocator_T::GetHeapBudgets(VmaBudget* outBudgets, uint32_t firstHeap, uint32_t heapCount) +{ +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + if(m_Budget.m_OperationsSinceBudgetFetch < 30) + { + VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex); + for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) + { + const uint32_t heapIndex = firstHeap + i; + + outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; + outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; + outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; + outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; + + if(m_Budget.m_VulkanUsage[heapIndex] + outBudgets->statistics.blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]) + { + outBudgets->usage = m_Budget.m_VulkanUsage[heapIndex] + + outBudgets->statistics.blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; + } + else + { + outBudgets->usage = 0; + } + + // Have to take MIN with heap size because explicit HeapSizeLimit is included in it. + outBudgets->budget = VMA_MIN( + m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size); + } + } + else + { + UpdateVulkanBudget(); // Outside of mutex lock + GetHeapBudgets(outBudgets, firstHeap, heapCount); // Recursion + } + } + else +#endif + { + for(uint32_t i = 0; i < heapCount; ++i, ++outBudgets) + { + const uint32_t heapIndex = firstHeap + i; + + outBudgets->statistics.blockCount = m_Budget.m_BlockCount[heapIndex]; + outBudgets->statistics.allocationCount = m_Budget.m_AllocationCount[heapIndex]; + outBudgets->statistics.blockBytes = m_Budget.m_BlockBytes[heapIndex]; + outBudgets->statistics.allocationBytes = m_Budget.m_AllocationBytes[heapIndex]; + + outBudgets->usage = outBudgets->statistics.blockBytes; + outBudgets->budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. + } + } +} + +void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation, VmaAllocationInfo* pAllocationInfo) +{ + pAllocationInfo->memoryType = hAllocation->GetMemoryTypeIndex(); + pAllocationInfo->deviceMemory = hAllocation->GetMemory(); + pAllocationInfo->offset = hAllocation->GetOffset(); + pAllocationInfo->size = hAllocation->GetSize(); + pAllocationInfo->pMappedData = hAllocation->GetMappedData(); + pAllocationInfo->pUserData = hAllocation->GetUserData(); + pAllocationInfo->pName = hAllocation->GetName(); +} + +void VmaAllocator_T::GetAllocationInfo2(VmaAllocation hAllocation, VmaAllocationInfo2* pAllocationInfo) +{ + GetAllocationInfo(hAllocation, &pAllocationInfo->allocationInfo); + + switch (hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + pAllocationInfo->blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize(); + pAllocationInfo->dedicatedMemory = VK_FALSE; + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + pAllocationInfo->blockSize = pAllocationInfo->allocationInfo.size; + pAllocationInfo->dedicatedMemory = VK_TRUE; + break; + default: + VMA_ASSERT(0); + } +} + +VkResult VmaAllocator_T::CreatePool(const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool) +{ + VMA_DEBUG_LOG_FORMAT(" CreatePool: MemoryTypeIndex=%" PRIu32 ", flags=%" PRIu32, pCreateInfo->memoryTypeIndex, pCreateInfo->flags); + + VmaPoolCreateInfo newCreateInfo = *pCreateInfo; + + // Protection against uninitialized new structure member. If garbage data are left there, this pointer dereference would crash. + if(pCreateInfo->pMemoryAllocateNext) + { + VMA_ASSERT(((const VkBaseInStructure*)pCreateInfo->pMemoryAllocateNext)->sType != 0); + } + + if(newCreateInfo.maxBlockCount == 0) + { + newCreateInfo.maxBlockCount = SIZE_MAX; + } + if(newCreateInfo.minBlockCount > newCreateInfo.maxBlockCount) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + // Memory type index out of range or forbidden. + if(pCreateInfo->memoryTypeIndex >= GetMemoryTypeCount() || + ((1u << pCreateInfo->memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0) + { + return VK_ERROR_FEATURE_NOT_PRESENT; + } + if(newCreateInfo.minAllocationAlignment > 0) + { + VMA_ASSERT(VmaIsPow2(newCreateInfo.minAllocationAlignment)); + } + + const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.memoryTypeIndex); + + *pPool = vma_new(this, VmaPool_T)(this, newCreateInfo, preferredBlockSize); + + VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks(); + if(res != VK_SUCCESS) + { + vma_delete(this, *pPool); + *pPool = VMA_NULL; + return res; + } + + // Add to m_Pools. + { + VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); + (*pPool)->SetId(m_NextPoolId++); + m_Pools.PushBack(*pPool); + } + + return VK_SUCCESS; +} + +void VmaAllocator_T::DestroyPool(VmaPool pool) +{ + // Remove from m_Pools. + { + VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex); + m_Pools.Remove(pool); + } + + vma_delete(this, pool); +} + +void VmaAllocator_T::GetPoolStatistics(VmaPool pool, VmaStatistics* pPoolStats) +{ + VmaClearStatistics(*pPoolStats); + pool->m_BlockVector.AddStatistics(*pPoolStats); + pool->m_DedicatedAllocations.AddStatistics(*pPoolStats); +} + +void VmaAllocator_T::CalculatePoolStatistics(VmaPool pool, VmaDetailedStatistics* pPoolStats) +{ + VmaClearDetailedStatistics(*pPoolStats); + pool->m_BlockVector.AddDetailedStatistics(*pPoolStats); + pool->m_DedicatedAllocations.AddDetailedStatistics(*pPoolStats); +} + +void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex) +{ + m_CurrentFrameIndex.store(frameIndex); + +#if VMA_MEMORY_BUDGET + if(m_UseExtMemoryBudget) + { + UpdateVulkanBudget(); + } +#endif // #if VMA_MEMORY_BUDGET +} + +VkResult VmaAllocator_T::CheckPoolCorruption(VmaPool hPool) +{ + return hPool->m_BlockVector.CheckCorruption(); +} + +VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits) +{ + VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT; + + // Process default pools. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* const pBlockVector = m_pBlockVectors[memTypeIndex]; + if(pBlockVector != VMA_NULL) + { + VkResult localRes = pBlockVector->CheckCorruption(); + switch(localRes) + { + case VK_ERROR_FEATURE_NOT_PRESENT: + break; + case VK_SUCCESS: + finalRes = VK_SUCCESS; + break; + default: + return localRes; + } + } + } + + // Process custom pools. + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + for(VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + if(((1u << pool->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0) + { + VkResult localRes = pool->m_BlockVector.CheckCorruption(); + switch(localRes) + { + case VK_ERROR_FEATURE_NOT_PRESENT: + break; + case VK_SUCCESS: + finalRes = VK_SUCCESS; + break; + default: + return localRes; + } + } + } + } + + return finalRes; +} + +VkResult VmaAllocator_T::AllocateVulkanMemory(const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory) +{ + AtomicTransactionalIncrement deviceMemoryCountIncrement; + const uint64_t prevDeviceMemoryCount = deviceMemoryCountIncrement.Increment(&m_DeviceMemoryCount); +#if VMA_DEBUG_DONT_EXCEED_MAX_MEMORY_ALLOCATION_COUNT + if(prevDeviceMemoryCount >= m_PhysicalDeviceProperties.limits.maxMemoryAllocationCount) + { + return VK_ERROR_TOO_MANY_OBJECTS; + } +#endif + + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex); + + // HeapSizeLimit is in effect for this heap. + if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0) + { + const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size; + VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex]; + for(;;) + { + const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize; + if(blockBytesAfterAllocation > heapSize) + { + return VK_ERROR_OUT_OF_DEVICE_MEMORY; + } + if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation)) + { + break; + } + } + } + else + { + m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize; + } + ++m_Budget.m_BlockCount[heapIndex]; + + // VULKAN CALL vkAllocateMemory. + VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory); + + if(res == VK_SUCCESS) + { +#if VMA_MEMORY_BUDGET + ++m_Budget.m_OperationsSinceBudgetFetch; +#endif + + // Informative callback. + if(m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL) + { + (*m_DeviceMemoryCallbacks.pfnAllocate)(this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.pUserData); + } + + deviceMemoryCountIncrement.Commit(); + } + else + { + --m_Budget.m_BlockCount[heapIndex]; + m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize; + } + + return res; +} + +void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory) +{ + // Informative callback. + if(m_DeviceMemoryCallbacks.pfnFree != VMA_NULL) + { + (*m_DeviceMemoryCallbacks.pfnFree)(this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.pUserData); + } + + // VULKAN CALL vkFreeMemory. + (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks()); + + const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType); + --m_Budget.m_BlockCount[heapIndex]; + m_Budget.m_BlockBytes[heapIndex] -= size; + + --m_DeviceMemoryCount; +} + +VkResult VmaAllocator_T::BindVulkanBuffer( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkBuffer buffer, + const void* pNext) +{ + if(pNext != VMA_NULL) + { +#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && + m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL) + { + VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR }; + bindBufferMemoryInfo.pNext = pNext; + bindBufferMemoryInfo.buffer = buffer; + bindBufferMemoryInfo.memory = memory; + bindBufferMemoryInfo.memoryOffset = memoryOffset; + return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); + } + else +#endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + { + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + } + else + { + return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset); + } +} + +VkResult VmaAllocator_T::BindVulkanImage( + VkDeviceMemory memory, + VkDeviceSize memoryOffset, + VkImage image, + const void* pNext) +{ + if(pNext != VMA_NULL) + { +#if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2 + if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) && + m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL) + { + VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR }; + bindBufferMemoryInfo.pNext = pNext; + bindBufferMemoryInfo.image = image; + bindBufferMemoryInfo.memory = memory; + bindBufferMemoryInfo.memoryOffset = memoryOffset; + return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo); + } + else +#endif // #if VMA_BIND_MEMORY2 + { + return VK_ERROR_EXTENSION_NOT_PRESENT; + } + } + else + { + return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset); + } +} + +VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData) +{ + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + char *pBytes = VMA_NULL; + VkResult res = pBlock->Map(this, 1, (void**)&pBytes); + if(res == VK_SUCCESS) + { + *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset(); + hAllocation->BlockAllocMap(); + } + return res; + } + VMA_FALLTHROUGH; // Fallthrough + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + return hAllocation->DedicatedAllocMap(this, ppData); + default: + VMA_ASSERT(0); + return VK_ERROR_MEMORY_MAP_FAILED; + } +} + +void VmaAllocator_T::Unmap(VmaAllocation hAllocation) +{ + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + hAllocation->BlockAllocUnmap(); + pBlock->Unmap(this, 1); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + hAllocation->DedicatedAllocUnmap(this); + break; + default: + VMA_ASSERT(0); + } +} + +VkResult VmaAllocator_T::BindBufferMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkBuffer hBuffer, + const void* pNext) +{ + VkResult res = VK_ERROR_UNKNOWN_COPY; + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext); + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* const pBlock = hAllocation->GetBlock(); + VMA_ASSERT(pBlock && "Binding buffer to allocation that doesn't belong to any block."); + res = pBlock->BindBufferMemory(this, hAllocation, allocationLocalOffset, hBuffer, pNext); + break; + } + default: + VMA_ASSERT(0); + } + return res; +} + +VkResult VmaAllocator_T::BindImageMemory( + VmaAllocation hAllocation, + VkDeviceSize allocationLocalOffset, + VkImage hImage, + const void* pNext) +{ + VkResult res = VK_ERROR_UNKNOWN_COPY; + switch(hAllocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext); + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock(); + VMA_ASSERT(pBlock && "Binding image to allocation that doesn't belong to any block."); + res = pBlock->BindImageMemory(this, hAllocation, allocationLocalOffset, hImage, pNext); + break; + } + default: + VMA_ASSERT(0); + } + return res; +} + +VkResult VmaAllocator_T::FlushOrInvalidateAllocation( + VmaAllocation hAllocation, + VkDeviceSize offset, VkDeviceSize size, + VMA_CACHE_OPERATION op) +{ + VkResult res = VK_SUCCESS; + + VkMappedMemoryRange memRange = {}; + if(GetFlushOrInvalidateRange(hAllocation, offset, size, memRange)) + { + switch(op) + { + case VMA_CACHE_FLUSH: + res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange); + break; + case VMA_CACHE_INVALIDATE: + res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange); + break; + default: + VMA_ASSERT(0); + } + } + // else: Just ignore this call. + return res; +} + +VkResult VmaAllocator_T::FlushOrInvalidateAllocations( + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, const VkDeviceSize* sizes, + VMA_CACHE_OPERATION op) +{ + typedef VmaStlAllocator RangeAllocator; + typedef VmaSmallVector RangeVector; + RangeVector ranges = RangeVector(RangeAllocator(GetAllocationCallbacks())); + + for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex) + { + const VmaAllocation alloc = allocations[allocIndex]; + const VkDeviceSize offset = offsets != VMA_NULL ? offsets[allocIndex] : 0; + const VkDeviceSize size = sizes != VMA_NULL ? sizes[allocIndex] : VK_WHOLE_SIZE; + VkMappedMemoryRange newRange; + if(GetFlushOrInvalidateRange(alloc, offset, size, newRange)) + { + ranges.push_back(newRange); + } + } + + VkResult res = VK_SUCCESS; + if(!ranges.empty()) + { + switch(op) + { + case VMA_CACHE_FLUSH: + res = (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); + break; + case VMA_CACHE_INVALIDATE: + res = (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, (uint32_t)ranges.size(), ranges.data()); + break; + default: + VMA_ASSERT(0); + } + } + // else: Just ignore this call. + return res; +} + +VkResult VmaAllocator_T::CopyMemoryToAllocation( + const void* pSrcHostPointer, + VmaAllocation dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size) +{ + void* dstMappedData = VMA_NULL; + VkResult res = Map(dstAllocation, &dstMappedData); + if(res == VK_SUCCESS) + { + memcpy((char*)dstMappedData + dstAllocationLocalOffset, pSrcHostPointer, (size_t)size); + Unmap(dstAllocation); + res = FlushOrInvalidateAllocation(dstAllocation, dstAllocationLocalOffset, size, VMA_CACHE_FLUSH); + } + return res; +} + +VkResult VmaAllocator_T::CopyAllocationToMemory( + VmaAllocation srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* pDstHostPointer, + VkDeviceSize size) +{ + void* srcMappedData = VMA_NULL; + VkResult res = Map(srcAllocation, &srcMappedData); + if(res == VK_SUCCESS) + { + res = FlushOrInvalidateAllocation(srcAllocation, srcAllocationLocalOffset, size, VMA_CACHE_INVALIDATE); + if(res == VK_SUCCESS) + { + memcpy(pDstHostPointer, (const char*)srcMappedData + srcAllocationLocalOffset, (size_t)size); + Unmap(srcAllocation); + } + } + return res; +} + +void VmaAllocator_T::FreeDedicatedMemory(const VmaAllocation allocation) +{ + VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED); + + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + VmaPool parentPool = allocation->GetParentPool(); + if(parentPool == VK_NULL_HANDLE) + { + // Default pool + m_DedicatedAllocations[memTypeIndex].Unregister(allocation); + } + else + { + // Custom pool + parentPool->m_DedicatedAllocations.Unregister(allocation); + } + + VkDeviceMemory hMemory = allocation->GetMemory(); + + /* + There is no need to call this, because Vulkan spec allows to skip vkUnmapMemory + before vkFreeMemory. + + if(allocation->GetMappedData() != VMA_NULL) + { + (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory); + } + */ + + FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory); + + m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize()); + m_AllocationObjectAllocator.Free(allocation); + + VMA_DEBUG_LOG_FORMAT(" Freed DedicatedMemory MemoryTypeIndex=%" PRIu32, memTypeIndex); +} + +uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits() const +{ + VkBufferCreateInfo dummyBufCreateInfo; + VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo); + + uint32_t memoryTypeBits = 0; + + // Create buffer. + VkBuffer buf = VK_NULL_HANDLE; + VkResult res = (*GetVulkanFunctions().vkCreateBuffer)( + m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf); + if(res == VK_SUCCESS) + { + // Query for supported memory types. + VkMemoryRequirements memReq; + (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq); + memoryTypeBits = memReq.memoryTypeBits; + + // Destroy buffer. + (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks()); + } + + return memoryTypeBits; +} + +uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits() const +{ + // Make sure memory information is already fetched. + VMA_ASSERT(GetMemoryTypeCount() > 0); + + uint32_t memoryTypeBits = UINT32_MAX; + + if(!m_UseAmdDeviceCoherentMemory) + { + // Exclude memory types that have VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD. + for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0) + { + memoryTypeBits &= ~(1u << memTypeIndex); + } + } + } + + return memoryTypeBits; +} + +bool VmaAllocator_T::GetFlushOrInvalidateRange( + VmaAllocation allocation, + VkDeviceSize offset, VkDeviceSize size, + VkMappedMemoryRange& outRange) const +{ + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex)) + { + const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize; + const VkDeviceSize allocationSize = allocation->GetSize(); + VMA_ASSERT(offset <= allocationSize); + + outRange.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE; + outRange.pNext = VMA_NULL; + outRange.memory = allocation->GetMemory(); + + switch(allocation->GetType()) + { + case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: + outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); + if(size == VK_WHOLE_SIZE) + { + outRange.size = allocationSize - outRange.offset; + } + else + { + VMA_ASSERT(offset + size <= allocationSize); + outRange.size = VMA_MIN( + VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize), + allocationSize - outRange.offset); + } + break; + case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: + { + // 1. Still within this allocation. + outRange.offset = VmaAlignDown(offset, nonCoherentAtomSize); + if(size == VK_WHOLE_SIZE) + { + size = allocationSize - offset; + } + else + { + VMA_ASSERT(offset + size <= allocationSize); + } + outRange.size = VmaAlignUp(size + (offset - outRange.offset), nonCoherentAtomSize); + + // 2. Adjust to whole block. + const VkDeviceSize allocationOffset = allocation->GetOffset(); + VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0); + const VkDeviceSize blockSize = allocation->GetBlock()->m_pMetadata->GetSize(); + outRange.offset += allocationOffset; + outRange.size = VMA_MIN(outRange.size, blockSize - outRange.offset); + + break; + } + default: + VMA_ASSERT(0); + } + return true; + } + return false; +} + +#if VMA_MEMORY_BUDGET +void VmaAllocator_T::UpdateVulkanBudget() +{ + VMA_ASSERT(m_UseExtMemoryBudget); + + VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR }; + + VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT }; + VmaPnextChainPushFront(&memProps, &budgetProps); + + GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps); + + { + VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex); + + for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex) + { + m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex]; + m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex]; + m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load(); + + // Some bugged drivers return the budget incorrectly, e.g. 0 or much bigger than heap size. + if(m_Budget.m_VulkanBudget[heapIndex] == 0) + { + m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10; // 80% heuristics. + } + else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size) + { + m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size; + } + if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0) + { + m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex]; + } + } + m_Budget.m_OperationsSinceBudgetFetch = 0; + } +} +#endif // VMA_MEMORY_BUDGET + +void VmaAllocator_T::FillAllocation(const VmaAllocation hAllocation, uint8_t pattern) +{ + if(VMA_DEBUG_INITIALIZE_ALLOCATIONS && + hAllocation->IsMappingAllowed() && + (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) + { + void* pData = VMA_NULL; + VkResult res = Map(hAllocation, &pData); + if(res == VK_SUCCESS) + { + memset(pData, (int)pattern, (size_t)hAllocation->GetSize()); + FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH); + Unmap(hAllocation); + } + else + { + VMA_ASSERT(0 && "VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation."); + } + } +} + +uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits() +{ + uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load(); + if(memoryTypeBits == UINT32_MAX) + { + memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits(); + m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits); + } + return memoryTypeBits; +} + +#if VMA_STATS_STRING_ENABLED +void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json) +{ + json.WriteString("DefaultPools"); + json.BeginObject(); + { + for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex]; + VmaDedicatedAllocationList& dedicatedAllocList = m_DedicatedAllocations[memTypeIndex]; + if (pBlockVector != VMA_NULL) + { + json.BeginString("Type "); + json.ContinueString(memTypeIndex); + json.EndString(); + json.BeginObject(); + { + json.WriteString("PreferredBlockSize"); + json.WriteNumber(pBlockVector->GetPreferredBlockSize()); + + json.WriteString("Blocks"); + pBlockVector->PrintDetailedMap(json); + + json.WriteString("DedicatedAllocations"); + dedicatedAllocList.BuildStatsString(json); + } + json.EndObject(); + } + } + } + json.EndObject(); + + json.WriteString("CustomPools"); + json.BeginObject(); + { + VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex); + if (!m_Pools.IsEmpty()) + { + for (uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex) + { + bool displayType = true; + size_t index = 0; + for (VmaPool pool = m_Pools.Front(); pool != VMA_NULL; pool = m_Pools.GetNext(pool)) + { + VmaBlockVector& blockVector = pool->m_BlockVector; + if (blockVector.GetMemoryTypeIndex() == memTypeIndex) + { + if (displayType) + { + json.BeginString("Type "); + json.ContinueString(memTypeIndex); + json.EndString(); + json.BeginArray(); + displayType = false; + } + + json.BeginObject(); + { + json.WriteString("Name"); + json.BeginString(); + json.ContinueString((uint64_t)index++); + if (pool->GetName()) + { + json.ContinueString(" - "); + json.ContinueString(pool->GetName()); + } + json.EndString(); + + json.WriteString("PreferredBlockSize"); + json.WriteNumber(blockVector.GetPreferredBlockSize()); + + json.WriteString("Blocks"); + blockVector.PrintDetailedMap(json); + + json.WriteString("DedicatedAllocations"); + pool->m_DedicatedAllocations.BuildStatsString(json); + } + json.EndObject(); + } + } + + if (!displayType) + json.EndArray(); + } + } + } + json.EndObject(); +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_ALLOCATOR_T_FUNCTIONS + + +#ifndef _VMA_PUBLIC_INTERFACE +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAllocator( + const VmaAllocatorCreateInfo* pCreateInfo, + VmaAllocator* pAllocator) +{ + VMA_ASSERT(pCreateInfo && pAllocator); + VMA_ASSERT(pCreateInfo->vulkanApiVersion == 0 || + (VK_VERSION_MAJOR(pCreateInfo->vulkanApiVersion) == 1 && VK_VERSION_MINOR(pCreateInfo->vulkanApiVersion) <= 3)); + VMA_DEBUG_LOG("vmaCreateAllocator"); + *pAllocator = vma_new(pCreateInfo->pAllocationCallbacks, VmaAllocator_T)(pCreateInfo); + VkResult result = (*pAllocator)->Init(pCreateInfo); + if(result < 0) + { + vma_delete(pCreateInfo->pAllocationCallbacks, *pAllocator); + *pAllocator = VK_NULL_HANDLE; + } + return result; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyAllocator( + VmaAllocator allocator) +{ + if(allocator != VK_NULL_HANDLE) + { + VMA_DEBUG_LOG("vmaDestroyAllocator"); + VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks; // Have to copy the callbacks when destroying. + vma_delete(&allocationCallbacks, allocator); + } +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocatorInfo(VmaAllocator allocator, VmaAllocatorInfo* pAllocatorInfo) +{ + VMA_ASSERT(allocator && pAllocatorInfo); + pAllocatorInfo->instance = allocator->m_hInstance; + pAllocatorInfo->physicalDevice = allocator->GetPhysicalDevice(); + pAllocatorInfo->device = allocator->m_hDevice; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetPhysicalDeviceProperties( + VmaAllocator allocator, + const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties) +{ + VMA_ASSERT(allocator && ppPhysicalDeviceProperties); + *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryProperties( + VmaAllocator allocator, + const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties) +{ + VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties); + *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetMemoryTypeProperties( + VmaAllocator allocator, + uint32_t memoryTypeIndex, + VkMemoryPropertyFlags* pFlags) +{ + VMA_ASSERT(allocator && pFlags); + VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount()); + *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetCurrentFrameIndex( + VmaAllocator allocator, + uint32_t frameIndex) +{ + VMA_ASSERT(allocator); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->SetCurrentFrameIndex(frameIndex); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateStatistics( + VmaAllocator allocator, + VmaTotalStatistics* pStats) +{ + VMA_ASSERT(allocator && pStats); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + allocator->CalculateStatistics(pStats); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetHeapBudgets( + VmaAllocator allocator, + VmaBudget* pBudgets) +{ + VMA_ASSERT(allocator && pBudgets); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + allocator->GetHeapBudgets(pBudgets, 0, allocator->GetMemoryHeapCount()); +} + +#if VMA_STATS_STRING_ENABLED + +VMA_CALL_PRE void VMA_CALL_POST vmaBuildStatsString( + VmaAllocator allocator, + char** ppStatsString, + VkBool32 detailedMap) +{ + VMA_ASSERT(allocator && ppStatsString); + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VmaStringBuilder sb(allocator->GetAllocationCallbacks()); + { + VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; + allocator->GetHeapBudgets(budgets, 0, allocator->GetMemoryHeapCount()); + + VmaTotalStatistics stats; + allocator->CalculateStatistics(&stats); + + VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb); + json.BeginObject(); + { + json.WriteString("General"); + json.BeginObject(); + { + const VkPhysicalDeviceProperties& deviceProperties = allocator->m_PhysicalDeviceProperties; + const VkPhysicalDeviceMemoryProperties& memoryProperties = allocator->m_MemProps; + + json.WriteString("API"); + json.WriteString("Vulkan"); + + json.WriteString("apiVersion"); + json.BeginString(); + json.ContinueString(VK_VERSION_MAJOR(deviceProperties.apiVersion)); + json.ContinueString("."); + json.ContinueString(VK_VERSION_MINOR(deviceProperties.apiVersion)); + json.ContinueString("."); + json.ContinueString(VK_VERSION_PATCH(deviceProperties.apiVersion)); + json.EndString(); + + json.WriteString("GPU"); + json.WriteString(deviceProperties.deviceName); + json.WriteString("deviceType"); + json.WriteNumber(static_cast(deviceProperties.deviceType)); + + json.WriteString("maxMemoryAllocationCount"); + json.WriteNumber(deviceProperties.limits.maxMemoryAllocationCount); + json.WriteString("bufferImageGranularity"); + json.WriteNumber(deviceProperties.limits.bufferImageGranularity); + json.WriteString("nonCoherentAtomSize"); + json.WriteNumber(deviceProperties.limits.nonCoherentAtomSize); + + json.WriteString("memoryHeapCount"); + json.WriteNumber(memoryProperties.memoryHeapCount); + json.WriteString("memoryTypeCount"); + json.WriteNumber(memoryProperties.memoryTypeCount); + } + json.EndObject(); + } + { + json.WriteString("Total"); + VmaPrintDetailedStatistics(json, stats.total); + } + { + json.WriteString("MemoryInfo"); + json.BeginObject(); + { + for (uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex) + { + json.BeginString("Heap "); + json.ContinueString(heapIndex); + json.EndString(); + json.BeginObject(); + { + const VkMemoryHeap& heapInfo = allocator->m_MemProps.memoryHeaps[heapIndex]; + json.WriteString("Flags"); + json.BeginArray(true); + { + if (heapInfo.flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) + json.WriteString("DEVICE_LOCAL"); + #if VMA_VULKAN_VERSION >= 1001000 + if (heapInfo.flags & VK_MEMORY_HEAP_MULTI_INSTANCE_BIT) + json.WriteString("MULTI_INSTANCE"); + #endif + + VkMemoryHeapFlags flags = heapInfo.flags & + ~(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT + #if VMA_VULKAN_VERSION >= 1001000 + | VK_MEMORY_HEAP_MULTI_INSTANCE_BIT + #endif + ); + if (flags != 0) + json.WriteNumber(flags); + } + json.EndArray(); + + json.WriteString("Size"); + json.WriteNumber(heapInfo.size); + + json.WriteString("Budget"); + json.BeginObject(); + { + json.WriteString("BudgetBytes"); + json.WriteNumber(budgets[heapIndex].budget); + json.WriteString("UsageBytes"); + json.WriteNumber(budgets[heapIndex].usage); + } + json.EndObject(); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats.memoryHeap[heapIndex]); + + json.WriteString("MemoryPools"); + json.BeginObject(); + { + for (uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex) + { + if (allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex) + { + json.BeginString("Type "); + json.ContinueString(typeIndex); + json.EndString(); + json.BeginObject(); + { + json.WriteString("Flags"); + json.BeginArray(true); + { + VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags; + if (flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) + json.WriteString("DEVICE_LOCAL"); + if (flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) + json.WriteString("HOST_VISIBLE"); + if (flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) + json.WriteString("HOST_COHERENT"); + if (flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) + json.WriteString("HOST_CACHED"); + if (flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) + json.WriteString("LAZILY_ALLOCATED"); + #if VMA_VULKAN_VERSION >= 1001000 + if (flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) + json.WriteString("PROTECTED"); + #endif + #if VK_AMD_device_coherent_memory + if (flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) + json.WriteString("DEVICE_COHERENT_AMD"); + if (flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) + json.WriteString("DEVICE_UNCACHED_AMD"); + #endif + + flags &= ~(VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT + #if VMA_VULKAN_VERSION >= 1001000 + | VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT + #endif + #if VK_AMD_device_coherent_memory + | VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY + | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY + #endif + | VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT + | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT + | VK_MEMORY_PROPERTY_HOST_CACHED_BIT); + if (flags != 0) + json.WriteNumber(flags); + } + json.EndArray(); + + json.WriteString("Stats"); + VmaPrintDetailedStatistics(json, stats.memoryType[typeIndex]); + } + json.EndObject(); + } + } + + } + json.EndObject(); + } + json.EndObject(); + } + } + json.EndObject(); + } + + if (detailedMap == VK_TRUE) + allocator->PrintDetailedMap(json); + + json.EndObject(); + } + + *ppStatsString = VmaCreateStringCopy(allocator->GetAllocationCallbacks(), sb.GetData(), sb.GetLength()); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeStatsString( + VmaAllocator allocator, + char* pStatsString) +{ + if(pStatsString != VMA_NULL) + { + VMA_ASSERT(allocator); + VmaFreeString(allocator->GetAllocationCallbacks(), pStatsString); + } +} + +#endif // VMA_STATS_STRING_ENABLED + +/* +This function is not protected by any mutex because it just reads immutable data. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndex( + VmaAllocator allocator, + uint32_t memoryTypeBits, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + return allocator->FindMemoryTypeIndex(memoryTypeBits, pAllocationCreateInfo, VmaBufferImageUsage::UNKNOWN, pMemoryTypeIndex); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForBufferInfo( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pBufferCreateInfo != VMA_NULL); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + const VkDevice hDev = allocator->m_hDevice; + const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); + VkResult res; + +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + if(funcs->vkGetDeviceBufferMemoryRequirements) + { + // Can query straight from VkBufferCreateInfo :) + VkDeviceBufferMemoryRequirementsKHR devBufMemReq = {VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR}; + devBufMemReq.pCreateInfo = pBufferCreateInfo; + + VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; + (*funcs->vkGetDeviceBufferMemoryRequirements)(hDev, &devBufMemReq, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5), pMemoryTypeIndex); + } + else +#endif // VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + { + // Must create a dummy buffer to query :( + VkBuffer hBuffer = VK_NULL_HANDLE; + res = funcs->vkCreateBuffer( + hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer); + if(res == VK_SUCCESS) + { + VkMemoryRequirements memReq = {}; + funcs->vkGetBufferMemoryRequirements(hDev, hBuffer, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5), pMemoryTypeIndex); + + funcs->vkDestroyBuffer( + hDev, hBuffer, allocator->GetAllocationCallbacks()); + } + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFindMemoryTypeIndexForImageInfo( + VmaAllocator allocator, + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + uint32_t* pMemoryTypeIndex) +{ + VMA_ASSERT(allocator != VK_NULL_HANDLE); + VMA_ASSERT(pImageCreateInfo != VMA_NULL); + VMA_ASSERT(pAllocationCreateInfo != VMA_NULL); + VMA_ASSERT(pMemoryTypeIndex != VMA_NULL); + + const VkDevice hDev = allocator->m_hDevice; + const VmaVulkanFunctions* funcs = &allocator->GetVulkanFunctions(); + VkResult res; + +#if VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + if(funcs->vkGetDeviceImageMemoryRequirements) + { + // Can query straight from VkImageCreateInfo :) + VkDeviceImageMemoryRequirementsKHR devImgMemReq = {VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR}; + devImgMemReq.pCreateInfo = pImageCreateInfo; + VMA_ASSERT(pImageCreateInfo->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT_COPY && (pImageCreateInfo->flags & VK_IMAGE_CREATE_DISJOINT_BIT_COPY) == 0 && + "Cannot use this VkImageCreateInfo with vmaFindMemoryTypeIndexForImageInfo as I don't know what to pass as VkDeviceImageMemoryRequirements::planeAspect."); + + VkMemoryRequirements2 memReq = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2}; + (*funcs->vkGetDeviceImageMemoryRequirements)(hDev, &devImgMemReq, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryRequirements.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pImageCreateInfo), pMemoryTypeIndex); + } + else +#endif // VMA_KHR_MAINTENANCE4 || VMA_VULKAN_VERSION >= 1003000 + { + // Must create a dummy image to query :( + VkImage hImage = VK_NULL_HANDLE; + res = funcs->vkCreateImage( + hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage); + if(res == VK_SUCCESS) + { + VkMemoryRequirements memReq = {}; + funcs->vkGetImageMemoryRequirements(hDev, hImage, &memReq); + + res = allocator->FindMemoryTypeIndex( + memReq.memoryTypeBits, pAllocationCreateInfo, + VmaBufferImageUsage(*pImageCreateInfo), pMemoryTypeIndex); + + funcs->vkDestroyImage( + hDev, hImage, allocator->GetAllocationCallbacks()); + } + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreatePool( + VmaAllocator allocator, + const VmaPoolCreateInfo* pCreateInfo, + VmaPool* pPool) +{ + VMA_ASSERT(allocator && pCreateInfo && pPool); + + VMA_DEBUG_LOG("vmaCreatePool"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->CreatePool(pCreateInfo, pPool); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( + VmaAllocator allocator, + VmaPool pool) +{ + VMA_ASSERT(allocator); + + if(pool == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaDestroyPool"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->DestroyPool(pool); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( + VmaAllocator allocator, + VmaPool pool, + VmaStatistics* pPoolStats) +{ + VMA_ASSERT(allocator && pool && pPoolStats); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetPoolStatistics(pool, pPoolStats); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaCalculatePoolStatistics( + VmaAllocator allocator, + VmaPool pool, + VmaDetailedStatistics* pPoolStats) +{ + VMA_ASSERT(allocator && pool && pPoolStats); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->CalculatePoolStatistics(pool, pPoolStats); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool) +{ + VMA_ASSERT(allocator && pool); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VMA_DEBUG_LOG("vmaCheckPoolCorruption"); + + return allocator->CheckPoolCorruption(pool); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolName( + VmaAllocator allocator, + VmaPool pool, + const char** ppName) +{ + VMA_ASSERT(allocator && pool && ppName); + + VMA_DEBUG_LOG("vmaGetPoolName"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *ppName = pool->GetName(); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetPoolName( + VmaAllocator allocator, + VmaPool pool, + const char* pName) +{ + VMA_ASSERT(allocator && pool); + + VMA_DEBUG_LOG("vmaSetPoolName"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + pool->SetName(pName); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemory( + VmaAllocator allocator, + const VkMemoryRequirements* pVkMemoryRequirements, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkResult result = allocator->AllocateMemory( + *pVkMemoryRequirements, + false, // requiresDedicatedAllocation + false, // prefersDedicatedAllocation + VK_NULL_HANDLE, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_UNKNOWN, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return result; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryPages( + VmaAllocator allocator, + const VkMemoryRequirements* pVkMemoryRequirements, + const VmaAllocationCreateInfo* pCreateInfo, + size_t allocationCount, + VmaAllocation* pAllocations, + VmaAllocationInfo* pAllocationInfo) +{ + if(allocationCount == 0) + { + return VK_SUCCESS; + } + + VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations); + + VMA_DEBUG_LOG("vmaAllocateMemoryPages"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkResult result = allocator->AllocateMemory( + *pVkMemoryRequirements, + false, // requiresDedicatedAllocation + false, // prefersDedicatedAllocation + VK_NULL_HANDLE, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_UNKNOWN, + allocationCount, + pAllocations); + + if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS) + { + for(size_t i = 0; i < allocationCount; ++i) + { + allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i); + } + } + + return result; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForBuffer( + VmaAllocator allocator, + VkBuffer buffer, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemoryForBuffer"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetBufferMemoryRequirements(buffer, vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation); + + VkResult result = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + buffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo && result == VK_SUCCESS) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return result; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaAllocateMemoryForImage( + VmaAllocator allocator, + VkImage image, + const VmaAllocationCreateInfo* pCreateInfo, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation); + + VMA_DEBUG_LOG("vmaAllocateMemoryForImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetImageMemoryRequirements(image, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + VkResult result = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + VK_NULL_HANDLE, // dedicatedBuffer + image, // dedicatedImage + VmaBufferImageUsage::UNKNOWN, // dedicatedBufferImageUsage + *pCreateInfo, + VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN, + 1, // allocationCount + pAllocation); + + if(pAllocationInfo && result == VK_SUCCESS) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return result; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemory( + VmaAllocator allocator, + VmaAllocation allocation) +{ + VMA_ASSERT(allocator); + + if(allocation == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaFreeMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->FreeMemory( + 1, // allocationCount + &allocation); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeMemoryPages( + VmaAllocator allocator, + size_t allocationCount, + const VmaAllocation* pAllocations) +{ + if(allocationCount == 0) + { + return; + } + + VMA_ASSERT(allocator); + + VMA_DEBUG_LOG("vmaFreeMemoryPages"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->FreeMemory(allocationCount, pAllocations); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo( + VmaAllocator allocator, + VmaAllocation allocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && allocation && pAllocationInfo); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetAllocationInfo(allocation, pAllocationInfo); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationInfo2( + VmaAllocator allocator, + VmaAllocation allocation, + VmaAllocationInfo2* pAllocationInfo) +{ + VMA_ASSERT(allocator && allocation && pAllocationInfo); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->GetAllocationInfo2(allocation, pAllocationInfo); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationUserData( + VmaAllocator allocator, + VmaAllocation allocation, + void* pUserData) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocation->SetUserData(allocator, pUserData); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetAllocationName( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const char* VMA_NULLABLE pName) +{ + allocation->SetName(allocator, pName); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkMemoryPropertyFlags* VMA_NOT_NULL pFlags) +{ + VMA_ASSERT(allocator && allocation && pFlags); + const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex(); + *pFlags = allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaMapMemory( + VmaAllocator allocator, + VmaAllocation allocation, + void** ppData) +{ + VMA_ASSERT(allocator && allocation && ppData); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->Map(allocation, ppData); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaUnmapMemory( + VmaAllocator allocator, + VmaAllocation allocation) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + allocator->Unmap(allocation); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocation( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize offset, + VkDeviceSize size) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_LOG("vmaFlushAllocation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocation( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize offset, + VkDeviceSize size) +{ + VMA_ASSERT(allocator && allocation); + + VMA_DEBUG_LOG("vmaInvalidateAllocation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaFlushAllocations( + VmaAllocator allocator, + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) +{ + VMA_ASSERT(allocator); + + if(allocationCount == 0) + { + return VK_SUCCESS; + } + + VMA_ASSERT(allocations); + + VMA_DEBUG_LOG("vmaFlushAllocations"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_FLUSH); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaInvalidateAllocations( + VmaAllocator allocator, + uint32_t allocationCount, + const VmaAllocation* allocations, + const VkDeviceSize* offsets, + const VkDeviceSize* sizes) +{ + VMA_ASSERT(allocator); + + if(allocationCount == 0) + { + return VK_SUCCESS; + } + + VMA_ASSERT(allocations); + + VMA_DEBUG_LOG("vmaInvalidateAllocations"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->FlushOrInvalidateAllocations(allocationCount, allocations, offsets, sizes, VMA_CACHE_INVALIDATE); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyMemoryToAllocation( + VmaAllocator allocator, + const void* pSrcHostPointer, + VmaAllocation dstAllocation, + VkDeviceSize dstAllocationLocalOffset, + VkDeviceSize size) +{ + VMA_ASSERT(allocator && pSrcHostPointer && dstAllocation); + + if(size == 0) + { + return VK_SUCCESS; + } + + VMA_DEBUG_LOG("vmaCopyMemoryToAllocation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->CopyMemoryToAllocation(pSrcHostPointer, dstAllocation, dstAllocationLocalOffset, size); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCopyAllocationToMemory( + VmaAllocator allocator, + VmaAllocation srcAllocation, + VkDeviceSize srcAllocationLocalOffset, + void* pDstHostPointer, + VkDeviceSize size) +{ + VMA_ASSERT(allocator && srcAllocation && pDstHostPointer); + + if(size == 0) + { + return VK_SUCCESS; + } + + VMA_DEBUG_LOG("vmaCopyAllocationToMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->CopyAllocationToMemory(srcAllocation, srcAllocationLocalOffset, pDstHostPointer, size); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCheckCorruption( + VmaAllocator allocator, + uint32_t memoryTypeBits) +{ + VMA_ASSERT(allocator); + + VMA_DEBUG_LOG("vmaCheckCorruption"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->CheckCorruption(memoryTypeBits); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentation( + VmaAllocator allocator, + const VmaDefragmentationInfo* pInfo, + VmaDefragmentationContext* pContext) +{ + VMA_ASSERT(allocator && pInfo && pContext); + + VMA_DEBUG_LOG("vmaBeginDefragmentation"); + + if (pInfo->pool != VMA_NULL) + { + // Check if run on supported algorithms + if (pInfo->pool->m_BlockVector.GetAlgorithm() & VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT) + return VK_ERROR_FEATURE_NOT_PRESENT; + } + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pContext = vma_new(allocator, VmaDefragmentationContext_T)(allocator, *pInfo); + return VK_SUCCESS; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaEndDefragmentation( + VmaAllocator allocator, + VmaDefragmentationContext context, + VmaDefragmentationStats* pStats) +{ + VMA_ASSERT(allocator && context); + + VMA_DEBUG_LOG("vmaEndDefragmentation"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if (pStats) + context->GetStats(*pStats); + vma_delete(allocator, context); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBeginDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) +{ + VMA_ASSERT(context && pPassInfo); + + VMA_DEBUG_LOG("vmaBeginDefragmentationPass"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return context->DefragmentPassBegin(*pPassInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaEndDefragmentationPass( + VmaAllocator VMA_NOT_NULL allocator, + VmaDefragmentationContext VMA_NOT_NULL context, + VmaDefragmentationPassMoveInfo* VMA_NOT_NULL pPassInfo) +{ + VMA_ASSERT(context && pPassInfo); + + VMA_DEBUG_LOG("vmaEndDefragmentationPass"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return context->DefragmentPassEnd(*pPassInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory( + VmaAllocator allocator, + VmaAllocation allocation, + VkBuffer buffer) +{ + VMA_ASSERT(allocator && allocation && buffer); + + VMA_DEBUG_LOG("vmaBindBufferMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindBufferMemory2( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize allocationLocalOffset, + VkBuffer buffer, + const void* pNext) +{ + VMA_ASSERT(allocator && allocation && buffer); + + VMA_DEBUG_LOG("vmaBindBufferMemory2"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory( + VmaAllocator allocator, + VmaAllocation allocation, + VkImage image) +{ + VMA_ASSERT(allocator && allocation && image); + + VMA_DEBUG_LOG("vmaBindImageMemory"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindImageMemory(allocation, 0, image, VMA_NULL); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaBindImageMemory2( + VmaAllocator allocator, + VmaAllocation allocation, + VkDeviceSize allocationLocalOffset, + VkImage image, + const void* pNext) +{ + VMA_ASSERT(allocator && allocation && image); + + VMA_DEBUG_LOG("vmaBindImageMemory2"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBuffer( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation); + + if(pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !allocator->m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_LOG("vmaCreateBuffer"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pBuffer = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + // 1. Create VkBuffer. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( + allocator->m_hDevice, + pBufferCreateInfo, + allocator->GetAllocationCallbacks(), + pBuffer); + if(res >= 0) + { + // 2. vkGetBufferMemoryRequirements. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + // 3. Allocate memory using allocator. + res = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + *pBuffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5), // dedicatedBufferImageUsage + *pAllocationCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + + if(res >= 0) + { + // 3. Bind buffer with memory. + if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL); + } + if(res >= 0) + { + // All steps succeeded. + #if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitBufferUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5); + #endif + if(pAllocationInfo != VMA_NULL) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + allocator->FreeMemory( + 1, // allocationCount + pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateBufferWithAlignment( + VmaAllocator allocator, + const VkBufferCreateInfo* pBufferCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkDeviceSize minAlignment, + VkBuffer* pBuffer, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && VmaIsPow2(minAlignment) && pBuffer && pAllocation); + + if(pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !allocator->m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_LOG("vmaCreateBufferWithAlignment"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pBuffer = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + // 1. Create VkBuffer. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( + allocator->m_hDevice, + pBufferCreateInfo, + allocator->GetAllocationCallbacks(), + pBuffer); + if(res >= 0) + { + // 2. vkGetBufferMemoryRequirements. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + // 2a. Include minAlignment + vkMemReq.alignment = VMA_MAX(vkMemReq.alignment, minAlignment); + + // 3. Allocate memory using allocator. + res = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + *pBuffer, // dedicatedBuffer + VK_NULL_HANDLE, // dedicatedImage + VmaBufferImageUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5), // dedicatedBufferImageUsage + *pAllocationCreateInfo, + VMA_SUBALLOCATION_TYPE_BUFFER, + 1, // allocationCount + pAllocation); + + if(res >= 0) + { + // 3. Bind buffer with memory. + if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL); + } + if(res >= 0) + { + // All steps succeeded. + #if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitBufferUsage(*pBufferCreateInfo, allocator->m_UseKhrMaintenance5); + #endif + if(pAllocationInfo != VMA_NULL) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + allocator->FreeMemory( + 1, // allocationCount + pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + *pBuffer = VK_NULL_HANDLE; + return res; + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer) +{ + return vmaCreateAliasingBuffer2(allocator, allocation, 0, pBufferCreateInfo, pBuffer); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingBuffer2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkBufferCreateInfo* VMA_NOT_NULL pBufferCreateInfo, + VkBuffer VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pBuffer) +{ + VMA_ASSERT(allocator && pBufferCreateInfo && pBuffer && allocation); + VMA_ASSERT(allocationLocalOffset + pBufferCreateInfo->size <= allocation->GetSize()); + + VMA_DEBUG_LOG("vmaCreateAliasingBuffer2"); + + *pBuffer = VK_NULL_HANDLE; + + if (pBufferCreateInfo->size == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + if ((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 && + !allocator->m_UseKhrBufferDeviceAddress) + { + VMA_ASSERT(0 && "Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used."); + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + // 1. Create VkBuffer. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)( + allocator->m_hDevice, + pBufferCreateInfo, + allocator->GetAllocationCallbacks(), + pBuffer); + if (res >= 0) + { + // 2. Bind buffer with memory. + res = allocator->BindBufferMemory(allocation, allocationLocalOffset, *pBuffer, VMA_NULL); + if (res >= 0) + { + return VK_SUCCESS; + } + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks()); + } + return res; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyBuffer( + VmaAllocator allocator, + VkBuffer buffer, + VmaAllocation allocation) +{ + VMA_ASSERT(allocator); + + if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaDestroyBuffer"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if(buffer != VK_NULL_HANDLE) + { + (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks()); + } + + if(allocation != VK_NULL_HANDLE) + { + allocator->FreeMemory( + 1, // allocationCount + &allocation); + } +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( + VmaAllocator allocator, + const VkImageCreateInfo* pImageCreateInfo, + const VmaAllocationCreateInfo* pAllocationCreateInfo, + VkImage* pImage, + VmaAllocation* pAllocation, + VmaAllocationInfo* pAllocationInfo) +{ + VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation); + + if(pImageCreateInfo->extent.width == 0 || + pImageCreateInfo->extent.height == 0 || + pImageCreateInfo->extent.depth == 0 || + pImageCreateInfo->mipLevels == 0 || + pImageCreateInfo->arrayLayers == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_LOG("vmaCreateImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + *pImage = VK_NULL_HANDLE; + *pAllocation = VK_NULL_HANDLE; + + // 1. Create VkImage. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)( + allocator->m_hDevice, + pImageCreateInfo, + allocator->GetAllocationCallbacks(), + pImage); + if(res >= 0) + { + VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ? + VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL : + VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR; + + // 2. Allocate memory using allocator. + VkMemoryRequirements vkMemReq = {}; + bool requiresDedicatedAllocation = false; + bool prefersDedicatedAllocation = false; + allocator->GetImageMemoryRequirements(*pImage, vkMemReq, + requiresDedicatedAllocation, prefersDedicatedAllocation); + + res = allocator->AllocateMemory( + vkMemReq, + requiresDedicatedAllocation, + prefersDedicatedAllocation, + VK_NULL_HANDLE, // dedicatedBuffer + *pImage, // dedicatedImage + VmaBufferImageUsage(*pImageCreateInfo), // dedicatedBufferImageUsage + *pAllocationCreateInfo, + suballocType, + 1, // allocationCount + pAllocation); + + if(res >= 0) + { + // 3. Bind image with memory. + if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) + { + res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL); + } + if(res >= 0) + { + // All steps succeeded. + #if VMA_STATS_STRING_ENABLED + (*pAllocation)->InitImageUsage(*pImageCreateInfo); + #endif + if(pAllocationInfo != VMA_NULL) + { + allocator->GetAllocationInfo(*pAllocation, pAllocationInfo); + } + + return VK_SUCCESS; + } + allocator->FreeMemory( + 1, // allocationCount + pAllocation); + *pAllocation = VK_NULL_HANDLE; + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); + *pImage = VK_NULL_HANDLE; + return res; + } + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); + *pImage = VK_NULL_HANDLE; + return res; + } + return res; +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage) +{ + return vmaCreateAliasingImage2(allocator, allocation, 0, pImageCreateInfo, pImage); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateAliasingImage2( + VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, + VkDeviceSize allocationLocalOffset, + const VkImageCreateInfo* VMA_NOT_NULL pImageCreateInfo, + VkImage VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pImage) +{ + VMA_ASSERT(allocator && pImageCreateInfo && pImage && allocation); + + *pImage = VK_NULL_HANDLE; + + VMA_DEBUG_LOG("vmaCreateImage2"); + + if (pImageCreateInfo->extent.width == 0 || + pImageCreateInfo->extent.height == 0 || + pImageCreateInfo->extent.depth == 0 || + pImageCreateInfo->mipLevels == 0 || + pImageCreateInfo->arrayLayers == 0) + { + return VK_ERROR_INITIALIZATION_FAILED; + } + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + // 1. Create VkImage. + VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)( + allocator->m_hDevice, + pImageCreateInfo, + allocator->GetAllocationCallbacks(), + pImage); + if (res >= 0) + { + // 2. Bind image with memory. + res = allocator->BindImageMemory(allocation, allocationLocalOffset, *pImage, VMA_NULL); + if (res >= 0) + { + return VK_SUCCESS; + } + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks()); + } + return res; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyImage( + VmaAllocator VMA_NOT_NULL allocator, + VkImage VMA_NULLABLE_NON_DISPATCHABLE image, + VmaAllocation VMA_NULLABLE allocation) +{ + VMA_ASSERT(allocator); + + if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE) + { + return; + } + + VMA_DEBUG_LOG("vmaDestroyImage"); + + VMA_DEBUG_GLOBAL_MUTEX_LOCK + + if(image != VK_NULL_HANDLE) + { + (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks()); + } + if(allocation != VK_NULL_HANDLE) + { + allocator->FreeMemory( + 1, // allocationCount + &allocation); + } +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateVirtualBlock( + const VmaVirtualBlockCreateInfo* VMA_NOT_NULL pCreateInfo, + VmaVirtualBlock VMA_NULLABLE * VMA_NOT_NULL pVirtualBlock) +{ + VMA_ASSERT(pCreateInfo && pVirtualBlock); + VMA_ASSERT(pCreateInfo->size > 0); + VMA_DEBUG_LOG("vmaCreateVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + *pVirtualBlock = vma_new(pCreateInfo->pAllocationCallbacks, VmaVirtualBlock_T)(*pCreateInfo); + VkResult res = (*pVirtualBlock)->Init(); + if(res < 0) + { + vma_delete(pCreateInfo->pAllocationCallbacks, *pVirtualBlock); + *pVirtualBlock = VK_NULL_HANDLE; + } + return res; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaDestroyVirtualBlock(VmaVirtualBlock VMA_NULLABLE virtualBlock) +{ + if(virtualBlock != VK_NULL_HANDLE) + { + VMA_DEBUG_LOG("vmaDestroyVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + VkAllocationCallbacks allocationCallbacks = virtualBlock->m_AllocationCallbacks; // Have to copy the callbacks when destroying. + vma_delete(&allocationCallbacks, virtualBlock); + } +} + +VMA_CALL_PRE VkBool32 VMA_CALL_POST vmaIsVirtualBlockEmpty(VmaVirtualBlock VMA_NOT_NULL virtualBlock) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaIsVirtualBlockEmpty"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return virtualBlock->IsEmpty() ? VK_TRUE : VK_FALSE; +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualAllocationInfo(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, VmaVirtualAllocationInfo* VMA_NOT_NULL pVirtualAllocInfo) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pVirtualAllocInfo != VMA_NULL); + VMA_DEBUG_LOG("vmaGetVirtualAllocationInfo"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->GetAllocationInfo(allocation, *pVirtualAllocInfo); +} + +VMA_CALL_PRE VkResult VMA_CALL_POST vmaVirtualAllocate(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + const VmaVirtualAllocationCreateInfo* VMA_NOT_NULL pCreateInfo, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE* VMA_NOT_NULL pAllocation, + VkDeviceSize* VMA_NULLABLE pOffset) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pCreateInfo != VMA_NULL && pAllocation != VMA_NULL); + VMA_DEBUG_LOG("vmaVirtualAllocate"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return virtualBlock->Allocate(*pCreateInfo, *pAllocation, pOffset); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaVirtualFree(VmaVirtualBlock VMA_NOT_NULL virtualBlock, VmaVirtualAllocation VMA_NULLABLE_NON_DISPATCHABLE allocation) +{ + if(allocation != VK_NULL_HANDLE) + { + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaVirtualFree"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->Free(allocation); + } +} + +VMA_CALL_PRE void VMA_CALL_POST vmaClearVirtualBlock(VmaVirtualBlock VMA_NOT_NULL virtualBlock) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaClearVirtualBlock"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->Clear(); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaSetVirtualAllocationUserData(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaVirtualAllocation VMA_NOT_NULL_NON_DISPATCHABLE allocation, void* VMA_NULLABLE pUserData) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_LOG("vmaSetVirtualAllocationUserData"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->SetAllocationUserData(allocation, pUserData); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaGetVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaStatistics* VMA_NOT_NULL pStats) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); + VMA_DEBUG_LOG("vmaGetVirtualBlockStatistics"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->GetStatistics(*pStats); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaCalculateVirtualBlockStatistics(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + VmaDetailedStatistics* VMA_NOT_NULL pStats) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && pStats != VMA_NULL); + VMA_DEBUG_LOG("vmaCalculateVirtualBlockStatistics"); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + virtualBlock->CalculateDetailedStatistics(*pStats); +} + +#if VMA_STATS_STRING_ENABLED + +VMA_CALL_PRE void VMA_CALL_POST vmaBuildVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE * VMA_NOT_NULL ppStatsString, VkBool32 detailedMap) +{ + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE && ppStatsString != VMA_NULL); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + const VkAllocationCallbacks* allocationCallbacks = virtualBlock->GetAllocationCallbacks(); + VmaStringBuilder sb(allocationCallbacks); + virtualBlock->BuildStatsString(detailedMap != VK_FALSE, sb); + *ppStatsString = VmaCreateStringCopy(allocationCallbacks, sb.GetData(), sb.GetLength()); +} + +VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString(VmaVirtualBlock VMA_NOT_NULL virtualBlock, + char* VMA_NULLABLE pStatsString) +{ + if(pStatsString != VMA_NULL) + { + VMA_ASSERT(virtualBlock != VK_NULL_HANDLE); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + VmaFreeString(virtualBlock->GetAllocationCallbacks(), pStatsString); + } +} +#endif // VMA_STATS_STRING_ENABLED +#endif // _VMA_PUBLIC_INTERFACE +#endif // VMA_IMPLEMENTATION + +/** +\page quick_start Quick start + +\section quick_start_project_setup Project setup + +Vulkan Memory Allocator comes in form of a "stb-style" single header file. +While you can pull the entire repository e.g. as Git module, there is also Cmake script provided, +you don't need to build it as a separate library project. +You can add file "vk_mem_alloc.h" directly to your project and submit it to code repository next to your other source files. + +"Single header" doesn't mean that everything is contained in C/C++ declarations, +like it tends to be in case of inline functions or C++ templates. +It means that implementation is bundled with interface in a single file and needs to be extracted using preprocessor macro. +If you don't do it properly, it will result in linker errors. + +To do it properly: + +-# Include "vk_mem_alloc.h" file in each CPP file where you want to use the library. + This includes declarations of all members of the library. +-# In exactly one CPP file define following macro before this include. + It enables also internal definitions. + +\code +#define VMA_IMPLEMENTATION +#include "vk_mem_alloc.h" +\endcode + +It may be a good idea to create dedicated CPP file just for this purpose, e.g. "VmaUsage.cpp". + +This library includes header ``, which in turn +includes `` on Windows. If you need some specific macros defined +before including these headers (like `WIN32_LEAN_AND_MEAN` or +`WINVER` for Windows, `VK_USE_PLATFORM_WIN32_KHR` for Vulkan), you must define +them before every `#include` of this library. +It may be a good idea to create a dedicate header file for this purpose, e.g. "VmaUsage.h", +that will be included in other source files instead of VMA header directly. + +This library is written in C++, but has C-compatible interface. +Thus, you can include and use "vk_mem_alloc.h" in C or C++ code, but full +implementation with `VMA_IMPLEMENTATION` macro must be compiled as C++, NOT as C. +Some features of C++14 are used and required. Features of C++20 are used optionally when available. +Some headers of standard C and C++ library are used, but STL containers, RTTI, or C++ exceptions are not used. + + +\section quick_start_initialization Initialization + +VMA offers library interface in a style similar to Vulkan, with object handles like #VmaAllocation, +structures describing parameters of objects to be created like #VmaAllocationCreateInfo, +and errors codes returned from functions using `VkResult` type. + +The first and the main object that needs to be created is #VmaAllocator. +It represents the initialization of the entire library. +Only one such object should be created per `VkDevice`. +You should create it at program startup, after `VkDevice` was created, and before any device memory allocator needs to be made. +It must be destroyed before `VkDevice` is destroyed. + +At program startup: + +-# Initialize Vulkan to have `VkInstance`, `VkPhysicalDevice`, `VkDevice` object. +-# Fill VmaAllocatorCreateInfo structure and call vmaCreateAllocator() to create #VmaAllocator object. + +Only members `physicalDevice`, `device`, `instance` are required. +However, you should inform the library which Vulkan version do you use by setting +VmaAllocatorCreateInfo::vulkanApiVersion and which extensions did you enable +by setting VmaAllocatorCreateInfo::flags. +Otherwise, VMA would use only features of Vulkan 1.0 core with no extensions. +See below for details. + +\subsection quick_start_initialization_selecting_vulkan_version Selecting Vulkan version + +VMA supports Vulkan version down to 1.0, for backward compatibility. +If you want to use higher version, you need to inform the library about it. +This is a two-step process. + +Step 1: Compile time. By default, VMA compiles with code supporting the highest +Vulkan version found in the included `` that is also supported by the library. +If this is OK, you don't need to do anything. +However, if you want to compile VMA as if only some lower Vulkan version was available, +define macro `VMA_VULKAN_VERSION` before every `#include "vk_mem_alloc.h"`. +It should have decimal numeric value in form of ABBBCCC, where A = major, BBB = minor, CCC = patch Vulkan version. +For example, to compile against Vulkan 1.2: + +\code +#define VMA_VULKAN_VERSION 1002000 // Vulkan 1.2 +#include "vk_mem_alloc.h" +\endcode + +Step 2: Runtime. Even when compiled with higher Vulkan version available, +VMA can use only features of a lower version, which is configurable during creation of the #VmaAllocator object. +By default, only Vulkan 1.0 is used. +To initialize the allocator with support for higher Vulkan version, you need to set member +VmaAllocatorCreateInfo::vulkanApiVersion to an appropriate value, e.g. using constants like `VK_API_VERSION_1_2`. +See code sample below. + +\subsection quick_start_initialization_importing_vulkan_functions Importing Vulkan functions + +You may need to configure importing Vulkan functions. There are 3 ways to do this: + +-# **If you link with Vulkan static library** (e.g. "vulkan-1.lib" on Windows): + - You don't need to do anything. + - VMA will use these, as macro `VMA_STATIC_VULKAN_FUNCTIONS` is defined to 1 by default. +-# **If you want VMA to fetch pointers to Vulkan functions dynamically** using `vkGetInstanceProcAddr`, + `vkGetDeviceProcAddr` (this is the option presented in the example below): + - Define `VMA_STATIC_VULKAN_FUNCTIONS` to 0, `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 1. + - Provide pointers to these two functions via VmaVulkanFunctions::vkGetInstanceProcAddr, + VmaVulkanFunctions::vkGetDeviceProcAddr. + - The library will fetch pointers to all other functions it needs internally. +-# **If you fetch pointers to all Vulkan functions in a custom way**, e.g. using some loader like + [Volk](https://github.com/zeux/volk): + - Define `VMA_STATIC_VULKAN_FUNCTIONS` and `VMA_DYNAMIC_VULKAN_FUNCTIONS` to 0. + - Pass these pointers via structure #VmaVulkanFunctions. + +\subsection quick_start_initialization_enabling_extensions Enabling extensions + +VMA can automatically use following Vulkan extensions. +If you found them available on the selected physical device and you enabled them +while creating `VkInstance` / `VkDevice` object, inform VMA about their availability +by setting appropriate flags in VmaAllocatorCreateInfo::flags. + +Vulkan extension | VMA flag +------------------------------|----------------------------------------------------- +VK_KHR_dedicated_allocation | #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT +VK_KHR_bind_memory2 | #VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT +VK_KHR_maintenance4 | #VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT +VK_KHR_maintenance5 | #VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT +VK_EXT_memory_budget | #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT +VK_KHR_buffer_device_address | #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT +VK_EXT_memory_priority | #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT +VK_AMD_device_coherent_memory | #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT + +Example with fetching pointers to Vulkan functions dynamically: + +\code +#define VMA_STATIC_VULKAN_FUNCTIONS 0 +#define VMA_DYNAMIC_VULKAN_FUNCTIONS 1 +#include "vk_mem_alloc.h" + +... + +VmaVulkanFunctions vulkanFunctions = {}; +vulkanFunctions.vkGetInstanceProcAddr = &vkGetInstanceProcAddr; +vulkanFunctions.vkGetDeviceProcAddr = &vkGetDeviceProcAddr; + +VmaAllocatorCreateInfo allocatorCreateInfo = {}; +allocatorCreateInfo.flags = VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT; +allocatorCreateInfo.vulkanApiVersion = VK_API_VERSION_1_2; +allocatorCreateInfo.physicalDevice = physicalDevice; +allocatorCreateInfo.device = device; +allocatorCreateInfo.instance = instance; +allocatorCreateInfo.pVulkanFunctions = &vulkanFunctions; + +VmaAllocator allocator; +vmaCreateAllocator(&allocatorCreateInfo, &allocator); + +// Entire program... + +// At the end, don't forget to: +vmaDestroyAllocator(allocator); +\endcode + + +\subsection quick_start_initialization_other_config Other configuration options + +There are additional configuration options available through preprocessor macros that you can define +before including VMA header and through parameters passed in #VmaAllocatorCreateInfo. +They include a possibility to use your own callbacks for host memory allocations (`VkAllocationCallbacks`), +callbacks for device memory allocations (instead of `vkAllocateMemory`, `vkFreeMemory`), +or your custom `VMA_ASSERT` macro, among others. +For more information, see: @ref configuration. + + +\section quick_start_resource_allocation Resource allocation + +When you want to create a buffer or image: + +-# Fill `VkBufferCreateInfo` / `VkImageCreateInfo` structure. +-# Fill VmaAllocationCreateInfo structure. +-# Call vmaCreateBuffer() / vmaCreateImage() to get `VkBuffer`/`VkImage` with memory + already allocated and bound to it, plus #VmaAllocation objects that represents its underlying memory. + +\code +VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufferInfo.size = 65536; +bufferInfo.usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +Don't forget to destroy your buffer and allocation objects when no longer needed: + +\code +vmaDestroyBuffer(allocator, buffer, allocation); +\endcode + +If you need to map the buffer, you must set flag +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT +in VmaAllocationCreateInfo::flags. +There are many additional parameters that can control the choice of memory type to be used for the allocation +and other features. +For more information, see documentation chapters: @ref choosing_memory_type, @ref memory_mapping. + + +\page choosing_memory_type Choosing memory type + +Physical devices in Vulkan support various combinations of memory heaps and +types. Help with choosing correct and optimal memory type for your specific +resource is one of the key features of this library. You can use it by filling +appropriate members of VmaAllocationCreateInfo structure, as described below. +You can also combine multiple methods. + +-# If you just want to find memory type index that meets your requirements, you + can use function: vmaFindMemoryTypeIndexForBufferInfo(), + vmaFindMemoryTypeIndexForImageInfo(), vmaFindMemoryTypeIndex(). +-# If you want to allocate a region of device memory without association with any + specific image or buffer, you can use function vmaAllocateMemory(). Usage of + this function is not recommended and usually not needed. + vmaAllocateMemoryPages() function is also provided for creating multiple allocations at once, + which may be useful for sparse binding. +-# If you already have a buffer or an image created, you want to allocate memory + for it and then you will bind it yourself, you can use function + vmaAllocateMemoryForBuffer(), vmaAllocateMemoryForImage(). + For binding you should use functions: vmaBindBufferMemory(), vmaBindImageMemory() + or their extended versions: vmaBindBufferMemory2(), vmaBindImageMemory2(). +-# If you want to create a buffer or an image, allocate memory for it, and bind + them together, all in one call, you can use function vmaCreateBuffer(), + vmaCreateImage(). + This is the easiest and recommended way to use this library! + +When using 3. or 4., the library internally queries Vulkan for memory types +supported for that buffer or image (function `vkGetBufferMemoryRequirements()`) +and uses only one of these types. + +If no memory type can be found that meets all the requirements, these functions +return `VK_ERROR_FEATURE_NOT_PRESENT`. + +You can leave VmaAllocationCreateInfo structure completely filled with zeros. +It means no requirements are specified for memory type. +It is valid, although not very useful. + +\section choosing_memory_type_usage Usage + +The easiest way to specify memory requirements is to fill member +VmaAllocationCreateInfo::usage using one of the values of enum #VmaMemoryUsage. +It defines high level, common usage types. +Since version 3 of the library, it is recommended to use #VMA_MEMORY_USAGE_AUTO to let it select best memory type for your resource automatically. + +For example, if you want to create a uniform buffer that will be filled using +transfer only once or infrequently and then used for rendering every frame as a uniform buffer, you can +do it using following code. The buffer will most likely end up in a memory type with +`VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT` to be fast to access by the GPU device. + +\code +VkBufferCreateInfo bufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufferInfo.size = 65536; +bufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +If you have a preference for putting the resource in GPU (device) memory or CPU (host) memory +on systems with discrete graphics card that have the memories separate, you can use +#VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST. + +When using `VMA_MEMORY_USAGE_AUTO*` while you want to map the allocated memory, +you also need to specify one of the host access flags: +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +This will help the library decide about preferred memory type to ensure it has `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +so you can map it. + +For example, a staging buffer that will be filled via mapped pointer and then +used as a source of transfer to the buffer described previously can be created like this. +It will likely end up in a memory type that is `HOST_VISIBLE` and `HOST_COHERENT` +but not `HOST_CACHED` (meaning uncached, write-combined) and not `DEVICE_LOCAL` (meaning system RAM). + +\code +VkBufferCreateInfo stagingBufferInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +stagingBufferInfo.size = 65536; +stagingBufferInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo stagingAllocInfo = {}; +stagingAllocInfo.usage = VMA_MEMORY_USAGE_AUTO; +stagingAllocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; + +VkBuffer stagingBuffer; +VmaAllocation stagingAllocation; +vmaCreateBuffer(allocator, &stagingBufferInfo, &stagingAllocInfo, &stagingBuffer, &stagingAllocation, nullptr); +\endcode + +For more examples of creating different kinds of resources, see chapter \ref usage_patterns. +See also: @ref memory_mapping. + +Usage values `VMA_MEMORY_USAGE_AUTO*` are legal to use only when the library knows +about the resource being created by having `VkBufferCreateInfo` / `VkImageCreateInfo` passed, +so they work with functions like: vmaCreateBuffer(), vmaCreateImage(), vmaFindMemoryTypeIndexForBufferInfo() etc. +If you allocate raw memory using function vmaAllocateMemory(), you have to use other means of selecting +memory type, as described below. + +\note +Old usage values (`VMA_MEMORY_USAGE_GPU_ONLY`, `VMA_MEMORY_USAGE_CPU_ONLY`, +`VMA_MEMORY_USAGE_CPU_TO_GPU`, `VMA_MEMORY_USAGE_GPU_TO_CPU`, `VMA_MEMORY_USAGE_CPU_COPY`) +are still available and work same way as in previous versions of the library +for backward compatibility, but they are deprecated. + +\section choosing_memory_type_required_preferred_flags Required and preferred flags + +You can specify more detailed requirements by filling members +VmaAllocationCreateInfo::requiredFlags and VmaAllocationCreateInfo::preferredFlags +with a combination of bits from enum `VkMemoryPropertyFlags`. For example, +if you want to create a buffer that will be persistently mapped on host (so it +must be `HOST_VISIBLE`) and preferably will also be `HOST_COHERENT` and `HOST_CACHED`, +use following code: + +\code +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.requiredFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; +allocInfo.preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT; +allocInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +A memory type is chosen that has all the required flags and as many preferred +flags set as possible. + +Value passed in VmaAllocationCreateInfo::usage is internally converted to a set of required and preferred flags, +plus some extra "magic" (heuristics). + +\section choosing_memory_type_explicit_memory_types Explicit memory types + +If you inspected memory types available on the physical device and you have +a preference for memory types that you want to use, you can fill member +VmaAllocationCreateInfo::memoryTypeBits. It is a bit mask, where each bit set +means that a memory type with that index is allowed to be used for the +allocation. Special value 0, just like `UINT32_MAX`, means there are no +restrictions to memory type index. + +Please note that this member is NOT just a memory type index. +Still you can use it to choose just one, specific memory type. +For example, if you already determined that your buffer should be created in +memory type 2, use following code: + +\code +uint32_t memoryTypeIndex = 2; + +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.memoryTypeBits = 1u << memoryTypeIndex; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufferInfo, &allocInfo, &buffer, &allocation, nullptr); +\endcode + +You can also use this parameter to exclude some memory types. +If you inspect memory heaps and types available on the current physical device and +you determine that for some reason you don't want to use a specific memory type for the allocation, +you can enable automatic memory type selection but exclude certain memory type or types +by setting all bits of `memoryTypeBits` to 1 except the ones you choose. + +\code +// ... +uint32_t excludedMemoryTypeIndex = 2; +VmaAllocationCreateInfo allocInfo = {}; +allocInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocInfo.memoryTypeBits = ~(1u << excludedMemoryTypeIndex); +// ... +\endcode + + +\section choosing_memory_type_custom_memory_pools Custom memory pools + +If you allocate from custom memory pool, all the ways of specifying memory +requirements described above are not applicable and the aforementioned members +of VmaAllocationCreateInfo structure are ignored. Memory type is selected +explicitly when creating the pool and then used to make all the allocations from +that pool. For further details, see \ref custom_memory_pools. + +\section choosing_memory_type_dedicated_allocations Dedicated allocations + +Memory for allocations is reserved out of larger block of `VkDeviceMemory` +allocated from Vulkan internally. That is the main feature of this whole library. +You can still request a separate memory block to be created for an allocation, +just like you would do in a trivial solution without using any allocator. +In that case, a buffer or image is always bound to that memory at offset 0. +This is called a "dedicated allocation". +You can explicitly request it by using flag #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +The library can also internally decide to use dedicated allocation in some cases, e.g.: + +- When the size of the allocation is large. +- When [VK_KHR_dedicated_allocation](@ref vk_khr_dedicated_allocation) extension is enabled + and it reports that dedicated allocation is required or recommended for the resource. +- When allocation of next big memory block fails due to not enough device memory, + but allocation with the exact requested size succeeds. + + +\page memory_mapping Memory mapping + +To "map memory" in Vulkan means to obtain a CPU pointer to `VkDeviceMemory`, +to be able to read from it or write to it in CPU code. +Mapping is possible only of memory allocated from a memory type that has +`VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` flag. +Functions `vkMapMemory()`, `vkUnmapMemory()` are designed for this purpose. +You can use them directly with memory allocated by this library, +but it is not recommended because of following issue: +Mapping the same `VkDeviceMemory` block multiple times is illegal - only one mapping at a time is allowed. +This includes mapping disjoint regions. Mapping is not reference-counted internally by Vulkan. +It is also not thread-safe. +Because of this, Vulkan Memory Allocator provides following facilities: + +\note If you want to be able to map an allocation, you need to specify one of the flags +#VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT +in VmaAllocationCreateInfo::flags. These flags are required for an allocation to be mappable +when using #VMA_MEMORY_USAGE_AUTO or other `VMA_MEMORY_USAGE_AUTO*` enum values. +For other usage values they are ignored and every such allocation made in `HOST_VISIBLE` memory type is mappable, +but these flags can still be used for consistency. + +\section memory_mapping_copy_functions Copy functions + +The easiest way to copy data from a host pointer to an allocation is to use convenience function vmaCopyMemoryToAllocation(). +It automatically maps the Vulkan memory temporarily (if not already mapped), performs `memcpy`, +and calls `vkFlushMappedMemoryRanges` (if required - if memory type is not `HOST_COHERENT`). + +It is also the safest one, because using `memcpy` avoids a risk of accidentally introducing memory reads +(e.g. by doing `pMappedVectors[i] += v`), which may be very slow on memory types that are not `HOST_CACHED`. + +\code +struct ConstantBuffer +{ + ... +}; +ConstantBuffer constantBufferData = ... + +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = sizeof(ConstantBuffer); +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; + +VkBuffer buf; +VmaAllocation alloc; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); + +vmaCopyMemoryToAllocation(allocator, &constantBufferData, alloc, 0, sizeof(ConstantBuffer)); +\endcode + +Copy in the other direction - from an allocation to a host pointer can be performed the same way using function vmaCopyAllocationToMemory(). + +\section memory_mapping_mapping_functions Mapping functions + +The library provides following functions for mapping of a specific allocation: vmaMapMemory(), vmaUnmapMemory(). +They are safer and more convenient to use than standard Vulkan functions. +You can map an allocation multiple times simultaneously - mapping is reference-counted internally. +You can also map different allocations simultaneously regardless of whether they use the same `VkDeviceMemory` block. +The way it is implemented is that the library always maps entire memory block, not just region of the allocation. +For further details, see description of vmaMapMemory() function. +Example: + +\code +// Having these objects initialized: +struct ConstantBuffer +{ + ... +}; +ConstantBuffer constantBufferData = ... + +VmaAllocator allocator = ... +VkBuffer constantBuffer = ... +VmaAllocation constantBufferAllocation = ... + +// You can map and fill your buffer using following code: + +void* mappedData; +vmaMapMemory(allocator, constantBufferAllocation, &mappedData); +memcpy(mappedData, &constantBufferData, sizeof(constantBufferData)); +vmaUnmapMemory(allocator, constantBufferAllocation); +\endcode + +When mapping, you may see a warning from Vulkan validation layer similar to this one: + +Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used. + +It happens because the library maps entire `VkDeviceMemory` block, where different +types of images and buffers may end up together, especially on GPUs with unified memory like Intel. +You can safely ignore it if you are sure you access only memory of the intended +object that you wanted to map. + + +\section memory_mapping_persistently_mapped_memory Persistently mapped memory + +Keeping your memory persistently mapped is generally OK in Vulkan. +You don't need to unmap it before using its data on the GPU. +The library provides a special feature designed for that: +Allocations made with #VMA_ALLOCATION_CREATE_MAPPED_BIT flag set in +VmaAllocationCreateInfo::flags stay mapped all the time, +so you can just access CPU pointer to it any time +without a need to call any "map" or "unmap" function. +Example: + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = sizeof(ConstantBuffer); +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +// Buffer is already mapped. You can access its memory. +memcpy(allocInfo.pMappedData, &constantBufferData, sizeof(constantBufferData)); +\endcode + +\note #VMA_ALLOCATION_CREATE_MAPPED_BIT by itself doesn't guarantee that the allocation will end up +in a mappable memory type. +For this, you need to also specify #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT or +#VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +#VMA_ALLOCATION_CREATE_MAPPED_BIT only guarantees that if the memory is `HOST_VISIBLE`, the allocation will be mapped on creation. +For an example of how to make use of this fact, see section \ref usage_patterns_advanced_data_uploading. + +\section memory_mapping_cache_control Cache flush and invalidate + +Memory in Vulkan doesn't need to be unmapped before using it on GPU, +but unless a memory types has `VK_MEMORY_PROPERTY_HOST_COHERENT_BIT` flag set, +you need to manually **invalidate** cache before reading of mapped pointer +and **flush** cache after writing to mapped pointer. +Map/unmap operations don't do that automatically. +Vulkan provides following functions for this purpose `vkFlushMappedMemoryRanges()`, +`vkInvalidateMappedMemoryRanges()`, but this library provides more convenient +functions that refer to given allocation object: vmaFlushAllocation(), +vmaInvalidateAllocation(), +or multiple objects at once: vmaFlushAllocations(), vmaInvalidateAllocations(). + +Regions of memory specified for flush/invalidate must be aligned to +`VkPhysicalDeviceLimits::nonCoherentAtomSize`. This is automatically ensured by the library. +In any memory type that is `HOST_VISIBLE` but not `HOST_COHERENT`, all allocations +within blocks are aligned to this value, so their offsets are always multiply of +`nonCoherentAtomSize` and two different allocations never share same "line" of this size. + +Also, Windows drivers from all 3 PC GPU vendors (AMD, Intel, NVIDIA) +currently provide `HOST_COHERENT` flag on all memory types that are +`HOST_VISIBLE`, so on PC you may not need to bother. + + +\page staying_within_budget Staying within budget + +When developing a graphics-intensive game or program, it is important to avoid allocating +more GPU memory than it is physically available. When the memory is over-committed, +various bad things can happen, depending on the specific GPU, graphics driver, and +operating system: + +- It may just work without any problems. +- The application may slow down because some memory blocks are moved to system RAM + and the GPU has to access them through PCI Express bus. +- A new allocation may take very long time to complete, even few seconds, and possibly + freeze entire system. +- The new allocation may fail with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +- It may even result in GPU crash (TDR), observed as `VK_ERROR_DEVICE_LOST` + returned somewhere later. + +\section staying_within_budget_querying_for_budget Querying for budget + +To query for current memory usage and available budget, use function vmaGetHeapBudgets(). +Returned structure #VmaBudget contains quantities expressed in bytes, per Vulkan memory heap. + +Please note that this function returns different information and works faster than +vmaCalculateStatistics(). vmaGetHeapBudgets() can be called every frame or even before every +allocation, while vmaCalculateStatistics() is intended to be used rarely, +only to obtain statistical information, e.g. for debugging purposes. + +It is recommended to use VK_EXT_memory_budget device extension to obtain information +about the budget from Vulkan device. VMA is able to use this extension automatically. +When not enabled, the allocator behaves same way, but then it estimates current usage +and available budget based on its internal information and Vulkan memory heap sizes, +which may be less precise. In order to use this extension: + +1. Make sure extensions VK_EXT_memory_budget and VK_KHR_get_physical_device_properties2 + required by it are available and enable them. Please note that the first is a device + extension and the second is instance extension! +2. Use flag #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT when creating #VmaAllocator object. +3. Make sure to call vmaSetCurrentFrameIndex() every frame. Budget is queried from + Vulkan inside of it to avoid overhead of querying it with every allocation. + +\section staying_within_budget_controlling_memory_usage Controlling memory usage + +There are many ways in which you can try to stay within the budget. + +First, when making new allocation requires allocating a new memory block, the library +tries not to exceed the budget automatically. If a block with default recommended size +(e.g. 256 MB) would go over budget, a smaller block is allocated, possibly even +dedicated memory for just this resource. + +If the size of the requested resource plus current memory usage is more than the +budget, by default the library still tries to create it, leaving it to the Vulkan +implementation whether the allocation succeeds or fails. You can change this behavior +by using #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag. With it, the allocation is +not made if it would exceed the budget or if the budget is already exceeded. +VMA then tries to make the allocation from the next eligible Vulkan memory type. +The all of them fail, the call then fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +Example usage pattern may be to pass the #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag +when creating resources that are not essential for the application (e.g. the texture +of a specific object) and not to pass it when creating critically important resources +(e.g. render targets). + +On AMD graphics cards there is a custom vendor extension available: VK_AMD_memory_overallocation_behavior +that allows to control the behavior of the Vulkan implementation in out-of-memory cases - +whether it should fail with an error code or still allow the allocation. +Usage of this extension involves only passing extra structure on Vulkan device creation, +so it is out of scope of this library. + +Finally, you can also use #VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT flag to make sure +a new allocation is created only when it fits inside one of the existing memory blocks. +If it would require to allocate a new block, if fails instead with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +This also ensures that the function call is very fast because it never goes to Vulkan +to obtain a new block. + +\note Creating \ref custom_memory_pools with VmaPoolCreateInfo::minBlockCount +set to more than 0 will currently try to allocate memory blocks without checking whether they +fit within budget. + + +\page resource_aliasing Resource aliasing (overlap) + +New explicit graphics APIs (Vulkan and Direct3D 12), thanks to manual memory +management, give an opportunity to alias (overlap) multiple resources in the +same region of memory - a feature not available in the old APIs (Direct3D 11, OpenGL). +It can be useful to save video memory, but it must be used with caution. + +For example, if you know the flow of your whole render frame in advance, you +are going to use some intermediate textures or buffers only during a small range of render passes, +and you know these ranges don't overlap in time, you can bind these resources to +the same place in memory, even if they have completely different parameters (width, height, format etc.). + +![Resource aliasing (overlap)](../gfx/Aliasing.png) + +Such scenario is possible using VMA, but you need to create your images manually. +Then you need to calculate parameters of an allocation to be made using formula: + +- allocation size = max(size of each image) +- allocation alignment = max(alignment of each image) +- allocation memoryTypeBits = bitwise AND(memoryTypeBits of each image) + +Following example shows two different images bound to the same place in memory, +allocated to fit largest of them. + +\code +// A 512x512 texture to be sampled. +VkImageCreateInfo img1CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +img1CreateInfo.imageType = VK_IMAGE_TYPE_2D; +img1CreateInfo.extent.width = 512; +img1CreateInfo.extent.height = 512; +img1CreateInfo.extent.depth = 1; +img1CreateInfo.mipLevels = 10; +img1CreateInfo.arrayLayers = 1; +img1CreateInfo.format = VK_FORMAT_R8G8B8A8_SRGB; +img1CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +img1CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +img1CreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; +img1CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +// A full screen texture to be used as color attachment. +VkImageCreateInfo img2CreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +img2CreateInfo.imageType = VK_IMAGE_TYPE_2D; +img2CreateInfo.extent.width = 1920; +img2CreateInfo.extent.height = 1080; +img2CreateInfo.extent.depth = 1; +img2CreateInfo.mipLevels = 1; +img2CreateInfo.arrayLayers = 1; +img2CreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +img2CreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +img2CreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +img2CreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +img2CreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VkImage img1; +res = vkCreateImage(device, &img1CreateInfo, nullptr, &img1); +VkImage img2; +res = vkCreateImage(device, &img2CreateInfo, nullptr, &img2); + +VkMemoryRequirements img1MemReq; +vkGetImageMemoryRequirements(device, img1, &img1MemReq); +VkMemoryRequirements img2MemReq; +vkGetImageMemoryRequirements(device, img2, &img2MemReq); + +VkMemoryRequirements finalMemReq = {}; +finalMemReq.size = std::max(img1MemReq.size, img2MemReq.size); +finalMemReq.alignment = std::max(img1MemReq.alignment, img2MemReq.alignment); +finalMemReq.memoryTypeBits = img1MemReq.memoryTypeBits & img2MemReq.memoryTypeBits; +// Validate if(finalMemReq.memoryTypeBits != 0) + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + +VmaAllocation alloc; +res = vmaAllocateMemory(allocator, &finalMemReq, &allocCreateInfo, &alloc, nullptr); + +res = vmaBindImageMemory(allocator, alloc, img1); +res = vmaBindImageMemory(allocator, alloc, img2); + +// You can use img1, img2 here, but not at the same time! + +vmaFreeMemory(allocator, alloc); +vkDestroyImage(allocator, img2, nullptr); +vkDestroyImage(allocator, img1, nullptr); +\endcode + +VMA also provides convenience functions that create a buffer or image and bind it to memory +represented by an existing #VmaAllocation: +vmaCreateAliasingBuffer(), vmaCreateAliasingBuffer2(), +vmaCreateAliasingImage(), vmaCreateAliasingImage2(). +Versions with "2" offer additional parameter `allocationLocalOffset`. + +Remember that using resources that alias in memory requires proper synchronization. +You need to issue a memory barrier to make sure commands that use `img1` and `img2` +don't overlap on GPU timeline. +You also need to treat a resource after aliasing as uninitialized - containing garbage data. +For example, if you use `img1` and then want to use `img2`, you need to issue +an image memory barrier for `img2` with `oldLayout` = `VK_IMAGE_LAYOUT_UNDEFINED`. + +Additional considerations: + +- Vulkan also allows to interpret contents of memory between aliasing resources consistently in some cases. +See chapter 11.8. "Memory Aliasing" of Vulkan specification or `VK_IMAGE_CREATE_ALIAS_BIT` flag. +- You can create more complex layout where different images and buffers are bound +at different offsets inside one large allocation. For example, one can imagine +a big texture used in some render passes, aliasing with a set of many small buffers +used between in some further passes. To bind a resource at non-zero offset in an allocation, +use vmaBindBufferMemory2() / vmaBindImageMemory2(). +- Before allocating memory for the resources you want to alias, check `memoryTypeBits` +returned in memory requirements of each resource to make sure the bits overlap. +Some GPUs may expose multiple memory types suitable e.g. only for buffers or +images with `COLOR_ATTACHMENT` usage, so the sets of memory types supported by your +resources may be disjoint. Aliasing them is not possible in that case. + + +\page custom_memory_pools Custom memory pools + +A memory pool contains a number of `VkDeviceMemory` blocks. +The library automatically creates and manages default pool for each memory type available on the device. +Default memory pool automatically grows in size. +Size of allocated blocks is also variable and managed automatically. +You are using default pools whenever you leave VmaAllocationCreateInfo::pool = null. + +You can create custom pool and allocate memory out of it. +It can be useful if you want to: + +- Keep certain kind of allocations separate from others. +- Enforce particular, fixed size of Vulkan memory blocks. +- Limit maximum amount of Vulkan memory allocated for that pool. +- Reserve minimum or fixed amount of Vulkan memory always preallocated for that pool. +- Use extra parameters for a set of your allocations that are available in #VmaPoolCreateInfo but not in + #VmaAllocationCreateInfo - e.g., custom minimum alignment, custom `pNext` chain. +- Perform defragmentation on a specific subset of your allocations. + +To use custom memory pools: + +-# Fill VmaPoolCreateInfo structure. +-# Call vmaCreatePool() to obtain #VmaPool handle. +-# When making an allocation, set VmaAllocationCreateInfo::pool to this handle. + You don't need to specify any other parameters of this structure, like `usage`. + +Example: + +\code +// Find memoryTypeIndex for the pool. +VkBufferCreateInfo sampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +sampleBufCreateInfo.size = 0x10000; // Doesn't matter. +sampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo sampleAllocCreateInfo = {}; +sampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +uint32_t memTypeIndex; +VkResult res = vmaFindMemoryTypeIndexForBufferInfo(allocator, + &sampleBufCreateInfo, &sampleAllocCreateInfo, &memTypeIndex); +// Check res... + +// Create a pool that can have at most 2 blocks, 128 MiB each. +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +poolCreateInfo.blockSize = 128ull * 1024 * 1024; +poolCreateInfo.maxBlockCount = 2; + +VmaPool pool; +res = vmaCreatePool(allocator, &poolCreateInfo, &pool); +// Check res... + +// Allocate a buffer out of it. +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 1024; +bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.pool = pool; + +VkBuffer buf; +VmaAllocation alloc; +res = vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); +// Check res... +\endcode + +You have to free all allocations made from this pool before destroying it. + +\code +vmaDestroyBuffer(allocator, buf, alloc); +vmaDestroyPool(allocator, pool); +\endcode + +New versions of this library support creating dedicated allocations in custom pools. +It is supported only when VmaPoolCreateInfo::blockSize = 0. +To use this feature, set VmaAllocationCreateInfo::pool to the pointer to your custom pool and +VmaAllocationCreateInfo::flags to #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. + + +\section custom_memory_pools_MemTypeIndex Choosing memory type index + +When creating a pool, you must explicitly specify memory type index. +To find the one suitable for your buffers or images, you can use helper functions +vmaFindMemoryTypeIndexForBufferInfo(), vmaFindMemoryTypeIndexForImageInfo(). +You need to provide structures with example parameters of buffers or images +that you are going to create in that pool. + +\code +VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +exampleBufCreateInfo.size = 1024; // Doesn't matter +exampleBufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +uint32_t memTypeIndex; +vmaFindMemoryTypeIndexForBufferInfo(allocator, &exampleBufCreateInfo, &allocCreateInfo, &memTypeIndex); + +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +// ... +\endcode + +When creating buffers/images allocated in that pool, provide following parameters: + +- `VkBufferCreateInfo`: Prefer to pass same parameters as above. + Otherwise you risk creating resources in a memory type that is not suitable for them, which may result in undefined behavior. + Using different `VK_BUFFER_USAGE_` flags may work, but you shouldn't create images in a pool intended for buffers + or the other way around. +- VmaAllocationCreateInfo: You don't need to pass same parameters. Fill only `pool` member. + Other members are ignored anyway. + + +\section custom_memory_pools_when_not_use When not to use custom pools + +Custom pools are commonly overused by VMA users. +While it may feel natural to keep some logical groups of resources separate in memory, +in most cases it does more harm than good. +Using custom pool shouldn't be your first choice. +Instead, please make all allocations from default pools first and only use custom pools +if you can prove and measure that it is beneficial in some way, +e.g. it results in lower memory usage, better performance, etc. + +Using custom pools has disadvantages: + +- Each pool has its own collection of `VkDeviceMemory` blocks. + Some of them may be partially or even completely empty. + Spreading allocations across multiple pools increases the amount of wasted (allocated but unbound) memory. +- You must manually choose specific memory type to be used by a custom pool (set as VmaPoolCreateInfo::memoryTypeIndex). + When using default pools, best memory type for each of your allocations can be selected automatically + using a carefully design algorithm that works across all kinds of GPUs. +- If an allocation from a custom pool at specific memory type fails, entire allocation operation returns failure. + When using default pools, VMA tries another compatible memory type. +- If you set VmaPoolCreateInfo::blockSize != 0, each memory block has the same size, + while default pools start from small blocks and only allocate next blocks larger and larger + up to the preferred block size. + +Many of the common concerns can be addressed in a different way than using custom pools: + +- If you want to keep your allocations of certain size (small versus large) or certain lifetime (transient versus long lived) + separate, you likely don't need to. + VMA uses a high quality allocation algorithm that manages memory well in various cases. + Please measure and check if using custom pools provides a benefit. +- If you want to keep your images and buffers separate, you don't need to. + VMA respects `bufferImageGranularity` limit automatically. +- If you want to keep your mapped and not mapped allocations separate, you don't need to. + VMA respects `nonCoherentAtomSize` limit automatically. + It also maps only those `VkDeviceMemory` blocks that need to map any allocation. + It even tries to keep mappable and non-mappable allocations in separate blocks to minimize the amount of mapped memory. +- If you want to choose a custom size for the default memory block, you can set it globally instead + using VmaAllocatorCreateInfo::preferredLargeHeapBlockSize. +- If you want to select specific memory type for your allocation, + you can set VmaAllocationCreateInfo::memoryTypeBits to `(1u << myMemoryTypeIndex)` instead. +- If you need to create a buffer with certain minimum alignment, you can still do it + using default pools with dedicated function vmaCreateBufferWithAlignment(). + + +\section linear_algorithm Linear allocation algorithm + +Each Vulkan memory block managed by this library has accompanying metadata that +keeps track of used and unused regions. By default, the metadata structure and +algorithm tries to find best place for new allocations among free regions to +optimize memory usage. This way you can allocate and free objects in any order. + +![Default allocation algorithm](../gfx/Linear_allocator_1_algo_default.png) + +Sometimes there is a need to use simpler, linear allocation algorithm. You can +create custom pool that uses such algorithm by adding flag +#VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT to VmaPoolCreateInfo::flags while creating +#VmaPool object. Then an alternative metadata management is used. It always +creates new allocations after last one and doesn't reuse free regions after +allocations freed in the middle. It results in better allocation performance and +less memory consumed by metadata. + +![Linear allocation algorithm](../gfx/Linear_allocator_2_algo_linear.png) + +With this one flag, you can create a custom pool that can be used in many ways: +free-at-once, stack, double stack, and ring buffer. See below for details. +You don't need to specify explicitly which of these options you are going to use - it is detected automatically. + +\subsection linear_algorithm_free_at_once Free-at-once + +In a pool that uses linear algorithm, you still need to free all the allocations +individually, e.g. by using vmaFreeMemory() or vmaDestroyBuffer(). You can free +them in any order. New allocations are always made after last one - free space +in the middle is not reused. However, when you release all the allocation and +the pool becomes empty, allocation starts from the beginning again. This way you +can use linear algorithm to speed up creation of allocations that you are going +to release all at once. + +![Free-at-once](../gfx/Linear_allocator_3_free_at_once.png) + +This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount +value that allows multiple memory blocks. + +\subsection linear_algorithm_stack Stack + +When you free an allocation that was created last, its space can be reused. +Thanks to this, if you always release allocations in the order opposite to their +creation (LIFO - Last In First Out), you can achieve behavior of a stack. + +![Stack](../gfx/Linear_allocator_4_stack.png) + +This mode is also available for pools created with VmaPoolCreateInfo::maxBlockCount +value that allows multiple memory blocks. + +\subsection linear_algorithm_double_stack Double stack + +The space reserved by a custom pool with linear algorithm may be used by two +stacks: + +- First, default one, growing up from offset 0. +- Second, "upper" one, growing down from the end towards lower offsets. + +To make allocation from the upper stack, add flag #VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT +to VmaAllocationCreateInfo::flags. + +![Double stack](../gfx/Linear_allocator_7_double_stack.png) + +Double stack is available only in pools with one memory block - +VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. + +When the two stacks' ends meet so there is not enough space between them for a +new allocation, such allocation fails with usual +`VK_ERROR_OUT_OF_DEVICE_MEMORY` error. + +\subsection linear_algorithm_ring_buffer Ring buffer + +When you free some allocations from the beginning and there is not enough free space +for a new one at the end of a pool, allocator's "cursor" wraps around to the +beginning and starts allocation there. Thanks to this, if you always release +allocations in the same order as you created them (FIFO - First In First Out), +you can achieve behavior of a ring buffer / queue. + +![Ring buffer](../gfx/Linear_allocator_5_ring_buffer.png) + +Ring buffer is available only in pools with one memory block - +VmaPoolCreateInfo::maxBlockCount must be 1. Otherwise behavior is undefined. + +\note \ref defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. + + +\page defragmentation Defragmentation + +Interleaved allocations and deallocations of many objects of varying size can +cause fragmentation over time, which can lead to a situation where the library is unable +to find a continuous range of free memory for a new allocation despite there is +enough free space, just scattered across many small free ranges between existing +allocations. + +To mitigate this problem, you can use defragmentation feature. +It doesn't happen automatically though and needs your cooperation, +because VMA is a low level library that only allocates memory. +It cannot recreate buffers and images in a new place as it doesn't remember the contents of `VkBufferCreateInfo` / `VkImageCreateInfo` structures. +It cannot copy their contents as it doesn't record any commands to a command buffer. + +Example: + +\code +VmaDefragmentationInfo defragInfo = {}; +defragInfo.pool = myPool; +defragInfo.flags = VMA_DEFRAGMENTATION_FLAG_ALGORITHM_FAST_BIT; + +VmaDefragmentationContext defragCtx; +VkResult res = vmaBeginDefragmentation(allocator, &defragInfo, &defragCtx); +// Check res... + +for(;;) +{ + VmaDefragmentationPassMoveInfo pass; + res = vmaBeginDefragmentationPass(allocator, defragCtx, &pass); + if(res == VK_SUCCESS) + break; + else if(res != VK_INCOMPLETE) + // Handle error... + + for(uint32_t i = 0; i < pass.moveCount; ++i) + { + // Inspect pass.pMoves[i].srcAllocation, identify what buffer/image it represents. + VmaAllocationInfo allocInfo; + vmaGetAllocationInfo(allocator, pass.pMoves[i].srcAllocation, &allocInfo); + MyEngineResourceData* resData = (MyEngineResourceData*)allocInfo.pUserData; + + // Recreate and bind this buffer/image at: pass.pMoves[i].dstMemory, pass.pMoves[i].dstOffset. + VkImageCreateInfo imgCreateInfo = ... + VkImage newImg; + res = vkCreateImage(device, &imgCreateInfo, nullptr, &newImg); + // Check res... + res = vmaBindImageMemory(allocator, pass.pMoves[i].dstTmpAllocation, newImg); + // Check res... + + // Issue a vkCmdCopyBuffer/vkCmdCopyImage to copy its content to the new place. + vkCmdCopyImage(cmdBuf, resData->img, ..., newImg, ...); + } + + // Make sure the copy commands finished executing. + vkWaitForFences(...); + + // Destroy old buffers/images bound with pass.pMoves[i].srcAllocation. + for(uint32_t i = 0; i < pass.moveCount; ++i) + { + // ... + vkDestroyImage(device, resData->img, nullptr); + } + + // Update appropriate descriptors to point to the new places... + + res = vmaEndDefragmentationPass(allocator, defragCtx, &pass); + if(res == VK_SUCCESS) + break; + else if(res != VK_INCOMPLETE) + // Handle error... +} + +vmaEndDefragmentation(allocator, defragCtx, nullptr); +\endcode + +Although functions like vmaCreateBuffer(), vmaCreateImage(), vmaDestroyBuffer(), vmaDestroyImage() +create/destroy an allocation and a buffer/image at once, these are just a shortcut for +creating the resource, allocating memory, and binding them together. +Defragmentation works on memory allocations only. You must handle the rest manually. +Defragmentation is an iterative process that should repreat "passes" as long as related functions +return `VK_INCOMPLETE` not `VK_SUCCESS`. +In each pass: + +1. vmaBeginDefragmentationPass() function call: + - Calculates and returns the list of allocations to be moved in this pass. + Note this can be a time-consuming process. + - Reserves destination memory for them by creating temporary destination allocations + that you can query for their `VkDeviceMemory` + offset using vmaGetAllocationInfo(). +2. Inside the pass, **you should**: + - Inspect the returned list of allocations to be moved. + - Create new buffers/images and bind them at the returned destination temporary allocations. + - Copy data from source to destination resources if necessary. + - Destroy the source buffers/images, but NOT their allocations. +3. vmaEndDefragmentationPass() function call: + - Frees the source memory reserved for the allocations that are moved. + - Modifies source #VmaAllocation objects that are moved to point to the destination reserved memory. + - Frees `VkDeviceMemory` blocks that became empty. + +Unlike in previous iterations of the defragmentation API, there is no list of "movable" allocations passed as a parameter. +Defragmentation algorithm tries to move all suitable allocations. +You can, however, refuse to move some of them inside a defragmentation pass, by setting +`pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. +This is not recommended and may result in suboptimal packing of the allocations after defragmentation. +If you cannot ensure any allocation can be moved, it is better to keep movable allocations separate in a custom pool. + +Inside a pass, for each allocation that should be moved: + +- You should copy its data from the source to the destination place by calling e.g. `vkCmdCopyBuffer()`, `vkCmdCopyImage()`. + - You need to make sure these commands finished executing before destroying the source buffers/images and before calling vmaEndDefragmentationPass(). +- If a resource doesn't contain any meaningful data, e.g. it is a transient color attachment image to be cleared, + filled, and used temporarily in each rendering frame, you can just recreate this image + without copying its data. +- If the resource is in `HOST_VISIBLE` and `HOST_CACHED` memory, you can copy its data on the CPU + using `memcpy()`. +- If you cannot move the allocation, you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_IGNORE. + This will cancel the move. + - vmaEndDefragmentationPass() will then free the destination memory + not the source memory of the allocation, leaving it unchanged. +- If you decide the allocation is unimportant and can be destroyed instead of moved (e.g. it wasn't used for long time), + you can set `pass.pMoves[i].operation` to #VMA_DEFRAGMENTATION_MOVE_OPERATION_DESTROY. + - vmaEndDefragmentationPass() will then free both source and destination memory, and will destroy the source #VmaAllocation object. + +You can defragment a specific custom pool by setting VmaDefragmentationInfo::pool +(like in the example above) or all the default pools by setting this member to null. + +Defragmentation is always performed in each pool separately. +Allocations are never moved between different Vulkan memory types. +The size of the destination memory reserved for a moved allocation is the same as the original one. +Alignment of an allocation as it was determined using `vkGetBufferMemoryRequirements()` etc. is also respected after defragmentation. +Buffers/images should be recreated with the same `VkBufferCreateInfo` / `VkImageCreateInfo` parameters as the original ones. + +You can perform the defragmentation incrementally to limit the number of allocations and bytes to be moved +in each pass, e.g. to call it in sync with render frames and not to experience too big hitches. +See members: VmaDefragmentationInfo::maxBytesPerPass, VmaDefragmentationInfo::maxAllocationsPerPass. + +It is also safe to perform the defragmentation asynchronously to render frames and other Vulkan and VMA +usage, possibly from multiple threads, with the exception that allocations +returned in VmaDefragmentationPassMoveInfo::pMoves shouldn't be destroyed until the defragmentation pass is ended. + +Mapping is preserved on allocations that are moved during defragmentation. +Whether through #VMA_ALLOCATION_CREATE_MAPPED_BIT or vmaMapMemory(), the allocations +are mapped at their new place. Of course, pointer to the mapped data changes, so it needs to be queried +using VmaAllocationInfo::pMappedData. + +\note Defragmentation is not supported in custom pools created with #VMA_POOL_CREATE_LINEAR_ALGORITHM_BIT. + + +\page statistics Statistics + +This library contains several functions that return information about its internal state, +especially the amount of memory allocated from Vulkan. + +\section statistics_numeric_statistics Numeric statistics + +If you need to obtain basic statistics about memory usage per heap, together with current budget, +you can call function vmaGetHeapBudgets() and inspect structure #VmaBudget. +This is useful to keep track of memory usage and stay within budget +(see also \ref staying_within_budget). +Example: + +\code +uint32_t heapIndex = ... + +VmaBudget budgets[VK_MAX_MEMORY_HEAPS]; +vmaGetHeapBudgets(allocator, budgets); + +printf("My heap currently has %u allocations taking %llu B,\n", + budgets[heapIndex].statistics.allocationCount, + budgets[heapIndex].statistics.allocationBytes); +printf("allocated out of %u Vulkan device memory blocks taking %llu B,\n", + budgets[heapIndex].statistics.blockCount, + budgets[heapIndex].statistics.blockBytes); +printf("Vulkan reports total usage %llu B with budget %llu B.\n", + budgets[heapIndex].usage, + budgets[heapIndex].budget); +\endcode + +You can query for more detailed statistics per memory heap, type, and totals, +including minimum and maximum allocation size and unused range size, +by calling function vmaCalculateStatistics() and inspecting structure #VmaTotalStatistics. +This function is slower though, as it has to traverse all the internal data structures, +so it should be used only for debugging purposes. + +You can query for statistics of a custom pool using function vmaGetPoolStatistics() +or vmaCalculatePoolStatistics(). + +You can query for information about a specific allocation using function vmaGetAllocationInfo(). +It fill structure #VmaAllocationInfo. + +\section statistics_json_dump JSON dump + +You can dump internal state of the allocator to a string in JSON format using function vmaBuildStatsString(). +The result is guaranteed to be correct JSON. +It uses ANSI encoding. +Any strings provided by user (see [Allocation names](@ref allocation_names)) +are copied as-is and properly escaped for JSON, so if they use UTF-8, ISO-8859-2 or any other encoding, +this JSON string can be treated as using this encoding. +It must be freed using function vmaFreeStatsString(). + +The format of this JSON string is not part of official documentation of the library, +but it will not change in backward-incompatible way without increasing library major version number +and appropriate mention in changelog. + +The JSON string contains all the data that can be obtained using vmaCalculateStatistics(). +It can also contain detailed map of allocated memory blocks and their regions - +free and occupied by allocations. +This allows e.g. to visualize the memory or assess fragmentation. + + +\page allocation_annotation Allocation names and user data + +\section allocation_user_data Allocation user data + +You can annotate allocations with your own information, e.g. for debugging purposes. +To do that, fill VmaAllocationCreateInfo::pUserData field when creating +an allocation. It is an opaque `void*` pointer. You can use it e.g. as a pointer, +some handle, index, key, ordinal number or any other value that would associate +the allocation with your custom metadata. +It is useful to identify appropriate data structures in your engine given #VmaAllocation, +e.g. when doing \ref defragmentation. + +\code +VkBufferCreateInfo bufCreateInfo = ... + +MyBufferMetadata* pMetadata = CreateBufferMetadata(); + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.pUserData = pMetadata; + +VkBuffer buffer; +VmaAllocation allocation; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buffer, &allocation, nullptr); +\endcode + +The pointer may be later retrieved as VmaAllocationInfo::pUserData: + +\code +VmaAllocationInfo allocInfo; +vmaGetAllocationInfo(allocator, allocation, &allocInfo); +MyBufferMetadata* pMetadata = (MyBufferMetadata*)allocInfo.pUserData; +\endcode + +It can also be changed using function vmaSetAllocationUserData(). + +Values of (non-zero) allocations' `pUserData` are printed in JSON report created by +vmaBuildStatsString() in hexadecimal form. + +\section allocation_names Allocation names + +An allocation can also carry a null-terminated string, giving a name to the allocation. +To set it, call vmaSetAllocationName(). +The library creates internal copy of the string, so the pointer you pass doesn't need +to be valid for whole lifetime of the allocation. You can free it after the call. + +\code +std::string imageName = "Texture: "; +imageName += fileName; +vmaSetAllocationName(allocator, allocation, imageName.c_str()); +\endcode + +The string can be later retrieved by inspecting VmaAllocationInfo::pName. +It is also printed in JSON report created by vmaBuildStatsString(). + +\note Setting string name to VMA allocation doesn't automatically set it to the Vulkan buffer or image created with it. +You must do it manually using an extension like VK_EXT_debug_utils, which is independent of this library. + + +\page virtual_allocator Virtual allocator + +As an extra feature, the core allocation algorithm of the library is exposed through a simple and convenient API of "virtual allocator". +It doesn't allocate any real GPU memory. It just keeps track of used and free regions of a "virtual block". +You can use it to allocate your own memory or other objects, even completely unrelated to Vulkan. +A common use case is sub-allocation of pieces of one large GPU buffer. + +\section virtual_allocator_creating_virtual_block Creating virtual block + +To use this functionality, there is no main "allocator" object. +You don't need to have #VmaAllocator object created. +All you need to do is to create a separate #VmaVirtualBlock object for each block of memory you want to be managed by the allocator: + +-# Fill in #VmaVirtualBlockCreateInfo structure. +-# Call vmaCreateVirtualBlock(). Get new #VmaVirtualBlock object. + +Example: + +\code +VmaVirtualBlockCreateInfo blockCreateInfo = {}; +blockCreateInfo.size = 1048576; // 1 MB + +VmaVirtualBlock block; +VkResult res = vmaCreateVirtualBlock(&blockCreateInfo, &block); +\endcode + +\section virtual_allocator_making_virtual_allocations Making virtual allocations + +#VmaVirtualBlock object contains internal data structure that keeps track of free and occupied regions +using the same code as the main Vulkan memory allocator. +Similarly to #VmaAllocation for standard GPU allocations, there is #VmaVirtualAllocation type +that represents an opaque handle to an allocation within the virtual block. + +In order to make such allocation: + +-# Fill in #VmaVirtualAllocationCreateInfo structure. +-# Call vmaVirtualAllocate(). Get new #VmaVirtualAllocation object that represents the allocation. + You can also receive `VkDeviceSize offset` that was assigned to the allocation. + +Example: + +\code +VmaVirtualAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.size = 4096; // 4 KB + +VmaVirtualAllocation alloc; +VkDeviceSize offset; +res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, &offset); +if(res == VK_SUCCESS) +{ + // Use the 4 KB of your memory starting at offset. +} +else +{ + // Allocation failed - no space for it could be found. Handle this error! +} +\endcode + +\section virtual_allocator_deallocation Deallocation + +When no longer needed, an allocation can be freed by calling vmaVirtualFree(). +You can only pass to this function an allocation that was previously returned by vmaVirtualAllocate() +called for the same #VmaVirtualBlock. + +When whole block is no longer needed, the block object can be released by calling vmaDestroyVirtualBlock(). +All allocations must be freed before the block is destroyed, which is checked internally by an assert. +However, if you don't want to call vmaVirtualFree() for each allocation, you can use vmaClearVirtualBlock() to free them all at once - +a feature not available in normal Vulkan memory allocator. Example: + +\code +vmaVirtualFree(block, alloc); +vmaDestroyVirtualBlock(block); +\endcode + +\section virtual_allocator_allocation_parameters Allocation parameters + +You can attach a custom pointer to each allocation by using vmaSetVirtualAllocationUserData(). +Its default value is null. +It can be used to store any data that needs to be associated with that allocation - e.g. an index, a handle, or a pointer to some +larger data structure containing more information. Example: + +\code +struct CustomAllocData +{ + std::string m_AllocName; +}; +CustomAllocData* allocData = new CustomAllocData(); +allocData->m_AllocName = "My allocation 1"; +vmaSetVirtualAllocationUserData(block, alloc, allocData); +\endcode + +The pointer can later be fetched, along with allocation offset and size, by passing the allocation handle to function +vmaGetVirtualAllocationInfo() and inspecting returned structure #VmaVirtualAllocationInfo. +If you allocated a new object to be used as the custom pointer, don't forget to delete that object before freeing the allocation! +Example: + +\code +VmaVirtualAllocationInfo allocInfo; +vmaGetVirtualAllocationInfo(block, alloc, &allocInfo); +delete (CustomAllocData*)allocInfo.pUserData; + +vmaVirtualFree(block, alloc); +\endcode + +\section virtual_allocator_alignment_and_units Alignment and units + +It feels natural to express sizes and offsets in bytes. +If an offset of an allocation needs to be aligned to a multiply of some number (e.g. 4 bytes), you can fill optional member +VmaVirtualAllocationCreateInfo::alignment to request it. Example: + +\code +VmaVirtualAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.size = 4096; // 4 KB +allocCreateInfo.alignment = 4; // Returned offset must be a multiply of 4 B + +VmaVirtualAllocation alloc; +res = vmaVirtualAllocate(block, &allocCreateInfo, &alloc, nullptr); +\endcode + +Alignments of different allocations made from one block may vary. +However, if all alignments and sizes are always multiply of some size e.g. 4 B or `sizeof(MyDataStruct)`, +you can express all sizes, alignments, and offsets in multiples of that size instead of individual bytes. +It might be more convenient, but you need to make sure to use this new unit consistently in all the places: + +- VmaVirtualBlockCreateInfo::size +- VmaVirtualAllocationCreateInfo::size and VmaVirtualAllocationCreateInfo::alignment +- Using offset returned by vmaVirtualAllocate() or in VmaVirtualAllocationInfo::offset + +\section virtual_allocator_statistics Statistics + +You can obtain statistics of a virtual block using vmaGetVirtualBlockStatistics() +(to get brief statistics that are fast to calculate) +or vmaCalculateVirtualBlockStatistics() (to get more detailed statistics, slower to calculate). +The functions fill structures #VmaStatistics, #VmaDetailedStatistics respectively - same as used by the normal Vulkan memory allocator. +Example: + +\code +VmaStatistics stats; +vmaGetVirtualBlockStatistics(block, &stats); +printf("My virtual block has %llu bytes used by %u virtual allocations\n", + stats.allocationBytes, stats.allocationCount); +\endcode + +You can also request a full list of allocations and free regions as a string in JSON format by calling +vmaBuildVirtualBlockStatsString(). +Returned string must be later freed using vmaFreeVirtualBlockStatsString(). +The format of this string differs from the one returned by the main Vulkan allocator, but it is similar. + +\section virtual_allocator_additional_considerations Additional considerations + +The "virtual allocator" functionality is implemented on a level of individual memory blocks. +Keeping track of a whole collection of blocks, allocating new ones when out of free space, +deleting empty ones, and deciding which one to try first for a new allocation must be implemented by the user. + +Alternative allocation algorithms are supported, just like in custom pools of the real GPU memory. +See enum #VmaVirtualBlockCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_BLOCK_CREATE_LINEAR_ALGORITHM_BIT). +You can find their description in chapter \ref custom_memory_pools. +Allocation strategies are also supported. +See enum #VmaVirtualAllocationCreateFlagBits to learn how to specify them (e.g. #VMA_VIRTUAL_ALLOCATION_CREATE_STRATEGY_MIN_TIME_BIT). + +Following features are supported only by the allocator of the real GPU memory and not by virtual allocations: +buffer-image granularity, `VMA_DEBUG_MARGIN`, `VMA_MIN_ALIGNMENT`. + + +\page debugging_memory_usage Debugging incorrect memory usage + +If you suspect a bug with memory usage, like usage of uninitialized memory or +memory being overwritten out of bounds of an allocation, +you can use debug features of this library to verify this. + +\section debugging_memory_usage_initialization Memory initialization + +If you experience a bug with incorrect and nondeterministic data in your program and you suspect uninitialized memory to be used, +you can enable automatic memory initialization to verify this. +To do it, define macro `VMA_DEBUG_INITIALIZE_ALLOCATIONS` to 1. + +\code +#define VMA_DEBUG_INITIALIZE_ALLOCATIONS 1 +#include "vk_mem_alloc.h" +\endcode + +It makes memory of new allocations initialized to bit pattern `0xDCDCDCDC`. +Before an allocation is destroyed, its memory is filled with bit pattern `0xEFEFEFEF`. +Memory is automatically mapped and unmapped if necessary. + +If you find these values while debugging your program, good chances are that you incorrectly +read Vulkan memory that is allocated but not initialized, or already freed, respectively. + +Memory initialization works only with memory types that are `HOST_VISIBLE` and with allocations that can be mapped. +It works also with dedicated allocations. + +\section debugging_memory_usage_margins Margins + +By default, allocations are laid out in memory blocks next to each other if possible +(considering required alignment, `bufferImageGranularity`, and `nonCoherentAtomSize`). + +![Allocations without margin](../gfx/Margins_1.png) + +Define macro `VMA_DEBUG_MARGIN` to some non-zero value (e.g. 16) to enforce specified +number of bytes as a margin after every allocation. + +\code +#define VMA_DEBUG_MARGIN 16 +#include "vk_mem_alloc.h" +\endcode + +![Allocations with margin](../gfx/Margins_2.png) + +If your bug goes away after enabling margins, it means it may be caused by memory +being overwritten outside of allocation boundaries. It is not 100% certain though. +Change in application behavior may also be caused by different order and distribution +of allocations across memory blocks after margins are applied. + +Margins work with all types of memory. + +Margin is applied only to allocations made out of memory blocks and not to dedicated +allocations, which have their own memory block of specific size. +It is thus not applied to allocations made using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag +or those automatically decided to put into dedicated allocations, e.g. due to its +large size or recommended by VK_KHR_dedicated_allocation extension. + +Margins appear in [JSON dump](@ref statistics_json_dump) as part of free space. + +Note that enabling margins increases memory usage and fragmentation. + +Margins do not apply to \ref virtual_allocator. + +\section debugging_memory_usage_corruption_detection Corruption detection + +You can additionally define macro `VMA_DEBUG_DETECT_CORRUPTION` to 1 to enable validation +of contents of the margins. + +\code +#define VMA_DEBUG_MARGIN 16 +#define VMA_DEBUG_DETECT_CORRUPTION 1 +#include "vk_mem_alloc.h" +\endcode + +When this feature is enabled, number of bytes specified as `VMA_DEBUG_MARGIN` +(it must be multiply of 4) after every allocation is filled with a magic number. +This idea is also know as "canary". +Memory is automatically mapped and unmapped if necessary. + +This number is validated automatically when the allocation is destroyed. +If it is not equal to the expected value, `VMA_ASSERT()` is executed. +It clearly means that either CPU or GPU overwritten the memory outside of boundaries of the allocation, +which indicates a serious bug. + +You can also explicitly request checking margins of all allocations in all memory blocks +that belong to specified memory types by using function vmaCheckCorruption(), +or in memory blocks that belong to specified custom pool, by using function +vmaCheckPoolCorruption(). + +Margin validation (corruption detection) works only for memory types that are +`HOST_VISIBLE` and `HOST_COHERENT`. + + +\section debugging_memory_usage_leak_detection Leak detection features + +At allocation and allocator destruction time VMA checks for unfreed and unmapped blocks using +`VMA_ASSERT_LEAK()`. This macro defaults to an assertion, triggering a typically fatal error in Debug +builds, and doing nothing in Release builds. You can provide your own definition of `VMA_ASSERT_LEAK()` +to change this behavior. + +At memory block destruction time VMA lists out all unfreed allocations using the `VMA_LEAK_LOG_FORMAT()` +macro, which defaults to `VMA_DEBUG_LOG_FORMAT`, which in turn defaults to a no-op. +If you're having trouble with leaks - for example, the aforementioned assertion triggers, but you don't +quite know \em why -, overriding this macro to print out the the leaking blocks, combined with assigning +individual names to allocations using vmaSetAllocationName(), can greatly aid in fixing them. + +\page other_api_interop Interop with other graphics APIs + +VMA provides some features that help with interoperability with other graphics APIs, e.g. OpenGL. + +\section opengl_interop_exporting_memory Exporting memory + +If you want to attach `VkExportMemoryAllocateInfoKHR` or other structure to `pNext` chain of memory allocations made by the library: + +You can create \ref custom_memory_pools for such allocations. +Define and fill in your `VkExportMemoryAllocateInfoKHR` structure and attach it to VmaPoolCreateInfo::pMemoryAllocateNext +while creating the custom pool. +Please note that the structure must remain alive and unchanged for the whole lifetime of the #VmaPool, +not only while creating it, as no copy of the structure is made, +but its original pointer is used for each allocation instead. + +If you want to export all memory allocated by VMA from certain memory types, +also dedicated allocations or other allocations made from default pools, +an alternative solution is to fill in VmaAllocatorCreateInfo::pTypeExternalMemoryHandleTypes. +It should point to an array with `VkExternalMemoryHandleTypeFlagsKHR` to be automatically passed by the library +through `VkExportMemoryAllocateInfoKHR` on each allocation made from a specific memory type. +Please note that new versions of the library also support dedicated allocations created in custom pools. + +You should not mix these two methods in a way that allows to apply both to the same memory type. +Otherwise, `VkExportMemoryAllocateInfoKHR` structure would be attached twice to the `pNext` chain of `VkMemoryAllocateInfo`. + + +\section opengl_interop_custom_alignment Custom alignment + +Buffers or images exported to a different API like OpenGL may require a different alignment, +higher than the one used by the library automatically, queried from functions like `vkGetBufferMemoryRequirements`. +To impose such alignment: + +You can create \ref custom_memory_pools for such allocations. +Set VmaPoolCreateInfo::minAllocationAlignment member to the minimum alignment required for each allocation +to be made out of this pool. +The alignment actually used will be the maximum of this member and the alignment returned for the specific buffer or image +from a function like `vkGetBufferMemoryRequirements`, which is called by VMA automatically. + +If you want to create a buffer with a specific minimum alignment out of default pools, +use special function vmaCreateBufferWithAlignment(), which takes additional parameter `minAlignment`. + +Note the problem of alignment affects only resources placed inside bigger `VkDeviceMemory` blocks and not dedicated +allocations, as these, by definition, always have alignment = 0 because the resource is bound to the beginning of its dedicated block. +You can ensure that an allocation is created as dedicated by using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +Contrary to Direct3D 12, Vulkan doesn't have a concept of alignment of the entire memory block passed on its allocation. + +\section opengl_interop_extended_allocation_information Extended allocation information + +If you want to rely on VMA to allocate your buffers and images inside larger memory blocks, +but you need to know the size of the entire block and whether the allocation was made +with its own dedicated memory, use function vmaGetAllocationInfo2() to retrieve +extended allocation information in structure #VmaAllocationInfo2. + + + +\page usage_patterns Recommended usage patterns + +Vulkan gives great flexibility in memory allocation. +This chapter shows the most common patterns. + +See also slides from talk: +[Sawicki, Adam. Advanced Graphics Techniques Tutorial: Memory management in Vulkan and DX12. Game Developers Conference, 2018](https://www.gdcvault.com/play/1025458/Advanced-Graphics-Techniques-Tutorial-New) + + +\section usage_patterns_gpu_only GPU-only resource + +When: +Any resources that you frequently write and read on GPU, +e.g. images used as color attachments (aka "render targets"), depth-stencil attachments, +images/buffers used as storage image/buffer (aka "Unordered Access View (UAV)"). + +What to do: +Let the library select the optimal memory type, which will likely have `VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT`. + +\code +VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; +imgCreateInfo.extent.width = 3840; +imgCreateInfo.extent.height = 2160; +imgCreateInfo.extent.depth = 1; +imgCreateInfo.mipLevels = 1; +imgCreateInfo.arrayLayers = 1; +imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; +allocCreateInfo.priority = 1.0f; + +VkImage img; +VmaAllocation alloc; +vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); +\endcode + +Also consider: +Consider creating them as dedicated allocations using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT, +especially if they are large or if you plan to destroy and recreate them with different sizes +e.g. when display resolution changes. +Prefer to create such resources first and all other GPU resources (like textures and vertex buffers) later. +When VK_EXT_memory_priority extension is enabled, it is also worth setting high priority to such allocation +to decrease chances to be evicted to system memory by the operating system. + +\section usage_patterns_staging_copy_upload Staging copy for upload + +When: +A "staging" buffer than you want to map and fill from CPU code, then use as a source of transfer +to some GPU resource. + +What to do: +Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT. +Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT`. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +... + +memcpy(allocInfo.pMappedData, myData, myDataSize); +\endcode + +Also consider: +You can map the allocation using vmaMapMemory() or you can create it as persistenly mapped +using #VMA_ALLOCATION_CREATE_MAPPED_BIT, as in the example above. + + +\section usage_patterns_readback Readback + +When: +Buffers for data written by or transferred from the GPU that you want to read back on the CPU, +e.g. results of some computations. + +What to do: +Use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT. +Let the library select the optimal memory type, which will always have `VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT` +and `VK_MEMORY_PROPERTY_HOST_CACHED_BIT`. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +... + +const float* downloadedData = (const float*)allocInfo.pMappedData; +\endcode + + +\section usage_patterns_advanced_data_uploading Advanced data uploading + +For resources that you frequently write on CPU via mapped pointer and +frequently read on GPU e.g. as a uniform buffer (also called "dynamic"), multiple options are possible: + +-# Easiest solution is to have one copy of the resource in `HOST_VISIBLE` memory, + even if it means system RAM (not `DEVICE_LOCAL`) on systems with a discrete graphics card, + and make the device reach out to that resource directly. + - Reads performed by the device will then go through PCI Express bus. + The performance of this access may be limited, but it may be fine depending on the size + of this resource (whether it is small enough to quickly end up in GPU cache) and the sparsity + of access. +-# On systems with unified memory (e.g. AMD APU or Intel integrated graphics, mobile chips), + a memory type may be available that is both `HOST_VISIBLE` (available for mapping) and `DEVICE_LOCAL` + (fast to access from the GPU). Then, it is likely the best choice for such type of resource. +-# Systems with a discrete graphics card and separate video memory may or may not expose + a memory type that is both `HOST_VISIBLE` and `DEVICE_LOCAL`, also known as Base Address Register (BAR). + If they do, it represents a piece of VRAM (or entire VRAM, if ReBAR is enabled in the motherboard BIOS) + that is available to CPU for mapping. + - Writes performed by the host to that memory go through PCI Express bus. + The performance of these writes may be limited, but it may be fine, especially on PCIe 4.0, + as long as rules of using uncached and write-combined memory are followed - only sequential writes and no reads. +-# Finally, you may need or prefer to create a separate copy of the resource in `DEVICE_LOCAL` memory, + a separate "staging" copy in `HOST_VISIBLE` memory and perform an explicit transfer command between them. + +Thankfully, VMA offers an aid to create and use such resources in the the way optimal +for the current Vulkan device. To help the library make the best choice, +use flag #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT together with +#VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT. +It will then prefer a memory type that is both `DEVICE_LOCAL` and `HOST_VISIBLE` (integrated memory or BAR), +but if no such memory type is available or allocation from it fails +(PC graphics cards have only 256 MB of BAR by default, unless ReBAR is supported and enabled in BIOS), +it will fall back to `DEVICE_LOCAL` memory for fast GPU access. +It is then up to you to detect that the allocation ended up in a memory type that is not `HOST_VISIBLE`, +so you need to create another "staging" allocation and perform explicit transfers. + +\code +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = 65536; +bufCreateInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_HOST_ACCESS_ALLOW_TRANSFER_INSTEAD_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + +VkBuffer buf; +VmaAllocation alloc; +VmaAllocationInfo allocInfo; +vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); + +VkMemoryPropertyFlags memPropFlags; +vmaGetAllocationMemoryProperties(allocator, alloc, &memPropFlags); + +if(memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) +{ + // Allocation ended up in a mappable memory and is already mapped - write to it directly. + + // [Executed in runtime]: + memcpy(allocInfo.pMappedData, myData, myDataSize); +} +else +{ + // Allocation ended up in a non-mappable memory - need to transfer. + VkBufferCreateInfo stagingBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; + stagingBufCreateInfo.size = 65536; + stagingBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; + + VmaAllocationCreateInfo stagingAllocCreateInfo = {}; + stagingAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + stagingAllocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | + VMA_ALLOCATION_CREATE_MAPPED_BIT; + + VkBuffer stagingBuf; + VmaAllocation stagingAlloc; + VmaAllocationInfo stagingAllocInfo; + vmaCreateBuffer(allocator, &stagingBufCreateInfo, &stagingAllocCreateInfo, + &stagingBuf, &stagingAlloc, stagingAllocInfo); + + // [Executed in runtime]: + memcpy(stagingAllocInfo.pMappedData, myData, myDataSize); + vmaFlushAllocation(allocator, stagingAlloc, 0, VK_WHOLE_SIZE); + //vkCmdPipelineBarrier: VK_ACCESS_HOST_WRITE_BIT --> VK_ACCESS_TRANSFER_READ_BIT + VkBufferCopy bufCopy = { + 0, // srcOffset + 0, // dstOffset, + myDataSize); // size + vkCmdCopyBuffer(cmdBuf, stagingBuf, buf, 1, &bufCopy); +} +\endcode + +\section usage_patterns_other_use_cases Other use cases + +Here are some other, less obvious use cases and their recommended settings: + +- An image that is used only as transfer source and destination, but it should stay on the device, + as it is used to temporarily store a copy of some texture, e.g. from the current to the next frame, + for temporal antialiasing or other temporal effects. + - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO +- An image that is used only as transfer source and destination, but it should be placed + in the system RAM despite it doesn't need to be mapped, because it serves as a "swap" copy to evict + least recently used textures from VRAM. + - Use `VkImageCreateInfo::usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_HOST, + as VMA needs a hint here to differentiate from the previous case. +- A buffer that you want to map and write from the CPU, directly read from the GPU + (e.g. as a uniform or vertex buffer), but you have a clear preference to place it in device or + host memory due to its large size. + - Use `VkBufferCreateInfo::usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT` + - Use VmaAllocationCreateInfo::usage = #VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE or #VMA_MEMORY_USAGE_AUTO_PREFER_HOST + - Use VmaAllocationCreateInfo::flags = #VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT + + +\page configuration Configuration + +Please check "CONFIGURATION SECTION" in the code to find macros that you can define +before each include of this file or change directly in this file to provide +your own implementation of basic facilities like assert, `min()` and `max()` functions, +mutex, atomic etc. +The library uses its own implementation of containers by default, but you can switch to using +STL containers instead. + +For example, define `VMA_ASSERT(expr)` before including the library to provide +custom implementation of the assertion, compatible with your project. +By default it is defined to standard C `assert(expr)` in `_DEBUG` configuration +and empty otherwise. + +\section config_Vulkan_functions Pointers to Vulkan functions + +There are multiple ways to import pointers to Vulkan functions in the library. +In the simplest case you don't need to do anything. +If the compilation or linking of your program or the initialization of the #VmaAllocator +doesn't work for you, you can try to reconfigure it. + +First, the allocator tries to fetch pointers to Vulkan functions linked statically, +like this: + +\code +m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory; +\endcode + +If you want to disable this feature, set configuration macro: `#define VMA_STATIC_VULKAN_FUNCTIONS 0`. + +Second, you can provide the pointers yourself by setting member VmaAllocatorCreateInfo::pVulkanFunctions. +You can fetch them e.g. using functions `vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` or +by using a helper library like [volk](https://github.com/zeux/volk). + +Third, VMA tries to fetch remaining pointers that are still null by calling +`vkGetInstanceProcAddr` and `vkGetDeviceProcAddr` on its own. +You need to only fill in VmaVulkanFunctions::vkGetInstanceProcAddr and VmaVulkanFunctions::vkGetDeviceProcAddr. +Other pointers will be fetched automatically. +If you want to disable this feature, set configuration macro: `#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0`. + +Finally, all the function pointers required by the library (considering selected +Vulkan version and enabled extensions) are checked with `VMA_ASSERT` if they are not null. + + +\section custom_memory_allocator Custom host memory allocator + +If you use custom allocator for CPU memory rather than default operator `new` +and `delete` from C++, you can make this library using your allocator as well +by filling optional member VmaAllocatorCreateInfo::pAllocationCallbacks. These +functions will be passed to Vulkan, as well as used by the library itself to +make any CPU-side allocations. + +\section allocation_callbacks Device memory allocation callbacks + +The library makes calls to `vkAllocateMemory()` and `vkFreeMemory()` internally. +You can setup callbacks to be informed about these calls, e.g. for the purpose +of gathering some statistics. To do it, fill optional member +VmaAllocatorCreateInfo::pDeviceMemoryCallbacks. + +\section heap_memory_limit Device heap memory limit + +When device memory of certain heap runs out of free space, new allocations may +fail (returning error code) or they may succeed, silently pushing some existing_ +memory blocks from GPU VRAM to system RAM (which degrades performance). This +behavior is implementation-dependent - it depends on GPU vendor and graphics +driver. + +On AMD cards it can be controlled while creating Vulkan device object by using +VK_AMD_memory_overallocation_behavior extension, if available. + +Alternatively, if you want to test how your program behaves with limited amount of Vulkan device +memory available without switching your graphics card to one that really has +smaller VRAM, you can use a feature of this library intended for this purpose. +To do it, fill optional member VmaAllocatorCreateInfo::pHeapSizeLimit. + + + +\page vk_khr_dedicated_allocation VK_KHR_dedicated_allocation + +VK_KHR_dedicated_allocation is a Vulkan extension which can be used to improve +performance on some GPUs. It augments Vulkan API with possibility to query +driver whether it prefers particular buffer or image to have its own, dedicated +allocation (separate `VkDeviceMemory` block) for better efficiency - to be able +to do some internal optimizations. The extension is supported by this library. +It will be used automatically when enabled. + +It has been promoted to core Vulkan 1.1, so if you use eligible Vulkan version +and inform VMA about it by setting VmaAllocatorCreateInfo::vulkanApiVersion, +you are all set. + +Otherwise, if you want to use it as an extension: + +1 . When creating Vulkan device, check if following 2 device extensions are +supported (call `vkEnumerateDeviceExtensionProperties()`). +If yes, enable them (fill `VkDeviceCreateInfo::ppEnabledExtensionNames`). + +- VK_KHR_get_memory_requirements2 +- VK_KHR_dedicated_allocation + +If you enabled these extensions: + +2 . Use #VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT flag when creating +your #VmaAllocator to inform the library that you enabled required extensions +and you want the library to use them. + +\code +allocatorInfo.flags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT; + +vmaCreateAllocator(&allocatorInfo, &allocator); +\endcode + +That is all. The extension will be automatically used whenever you create a +buffer using vmaCreateBuffer() or image using vmaCreateImage(). + +When using the extension together with Vulkan Validation Layer, you will receive +warnings like this: + +_vkBindBufferMemory(): Binding memory to buffer 0x33 but vkGetBufferMemoryRequirements() has not been called on that buffer._ + +It is OK, you should just ignore it. It happens because you use function +`vkGetBufferMemoryRequirements2KHR()` instead of standard +`vkGetBufferMemoryRequirements()`, while the validation layer seems to be +unaware of it. + +To learn more about this extension, see: + +- [VK_KHR_dedicated_allocation in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap50.html#VK_KHR_dedicated_allocation) +- [VK_KHR_dedicated_allocation unofficial manual](http://asawicki.info/articles/VK_KHR_dedicated_allocation.php5) + + + +\page vk_ext_memory_priority VK_EXT_memory_priority + +VK_EXT_memory_priority is a device extension that allows to pass additional "priority" +value to Vulkan memory allocations that the implementation may use prefer certain +buffers and images that are critical for performance to stay in device-local memory +in cases when the memory is over-subscribed, while some others may be moved to the system memory. + +VMA offers convenient usage of this extension. +If you enable it, you can pass "priority" parameter when creating allocations or custom pools +and the library automatically passes the value to Vulkan using this extension. + +If you want to use this extension in connection with VMA, follow these steps: + +\section vk_ext_memory_priority_initialization Initialization + +1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_EXT_memory_priority". + +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceMemoryPriorityFeaturesEXT::memoryPriority` is true. + +3) While creating device with `vkCreateDevice`, enable this extension - add "VK_EXT_memory_priority" +to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. + +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceMemoryPriorityFeaturesEXT` to +`VkPhysicalDeviceFeatures2::pNext` chain and set its member `memoryPriority` to `VK_TRUE`. + +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT +to VmaAllocatorCreateInfo::flags. + +\section vk_ext_memory_priority_usage Usage + +When using this extension, you should initialize following member: + +- VmaAllocationCreateInfo::priority when creating a dedicated allocation with #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +- VmaPoolCreateInfo::priority when creating a custom pool. + +It should be a floating-point value between `0.0f` and `1.0f`, where recommended default is `0.5f`. +Memory allocated with higher value can be treated by the Vulkan implementation as higher priority +and so it can have lower chances of being pushed out to system memory, experiencing degraded performance. + +It might be a good idea to create performance-critical resources like color-attachment or depth-stencil images +as dedicated and set high priority to them. For example: + +\code +VkImageCreateInfo imgCreateInfo = { VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO }; +imgCreateInfo.imageType = VK_IMAGE_TYPE_2D; +imgCreateInfo.extent.width = 3840; +imgCreateInfo.extent.height = 2160; +imgCreateInfo.extent.depth = 1; +imgCreateInfo.mipLevels = 1; +imgCreateInfo.arrayLayers = 1; +imgCreateInfo.format = VK_FORMAT_R8G8B8A8_UNORM; +imgCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL; +imgCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED; +imgCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; +imgCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; +allocCreateInfo.flags = VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT; +allocCreateInfo.priority = 1.0f; + +VkImage img; +VmaAllocation alloc; +vmaCreateImage(allocator, &imgCreateInfo, &allocCreateInfo, &img, &alloc, nullptr); +\endcode + +`priority` member is ignored in the following situations: + +- Allocations created in custom pools: They inherit the priority, along with all other allocation parameters + from the parameters passed in #VmaPoolCreateInfo when the pool was created. +- Allocations created in default pools: They inherit the priority from the parameters + VMA used when creating default pools, which means `priority == 0.5f`. + + +\page vk_amd_device_coherent_memory VK_AMD_device_coherent_memory + +VK_AMD_device_coherent_memory is a device extension that enables access to +additional memory types with `VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD` and +`VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` flag. It is useful mostly for +allocation of buffers intended for writing "breadcrumb markers" in between passes +or draw calls, which in turn are useful for debugging GPU crash/hang/TDR cases. + +When the extension is available but has not been enabled, Vulkan physical device +still exposes those memory types, but their usage is forbidden. VMA automatically +takes care of that - it returns `VK_ERROR_FEATURE_NOT_PRESENT` when an attempt +to allocate memory of such type is made. + +If you want to use this extension in connection with VMA, follow these steps: + +\section vk_amd_device_coherent_memory_initialization Initialization + +1) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains "VK_AMD_device_coherent_memory". + +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceCoherentMemoryFeaturesAMD::deviceCoherentMemory` is true. + +3) While creating device with `vkCreateDevice`, enable this extension - add "VK_AMD_device_coherent_memory" +to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. + +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceCoherentMemoryFeaturesAMD` to +`VkPhysicalDeviceFeatures2::pNext` and set its member `deviceCoherentMemory` to `VK_TRUE`. + +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this extension and feature - add #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT +to VmaAllocatorCreateInfo::flags. + +\section vk_amd_device_coherent_memory_usage Usage + +After following steps described above, you can create VMA allocations and custom pools +out of the special `DEVICE_COHERENT` and `DEVICE_UNCACHED` memory types on eligible +devices. There are multiple ways to do it, for example: + +- You can request or prefer to allocate out of such memory types by adding + `VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD` to VmaAllocationCreateInfo::requiredFlags + or VmaAllocationCreateInfo::preferredFlags. Those flags can be freely mixed with + other ways of \ref choosing_memory_type, like setting VmaAllocationCreateInfo::usage. +- If you manually found memory type index to use for this purpose, force allocation + from this specific index by setting VmaAllocationCreateInfo::memoryTypeBits `= 1u << index`. + +\section vk_amd_device_coherent_memory_more_information More information + +To learn more about this extension, see [VK_AMD_device_coherent_memory in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VK_AMD_device_coherent_memory.html) + +Example use of this extension can be found in the code of the sample and test suite +accompanying this library. + + +\page enabling_buffer_device_address Enabling buffer device address + +Device extension VK_KHR_buffer_device_address +allow to fetch raw GPU pointer to a buffer and pass it for usage in a shader code. +It has been promoted to core Vulkan 1.2. + +If you want to use this feature in connection with VMA, follow these steps: + +\section enabling_buffer_device_address_initialization Initialization + +1) (For Vulkan version < 1.2) Call `vkEnumerateDeviceExtensionProperties` for the physical device. +Check if the extension is supported - if returned array of `VkExtensionProperties` contains +"VK_KHR_buffer_device_address". + +2) Call `vkGetPhysicalDeviceFeatures2` for the physical device instead of old `vkGetPhysicalDeviceFeatures`. +Attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to `VkPhysicalDeviceFeatures2::pNext` to be returned. +Check if the device feature is really supported - check if `VkPhysicalDeviceBufferDeviceAddressFeatures::bufferDeviceAddress` is true. + +3) (For Vulkan version < 1.2) While creating device with `vkCreateDevice`, enable this extension - add +"VK_KHR_buffer_device_address" to the list passed as `VkDeviceCreateInfo::ppEnabledExtensionNames`. + +4) While creating the device, also don't set `VkDeviceCreateInfo::pEnabledFeatures`. +Fill in `VkPhysicalDeviceFeatures2` structure instead and pass it as `VkDeviceCreateInfo::pNext`. +Enable this device feature - attach additional structure `VkPhysicalDeviceBufferDeviceAddressFeatures*` to +`VkPhysicalDeviceFeatures2::pNext` and set its member `bufferDeviceAddress` to `VK_TRUE`. + +5) While creating #VmaAllocator with vmaCreateAllocator() inform VMA that you +have enabled this feature - add #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT +to VmaAllocatorCreateInfo::flags. + +\section enabling_buffer_device_address_usage Usage + +After following steps described above, you can create buffers with `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*` using VMA. +The library automatically adds `VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT*` to +allocated memory blocks wherever it might be needed. + +Please note that the library supports only `VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT*`. +The second part of this functionality related to "capture and replay" is not supported, +as it is intended for usage in debugging tools like RenderDoc, not in everyday Vulkan usage. + +\section enabling_buffer_device_address_more_information More information + +To learn more about this extension, see [VK_KHR_buffer_device_address in Vulkan specification](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/chap46.html#VK_KHR_buffer_device_address) + +Example use of this extension can be found in the code of the sample and test suite +accompanying this library. + +\page general_considerations General considerations + +\section general_considerations_thread_safety Thread safety + +- The library has no global state, so separate #VmaAllocator objects can be used + independently. + There should be no need to create multiple such objects though - one per `VkDevice` is enough. +- By default, all calls to functions that take #VmaAllocator as first parameter + are safe to call from multiple threads simultaneously because they are + synchronized internally when needed. + This includes allocation and deallocation from default memory pool, as well as custom #VmaPool. +- When the allocator is created with #VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT + flag, calls to functions that take such #VmaAllocator object must be + synchronized externally. +- Access to a #VmaAllocation object must be externally synchronized. For example, + you must not call vmaGetAllocationInfo() and vmaMapMemory() from different + threads at the same time if you pass the same #VmaAllocation object to these + functions. +- #VmaVirtualBlock is not safe to be used from multiple threads simultaneously. + +\section general_considerations_versioning_and_compatibility Versioning and compatibility + +The library uses [**Semantic Versioning**](https://semver.org/), +which means version numbers follow convention: Major.Minor.Patch (e.g. 2.3.0), where: + +- Incremented Patch version means a release is backward- and forward-compatible, + introducing only some internal improvements, bug fixes, optimizations etc. + or changes that are out of scope of the official API described in this documentation. +- Incremented Minor version means a release is backward-compatible, + so existing code that uses the library should continue to work, while some new + symbols could have been added: new structures, functions, new values in existing + enums and bit flags, new structure members, but not new function parameters. +- Incrementing Major version means a release could break some backward compatibility. + +All changes between official releases are documented in file "CHANGELOG.md". + +\warning Backward compatibility is considered on the level of C++ source code, not binary linkage. +Adding new members to existing structures is treated as backward compatible if initializing +the new members to binary zero results in the old behavior. +You should always fully initialize all library structures to zeros and not rely on their +exact binary size. + +\section general_considerations_validation_layer_warnings Validation layer warnings + +When using this library, you can meet following types of warnings issued by +Vulkan validation layer. They don't necessarily indicate a bug, so you may need +to just ignore them. + +- *vkBindBufferMemory(): Binding memory to buffer 0xeb8e4 but vkGetBufferMemoryRequirements() has not been called on that buffer.* + - It happens when VK_KHR_dedicated_allocation extension is enabled. + `vkGetBufferMemoryRequirements2KHR` function is used instead, while validation layer seems to be unaware of it. +- *Mapping an image with layout VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL can result in undefined behavior if this memory is used by the device. Only GENERAL or PREINITIALIZED should be used.* + - It happens when you map a buffer or image, because the library maps entire + `VkDeviceMemory` block, where different types of images and buffers may end + up together, especially on GPUs with unified memory like Intel. +- *Non-linear image 0xebc91 is aliased with linear buffer 0xeb8e4 which may indicate a bug.* + - It may happen when you use [defragmentation](@ref defragmentation). + +\section general_considerations_allocation_algorithm Allocation algorithm + +The library uses following algorithm for allocation, in order: + +-# Try to find free range of memory in existing blocks. +-# If failed, try to create a new block of `VkDeviceMemory`, with preferred block size. +-# If failed, try to create such block with size / 2, size / 4, size / 8. +-# If failed, try to allocate separate `VkDeviceMemory` for this allocation, + just like when you use #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT. +-# If failed, choose other memory type that meets the requirements specified in + VmaAllocationCreateInfo and go to point 1. +-# If failed, return `VK_ERROR_OUT_OF_DEVICE_MEMORY`. + +\section general_considerations_features_not_supported Features not supported + +Features deliberately excluded from the scope of this library: + +-# **Data transfer.** Uploading (streaming) and downloading data of buffers and images + between CPU and GPU memory and related synchronization is responsibility of the user. + Defining some "texture" object that would automatically stream its data from a + staging copy in CPU memory to GPU memory would rather be a feature of another, + higher-level library implemented on top of VMA. + VMA doesn't record any commands to a `VkCommandBuffer`. It just allocates memory. +-# **Recreation of buffers and images.** Although the library has functions for + buffer and image creation: vmaCreateBuffer(), vmaCreateImage(), you need to + recreate these objects yourself after defragmentation. That is because the big + structures `VkBufferCreateInfo`, `VkImageCreateInfo` are not stored in + #VmaAllocation object. +-# **Handling CPU memory allocation failures.** When dynamically creating small C++ + objects in CPU memory (not Vulkan memory), allocation failures are not checked + and handled gracefully, because that would complicate code significantly and + is usually not needed in desktop PC applications anyway. + Success of an allocation is just checked with an assert. +-# **Code free of any compiler warnings.** Maintaining the library to compile and + work correctly on so many different platforms is hard enough. Being free of + any warnings, on any version of any compiler, is simply not feasible. + There are many preprocessor macros that make some variables unused, function parameters unreferenced, + or conditional expressions constant in some configurations. + The code of this library should not be bigger or more complicated just to silence these warnings. + It is recommended to disable such warnings instead. +-# This is a C++ library with C interface. **Bindings or ports to any other programming languages** are welcome as external projects but + are not going to be included into this repository. +*/ diff --git a/third_party/volk.c b/third_party/volk.c deleted file mode 100644 index 03b6486..0000000 --- a/third_party/volk.c +++ /dev/null @@ -1,3041 +0,0 @@ -/* This file is part of volk library; see volk.h for version/license details */ -/* clang-format off */ -#include "volk.h" - -#ifdef _WIN32 - typedef const char* LPCSTR; - typedef struct HINSTANCE__* HINSTANCE; - typedef HINSTANCE HMODULE; - #if defined(_MINWINDEF_) - /* minwindef.h defines FARPROC, and attempting to redefine it may conflict with -Wstrict-prototypes */ - #elif defined(_WIN64) - typedef __int64 (__stdcall* FARPROC)(void); - #else - typedef int (__stdcall* FARPROC)(void); - #endif -#else -# include -#endif - -#ifdef __cplusplus -extern "C" { -#endif - -#ifdef _WIN32 -__declspec(dllimport) HMODULE __stdcall LoadLibraryA(LPCSTR); -__declspec(dllimport) FARPROC __stdcall GetProcAddress(HMODULE, LPCSTR); -__declspec(dllimport) int __stdcall FreeLibrary(HMODULE); -#endif - -static void* loadedModule = NULL; -static VkInstance loadedInstance = VK_NULL_HANDLE; -static VkDevice loadedDevice = VK_NULL_HANDLE; - -static void volkGenLoadLoader(void* context, PFN_vkVoidFunction (*load)(void*, const char*)); -static void volkGenLoadInstance(void* context, PFN_vkVoidFunction (*load)(void*, const char*)); -static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, const char*)); -static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context, PFN_vkVoidFunction (*load)(void*, const char*)); - -static PFN_vkVoidFunction vkGetInstanceProcAddrStub(void* context, const char* name) -{ - return vkGetInstanceProcAddr((VkInstance)context, name); -} - -static PFN_vkVoidFunction vkGetDeviceProcAddrStub(void* context, const char* name) -{ - return vkGetDeviceProcAddr((VkDevice)context, name); -} - -static PFN_vkVoidFunction nullProcAddrStub(void* context, const char* name) -{ - (void)context; - (void)name; - return NULL; -} - -VkResult volkInitialize(void) -{ -#if defined(_WIN32) - HMODULE module = LoadLibraryA("vulkan-1.dll"); - if (!module) - return VK_ERROR_INITIALIZATION_FAILED; - - // note: function pointer is cast through void function pointer to silence cast-function-type warning on gcc8 - vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)(void(*)(void))GetProcAddress(module, "vkGetInstanceProcAddr"); -#elif defined(__APPLE__) - void* module = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); - if (!module) - module = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); - if (!module) - module = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); - if (!module) - return VK_ERROR_INITIALIZATION_FAILED; - - vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(module, "vkGetInstanceProcAddr"); -#else - void* module = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); - if (!module) - module = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); - if (!module) - return VK_ERROR_INITIALIZATION_FAILED; - - vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(module, "vkGetInstanceProcAddr"); -#endif - - loadedModule = module; - volkGenLoadLoader(NULL, vkGetInstanceProcAddrStub); - - return VK_SUCCESS; -} - -void volkInitializeCustom(PFN_vkGetInstanceProcAddr handler) -{ - vkGetInstanceProcAddr = handler; - - loadedModule = NULL; - volkGenLoadLoader(NULL, vkGetInstanceProcAddrStub); -} - -void volkFinalize(void) -{ - if (loadedModule) - { -#if defined(_WIN32) - FreeLibrary((HMODULE)loadedModule); -#else - dlclose(loadedModule); -#endif - } - - vkGetInstanceProcAddr = NULL; - volkGenLoadLoader(NULL, nullProcAddrStub); - volkGenLoadInstance(NULL, nullProcAddrStub); - volkGenLoadDevice(NULL, nullProcAddrStub); - - loadedModule = NULL; - loadedInstance = VK_NULL_HANDLE; - loadedDevice = VK_NULL_HANDLE; -} - -uint32_t volkGetInstanceVersion(void) -{ -#if defined(VK_VERSION_1_1) - uint32_t apiVersion = 0; - if (vkEnumerateInstanceVersion && vkEnumerateInstanceVersion(&apiVersion) == VK_SUCCESS) - return apiVersion; -#endif - - if (vkCreateInstance) - return VK_API_VERSION_1_0; - - return 0; -} - -void volkLoadInstance(VkInstance instance) -{ - loadedInstance = instance; - volkGenLoadInstance(instance, vkGetInstanceProcAddrStub); - volkGenLoadDevice(instance, vkGetInstanceProcAddrStub); -} - -void volkLoadInstanceOnly(VkInstance instance) -{ - loadedInstance = instance; - volkGenLoadInstance(instance, vkGetInstanceProcAddrStub); -} - -VkInstance volkGetLoadedInstance(void) -{ - return loadedInstance; -} - -void volkLoadDevice(VkDevice device) -{ - loadedDevice = device; - volkGenLoadDevice(device, vkGetDeviceProcAddrStub); -} - -VkDevice volkGetLoadedDevice(void) -{ - return loadedDevice; -} - -void volkLoadDeviceTable(struct VolkDeviceTable* table, VkDevice device) -{ - volkGenLoadDeviceTable(table, device, vkGetDeviceProcAddrStub); -} - -static void volkGenLoadLoader(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) -{ - /* VOLK_GENERATE_LOAD_LOADER */ -#if defined(VK_VERSION_1_0) - vkCreateInstance = (PFN_vkCreateInstance)load(context, "vkCreateInstance"); - vkEnumerateInstanceExtensionProperties = (PFN_vkEnumerateInstanceExtensionProperties)load(context, "vkEnumerateInstanceExtensionProperties"); - vkEnumerateInstanceLayerProperties = (PFN_vkEnumerateInstanceLayerProperties)load(context, "vkEnumerateInstanceLayerProperties"); -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_VERSION_1_1) - vkEnumerateInstanceVersion = (PFN_vkEnumerateInstanceVersion)load(context, "vkEnumerateInstanceVersion"); -#endif /* defined(VK_VERSION_1_1) */ - /* VOLK_GENERATE_LOAD_LOADER */ -} - -static void volkGenLoadInstance(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) -{ - /* VOLK_GENERATE_LOAD_INSTANCE */ -#if defined(VK_VERSION_1_0) - vkCreateDevice = (PFN_vkCreateDevice)load(context, "vkCreateDevice"); - vkDestroyInstance = (PFN_vkDestroyInstance)load(context, "vkDestroyInstance"); - vkEnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties)load(context, "vkEnumerateDeviceExtensionProperties"); - vkEnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties)load(context, "vkEnumerateDeviceLayerProperties"); - vkEnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices)load(context, "vkEnumeratePhysicalDevices"); - vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)load(context, "vkGetDeviceProcAddr"); - vkGetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures)load(context, "vkGetPhysicalDeviceFeatures"); - vkGetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties)load(context, "vkGetPhysicalDeviceFormatProperties"); - vkGetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties)load(context, "vkGetPhysicalDeviceImageFormatProperties"); - vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)load(context, "vkGetPhysicalDeviceMemoryProperties"); - vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)load(context, "vkGetPhysicalDeviceProperties"); - vkGetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)load(context, "vkGetPhysicalDeviceQueueFamilyProperties"); - vkGetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties"); -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_VERSION_1_1) - vkEnumeratePhysicalDeviceGroups = (PFN_vkEnumeratePhysicalDeviceGroups)load(context, "vkEnumeratePhysicalDeviceGroups"); - vkGetPhysicalDeviceExternalBufferProperties = (PFN_vkGetPhysicalDeviceExternalBufferProperties)load(context, "vkGetPhysicalDeviceExternalBufferProperties"); - vkGetPhysicalDeviceExternalFenceProperties = (PFN_vkGetPhysicalDeviceExternalFenceProperties)load(context, "vkGetPhysicalDeviceExternalFenceProperties"); - vkGetPhysicalDeviceExternalSemaphoreProperties = (PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)load(context, "vkGetPhysicalDeviceExternalSemaphoreProperties"); - vkGetPhysicalDeviceFeatures2 = (PFN_vkGetPhysicalDeviceFeatures2)load(context, "vkGetPhysicalDeviceFeatures2"); - vkGetPhysicalDeviceFormatProperties2 = (PFN_vkGetPhysicalDeviceFormatProperties2)load(context, "vkGetPhysicalDeviceFormatProperties2"); - vkGetPhysicalDeviceImageFormatProperties2 = (PFN_vkGetPhysicalDeviceImageFormatProperties2)load(context, "vkGetPhysicalDeviceImageFormatProperties2"); - vkGetPhysicalDeviceMemoryProperties2 = (PFN_vkGetPhysicalDeviceMemoryProperties2)load(context, "vkGetPhysicalDeviceMemoryProperties2"); - vkGetPhysicalDeviceProperties2 = (PFN_vkGetPhysicalDeviceProperties2)load(context, "vkGetPhysicalDeviceProperties2"); - vkGetPhysicalDeviceQueueFamilyProperties2 = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2)load(context, "vkGetPhysicalDeviceQueueFamilyProperties2"); - vkGetPhysicalDeviceSparseImageFormatProperties2 = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties2"); -#endif /* defined(VK_VERSION_1_1) */ -#if defined(VK_VERSION_1_3) - vkGetPhysicalDeviceToolProperties = (PFN_vkGetPhysicalDeviceToolProperties)load(context, "vkGetPhysicalDeviceToolProperties"); -#endif /* defined(VK_VERSION_1_3) */ -#if defined(VK_EXT_acquire_drm_display) - vkAcquireDrmDisplayEXT = (PFN_vkAcquireDrmDisplayEXT)load(context, "vkAcquireDrmDisplayEXT"); - vkGetDrmDisplayEXT = (PFN_vkGetDrmDisplayEXT)load(context, "vkGetDrmDisplayEXT"); -#endif /* defined(VK_EXT_acquire_drm_display) */ -#if defined(VK_EXT_acquire_xlib_display) - vkAcquireXlibDisplayEXT = (PFN_vkAcquireXlibDisplayEXT)load(context, "vkAcquireXlibDisplayEXT"); - vkGetRandROutputDisplayEXT = (PFN_vkGetRandROutputDisplayEXT)load(context, "vkGetRandROutputDisplayEXT"); -#endif /* defined(VK_EXT_acquire_xlib_display) */ -#if defined(VK_EXT_calibrated_timestamps) - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = (PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)load(context, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT"); -#endif /* defined(VK_EXT_calibrated_timestamps) */ -#if defined(VK_EXT_debug_report) - vkCreateDebugReportCallbackEXT = (PFN_vkCreateDebugReportCallbackEXT)load(context, "vkCreateDebugReportCallbackEXT"); - vkDebugReportMessageEXT = (PFN_vkDebugReportMessageEXT)load(context, "vkDebugReportMessageEXT"); - vkDestroyDebugReportCallbackEXT = (PFN_vkDestroyDebugReportCallbackEXT)load(context, "vkDestroyDebugReportCallbackEXT"); -#endif /* defined(VK_EXT_debug_report) */ -#if defined(VK_EXT_debug_utils) - vkCmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT)load(context, "vkCmdBeginDebugUtilsLabelEXT"); - vkCmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT)load(context, "vkCmdEndDebugUtilsLabelEXT"); - vkCmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT)load(context, "vkCmdInsertDebugUtilsLabelEXT"); - vkCreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT)load(context, "vkCreateDebugUtilsMessengerEXT"); - vkDestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT)load(context, "vkDestroyDebugUtilsMessengerEXT"); - vkQueueBeginDebugUtilsLabelEXT = (PFN_vkQueueBeginDebugUtilsLabelEXT)load(context, "vkQueueBeginDebugUtilsLabelEXT"); - vkQueueEndDebugUtilsLabelEXT = (PFN_vkQueueEndDebugUtilsLabelEXT)load(context, "vkQueueEndDebugUtilsLabelEXT"); - vkQueueInsertDebugUtilsLabelEXT = (PFN_vkQueueInsertDebugUtilsLabelEXT)load(context, "vkQueueInsertDebugUtilsLabelEXT"); - vkSetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)load(context, "vkSetDebugUtilsObjectNameEXT"); - vkSetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT)load(context, "vkSetDebugUtilsObjectTagEXT"); - vkSubmitDebugUtilsMessageEXT = (PFN_vkSubmitDebugUtilsMessageEXT)load(context, "vkSubmitDebugUtilsMessageEXT"); -#endif /* defined(VK_EXT_debug_utils) */ -#if defined(VK_EXT_direct_mode_display) - vkReleaseDisplayEXT = (PFN_vkReleaseDisplayEXT)load(context, "vkReleaseDisplayEXT"); -#endif /* defined(VK_EXT_direct_mode_display) */ -#if defined(VK_EXT_directfb_surface) - vkCreateDirectFBSurfaceEXT = (PFN_vkCreateDirectFBSurfaceEXT)load(context, "vkCreateDirectFBSurfaceEXT"); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = (PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)load(context, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT"); -#endif /* defined(VK_EXT_directfb_surface) */ -#if defined(VK_EXT_display_surface_counter) - vkGetPhysicalDeviceSurfaceCapabilities2EXT = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)load(context, "vkGetPhysicalDeviceSurfaceCapabilities2EXT"); -#endif /* defined(VK_EXT_display_surface_counter) */ -#if defined(VK_EXT_full_screen_exclusive) - vkGetPhysicalDeviceSurfacePresentModes2EXT = (PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)load(context, "vkGetPhysicalDeviceSurfacePresentModes2EXT"); -#endif /* defined(VK_EXT_full_screen_exclusive) */ -#if defined(VK_EXT_headless_surface) - vkCreateHeadlessSurfaceEXT = (PFN_vkCreateHeadlessSurfaceEXT)load(context, "vkCreateHeadlessSurfaceEXT"); -#endif /* defined(VK_EXT_headless_surface) */ -#if defined(VK_EXT_metal_surface) - vkCreateMetalSurfaceEXT = (PFN_vkCreateMetalSurfaceEXT)load(context, "vkCreateMetalSurfaceEXT"); -#endif /* defined(VK_EXT_metal_surface) */ -#if defined(VK_EXT_sample_locations) - vkGetPhysicalDeviceMultisamplePropertiesEXT = (PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)load(context, "vkGetPhysicalDeviceMultisamplePropertiesEXT"); -#endif /* defined(VK_EXT_sample_locations) */ -#if defined(VK_EXT_tooling_info) - vkGetPhysicalDeviceToolPropertiesEXT = (PFN_vkGetPhysicalDeviceToolPropertiesEXT)load(context, "vkGetPhysicalDeviceToolPropertiesEXT"); -#endif /* defined(VK_EXT_tooling_info) */ -#if defined(VK_FUCHSIA_imagepipe_surface) - vkCreateImagePipeSurfaceFUCHSIA = (PFN_vkCreateImagePipeSurfaceFUCHSIA)load(context, "vkCreateImagePipeSurfaceFUCHSIA"); -#endif /* defined(VK_FUCHSIA_imagepipe_surface) */ -#if defined(VK_GGP_stream_descriptor_surface) - vkCreateStreamDescriptorSurfaceGGP = (PFN_vkCreateStreamDescriptorSurfaceGGP)load(context, "vkCreateStreamDescriptorSurfaceGGP"); -#endif /* defined(VK_GGP_stream_descriptor_surface) */ -#if defined(VK_KHR_android_surface) - vkCreateAndroidSurfaceKHR = (PFN_vkCreateAndroidSurfaceKHR)load(context, "vkCreateAndroidSurfaceKHR"); -#endif /* defined(VK_KHR_android_surface) */ -#if defined(VK_KHR_cooperative_matrix) - vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR)load(context, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR"); -#endif /* defined(VK_KHR_cooperative_matrix) */ -#if defined(VK_KHR_device_group_creation) - vkEnumeratePhysicalDeviceGroupsKHR = (PFN_vkEnumeratePhysicalDeviceGroupsKHR)load(context, "vkEnumeratePhysicalDeviceGroupsKHR"); -#endif /* defined(VK_KHR_device_group_creation) */ -#if defined(VK_KHR_display) - vkCreateDisplayModeKHR = (PFN_vkCreateDisplayModeKHR)load(context, "vkCreateDisplayModeKHR"); - vkCreateDisplayPlaneSurfaceKHR = (PFN_vkCreateDisplayPlaneSurfaceKHR)load(context, "vkCreateDisplayPlaneSurfaceKHR"); - vkGetDisplayModePropertiesKHR = (PFN_vkGetDisplayModePropertiesKHR)load(context, "vkGetDisplayModePropertiesKHR"); - vkGetDisplayPlaneCapabilitiesKHR = (PFN_vkGetDisplayPlaneCapabilitiesKHR)load(context, "vkGetDisplayPlaneCapabilitiesKHR"); - vkGetDisplayPlaneSupportedDisplaysKHR = (PFN_vkGetDisplayPlaneSupportedDisplaysKHR)load(context, "vkGetDisplayPlaneSupportedDisplaysKHR"); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)load(context, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR"); - vkGetPhysicalDeviceDisplayPropertiesKHR = (PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)load(context, "vkGetPhysicalDeviceDisplayPropertiesKHR"); -#endif /* defined(VK_KHR_display) */ -#if defined(VK_KHR_external_fence_capabilities) - vkGetPhysicalDeviceExternalFencePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)load(context, "vkGetPhysicalDeviceExternalFencePropertiesKHR"); -#endif /* defined(VK_KHR_external_fence_capabilities) */ -#if defined(VK_KHR_external_memory_capabilities) - vkGetPhysicalDeviceExternalBufferPropertiesKHR = (PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)load(context, "vkGetPhysicalDeviceExternalBufferPropertiesKHR"); -#endif /* defined(VK_KHR_external_memory_capabilities) */ -#if defined(VK_KHR_external_semaphore_capabilities) - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = (PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)load(context, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR"); -#endif /* defined(VK_KHR_external_semaphore_capabilities) */ -#if defined(VK_KHR_fragment_shading_rate) - vkGetPhysicalDeviceFragmentShadingRatesKHR = (PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)load(context, "vkGetPhysicalDeviceFragmentShadingRatesKHR"); -#endif /* defined(VK_KHR_fragment_shading_rate) */ -#if defined(VK_KHR_get_display_properties2) - vkGetDisplayModeProperties2KHR = (PFN_vkGetDisplayModeProperties2KHR)load(context, "vkGetDisplayModeProperties2KHR"); - vkGetDisplayPlaneCapabilities2KHR = (PFN_vkGetDisplayPlaneCapabilities2KHR)load(context, "vkGetDisplayPlaneCapabilities2KHR"); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)load(context, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR"); - vkGetPhysicalDeviceDisplayProperties2KHR = (PFN_vkGetPhysicalDeviceDisplayProperties2KHR)load(context, "vkGetPhysicalDeviceDisplayProperties2KHR"); -#endif /* defined(VK_KHR_get_display_properties2) */ -#if defined(VK_KHR_get_physical_device_properties2) - vkGetPhysicalDeviceFeatures2KHR = (PFN_vkGetPhysicalDeviceFeatures2KHR)load(context, "vkGetPhysicalDeviceFeatures2KHR"); - vkGetPhysicalDeviceFormatProperties2KHR = (PFN_vkGetPhysicalDeviceFormatProperties2KHR)load(context, "vkGetPhysicalDeviceFormatProperties2KHR"); - vkGetPhysicalDeviceImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)load(context, "vkGetPhysicalDeviceImageFormatProperties2KHR"); - vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)load(context, "vkGetPhysicalDeviceMemoryProperties2KHR"); - vkGetPhysicalDeviceProperties2KHR = (PFN_vkGetPhysicalDeviceProperties2KHR)load(context, "vkGetPhysicalDeviceProperties2KHR"); - vkGetPhysicalDeviceQueueFamilyProperties2KHR = (PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)load(context, "vkGetPhysicalDeviceQueueFamilyProperties2KHR"); - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR"); -#endif /* defined(VK_KHR_get_physical_device_properties2) */ -#if defined(VK_KHR_get_surface_capabilities2) - vkGetPhysicalDeviceSurfaceCapabilities2KHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)load(context, "vkGetPhysicalDeviceSurfaceCapabilities2KHR"); - vkGetPhysicalDeviceSurfaceFormats2KHR = (PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)load(context, "vkGetPhysicalDeviceSurfaceFormats2KHR"); -#endif /* defined(VK_KHR_get_surface_capabilities2) */ -#if defined(VK_KHR_performance_query) - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = (PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)load(context, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR"); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = (PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)load(context, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR"); -#endif /* defined(VK_KHR_performance_query) */ -#if defined(VK_KHR_surface) - vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)load(context, "vkDestroySurfaceKHR"); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)load(context, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); - vkGetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)load(context, "vkGetPhysicalDeviceSurfaceFormatsKHR"); - vkGetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)load(context, "vkGetPhysicalDeviceSurfacePresentModesKHR"); - vkGetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)load(context, "vkGetPhysicalDeviceSurfaceSupportKHR"); -#endif /* defined(VK_KHR_surface) */ -#if defined(VK_KHR_video_encode_queue) - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = (PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR)load(context, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR"); -#endif /* defined(VK_KHR_video_encode_queue) */ -#if defined(VK_KHR_video_queue) - vkGetPhysicalDeviceVideoCapabilitiesKHR = (PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)load(context, "vkGetPhysicalDeviceVideoCapabilitiesKHR"); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = (PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)load(context, "vkGetPhysicalDeviceVideoFormatPropertiesKHR"); -#endif /* defined(VK_KHR_video_queue) */ -#if defined(VK_KHR_wayland_surface) - vkCreateWaylandSurfaceKHR = (PFN_vkCreateWaylandSurfaceKHR)load(context, "vkCreateWaylandSurfaceKHR"); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = (PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)load(context, "vkGetPhysicalDeviceWaylandPresentationSupportKHR"); -#endif /* defined(VK_KHR_wayland_surface) */ -#if defined(VK_KHR_win32_surface) - vkCreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR)load(context, "vkCreateWin32SurfaceKHR"); - vkGetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)load(context, "vkGetPhysicalDeviceWin32PresentationSupportKHR"); -#endif /* defined(VK_KHR_win32_surface) */ -#if defined(VK_KHR_xcb_surface) - vkCreateXcbSurfaceKHR = (PFN_vkCreateXcbSurfaceKHR)load(context, "vkCreateXcbSurfaceKHR"); - vkGetPhysicalDeviceXcbPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)load(context, "vkGetPhysicalDeviceXcbPresentationSupportKHR"); -#endif /* defined(VK_KHR_xcb_surface) */ -#if defined(VK_KHR_xlib_surface) - vkCreateXlibSurfaceKHR = (PFN_vkCreateXlibSurfaceKHR)load(context, "vkCreateXlibSurfaceKHR"); - vkGetPhysicalDeviceXlibPresentationSupportKHR = (PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)load(context, "vkGetPhysicalDeviceXlibPresentationSupportKHR"); -#endif /* defined(VK_KHR_xlib_surface) */ -#if defined(VK_MVK_ios_surface) - vkCreateIOSSurfaceMVK = (PFN_vkCreateIOSSurfaceMVK)load(context, "vkCreateIOSSurfaceMVK"); -#endif /* defined(VK_MVK_ios_surface) */ -#if defined(VK_MVK_macos_surface) - vkCreateMacOSSurfaceMVK = (PFN_vkCreateMacOSSurfaceMVK)load(context, "vkCreateMacOSSurfaceMVK"); -#endif /* defined(VK_MVK_macos_surface) */ -#if defined(VK_NN_vi_surface) - vkCreateViSurfaceNN = (PFN_vkCreateViSurfaceNN)load(context, "vkCreateViSurfaceNN"); -#endif /* defined(VK_NN_vi_surface) */ -#if defined(VK_NV_acquire_winrt_display) - vkAcquireWinrtDisplayNV = (PFN_vkAcquireWinrtDisplayNV)load(context, "vkAcquireWinrtDisplayNV"); - vkGetWinrtDisplayNV = (PFN_vkGetWinrtDisplayNV)load(context, "vkGetWinrtDisplayNV"); -#endif /* defined(VK_NV_acquire_winrt_display) */ -#if defined(VK_NV_cooperative_matrix) - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = (PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)load(context, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV"); -#endif /* defined(VK_NV_cooperative_matrix) */ -#if defined(VK_NV_coverage_reduction_mode) - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = (PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)load(context, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV"); -#endif /* defined(VK_NV_coverage_reduction_mode) */ -#if defined(VK_NV_external_memory_capabilities) - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = (PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)load(context, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV"); -#endif /* defined(VK_NV_external_memory_capabilities) */ -#if defined(VK_NV_optical_flow) - vkGetPhysicalDeviceOpticalFlowImageFormatsNV = (PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)load(context, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV"); -#endif /* defined(VK_NV_optical_flow) */ -#if defined(VK_QNX_screen_surface) - vkCreateScreenSurfaceQNX = (PFN_vkCreateScreenSurfaceQNX)load(context, "vkCreateScreenSurfaceQNX"); - vkGetPhysicalDeviceScreenPresentationSupportQNX = (PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)load(context, "vkGetPhysicalDeviceScreenPresentationSupportQNX"); -#endif /* defined(VK_QNX_screen_surface) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) - vkGetPhysicalDevicePresentRectanglesKHR = (PFN_vkGetPhysicalDevicePresentRectanglesKHR)load(context, "vkGetPhysicalDevicePresentRectanglesKHR"); -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ - /* VOLK_GENERATE_LOAD_INSTANCE */ -} - -static void volkGenLoadDevice(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) -{ - /* VOLK_GENERATE_LOAD_DEVICE */ -#if defined(VK_VERSION_1_0) - vkAllocateCommandBuffers = (PFN_vkAllocateCommandBuffers)load(context, "vkAllocateCommandBuffers"); - vkAllocateDescriptorSets = (PFN_vkAllocateDescriptorSets)load(context, "vkAllocateDescriptorSets"); - vkAllocateMemory = (PFN_vkAllocateMemory)load(context, "vkAllocateMemory"); - vkBeginCommandBuffer = (PFN_vkBeginCommandBuffer)load(context, "vkBeginCommandBuffer"); - vkBindBufferMemory = (PFN_vkBindBufferMemory)load(context, "vkBindBufferMemory"); - vkBindImageMemory = (PFN_vkBindImageMemory)load(context, "vkBindImageMemory"); - vkCmdBeginQuery = (PFN_vkCmdBeginQuery)load(context, "vkCmdBeginQuery"); - vkCmdBeginRenderPass = (PFN_vkCmdBeginRenderPass)load(context, "vkCmdBeginRenderPass"); - vkCmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets)load(context, "vkCmdBindDescriptorSets"); - vkCmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer)load(context, "vkCmdBindIndexBuffer"); - vkCmdBindPipeline = (PFN_vkCmdBindPipeline)load(context, "vkCmdBindPipeline"); - vkCmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers)load(context, "vkCmdBindVertexBuffers"); - vkCmdBlitImage = (PFN_vkCmdBlitImage)load(context, "vkCmdBlitImage"); - vkCmdClearAttachments = (PFN_vkCmdClearAttachments)load(context, "vkCmdClearAttachments"); - vkCmdClearColorImage = (PFN_vkCmdClearColorImage)load(context, "vkCmdClearColorImage"); - vkCmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)load(context, "vkCmdClearDepthStencilImage"); - vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)load(context, "vkCmdCopyBuffer"); - vkCmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage)load(context, "vkCmdCopyBufferToImage"); - vkCmdCopyImage = (PFN_vkCmdCopyImage)load(context, "vkCmdCopyImage"); - vkCmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer)load(context, "vkCmdCopyImageToBuffer"); - vkCmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults)load(context, "vkCmdCopyQueryPoolResults"); - vkCmdDispatch = (PFN_vkCmdDispatch)load(context, "vkCmdDispatch"); - vkCmdDispatchIndirect = (PFN_vkCmdDispatchIndirect)load(context, "vkCmdDispatchIndirect"); - vkCmdDraw = (PFN_vkCmdDraw)load(context, "vkCmdDraw"); - vkCmdDrawIndexed = (PFN_vkCmdDrawIndexed)load(context, "vkCmdDrawIndexed"); - vkCmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect)load(context, "vkCmdDrawIndexedIndirect"); - vkCmdDrawIndirect = (PFN_vkCmdDrawIndirect)load(context, "vkCmdDrawIndirect"); - vkCmdEndQuery = (PFN_vkCmdEndQuery)load(context, "vkCmdEndQuery"); - vkCmdEndRenderPass = (PFN_vkCmdEndRenderPass)load(context, "vkCmdEndRenderPass"); - vkCmdExecuteCommands = (PFN_vkCmdExecuteCommands)load(context, "vkCmdExecuteCommands"); - vkCmdFillBuffer = (PFN_vkCmdFillBuffer)load(context, "vkCmdFillBuffer"); - vkCmdNextSubpass = (PFN_vkCmdNextSubpass)load(context, "vkCmdNextSubpass"); - vkCmdPipelineBarrier = (PFN_vkCmdPipelineBarrier)load(context, "vkCmdPipelineBarrier"); - vkCmdPushConstants = (PFN_vkCmdPushConstants)load(context, "vkCmdPushConstants"); - vkCmdResetEvent = (PFN_vkCmdResetEvent)load(context, "vkCmdResetEvent"); - vkCmdResetQueryPool = (PFN_vkCmdResetQueryPool)load(context, "vkCmdResetQueryPool"); - vkCmdResolveImage = (PFN_vkCmdResolveImage)load(context, "vkCmdResolveImage"); - vkCmdSetBlendConstants = (PFN_vkCmdSetBlendConstants)load(context, "vkCmdSetBlendConstants"); - vkCmdSetDepthBias = (PFN_vkCmdSetDepthBias)load(context, "vkCmdSetDepthBias"); - vkCmdSetDepthBounds = (PFN_vkCmdSetDepthBounds)load(context, "vkCmdSetDepthBounds"); - vkCmdSetEvent = (PFN_vkCmdSetEvent)load(context, "vkCmdSetEvent"); - vkCmdSetLineWidth = (PFN_vkCmdSetLineWidth)load(context, "vkCmdSetLineWidth"); - vkCmdSetScissor = (PFN_vkCmdSetScissor)load(context, "vkCmdSetScissor"); - vkCmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask)load(context, "vkCmdSetStencilCompareMask"); - vkCmdSetStencilReference = (PFN_vkCmdSetStencilReference)load(context, "vkCmdSetStencilReference"); - vkCmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask)load(context, "vkCmdSetStencilWriteMask"); - vkCmdSetViewport = (PFN_vkCmdSetViewport)load(context, "vkCmdSetViewport"); - vkCmdUpdateBuffer = (PFN_vkCmdUpdateBuffer)load(context, "vkCmdUpdateBuffer"); - vkCmdWaitEvents = (PFN_vkCmdWaitEvents)load(context, "vkCmdWaitEvents"); - vkCmdWriteTimestamp = (PFN_vkCmdWriteTimestamp)load(context, "vkCmdWriteTimestamp"); - vkCreateBuffer = (PFN_vkCreateBuffer)load(context, "vkCreateBuffer"); - vkCreateBufferView = (PFN_vkCreateBufferView)load(context, "vkCreateBufferView"); - vkCreateCommandPool = (PFN_vkCreateCommandPool)load(context, "vkCreateCommandPool"); - vkCreateComputePipelines = (PFN_vkCreateComputePipelines)load(context, "vkCreateComputePipelines"); - vkCreateDescriptorPool = (PFN_vkCreateDescriptorPool)load(context, "vkCreateDescriptorPool"); - vkCreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)load(context, "vkCreateDescriptorSetLayout"); - vkCreateEvent = (PFN_vkCreateEvent)load(context, "vkCreateEvent"); - vkCreateFence = (PFN_vkCreateFence)load(context, "vkCreateFence"); - vkCreateFramebuffer = (PFN_vkCreateFramebuffer)load(context, "vkCreateFramebuffer"); - vkCreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines)load(context, "vkCreateGraphicsPipelines"); - vkCreateImage = (PFN_vkCreateImage)load(context, "vkCreateImage"); - vkCreateImageView = (PFN_vkCreateImageView)load(context, "vkCreateImageView"); - vkCreatePipelineCache = (PFN_vkCreatePipelineCache)load(context, "vkCreatePipelineCache"); - vkCreatePipelineLayout = (PFN_vkCreatePipelineLayout)load(context, "vkCreatePipelineLayout"); - vkCreateQueryPool = (PFN_vkCreateQueryPool)load(context, "vkCreateQueryPool"); - vkCreateRenderPass = (PFN_vkCreateRenderPass)load(context, "vkCreateRenderPass"); - vkCreateSampler = (PFN_vkCreateSampler)load(context, "vkCreateSampler"); - vkCreateSemaphore = (PFN_vkCreateSemaphore)load(context, "vkCreateSemaphore"); - vkCreateShaderModule = (PFN_vkCreateShaderModule)load(context, "vkCreateShaderModule"); - vkDestroyBuffer = (PFN_vkDestroyBuffer)load(context, "vkDestroyBuffer"); - vkDestroyBufferView = (PFN_vkDestroyBufferView)load(context, "vkDestroyBufferView"); - vkDestroyCommandPool = (PFN_vkDestroyCommandPool)load(context, "vkDestroyCommandPool"); - vkDestroyDescriptorPool = (PFN_vkDestroyDescriptorPool)load(context, "vkDestroyDescriptorPool"); - vkDestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)load(context, "vkDestroyDescriptorSetLayout"); - vkDestroyDevice = (PFN_vkDestroyDevice)load(context, "vkDestroyDevice"); - vkDestroyEvent = (PFN_vkDestroyEvent)load(context, "vkDestroyEvent"); - vkDestroyFence = (PFN_vkDestroyFence)load(context, "vkDestroyFence"); - vkDestroyFramebuffer = (PFN_vkDestroyFramebuffer)load(context, "vkDestroyFramebuffer"); - vkDestroyImage = (PFN_vkDestroyImage)load(context, "vkDestroyImage"); - vkDestroyImageView = (PFN_vkDestroyImageView)load(context, "vkDestroyImageView"); - vkDestroyPipeline = (PFN_vkDestroyPipeline)load(context, "vkDestroyPipeline"); - vkDestroyPipelineCache = (PFN_vkDestroyPipelineCache)load(context, "vkDestroyPipelineCache"); - vkDestroyPipelineLayout = (PFN_vkDestroyPipelineLayout)load(context, "vkDestroyPipelineLayout"); - vkDestroyQueryPool = (PFN_vkDestroyQueryPool)load(context, "vkDestroyQueryPool"); - vkDestroyRenderPass = (PFN_vkDestroyRenderPass)load(context, "vkDestroyRenderPass"); - vkDestroySampler = (PFN_vkDestroySampler)load(context, "vkDestroySampler"); - vkDestroySemaphore = (PFN_vkDestroySemaphore)load(context, "vkDestroySemaphore"); - vkDestroyShaderModule = (PFN_vkDestroyShaderModule)load(context, "vkDestroyShaderModule"); - vkDeviceWaitIdle = (PFN_vkDeviceWaitIdle)load(context, "vkDeviceWaitIdle"); - vkEndCommandBuffer = (PFN_vkEndCommandBuffer)load(context, "vkEndCommandBuffer"); - vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)load(context, "vkFlushMappedMemoryRanges"); - vkFreeCommandBuffers = (PFN_vkFreeCommandBuffers)load(context, "vkFreeCommandBuffers"); - vkFreeDescriptorSets = (PFN_vkFreeDescriptorSets)load(context, "vkFreeDescriptorSets"); - vkFreeMemory = (PFN_vkFreeMemory)load(context, "vkFreeMemory"); - vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)load(context, "vkGetBufferMemoryRequirements"); - vkGetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)load(context, "vkGetDeviceMemoryCommitment"); - vkGetDeviceQueue = (PFN_vkGetDeviceQueue)load(context, "vkGetDeviceQueue"); - vkGetEventStatus = (PFN_vkGetEventStatus)load(context, "vkGetEventStatus"); - vkGetFenceStatus = (PFN_vkGetFenceStatus)load(context, "vkGetFenceStatus"); - vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)load(context, "vkGetImageMemoryRequirements"); - vkGetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements)load(context, "vkGetImageSparseMemoryRequirements"); - vkGetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)load(context, "vkGetImageSubresourceLayout"); - vkGetPipelineCacheData = (PFN_vkGetPipelineCacheData)load(context, "vkGetPipelineCacheData"); - vkGetQueryPoolResults = (PFN_vkGetQueryPoolResults)load(context, "vkGetQueryPoolResults"); - vkGetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity)load(context, "vkGetRenderAreaGranularity"); - vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)load(context, "vkInvalidateMappedMemoryRanges"); - vkMapMemory = (PFN_vkMapMemory)load(context, "vkMapMemory"); - vkMergePipelineCaches = (PFN_vkMergePipelineCaches)load(context, "vkMergePipelineCaches"); - vkQueueBindSparse = (PFN_vkQueueBindSparse)load(context, "vkQueueBindSparse"); - vkQueueSubmit = (PFN_vkQueueSubmit)load(context, "vkQueueSubmit"); - vkQueueWaitIdle = (PFN_vkQueueWaitIdle)load(context, "vkQueueWaitIdle"); - vkResetCommandBuffer = (PFN_vkResetCommandBuffer)load(context, "vkResetCommandBuffer"); - vkResetCommandPool = (PFN_vkResetCommandPool)load(context, "vkResetCommandPool"); - vkResetDescriptorPool = (PFN_vkResetDescriptorPool)load(context, "vkResetDescriptorPool"); - vkResetEvent = (PFN_vkResetEvent)load(context, "vkResetEvent"); - vkResetFences = (PFN_vkResetFences)load(context, "vkResetFences"); - vkSetEvent = (PFN_vkSetEvent)load(context, "vkSetEvent"); - vkUnmapMemory = (PFN_vkUnmapMemory)load(context, "vkUnmapMemory"); - vkUpdateDescriptorSets = (PFN_vkUpdateDescriptorSets)load(context, "vkUpdateDescriptorSets"); - vkWaitForFences = (PFN_vkWaitForFences)load(context, "vkWaitForFences"); -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_VERSION_1_1) - vkBindBufferMemory2 = (PFN_vkBindBufferMemory2)load(context, "vkBindBufferMemory2"); - vkBindImageMemory2 = (PFN_vkBindImageMemory2)load(context, "vkBindImageMemory2"); - vkCmdDispatchBase = (PFN_vkCmdDispatchBase)load(context, "vkCmdDispatchBase"); - vkCmdSetDeviceMask = (PFN_vkCmdSetDeviceMask)load(context, "vkCmdSetDeviceMask"); - vkCreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate)load(context, "vkCreateDescriptorUpdateTemplate"); - vkCreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion)load(context, "vkCreateSamplerYcbcrConversion"); - vkDestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)load(context, "vkDestroyDescriptorUpdateTemplate"); - vkDestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion)load(context, "vkDestroySamplerYcbcrConversion"); - vkGetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2)load(context, "vkGetBufferMemoryRequirements2"); - vkGetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)load(context, "vkGetDescriptorSetLayoutSupport"); - vkGetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures)load(context, "vkGetDeviceGroupPeerMemoryFeatures"); - vkGetDeviceQueue2 = (PFN_vkGetDeviceQueue2)load(context, "vkGetDeviceQueue2"); - vkGetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2)load(context, "vkGetImageMemoryRequirements2"); - vkGetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2)load(context, "vkGetImageSparseMemoryRequirements2"); - vkTrimCommandPool = (PFN_vkTrimCommandPool)load(context, "vkTrimCommandPool"); - vkUpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)load(context, "vkUpdateDescriptorSetWithTemplate"); -#endif /* defined(VK_VERSION_1_1) */ -#if defined(VK_VERSION_1_2) - vkCmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2)load(context, "vkCmdBeginRenderPass2"); - vkCmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount)load(context, "vkCmdDrawIndexedIndirectCount"); - vkCmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount)load(context, "vkCmdDrawIndirectCount"); - vkCmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2)load(context, "vkCmdEndRenderPass2"); - vkCmdNextSubpass2 = (PFN_vkCmdNextSubpass2)load(context, "vkCmdNextSubpass2"); - vkCreateRenderPass2 = (PFN_vkCreateRenderPass2)load(context, "vkCreateRenderPass2"); - vkGetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress)load(context, "vkGetBufferDeviceAddress"); - vkGetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress)load(context, "vkGetBufferOpaqueCaptureAddress"); - vkGetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress)load(context, "vkGetDeviceMemoryOpaqueCaptureAddress"); - vkGetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue)load(context, "vkGetSemaphoreCounterValue"); - vkResetQueryPool = (PFN_vkResetQueryPool)load(context, "vkResetQueryPool"); - vkSignalSemaphore = (PFN_vkSignalSemaphore)load(context, "vkSignalSemaphore"); - vkWaitSemaphores = (PFN_vkWaitSemaphores)load(context, "vkWaitSemaphores"); -#endif /* defined(VK_VERSION_1_2) */ -#if defined(VK_VERSION_1_3) - vkCmdBeginRendering = (PFN_vkCmdBeginRendering)load(context, "vkCmdBeginRendering"); - vkCmdBindVertexBuffers2 = (PFN_vkCmdBindVertexBuffers2)load(context, "vkCmdBindVertexBuffers2"); - vkCmdBlitImage2 = (PFN_vkCmdBlitImage2)load(context, "vkCmdBlitImage2"); - vkCmdCopyBuffer2 = (PFN_vkCmdCopyBuffer2)load(context, "vkCmdCopyBuffer2"); - vkCmdCopyBufferToImage2 = (PFN_vkCmdCopyBufferToImage2)load(context, "vkCmdCopyBufferToImage2"); - vkCmdCopyImage2 = (PFN_vkCmdCopyImage2)load(context, "vkCmdCopyImage2"); - vkCmdCopyImageToBuffer2 = (PFN_vkCmdCopyImageToBuffer2)load(context, "vkCmdCopyImageToBuffer2"); - vkCmdEndRendering = (PFN_vkCmdEndRendering)load(context, "vkCmdEndRendering"); - vkCmdPipelineBarrier2 = (PFN_vkCmdPipelineBarrier2)load(context, "vkCmdPipelineBarrier2"); - vkCmdResetEvent2 = (PFN_vkCmdResetEvent2)load(context, "vkCmdResetEvent2"); - vkCmdResolveImage2 = (PFN_vkCmdResolveImage2)load(context, "vkCmdResolveImage2"); - vkCmdSetCullMode = (PFN_vkCmdSetCullMode)load(context, "vkCmdSetCullMode"); - vkCmdSetDepthBiasEnable = (PFN_vkCmdSetDepthBiasEnable)load(context, "vkCmdSetDepthBiasEnable"); - vkCmdSetDepthBoundsTestEnable = (PFN_vkCmdSetDepthBoundsTestEnable)load(context, "vkCmdSetDepthBoundsTestEnable"); - vkCmdSetDepthCompareOp = (PFN_vkCmdSetDepthCompareOp)load(context, "vkCmdSetDepthCompareOp"); - vkCmdSetDepthTestEnable = (PFN_vkCmdSetDepthTestEnable)load(context, "vkCmdSetDepthTestEnable"); - vkCmdSetDepthWriteEnable = (PFN_vkCmdSetDepthWriteEnable)load(context, "vkCmdSetDepthWriteEnable"); - vkCmdSetEvent2 = (PFN_vkCmdSetEvent2)load(context, "vkCmdSetEvent2"); - vkCmdSetFrontFace = (PFN_vkCmdSetFrontFace)load(context, "vkCmdSetFrontFace"); - vkCmdSetPrimitiveRestartEnable = (PFN_vkCmdSetPrimitiveRestartEnable)load(context, "vkCmdSetPrimitiveRestartEnable"); - vkCmdSetPrimitiveTopology = (PFN_vkCmdSetPrimitiveTopology)load(context, "vkCmdSetPrimitiveTopology"); - vkCmdSetRasterizerDiscardEnable = (PFN_vkCmdSetRasterizerDiscardEnable)load(context, "vkCmdSetRasterizerDiscardEnable"); - vkCmdSetScissorWithCount = (PFN_vkCmdSetScissorWithCount)load(context, "vkCmdSetScissorWithCount"); - vkCmdSetStencilOp = (PFN_vkCmdSetStencilOp)load(context, "vkCmdSetStencilOp"); - vkCmdSetStencilTestEnable = (PFN_vkCmdSetStencilTestEnable)load(context, "vkCmdSetStencilTestEnable"); - vkCmdSetViewportWithCount = (PFN_vkCmdSetViewportWithCount)load(context, "vkCmdSetViewportWithCount"); - vkCmdWaitEvents2 = (PFN_vkCmdWaitEvents2)load(context, "vkCmdWaitEvents2"); - vkCmdWriteTimestamp2 = (PFN_vkCmdWriteTimestamp2)load(context, "vkCmdWriteTimestamp2"); - vkCreatePrivateDataSlot = (PFN_vkCreatePrivateDataSlot)load(context, "vkCreatePrivateDataSlot"); - vkDestroyPrivateDataSlot = (PFN_vkDestroyPrivateDataSlot)load(context, "vkDestroyPrivateDataSlot"); - vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)load(context, "vkGetDeviceBufferMemoryRequirements"); - vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)load(context, "vkGetDeviceImageMemoryRequirements"); - vkGetDeviceImageSparseMemoryRequirements = (PFN_vkGetDeviceImageSparseMemoryRequirements)load(context, "vkGetDeviceImageSparseMemoryRequirements"); - vkGetPrivateData = (PFN_vkGetPrivateData)load(context, "vkGetPrivateData"); - vkQueueSubmit2 = (PFN_vkQueueSubmit2)load(context, "vkQueueSubmit2"); - vkSetPrivateData = (PFN_vkSetPrivateData)load(context, "vkSetPrivateData"); -#endif /* defined(VK_VERSION_1_3) */ -#if defined(VK_AMDX_shader_enqueue) - vkCmdDispatchGraphAMDX = (PFN_vkCmdDispatchGraphAMDX)load(context, "vkCmdDispatchGraphAMDX"); - vkCmdDispatchGraphIndirectAMDX = (PFN_vkCmdDispatchGraphIndirectAMDX)load(context, "vkCmdDispatchGraphIndirectAMDX"); - vkCmdDispatchGraphIndirectCountAMDX = (PFN_vkCmdDispatchGraphIndirectCountAMDX)load(context, "vkCmdDispatchGraphIndirectCountAMDX"); - vkCmdInitializeGraphScratchMemoryAMDX = (PFN_vkCmdInitializeGraphScratchMemoryAMDX)load(context, "vkCmdInitializeGraphScratchMemoryAMDX"); - vkCreateExecutionGraphPipelinesAMDX = (PFN_vkCreateExecutionGraphPipelinesAMDX)load(context, "vkCreateExecutionGraphPipelinesAMDX"); - vkGetExecutionGraphPipelineNodeIndexAMDX = (PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)load(context, "vkGetExecutionGraphPipelineNodeIndexAMDX"); - vkGetExecutionGraphPipelineScratchSizeAMDX = (PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)load(context, "vkGetExecutionGraphPipelineScratchSizeAMDX"); -#endif /* defined(VK_AMDX_shader_enqueue) */ -#if defined(VK_AMD_buffer_marker) - vkCmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)load(context, "vkCmdWriteBufferMarkerAMD"); -#endif /* defined(VK_AMD_buffer_marker) */ -#if defined(VK_AMD_display_native_hdr) - vkSetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)load(context, "vkSetLocalDimmingAMD"); -#endif /* defined(VK_AMD_display_native_hdr) */ -#if defined(VK_AMD_draw_indirect_count) - vkCmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)load(context, "vkCmdDrawIndexedIndirectCountAMD"); - vkCmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)load(context, "vkCmdDrawIndirectCountAMD"); -#endif /* defined(VK_AMD_draw_indirect_count) */ -#if defined(VK_AMD_shader_info) - vkGetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)load(context, "vkGetShaderInfoAMD"); -#endif /* defined(VK_AMD_shader_info) */ -#if defined(VK_ANDROID_external_memory_android_hardware_buffer) - vkGetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)load(context, "vkGetAndroidHardwareBufferPropertiesANDROID"); - vkGetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)load(context, "vkGetMemoryAndroidHardwareBufferANDROID"); -#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ -#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) - vkCmdSetAttachmentFeedbackLoopEnableEXT = (PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)load(context, "vkCmdSetAttachmentFeedbackLoopEnableEXT"); -#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ -#if defined(VK_EXT_buffer_device_address) - vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)load(context, "vkGetBufferDeviceAddressEXT"); -#endif /* defined(VK_EXT_buffer_device_address) */ -#if defined(VK_EXT_calibrated_timestamps) - vkGetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)load(context, "vkGetCalibratedTimestampsEXT"); -#endif /* defined(VK_EXT_calibrated_timestamps) */ -#if defined(VK_EXT_color_write_enable) - vkCmdSetColorWriteEnableEXT = (PFN_vkCmdSetColorWriteEnableEXT)load(context, "vkCmdSetColorWriteEnableEXT"); -#endif /* defined(VK_EXT_color_write_enable) */ -#if defined(VK_EXT_conditional_rendering) - vkCmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)load(context, "vkCmdBeginConditionalRenderingEXT"); - vkCmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)load(context, "vkCmdEndConditionalRenderingEXT"); -#endif /* defined(VK_EXT_conditional_rendering) */ -#if defined(VK_EXT_debug_marker) - vkCmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)load(context, "vkCmdDebugMarkerBeginEXT"); - vkCmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)load(context, "vkCmdDebugMarkerEndEXT"); - vkCmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)load(context, "vkCmdDebugMarkerInsertEXT"); - vkDebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)load(context, "vkDebugMarkerSetObjectNameEXT"); - vkDebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)load(context, "vkDebugMarkerSetObjectTagEXT"); -#endif /* defined(VK_EXT_debug_marker) */ -#if defined(VK_EXT_depth_bias_control) - vkCmdSetDepthBias2EXT = (PFN_vkCmdSetDepthBias2EXT)load(context, "vkCmdSetDepthBias2EXT"); -#endif /* defined(VK_EXT_depth_bias_control) */ -#if defined(VK_EXT_descriptor_buffer) - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = (PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)load(context, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT"); - vkCmdBindDescriptorBuffersEXT = (PFN_vkCmdBindDescriptorBuffersEXT)load(context, "vkCmdBindDescriptorBuffersEXT"); - vkCmdSetDescriptorBufferOffsetsEXT = (PFN_vkCmdSetDescriptorBufferOffsetsEXT)load(context, "vkCmdSetDescriptorBufferOffsetsEXT"); - vkGetBufferOpaqueCaptureDescriptorDataEXT = (PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)load(context, "vkGetBufferOpaqueCaptureDescriptorDataEXT"); - vkGetDescriptorEXT = (PFN_vkGetDescriptorEXT)load(context, "vkGetDescriptorEXT"); - vkGetDescriptorSetLayoutBindingOffsetEXT = (PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)load(context, "vkGetDescriptorSetLayoutBindingOffsetEXT"); - vkGetDescriptorSetLayoutSizeEXT = (PFN_vkGetDescriptorSetLayoutSizeEXT)load(context, "vkGetDescriptorSetLayoutSizeEXT"); - vkGetImageOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageOpaqueCaptureDescriptorDataEXT"); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageViewOpaqueCaptureDescriptorDataEXT"); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = (PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)load(context, "vkGetSamplerOpaqueCaptureDescriptorDataEXT"); -#endif /* defined(VK_EXT_descriptor_buffer) */ -#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = (PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)load(context, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT"); -#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ -#if defined(VK_EXT_device_fault) - vkGetDeviceFaultInfoEXT = (PFN_vkGetDeviceFaultInfoEXT)load(context, "vkGetDeviceFaultInfoEXT"); -#endif /* defined(VK_EXT_device_fault) */ -#if defined(VK_EXT_discard_rectangles) - vkCmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)load(context, "vkCmdSetDiscardRectangleEXT"); -#endif /* defined(VK_EXT_discard_rectangles) */ -#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 - vkCmdSetDiscardRectangleEnableEXT = (PFN_vkCmdSetDiscardRectangleEnableEXT)load(context, "vkCmdSetDiscardRectangleEnableEXT"); - vkCmdSetDiscardRectangleModeEXT = (PFN_vkCmdSetDiscardRectangleModeEXT)load(context, "vkCmdSetDiscardRectangleModeEXT"); -#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ -#if defined(VK_EXT_display_control) - vkDisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)load(context, "vkDisplayPowerControlEXT"); - vkGetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)load(context, "vkGetSwapchainCounterEXT"); - vkRegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)load(context, "vkRegisterDeviceEventEXT"); - vkRegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)load(context, "vkRegisterDisplayEventEXT"); -#endif /* defined(VK_EXT_display_control) */ -#if defined(VK_EXT_external_memory_host) - vkGetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)load(context, "vkGetMemoryHostPointerPropertiesEXT"); -#endif /* defined(VK_EXT_external_memory_host) */ -#if defined(VK_EXT_full_screen_exclusive) - vkAcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)load(context, "vkAcquireFullScreenExclusiveModeEXT"); - vkReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)load(context, "vkReleaseFullScreenExclusiveModeEXT"); -#endif /* defined(VK_EXT_full_screen_exclusive) */ -#if defined(VK_EXT_hdr_metadata) - vkSetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)load(context, "vkSetHdrMetadataEXT"); -#endif /* defined(VK_EXT_hdr_metadata) */ -#if defined(VK_EXT_host_image_copy) - vkCopyImageToImageEXT = (PFN_vkCopyImageToImageEXT)load(context, "vkCopyImageToImageEXT"); - vkCopyImageToMemoryEXT = (PFN_vkCopyImageToMemoryEXT)load(context, "vkCopyImageToMemoryEXT"); - vkCopyMemoryToImageEXT = (PFN_vkCopyMemoryToImageEXT)load(context, "vkCopyMemoryToImageEXT"); - vkTransitionImageLayoutEXT = (PFN_vkTransitionImageLayoutEXT)load(context, "vkTransitionImageLayoutEXT"); -#endif /* defined(VK_EXT_host_image_copy) */ -#if defined(VK_EXT_host_query_reset) - vkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)load(context, "vkResetQueryPoolEXT"); -#endif /* defined(VK_EXT_host_query_reset) */ -#if defined(VK_EXT_image_drm_format_modifier) - vkGetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)load(context, "vkGetImageDrmFormatModifierPropertiesEXT"); -#endif /* defined(VK_EXT_image_drm_format_modifier) */ -#if defined(VK_EXT_line_rasterization) - vkCmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)load(context, "vkCmdSetLineStippleEXT"); -#endif /* defined(VK_EXT_line_rasterization) */ -#if defined(VK_EXT_mesh_shader) - vkCmdDrawMeshTasksEXT = (PFN_vkCmdDrawMeshTasksEXT)load(context, "vkCmdDrawMeshTasksEXT"); - vkCmdDrawMeshTasksIndirectCountEXT = (PFN_vkCmdDrawMeshTasksIndirectCountEXT)load(context, "vkCmdDrawMeshTasksIndirectCountEXT"); - vkCmdDrawMeshTasksIndirectEXT = (PFN_vkCmdDrawMeshTasksIndirectEXT)load(context, "vkCmdDrawMeshTasksIndirectEXT"); -#endif /* defined(VK_EXT_mesh_shader) */ -#if defined(VK_EXT_metal_objects) - vkExportMetalObjectsEXT = (PFN_vkExportMetalObjectsEXT)load(context, "vkExportMetalObjectsEXT"); -#endif /* defined(VK_EXT_metal_objects) */ -#if defined(VK_EXT_multi_draw) - vkCmdDrawMultiEXT = (PFN_vkCmdDrawMultiEXT)load(context, "vkCmdDrawMultiEXT"); - vkCmdDrawMultiIndexedEXT = (PFN_vkCmdDrawMultiIndexedEXT)load(context, "vkCmdDrawMultiIndexedEXT"); -#endif /* defined(VK_EXT_multi_draw) */ -#if defined(VK_EXT_opacity_micromap) - vkBuildMicromapsEXT = (PFN_vkBuildMicromapsEXT)load(context, "vkBuildMicromapsEXT"); - vkCmdBuildMicromapsEXT = (PFN_vkCmdBuildMicromapsEXT)load(context, "vkCmdBuildMicromapsEXT"); - vkCmdCopyMemoryToMicromapEXT = (PFN_vkCmdCopyMemoryToMicromapEXT)load(context, "vkCmdCopyMemoryToMicromapEXT"); - vkCmdCopyMicromapEXT = (PFN_vkCmdCopyMicromapEXT)load(context, "vkCmdCopyMicromapEXT"); - vkCmdCopyMicromapToMemoryEXT = (PFN_vkCmdCopyMicromapToMemoryEXT)load(context, "vkCmdCopyMicromapToMemoryEXT"); - vkCmdWriteMicromapsPropertiesEXT = (PFN_vkCmdWriteMicromapsPropertiesEXT)load(context, "vkCmdWriteMicromapsPropertiesEXT"); - vkCopyMemoryToMicromapEXT = (PFN_vkCopyMemoryToMicromapEXT)load(context, "vkCopyMemoryToMicromapEXT"); - vkCopyMicromapEXT = (PFN_vkCopyMicromapEXT)load(context, "vkCopyMicromapEXT"); - vkCopyMicromapToMemoryEXT = (PFN_vkCopyMicromapToMemoryEXT)load(context, "vkCopyMicromapToMemoryEXT"); - vkCreateMicromapEXT = (PFN_vkCreateMicromapEXT)load(context, "vkCreateMicromapEXT"); - vkDestroyMicromapEXT = (PFN_vkDestroyMicromapEXT)load(context, "vkDestroyMicromapEXT"); - vkGetDeviceMicromapCompatibilityEXT = (PFN_vkGetDeviceMicromapCompatibilityEXT)load(context, "vkGetDeviceMicromapCompatibilityEXT"); - vkGetMicromapBuildSizesEXT = (PFN_vkGetMicromapBuildSizesEXT)load(context, "vkGetMicromapBuildSizesEXT"); - vkWriteMicromapsPropertiesEXT = (PFN_vkWriteMicromapsPropertiesEXT)load(context, "vkWriteMicromapsPropertiesEXT"); -#endif /* defined(VK_EXT_opacity_micromap) */ -#if defined(VK_EXT_pageable_device_local_memory) - vkSetDeviceMemoryPriorityEXT = (PFN_vkSetDeviceMemoryPriorityEXT)load(context, "vkSetDeviceMemoryPriorityEXT"); -#endif /* defined(VK_EXT_pageable_device_local_memory) */ -#if defined(VK_EXT_pipeline_properties) - vkGetPipelinePropertiesEXT = (PFN_vkGetPipelinePropertiesEXT)load(context, "vkGetPipelinePropertiesEXT"); -#endif /* defined(VK_EXT_pipeline_properties) */ -#if defined(VK_EXT_private_data) - vkCreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)load(context, "vkCreatePrivateDataSlotEXT"); - vkDestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)load(context, "vkDestroyPrivateDataSlotEXT"); - vkGetPrivateDataEXT = (PFN_vkGetPrivateDataEXT)load(context, "vkGetPrivateDataEXT"); - vkSetPrivateDataEXT = (PFN_vkSetPrivateDataEXT)load(context, "vkSetPrivateDataEXT"); -#endif /* defined(VK_EXT_private_data) */ -#if defined(VK_EXT_sample_locations) - vkCmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)load(context, "vkCmdSetSampleLocationsEXT"); -#endif /* defined(VK_EXT_sample_locations) */ -#if defined(VK_EXT_shader_module_identifier) - vkGetShaderModuleCreateInfoIdentifierEXT = (PFN_vkGetShaderModuleCreateInfoIdentifierEXT)load(context, "vkGetShaderModuleCreateInfoIdentifierEXT"); - vkGetShaderModuleIdentifierEXT = (PFN_vkGetShaderModuleIdentifierEXT)load(context, "vkGetShaderModuleIdentifierEXT"); -#endif /* defined(VK_EXT_shader_module_identifier) */ -#if defined(VK_EXT_shader_object) - vkCmdBindShadersEXT = (PFN_vkCmdBindShadersEXT)load(context, "vkCmdBindShadersEXT"); - vkCreateShadersEXT = (PFN_vkCreateShadersEXT)load(context, "vkCreateShadersEXT"); - vkDestroyShaderEXT = (PFN_vkDestroyShaderEXT)load(context, "vkDestroyShaderEXT"); - vkGetShaderBinaryDataEXT = (PFN_vkGetShaderBinaryDataEXT)load(context, "vkGetShaderBinaryDataEXT"); -#endif /* defined(VK_EXT_shader_object) */ -#if defined(VK_EXT_swapchain_maintenance1) - vkReleaseSwapchainImagesEXT = (PFN_vkReleaseSwapchainImagesEXT)load(context, "vkReleaseSwapchainImagesEXT"); -#endif /* defined(VK_EXT_swapchain_maintenance1) */ -#if defined(VK_EXT_transform_feedback) - vkCmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)load(context, "vkCmdBeginQueryIndexedEXT"); - vkCmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)load(context, "vkCmdBeginTransformFeedbackEXT"); - vkCmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)load(context, "vkCmdBindTransformFeedbackBuffersEXT"); - vkCmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)load(context, "vkCmdDrawIndirectByteCountEXT"); - vkCmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)load(context, "vkCmdEndQueryIndexedEXT"); - vkCmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)load(context, "vkCmdEndTransformFeedbackEXT"); -#endif /* defined(VK_EXT_transform_feedback) */ -#if defined(VK_EXT_validation_cache) - vkCreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)load(context, "vkCreateValidationCacheEXT"); - vkDestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)load(context, "vkDestroyValidationCacheEXT"); - vkGetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)load(context, "vkGetValidationCacheDataEXT"); - vkMergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)load(context, "vkMergeValidationCachesEXT"); -#endif /* defined(VK_EXT_validation_cache) */ -#if defined(VK_FUCHSIA_buffer_collection) - vkCreateBufferCollectionFUCHSIA = (PFN_vkCreateBufferCollectionFUCHSIA)load(context, "vkCreateBufferCollectionFUCHSIA"); - vkDestroyBufferCollectionFUCHSIA = (PFN_vkDestroyBufferCollectionFUCHSIA)load(context, "vkDestroyBufferCollectionFUCHSIA"); - vkGetBufferCollectionPropertiesFUCHSIA = (PFN_vkGetBufferCollectionPropertiesFUCHSIA)load(context, "vkGetBufferCollectionPropertiesFUCHSIA"); - vkSetBufferCollectionBufferConstraintsFUCHSIA = (PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)load(context, "vkSetBufferCollectionBufferConstraintsFUCHSIA"); - vkSetBufferCollectionImageConstraintsFUCHSIA = (PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)load(context, "vkSetBufferCollectionImageConstraintsFUCHSIA"); -#endif /* defined(VK_FUCHSIA_buffer_collection) */ -#if defined(VK_FUCHSIA_external_memory) - vkGetMemoryZirconHandleFUCHSIA = (PFN_vkGetMemoryZirconHandleFUCHSIA)load(context, "vkGetMemoryZirconHandleFUCHSIA"); - vkGetMemoryZirconHandlePropertiesFUCHSIA = (PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)load(context, "vkGetMemoryZirconHandlePropertiesFUCHSIA"); -#endif /* defined(VK_FUCHSIA_external_memory) */ -#if defined(VK_FUCHSIA_external_semaphore) - vkGetSemaphoreZirconHandleFUCHSIA = (PFN_vkGetSemaphoreZirconHandleFUCHSIA)load(context, "vkGetSemaphoreZirconHandleFUCHSIA"); - vkImportSemaphoreZirconHandleFUCHSIA = (PFN_vkImportSemaphoreZirconHandleFUCHSIA)load(context, "vkImportSemaphoreZirconHandleFUCHSIA"); -#endif /* defined(VK_FUCHSIA_external_semaphore) */ -#if defined(VK_GOOGLE_display_timing) - vkGetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)load(context, "vkGetPastPresentationTimingGOOGLE"); - vkGetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)load(context, "vkGetRefreshCycleDurationGOOGLE"); -#endif /* defined(VK_GOOGLE_display_timing) */ -#if defined(VK_HUAWEI_cluster_culling_shader) - vkCmdDrawClusterHUAWEI = (PFN_vkCmdDrawClusterHUAWEI)load(context, "vkCmdDrawClusterHUAWEI"); - vkCmdDrawClusterIndirectHUAWEI = (PFN_vkCmdDrawClusterIndirectHUAWEI)load(context, "vkCmdDrawClusterIndirectHUAWEI"); -#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ -#if defined(VK_HUAWEI_invocation_mask) - vkCmdBindInvocationMaskHUAWEI = (PFN_vkCmdBindInvocationMaskHUAWEI)load(context, "vkCmdBindInvocationMaskHUAWEI"); -#endif /* defined(VK_HUAWEI_invocation_mask) */ -#if defined(VK_HUAWEI_subpass_shading) - vkCmdSubpassShadingHUAWEI = (PFN_vkCmdSubpassShadingHUAWEI)load(context, "vkCmdSubpassShadingHUAWEI"); - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = (PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)load(context, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI"); -#endif /* defined(VK_HUAWEI_subpass_shading) */ -#if defined(VK_INTEL_performance_query) - vkAcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)load(context, "vkAcquirePerformanceConfigurationINTEL"); - vkCmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)load(context, "vkCmdSetPerformanceMarkerINTEL"); - vkCmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)load(context, "vkCmdSetPerformanceOverrideINTEL"); - vkCmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)load(context, "vkCmdSetPerformanceStreamMarkerINTEL"); - vkGetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)load(context, "vkGetPerformanceParameterINTEL"); - vkInitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)load(context, "vkInitializePerformanceApiINTEL"); - vkQueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)load(context, "vkQueueSetPerformanceConfigurationINTEL"); - vkReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)load(context, "vkReleasePerformanceConfigurationINTEL"); - vkUninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)load(context, "vkUninitializePerformanceApiINTEL"); -#endif /* defined(VK_INTEL_performance_query) */ -#if defined(VK_KHR_acceleration_structure) - vkBuildAccelerationStructuresKHR = (PFN_vkBuildAccelerationStructuresKHR)load(context, "vkBuildAccelerationStructuresKHR"); - vkCmdBuildAccelerationStructuresIndirectKHR = (PFN_vkCmdBuildAccelerationStructuresIndirectKHR)load(context, "vkCmdBuildAccelerationStructuresIndirectKHR"); - vkCmdBuildAccelerationStructuresKHR = (PFN_vkCmdBuildAccelerationStructuresKHR)load(context, "vkCmdBuildAccelerationStructuresKHR"); - vkCmdCopyAccelerationStructureKHR = (PFN_vkCmdCopyAccelerationStructureKHR)load(context, "vkCmdCopyAccelerationStructureKHR"); - vkCmdCopyAccelerationStructureToMemoryKHR = (PFN_vkCmdCopyAccelerationStructureToMemoryKHR)load(context, "vkCmdCopyAccelerationStructureToMemoryKHR"); - vkCmdCopyMemoryToAccelerationStructureKHR = (PFN_vkCmdCopyMemoryToAccelerationStructureKHR)load(context, "vkCmdCopyMemoryToAccelerationStructureKHR"); - vkCmdWriteAccelerationStructuresPropertiesKHR = (PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)load(context, "vkCmdWriteAccelerationStructuresPropertiesKHR"); - vkCopyAccelerationStructureKHR = (PFN_vkCopyAccelerationStructureKHR)load(context, "vkCopyAccelerationStructureKHR"); - vkCopyAccelerationStructureToMemoryKHR = (PFN_vkCopyAccelerationStructureToMemoryKHR)load(context, "vkCopyAccelerationStructureToMemoryKHR"); - vkCopyMemoryToAccelerationStructureKHR = (PFN_vkCopyMemoryToAccelerationStructureKHR)load(context, "vkCopyMemoryToAccelerationStructureKHR"); - vkCreateAccelerationStructureKHR = (PFN_vkCreateAccelerationStructureKHR)load(context, "vkCreateAccelerationStructureKHR"); - vkDestroyAccelerationStructureKHR = (PFN_vkDestroyAccelerationStructureKHR)load(context, "vkDestroyAccelerationStructureKHR"); - vkGetAccelerationStructureBuildSizesKHR = (PFN_vkGetAccelerationStructureBuildSizesKHR)load(context, "vkGetAccelerationStructureBuildSizesKHR"); - vkGetAccelerationStructureDeviceAddressKHR = (PFN_vkGetAccelerationStructureDeviceAddressKHR)load(context, "vkGetAccelerationStructureDeviceAddressKHR"); - vkGetDeviceAccelerationStructureCompatibilityKHR = (PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)load(context, "vkGetDeviceAccelerationStructureCompatibilityKHR"); - vkWriteAccelerationStructuresPropertiesKHR = (PFN_vkWriteAccelerationStructuresPropertiesKHR)load(context, "vkWriteAccelerationStructuresPropertiesKHR"); -#endif /* defined(VK_KHR_acceleration_structure) */ -#if defined(VK_KHR_bind_memory2) - vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)load(context, "vkBindBufferMemory2KHR"); - vkBindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)load(context, "vkBindImageMemory2KHR"); -#endif /* defined(VK_KHR_bind_memory2) */ -#if defined(VK_KHR_buffer_device_address) - vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR)load(context, "vkGetBufferDeviceAddressKHR"); - vkGetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)load(context, "vkGetBufferOpaqueCaptureAddressKHR"); - vkGetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)load(context, "vkGetDeviceMemoryOpaqueCaptureAddressKHR"); -#endif /* defined(VK_KHR_buffer_device_address) */ -#if defined(VK_KHR_copy_commands2) - vkCmdBlitImage2KHR = (PFN_vkCmdBlitImage2KHR)load(context, "vkCmdBlitImage2KHR"); - vkCmdCopyBuffer2KHR = (PFN_vkCmdCopyBuffer2KHR)load(context, "vkCmdCopyBuffer2KHR"); - vkCmdCopyBufferToImage2KHR = (PFN_vkCmdCopyBufferToImage2KHR)load(context, "vkCmdCopyBufferToImage2KHR"); - vkCmdCopyImage2KHR = (PFN_vkCmdCopyImage2KHR)load(context, "vkCmdCopyImage2KHR"); - vkCmdCopyImageToBuffer2KHR = (PFN_vkCmdCopyImageToBuffer2KHR)load(context, "vkCmdCopyImageToBuffer2KHR"); - vkCmdResolveImage2KHR = (PFN_vkCmdResolveImage2KHR)load(context, "vkCmdResolveImage2KHR"); -#endif /* defined(VK_KHR_copy_commands2) */ -#if defined(VK_KHR_create_renderpass2) - vkCmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)load(context, "vkCmdBeginRenderPass2KHR"); - vkCmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)load(context, "vkCmdEndRenderPass2KHR"); - vkCmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)load(context, "vkCmdNextSubpass2KHR"); - vkCreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)load(context, "vkCreateRenderPass2KHR"); -#endif /* defined(VK_KHR_create_renderpass2) */ -#if defined(VK_KHR_deferred_host_operations) - vkCreateDeferredOperationKHR = (PFN_vkCreateDeferredOperationKHR)load(context, "vkCreateDeferredOperationKHR"); - vkDeferredOperationJoinKHR = (PFN_vkDeferredOperationJoinKHR)load(context, "vkDeferredOperationJoinKHR"); - vkDestroyDeferredOperationKHR = (PFN_vkDestroyDeferredOperationKHR)load(context, "vkDestroyDeferredOperationKHR"); - vkGetDeferredOperationMaxConcurrencyKHR = (PFN_vkGetDeferredOperationMaxConcurrencyKHR)load(context, "vkGetDeferredOperationMaxConcurrencyKHR"); - vkGetDeferredOperationResultKHR = (PFN_vkGetDeferredOperationResultKHR)load(context, "vkGetDeferredOperationResultKHR"); -#endif /* defined(VK_KHR_deferred_host_operations) */ -#if defined(VK_KHR_descriptor_update_template) - vkCreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)load(context, "vkCreateDescriptorUpdateTemplateKHR"); - vkDestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)load(context, "vkDestroyDescriptorUpdateTemplateKHR"); - vkUpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)load(context, "vkUpdateDescriptorSetWithTemplateKHR"); -#endif /* defined(VK_KHR_descriptor_update_template) */ -#if defined(VK_KHR_device_group) - vkCmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)load(context, "vkCmdDispatchBaseKHR"); - vkCmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)load(context, "vkCmdSetDeviceMaskKHR"); - vkGetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)load(context, "vkGetDeviceGroupPeerMemoryFeaturesKHR"); -#endif /* defined(VK_KHR_device_group) */ -#if defined(VK_KHR_display_swapchain) - vkCreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)load(context, "vkCreateSharedSwapchainsKHR"); -#endif /* defined(VK_KHR_display_swapchain) */ -#if defined(VK_KHR_draw_indirect_count) - vkCmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)load(context, "vkCmdDrawIndexedIndirectCountKHR"); - vkCmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)load(context, "vkCmdDrawIndirectCountKHR"); -#endif /* defined(VK_KHR_draw_indirect_count) */ -#if defined(VK_KHR_dynamic_rendering) - vkCmdBeginRenderingKHR = (PFN_vkCmdBeginRenderingKHR)load(context, "vkCmdBeginRenderingKHR"); - vkCmdEndRenderingKHR = (PFN_vkCmdEndRenderingKHR)load(context, "vkCmdEndRenderingKHR"); -#endif /* defined(VK_KHR_dynamic_rendering) */ -#if defined(VK_KHR_external_fence_fd) - vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)load(context, "vkGetFenceFdKHR"); - vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)load(context, "vkImportFenceFdKHR"); -#endif /* defined(VK_KHR_external_fence_fd) */ -#if defined(VK_KHR_external_fence_win32) - vkGetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)load(context, "vkGetFenceWin32HandleKHR"); - vkImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)load(context, "vkImportFenceWin32HandleKHR"); -#endif /* defined(VK_KHR_external_fence_win32) */ -#if defined(VK_KHR_external_memory_fd) - vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)load(context, "vkGetMemoryFdKHR"); - vkGetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)load(context, "vkGetMemoryFdPropertiesKHR"); -#endif /* defined(VK_KHR_external_memory_fd) */ -#if defined(VK_KHR_external_memory_win32) - vkGetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)load(context, "vkGetMemoryWin32HandleKHR"); - vkGetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)load(context, "vkGetMemoryWin32HandlePropertiesKHR"); -#endif /* defined(VK_KHR_external_memory_win32) */ -#if defined(VK_KHR_external_semaphore_fd) - vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)load(context, "vkGetSemaphoreFdKHR"); - vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)load(context, "vkImportSemaphoreFdKHR"); -#endif /* defined(VK_KHR_external_semaphore_fd) */ -#if defined(VK_KHR_external_semaphore_win32) - vkGetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)load(context, "vkGetSemaphoreWin32HandleKHR"); - vkImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)load(context, "vkImportSemaphoreWin32HandleKHR"); -#endif /* defined(VK_KHR_external_semaphore_win32) */ -#if defined(VK_KHR_fragment_shading_rate) - vkCmdSetFragmentShadingRateKHR = (PFN_vkCmdSetFragmentShadingRateKHR)load(context, "vkCmdSetFragmentShadingRateKHR"); -#endif /* defined(VK_KHR_fragment_shading_rate) */ -#if defined(VK_KHR_get_memory_requirements2) - vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)load(context, "vkGetBufferMemoryRequirements2KHR"); - vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)load(context, "vkGetImageMemoryRequirements2KHR"); - vkGetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)load(context, "vkGetImageSparseMemoryRequirements2KHR"); -#endif /* defined(VK_KHR_get_memory_requirements2) */ -#if defined(VK_KHR_maintenance1) - vkTrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)load(context, "vkTrimCommandPoolKHR"); -#endif /* defined(VK_KHR_maintenance1) */ -#if defined(VK_KHR_maintenance3) - vkGetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)load(context, "vkGetDescriptorSetLayoutSupportKHR"); -#endif /* defined(VK_KHR_maintenance3) */ -#if defined(VK_KHR_maintenance4) - vkGetDeviceBufferMemoryRequirementsKHR = (PFN_vkGetDeviceBufferMemoryRequirementsKHR)load(context, "vkGetDeviceBufferMemoryRequirementsKHR"); - vkGetDeviceImageMemoryRequirementsKHR = (PFN_vkGetDeviceImageMemoryRequirementsKHR)load(context, "vkGetDeviceImageMemoryRequirementsKHR"); - vkGetDeviceImageSparseMemoryRequirementsKHR = (PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)load(context, "vkGetDeviceImageSparseMemoryRequirementsKHR"); -#endif /* defined(VK_KHR_maintenance4) */ -#if defined(VK_KHR_maintenance5) - vkCmdBindIndexBuffer2KHR = (PFN_vkCmdBindIndexBuffer2KHR)load(context, "vkCmdBindIndexBuffer2KHR"); - vkGetDeviceImageSubresourceLayoutKHR = (PFN_vkGetDeviceImageSubresourceLayoutKHR)load(context, "vkGetDeviceImageSubresourceLayoutKHR"); - vkGetImageSubresourceLayout2KHR = (PFN_vkGetImageSubresourceLayout2KHR)load(context, "vkGetImageSubresourceLayout2KHR"); - vkGetRenderingAreaGranularityKHR = (PFN_vkGetRenderingAreaGranularityKHR)load(context, "vkGetRenderingAreaGranularityKHR"); -#endif /* defined(VK_KHR_maintenance5) */ -#if defined(VK_KHR_map_memory2) - vkMapMemory2KHR = (PFN_vkMapMemory2KHR)load(context, "vkMapMemory2KHR"); - vkUnmapMemory2KHR = (PFN_vkUnmapMemory2KHR)load(context, "vkUnmapMemory2KHR"); -#endif /* defined(VK_KHR_map_memory2) */ -#if defined(VK_KHR_performance_query) - vkAcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)load(context, "vkAcquireProfilingLockKHR"); - vkReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)load(context, "vkReleaseProfilingLockKHR"); -#endif /* defined(VK_KHR_performance_query) */ -#if defined(VK_KHR_pipeline_executable_properties) - vkGetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)load(context, "vkGetPipelineExecutableInternalRepresentationsKHR"); - vkGetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)load(context, "vkGetPipelineExecutablePropertiesKHR"); - vkGetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)load(context, "vkGetPipelineExecutableStatisticsKHR"); -#endif /* defined(VK_KHR_pipeline_executable_properties) */ -#if defined(VK_KHR_present_wait) - vkWaitForPresentKHR = (PFN_vkWaitForPresentKHR)load(context, "vkWaitForPresentKHR"); -#endif /* defined(VK_KHR_present_wait) */ -#if defined(VK_KHR_push_descriptor) - vkCmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)load(context, "vkCmdPushDescriptorSetKHR"); -#endif /* defined(VK_KHR_push_descriptor) */ -#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) - vkCmdTraceRaysIndirect2KHR = (PFN_vkCmdTraceRaysIndirect2KHR)load(context, "vkCmdTraceRaysIndirect2KHR"); -#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_ray_tracing_pipeline) - vkCmdSetRayTracingPipelineStackSizeKHR = (PFN_vkCmdSetRayTracingPipelineStackSizeKHR)load(context, "vkCmdSetRayTracingPipelineStackSizeKHR"); - vkCmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)load(context, "vkCmdTraceRaysIndirectKHR"); - vkCmdTraceRaysKHR = (PFN_vkCmdTraceRaysKHR)load(context, "vkCmdTraceRaysKHR"); - vkCreateRayTracingPipelinesKHR = (PFN_vkCreateRayTracingPipelinesKHR)load(context, "vkCreateRayTracingPipelinesKHR"); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = (PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)load(context, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR"); - vkGetRayTracingShaderGroupHandlesKHR = (PFN_vkGetRayTracingShaderGroupHandlesKHR)load(context, "vkGetRayTracingShaderGroupHandlesKHR"); - vkGetRayTracingShaderGroupStackSizeKHR = (PFN_vkGetRayTracingShaderGroupStackSizeKHR)load(context, "vkGetRayTracingShaderGroupStackSizeKHR"); -#endif /* defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_sampler_ycbcr_conversion) - vkCreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)load(context, "vkCreateSamplerYcbcrConversionKHR"); - vkDestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)load(context, "vkDestroySamplerYcbcrConversionKHR"); -#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ -#if defined(VK_KHR_shared_presentable_image) - vkGetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)load(context, "vkGetSwapchainStatusKHR"); -#endif /* defined(VK_KHR_shared_presentable_image) */ -#if defined(VK_KHR_swapchain) - vkAcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)load(context, "vkAcquireNextImageKHR"); - vkCreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)load(context, "vkCreateSwapchainKHR"); - vkDestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)load(context, "vkDestroySwapchainKHR"); - vkGetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)load(context, "vkGetSwapchainImagesKHR"); - vkQueuePresentKHR = (PFN_vkQueuePresentKHR)load(context, "vkQueuePresentKHR"); -#endif /* defined(VK_KHR_swapchain) */ -#if defined(VK_KHR_synchronization2) - vkCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)load(context, "vkCmdPipelineBarrier2KHR"); - vkCmdResetEvent2KHR = (PFN_vkCmdResetEvent2KHR)load(context, "vkCmdResetEvent2KHR"); - vkCmdSetEvent2KHR = (PFN_vkCmdSetEvent2KHR)load(context, "vkCmdSetEvent2KHR"); - vkCmdWaitEvents2KHR = (PFN_vkCmdWaitEvents2KHR)load(context, "vkCmdWaitEvents2KHR"); - vkCmdWriteTimestamp2KHR = (PFN_vkCmdWriteTimestamp2KHR)load(context, "vkCmdWriteTimestamp2KHR"); - vkQueueSubmit2KHR = (PFN_vkQueueSubmit2KHR)load(context, "vkQueueSubmit2KHR"); -#endif /* defined(VK_KHR_synchronization2) */ -#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) - vkCmdWriteBufferMarker2AMD = (PFN_vkCmdWriteBufferMarker2AMD)load(context, "vkCmdWriteBufferMarker2AMD"); -#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ -#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) - vkGetQueueCheckpointData2NV = (PFN_vkGetQueueCheckpointData2NV)load(context, "vkGetQueueCheckpointData2NV"); -#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_KHR_timeline_semaphore) - vkGetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR)load(context, "vkGetSemaphoreCounterValueKHR"); - vkSignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)load(context, "vkSignalSemaphoreKHR"); - vkWaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)load(context, "vkWaitSemaphoresKHR"); -#endif /* defined(VK_KHR_timeline_semaphore) */ -#if defined(VK_KHR_video_decode_queue) - vkCmdDecodeVideoKHR = (PFN_vkCmdDecodeVideoKHR)load(context, "vkCmdDecodeVideoKHR"); -#endif /* defined(VK_KHR_video_decode_queue) */ -#if defined(VK_KHR_video_encode_queue) - vkCmdEncodeVideoKHR = (PFN_vkCmdEncodeVideoKHR)load(context, "vkCmdEncodeVideoKHR"); - vkGetEncodedVideoSessionParametersKHR = (PFN_vkGetEncodedVideoSessionParametersKHR)load(context, "vkGetEncodedVideoSessionParametersKHR"); -#endif /* defined(VK_KHR_video_encode_queue) */ -#if defined(VK_KHR_video_queue) - vkBindVideoSessionMemoryKHR = (PFN_vkBindVideoSessionMemoryKHR)load(context, "vkBindVideoSessionMemoryKHR"); - vkCmdBeginVideoCodingKHR = (PFN_vkCmdBeginVideoCodingKHR)load(context, "vkCmdBeginVideoCodingKHR"); - vkCmdControlVideoCodingKHR = (PFN_vkCmdControlVideoCodingKHR)load(context, "vkCmdControlVideoCodingKHR"); - vkCmdEndVideoCodingKHR = (PFN_vkCmdEndVideoCodingKHR)load(context, "vkCmdEndVideoCodingKHR"); - vkCreateVideoSessionKHR = (PFN_vkCreateVideoSessionKHR)load(context, "vkCreateVideoSessionKHR"); - vkCreateVideoSessionParametersKHR = (PFN_vkCreateVideoSessionParametersKHR)load(context, "vkCreateVideoSessionParametersKHR"); - vkDestroyVideoSessionKHR = (PFN_vkDestroyVideoSessionKHR)load(context, "vkDestroyVideoSessionKHR"); - vkDestroyVideoSessionParametersKHR = (PFN_vkDestroyVideoSessionParametersKHR)load(context, "vkDestroyVideoSessionParametersKHR"); - vkGetVideoSessionMemoryRequirementsKHR = (PFN_vkGetVideoSessionMemoryRequirementsKHR)load(context, "vkGetVideoSessionMemoryRequirementsKHR"); - vkUpdateVideoSessionParametersKHR = (PFN_vkUpdateVideoSessionParametersKHR)load(context, "vkUpdateVideoSessionParametersKHR"); -#endif /* defined(VK_KHR_video_queue) */ -#if defined(VK_NVX_binary_import) - vkCmdCuLaunchKernelNVX = (PFN_vkCmdCuLaunchKernelNVX)load(context, "vkCmdCuLaunchKernelNVX"); - vkCreateCuFunctionNVX = (PFN_vkCreateCuFunctionNVX)load(context, "vkCreateCuFunctionNVX"); - vkCreateCuModuleNVX = (PFN_vkCreateCuModuleNVX)load(context, "vkCreateCuModuleNVX"); - vkDestroyCuFunctionNVX = (PFN_vkDestroyCuFunctionNVX)load(context, "vkDestroyCuFunctionNVX"); - vkDestroyCuModuleNVX = (PFN_vkDestroyCuModuleNVX)load(context, "vkDestroyCuModuleNVX"); -#endif /* defined(VK_NVX_binary_import) */ -#if defined(VK_NVX_image_view_handle) - vkGetImageViewAddressNVX = (PFN_vkGetImageViewAddressNVX)load(context, "vkGetImageViewAddressNVX"); - vkGetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)load(context, "vkGetImageViewHandleNVX"); -#endif /* defined(VK_NVX_image_view_handle) */ -#if defined(VK_NV_clip_space_w_scaling) - vkCmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)load(context, "vkCmdSetViewportWScalingNV"); -#endif /* defined(VK_NV_clip_space_w_scaling) */ -#if defined(VK_NV_copy_memory_indirect) - vkCmdCopyMemoryIndirectNV = (PFN_vkCmdCopyMemoryIndirectNV)load(context, "vkCmdCopyMemoryIndirectNV"); - vkCmdCopyMemoryToImageIndirectNV = (PFN_vkCmdCopyMemoryToImageIndirectNV)load(context, "vkCmdCopyMemoryToImageIndirectNV"); -#endif /* defined(VK_NV_copy_memory_indirect) */ -#if defined(VK_NV_cuda_kernel_launch) - vkCmdCudaLaunchKernelNV = (PFN_vkCmdCudaLaunchKernelNV)load(context, "vkCmdCudaLaunchKernelNV"); - vkCreateCudaFunctionNV = (PFN_vkCreateCudaFunctionNV)load(context, "vkCreateCudaFunctionNV"); - vkCreateCudaModuleNV = (PFN_vkCreateCudaModuleNV)load(context, "vkCreateCudaModuleNV"); - vkDestroyCudaFunctionNV = (PFN_vkDestroyCudaFunctionNV)load(context, "vkDestroyCudaFunctionNV"); - vkDestroyCudaModuleNV = (PFN_vkDestroyCudaModuleNV)load(context, "vkDestroyCudaModuleNV"); - vkGetCudaModuleCacheNV = (PFN_vkGetCudaModuleCacheNV)load(context, "vkGetCudaModuleCacheNV"); -#endif /* defined(VK_NV_cuda_kernel_launch) */ -#if defined(VK_NV_device_diagnostic_checkpoints) - vkCmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)load(context, "vkCmdSetCheckpointNV"); - vkGetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)load(context, "vkGetQueueCheckpointDataNV"); -#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_NV_device_generated_commands) - vkCmdBindPipelineShaderGroupNV = (PFN_vkCmdBindPipelineShaderGroupNV)load(context, "vkCmdBindPipelineShaderGroupNV"); - vkCmdExecuteGeneratedCommandsNV = (PFN_vkCmdExecuteGeneratedCommandsNV)load(context, "vkCmdExecuteGeneratedCommandsNV"); - vkCmdPreprocessGeneratedCommandsNV = (PFN_vkCmdPreprocessGeneratedCommandsNV)load(context, "vkCmdPreprocessGeneratedCommandsNV"); - vkCreateIndirectCommandsLayoutNV = (PFN_vkCreateIndirectCommandsLayoutNV)load(context, "vkCreateIndirectCommandsLayoutNV"); - vkDestroyIndirectCommandsLayoutNV = (PFN_vkDestroyIndirectCommandsLayoutNV)load(context, "vkDestroyIndirectCommandsLayoutNV"); - vkGetGeneratedCommandsMemoryRequirementsNV = (PFN_vkGetGeneratedCommandsMemoryRequirementsNV)load(context, "vkGetGeneratedCommandsMemoryRequirementsNV"); -#endif /* defined(VK_NV_device_generated_commands) */ -#if defined(VK_NV_device_generated_commands_compute) - vkCmdUpdatePipelineIndirectBufferNV = (PFN_vkCmdUpdatePipelineIndirectBufferNV)load(context, "vkCmdUpdatePipelineIndirectBufferNV"); - vkGetPipelineIndirectDeviceAddressNV = (PFN_vkGetPipelineIndirectDeviceAddressNV)load(context, "vkGetPipelineIndirectDeviceAddressNV"); - vkGetPipelineIndirectMemoryRequirementsNV = (PFN_vkGetPipelineIndirectMemoryRequirementsNV)load(context, "vkGetPipelineIndirectMemoryRequirementsNV"); -#endif /* defined(VK_NV_device_generated_commands_compute) */ -#if defined(VK_NV_external_memory_rdma) - vkGetMemoryRemoteAddressNV = (PFN_vkGetMemoryRemoteAddressNV)load(context, "vkGetMemoryRemoteAddressNV"); -#endif /* defined(VK_NV_external_memory_rdma) */ -#if defined(VK_NV_external_memory_win32) - vkGetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)load(context, "vkGetMemoryWin32HandleNV"); -#endif /* defined(VK_NV_external_memory_win32) */ -#if defined(VK_NV_fragment_shading_rate_enums) - vkCmdSetFragmentShadingRateEnumNV = (PFN_vkCmdSetFragmentShadingRateEnumNV)load(context, "vkCmdSetFragmentShadingRateEnumNV"); -#endif /* defined(VK_NV_fragment_shading_rate_enums) */ -#if defined(VK_NV_low_latency2) - vkGetLatencyTimingsNV = (PFN_vkGetLatencyTimingsNV)load(context, "vkGetLatencyTimingsNV"); - vkLatencySleepNV = (PFN_vkLatencySleepNV)load(context, "vkLatencySleepNV"); - vkQueueNotifyOutOfBandNV = (PFN_vkQueueNotifyOutOfBandNV)load(context, "vkQueueNotifyOutOfBandNV"); - vkSetLatencyMarkerNV = (PFN_vkSetLatencyMarkerNV)load(context, "vkSetLatencyMarkerNV"); - vkSetLatencySleepModeNV = (PFN_vkSetLatencySleepModeNV)load(context, "vkSetLatencySleepModeNV"); -#endif /* defined(VK_NV_low_latency2) */ -#if defined(VK_NV_memory_decompression) - vkCmdDecompressMemoryIndirectCountNV = (PFN_vkCmdDecompressMemoryIndirectCountNV)load(context, "vkCmdDecompressMemoryIndirectCountNV"); - vkCmdDecompressMemoryNV = (PFN_vkCmdDecompressMemoryNV)load(context, "vkCmdDecompressMemoryNV"); -#endif /* defined(VK_NV_memory_decompression) */ -#if defined(VK_NV_mesh_shader) - vkCmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)load(context, "vkCmdDrawMeshTasksIndirectCountNV"); - vkCmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)load(context, "vkCmdDrawMeshTasksIndirectNV"); - vkCmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)load(context, "vkCmdDrawMeshTasksNV"); -#endif /* defined(VK_NV_mesh_shader) */ -#if defined(VK_NV_optical_flow) - vkBindOpticalFlowSessionImageNV = (PFN_vkBindOpticalFlowSessionImageNV)load(context, "vkBindOpticalFlowSessionImageNV"); - vkCmdOpticalFlowExecuteNV = (PFN_vkCmdOpticalFlowExecuteNV)load(context, "vkCmdOpticalFlowExecuteNV"); - vkCreateOpticalFlowSessionNV = (PFN_vkCreateOpticalFlowSessionNV)load(context, "vkCreateOpticalFlowSessionNV"); - vkDestroyOpticalFlowSessionNV = (PFN_vkDestroyOpticalFlowSessionNV)load(context, "vkDestroyOpticalFlowSessionNV"); -#endif /* defined(VK_NV_optical_flow) */ -#if defined(VK_NV_ray_tracing) - vkBindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)load(context, "vkBindAccelerationStructureMemoryNV"); - vkCmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)load(context, "vkCmdBuildAccelerationStructureNV"); - vkCmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)load(context, "vkCmdCopyAccelerationStructureNV"); - vkCmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)load(context, "vkCmdTraceRaysNV"); - vkCmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)load(context, "vkCmdWriteAccelerationStructuresPropertiesNV"); - vkCompileDeferredNV = (PFN_vkCompileDeferredNV)load(context, "vkCompileDeferredNV"); - vkCreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)load(context, "vkCreateAccelerationStructureNV"); - vkCreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)load(context, "vkCreateRayTracingPipelinesNV"); - vkDestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)load(context, "vkDestroyAccelerationStructureNV"); - vkGetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)load(context, "vkGetAccelerationStructureHandleNV"); - vkGetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)load(context, "vkGetAccelerationStructureMemoryRequirementsNV"); - vkGetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)load(context, "vkGetRayTracingShaderGroupHandlesNV"); -#endif /* defined(VK_NV_ray_tracing) */ -#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 - vkCmdSetExclusiveScissorEnableNV = (PFN_vkCmdSetExclusiveScissorEnableNV)load(context, "vkCmdSetExclusiveScissorEnableNV"); -#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ -#if defined(VK_NV_scissor_exclusive) - vkCmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)load(context, "vkCmdSetExclusiveScissorNV"); -#endif /* defined(VK_NV_scissor_exclusive) */ -#if defined(VK_NV_shading_rate_image) - vkCmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)load(context, "vkCmdBindShadingRateImageNV"); - vkCmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)load(context, "vkCmdSetCoarseSampleOrderNV"); - vkCmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)load(context, "vkCmdSetViewportShadingRatePaletteNV"); -#endif /* defined(VK_NV_shading_rate_image) */ -#if defined(VK_QCOM_tile_properties) - vkGetDynamicRenderingTilePropertiesQCOM = (PFN_vkGetDynamicRenderingTilePropertiesQCOM)load(context, "vkGetDynamicRenderingTilePropertiesQCOM"); - vkGetFramebufferTilePropertiesQCOM = (PFN_vkGetFramebufferTilePropertiesQCOM)load(context, "vkGetFramebufferTilePropertiesQCOM"); -#endif /* defined(VK_QCOM_tile_properties) */ -#if defined(VK_QNX_external_memory_screen_buffer) - vkGetScreenBufferPropertiesQNX = (PFN_vkGetScreenBufferPropertiesQNX)load(context, "vkGetScreenBufferPropertiesQNX"); -#endif /* defined(VK_QNX_external_memory_screen_buffer) */ -#if defined(VK_VALVE_descriptor_set_host_mapping) - vkGetDescriptorSetHostMappingVALVE = (PFN_vkGetDescriptorSetHostMappingVALVE)load(context, "vkGetDescriptorSetHostMappingVALVE"); - vkGetDescriptorSetLayoutHostMappingInfoVALVE = (PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)load(context, "vkGetDescriptorSetLayoutHostMappingInfoVALVE"); -#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ -#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - vkCmdBindVertexBuffers2EXT = (PFN_vkCmdBindVertexBuffers2EXT)load(context, "vkCmdBindVertexBuffers2EXT"); - vkCmdSetCullModeEXT = (PFN_vkCmdSetCullModeEXT)load(context, "vkCmdSetCullModeEXT"); - vkCmdSetDepthBoundsTestEnableEXT = (PFN_vkCmdSetDepthBoundsTestEnableEXT)load(context, "vkCmdSetDepthBoundsTestEnableEXT"); - vkCmdSetDepthCompareOpEXT = (PFN_vkCmdSetDepthCompareOpEXT)load(context, "vkCmdSetDepthCompareOpEXT"); - vkCmdSetDepthTestEnableEXT = (PFN_vkCmdSetDepthTestEnableEXT)load(context, "vkCmdSetDepthTestEnableEXT"); - vkCmdSetDepthWriteEnableEXT = (PFN_vkCmdSetDepthWriteEnableEXT)load(context, "vkCmdSetDepthWriteEnableEXT"); - vkCmdSetFrontFaceEXT = (PFN_vkCmdSetFrontFaceEXT)load(context, "vkCmdSetFrontFaceEXT"); - vkCmdSetPrimitiveTopologyEXT = (PFN_vkCmdSetPrimitiveTopologyEXT)load(context, "vkCmdSetPrimitiveTopologyEXT"); - vkCmdSetScissorWithCountEXT = (PFN_vkCmdSetScissorWithCountEXT)load(context, "vkCmdSetScissorWithCountEXT"); - vkCmdSetStencilOpEXT = (PFN_vkCmdSetStencilOpEXT)load(context, "vkCmdSetStencilOpEXT"); - vkCmdSetStencilTestEnableEXT = (PFN_vkCmdSetStencilTestEnableEXT)load(context, "vkCmdSetStencilTestEnableEXT"); - vkCmdSetViewportWithCountEXT = (PFN_vkCmdSetViewportWithCountEXT)load(context, "vkCmdSetViewportWithCountEXT"); -#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - vkCmdSetDepthBiasEnableEXT = (PFN_vkCmdSetDepthBiasEnableEXT)load(context, "vkCmdSetDepthBiasEnableEXT"); - vkCmdSetLogicOpEXT = (PFN_vkCmdSetLogicOpEXT)load(context, "vkCmdSetLogicOpEXT"); - vkCmdSetPatchControlPointsEXT = (PFN_vkCmdSetPatchControlPointsEXT)load(context, "vkCmdSetPatchControlPointsEXT"); - vkCmdSetPrimitiveRestartEnableEXT = (PFN_vkCmdSetPrimitiveRestartEnableEXT)load(context, "vkCmdSetPrimitiveRestartEnableEXT"); - vkCmdSetRasterizerDiscardEnableEXT = (PFN_vkCmdSetRasterizerDiscardEnableEXT)load(context, "vkCmdSetRasterizerDiscardEnableEXT"); -#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - vkCmdSetAlphaToCoverageEnableEXT = (PFN_vkCmdSetAlphaToCoverageEnableEXT)load(context, "vkCmdSetAlphaToCoverageEnableEXT"); - vkCmdSetAlphaToOneEnableEXT = (PFN_vkCmdSetAlphaToOneEnableEXT)load(context, "vkCmdSetAlphaToOneEnableEXT"); - vkCmdSetColorBlendAdvancedEXT = (PFN_vkCmdSetColorBlendAdvancedEXT)load(context, "vkCmdSetColorBlendAdvancedEXT"); - vkCmdSetColorBlendEnableEXT = (PFN_vkCmdSetColorBlendEnableEXT)load(context, "vkCmdSetColorBlendEnableEXT"); - vkCmdSetColorBlendEquationEXT = (PFN_vkCmdSetColorBlendEquationEXT)load(context, "vkCmdSetColorBlendEquationEXT"); - vkCmdSetColorWriteMaskEXT = (PFN_vkCmdSetColorWriteMaskEXT)load(context, "vkCmdSetColorWriteMaskEXT"); - vkCmdSetConservativeRasterizationModeEXT = (PFN_vkCmdSetConservativeRasterizationModeEXT)load(context, "vkCmdSetConservativeRasterizationModeEXT"); - vkCmdSetDepthClampEnableEXT = (PFN_vkCmdSetDepthClampEnableEXT)load(context, "vkCmdSetDepthClampEnableEXT"); - vkCmdSetDepthClipEnableEXT = (PFN_vkCmdSetDepthClipEnableEXT)load(context, "vkCmdSetDepthClipEnableEXT"); - vkCmdSetDepthClipNegativeOneToOneEXT = (PFN_vkCmdSetDepthClipNegativeOneToOneEXT)load(context, "vkCmdSetDepthClipNegativeOneToOneEXT"); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = (PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)load(context, "vkCmdSetExtraPrimitiveOverestimationSizeEXT"); - vkCmdSetLineRasterizationModeEXT = (PFN_vkCmdSetLineRasterizationModeEXT)load(context, "vkCmdSetLineRasterizationModeEXT"); - vkCmdSetLineStippleEnableEXT = (PFN_vkCmdSetLineStippleEnableEXT)load(context, "vkCmdSetLineStippleEnableEXT"); - vkCmdSetLogicOpEnableEXT = (PFN_vkCmdSetLogicOpEnableEXT)load(context, "vkCmdSetLogicOpEnableEXT"); - vkCmdSetPolygonModeEXT = (PFN_vkCmdSetPolygonModeEXT)load(context, "vkCmdSetPolygonModeEXT"); - vkCmdSetProvokingVertexModeEXT = (PFN_vkCmdSetProvokingVertexModeEXT)load(context, "vkCmdSetProvokingVertexModeEXT"); - vkCmdSetRasterizationSamplesEXT = (PFN_vkCmdSetRasterizationSamplesEXT)load(context, "vkCmdSetRasterizationSamplesEXT"); - vkCmdSetRasterizationStreamEXT = (PFN_vkCmdSetRasterizationStreamEXT)load(context, "vkCmdSetRasterizationStreamEXT"); - vkCmdSetSampleLocationsEnableEXT = (PFN_vkCmdSetSampleLocationsEnableEXT)load(context, "vkCmdSetSampleLocationsEnableEXT"); - vkCmdSetSampleMaskEXT = (PFN_vkCmdSetSampleMaskEXT)load(context, "vkCmdSetSampleMaskEXT"); - vkCmdSetTessellationDomainOriginEXT = (PFN_vkCmdSetTessellationDomainOriginEXT)load(context, "vkCmdSetTessellationDomainOriginEXT"); -#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) - vkCmdSetViewportWScalingEnableNV = (PFN_vkCmdSetViewportWScalingEnableNV)load(context, "vkCmdSetViewportWScalingEnableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) - vkCmdSetViewportSwizzleNV = (PFN_vkCmdSetViewportSwizzleNV)load(context, "vkCmdSetViewportSwizzleNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) - vkCmdSetCoverageToColorEnableNV = (PFN_vkCmdSetCoverageToColorEnableNV)load(context, "vkCmdSetCoverageToColorEnableNV"); - vkCmdSetCoverageToColorLocationNV = (PFN_vkCmdSetCoverageToColorLocationNV)load(context, "vkCmdSetCoverageToColorLocationNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) - vkCmdSetCoverageModulationModeNV = (PFN_vkCmdSetCoverageModulationModeNV)load(context, "vkCmdSetCoverageModulationModeNV"); - vkCmdSetCoverageModulationTableEnableNV = (PFN_vkCmdSetCoverageModulationTableEnableNV)load(context, "vkCmdSetCoverageModulationTableEnableNV"); - vkCmdSetCoverageModulationTableNV = (PFN_vkCmdSetCoverageModulationTableNV)load(context, "vkCmdSetCoverageModulationTableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) - vkCmdSetShadingRateImageEnableNV = (PFN_vkCmdSetShadingRateImageEnableNV)load(context, "vkCmdSetShadingRateImageEnableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) - vkCmdSetRepresentativeFragmentTestEnableNV = (PFN_vkCmdSetRepresentativeFragmentTestEnableNV)load(context, "vkCmdSetRepresentativeFragmentTestEnableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) - vkCmdSetCoverageReductionModeNV = (PFN_vkCmdSetCoverageReductionModeNV)load(context, "vkCmdSetCoverageReductionModeNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ -#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) - vkGetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)load(context, "vkGetDeviceGroupSurfacePresentModes2EXT"); -#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) - vkGetImageSubresourceLayout2EXT = (PFN_vkGetImageSubresourceLayout2EXT)load(context, "vkGetImageSubresourceLayout2EXT"); -#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ -#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) - vkCmdSetVertexInputEXT = (PFN_vkCmdSetVertexInputEXT)load(context, "vkCmdSetVertexInputEXT"); -#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ -#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) - vkCmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)load(context, "vkCmdPushDescriptorSetWithTemplateKHR"); -#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) - vkGetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)load(context, "vkGetDeviceGroupPresentCapabilitiesKHR"); - vkGetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)load(context, "vkGetDeviceGroupSurfacePresentModesKHR"); -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) - vkAcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)load(context, "vkAcquireNextImage2KHR"); -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ - /* VOLK_GENERATE_LOAD_DEVICE */ -} - -static void volkGenLoadDeviceTable(struct VolkDeviceTable* table, void* context, PFN_vkVoidFunction (*load)(void*, const char*)) -{ - /* VOLK_GENERATE_LOAD_DEVICE_TABLE */ -#if defined(VK_VERSION_1_0) - table->vkAllocateCommandBuffers = (PFN_vkAllocateCommandBuffers)load(context, "vkAllocateCommandBuffers"); - table->vkAllocateDescriptorSets = (PFN_vkAllocateDescriptorSets)load(context, "vkAllocateDescriptorSets"); - table->vkAllocateMemory = (PFN_vkAllocateMemory)load(context, "vkAllocateMemory"); - table->vkBeginCommandBuffer = (PFN_vkBeginCommandBuffer)load(context, "vkBeginCommandBuffer"); - table->vkBindBufferMemory = (PFN_vkBindBufferMemory)load(context, "vkBindBufferMemory"); - table->vkBindImageMemory = (PFN_vkBindImageMemory)load(context, "vkBindImageMemory"); - table->vkCmdBeginQuery = (PFN_vkCmdBeginQuery)load(context, "vkCmdBeginQuery"); - table->vkCmdBeginRenderPass = (PFN_vkCmdBeginRenderPass)load(context, "vkCmdBeginRenderPass"); - table->vkCmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets)load(context, "vkCmdBindDescriptorSets"); - table->vkCmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer)load(context, "vkCmdBindIndexBuffer"); - table->vkCmdBindPipeline = (PFN_vkCmdBindPipeline)load(context, "vkCmdBindPipeline"); - table->vkCmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers)load(context, "vkCmdBindVertexBuffers"); - table->vkCmdBlitImage = (PFN_vkCmdBlitImage)load(context, "vkCmdBlitImage"); - table->vkCmdClearAttachments = (PFN_vkCmdClearAttachments)load(context, "vkCmdClearAttachments"); - table->vkCmdClearColorImage = (PFN_vkCmdClearColorImage)load(context, "vkCmdClearColorImage"); - table->vkCmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)load(context, "vkCmdClearDepthStencilImage"); - table->vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)load(context, "vkCmdCopyBuffer"); - table->vkCmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage)load(context, "vkCmdCopyBufferToImage"); - table->vkCmdCopyImage = (PFN_vkCmdCopyImage)load(context, "vkCmdCopyImage"); - table->vkCmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer)load(context, "vkCmdCopyImageToBuffer"); - table->vkCmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults)load(context, "vkCmdCopyQueryPoolResults"); - table->vkCmdDispatch = (PFN_vkCmdDispatch)load(context, "vkCmdDispatch"); - table->vkCmdDispatchIndirect = (PFN_vkCmdDispatchIndirect)load(context, "vkCmdDispatchIndirect"); - table->vkCmdDraw = (PFN_vkCmdDraw)load(context, "vkCmdDraw"); - table->vkCmdDrawIndexed = (PFN_vkCmdDrawIndexed)load(context, "vkCmdDrawIndexed"); - table->vkCmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect)load(context, "vkCmdDrawIndexedIndirect"); - table->vkCmdDrawIndirect = (PFN_vkCmdDrawIndirect)load(context, "vkCmdDrawIndirect"); - table->vkCmdEndQuery = (PFN_vkCmdEndQuery)load(context, "vkCmdEndQuery"); - table->vkCmdEndRenderPass = (PFN_vkCmdEndRenderPass)load(context, "vkCmdEndRenderPass"); - table->vkCmdExecuteCommands = (PFN_vkCmdExecuteCommands)load(context, "vkCmdExecuteCommands"); - table->vkCmdFillBuffer = (PFN_vkCmdFillBuffer)load(context, "vkCmdFillBuffer"); - table->vkCmdNextSubpass = (PFN_vkCmdNextSubpass)load(context, "vkCmdNextSubpass"); - table->vkCmdPipelineBarrier = (PFN_vkCmdPipelineBarrier)load(context, "vkCmdPipelineBarrier"); - table->vkCmdPushConstants = (PFN_vkCmdPushConstants)load(context, "vkCmdPushConstants"); - table->vkCmdResetEvent = (PFN_vkCmdResetEvent)load(context, "vkCmdResetEvent"); - table->vkCmdResetQueryPool = (PFN_vkCmdResetQueryPool)load(context, "vkCmdResetQueryPool"); - table->vkCmdResolveImage = (PFN_vkCmdResolveImage)load(context, "vkCmdResolveImage"); - table->vkCmdSetBlendConstants = (PFN_vkCmdSetBlendConstants)load(context, "vkCmdSetBlendConstants"); - table->vkCmdSetDepthBias = (PFN_vkCmdSetDepthBias)load(context, "vkCmdSetDepthBias"); - table->vkCmdSetDepthBounds = (PFN_vkCmdSetDepthBounds)load(context, "vkCmdSetDepthBounds"); - table->vkCmdSetEvent = (PFN_vkCmdSetEvent)load(context, "vkCmdSetEvent"); - table->vkCmdSetLineWidth = (PFN_vkCmdSetLineWidth)load(context, "vkCmdSetLineWidth"); - table->vkCmdSetScissor = (PFN_vkCmdSetScissor)load(context, "vkCmdSetScissor"); - table->vkCmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask)load(context, "vkCmdSetStencilCompareMask"); - table->vkCmdSetStencilReference = (PFN_vkCmdSetStencilReference)load(context, "vkCmdSetStencilReference"); - table->vkCmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask)load(context, "vkCmdSetStencilWriteMask"); - table->vkCmdSetViewport = (PFN_vkCmdSetViewport)load(context, "vkCmdSetViewport"); - table->vkCmdUpdateBuffer = (PFN_vkCmdUpdateBuffer)load(context, "vkCmdUpdateBuffer"); - table->vkCmdWaitEvents = (PFN_vkCmdWaitEvents)load(context, "vkCmdWaitEvents"); - table->vkCmdWriteTimestamp = (PFN_vkCmdWriteTimestamp)load(context, "vkCmdWriteTimestamp"); - table->vkCreateBuffer = (PFN_vkCreateBuffer)load(context, "vkCreateBuffer"); - table->vkCreateBufferView = (PFN_vkCreateBufferView)load(context, "vkCreateBufferView"); - table->vkCreateCommandPool = (PFN_vkCreateCommandPool)load(context, "vkCreateCommandPool"); - table->vkCreateComputePipelines = (PFN_vkCreateComputePipelines)load(context, "vkCreateComputePipelines"); - table->vkCreateDescriptorPool = (PFN_vkCreateDescriptorPool)load(context, "vkCreateDescriptorPool"); - table->vkCreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)load(context, "vkCreateDescriptorSetLayout"); - table->vkCreateEvent = (PFN_vkCreateEvent)load(context, "vkCreateEvent"); - table->vkCreateFence = (PFN_vkCreateFence)load(context, "vkCreateFence"); - table->vkCreateFramebuffer = (PFN_vkCreateFramebuffer)load(context, "vkCreateFramebuffer"); - table->vkCreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines)load(context, "vkCreateGraphicsPipelines"); - table->vkCreateImage = (PFN_vkCreateImage)load(context, "vkCreateImage"); - table->vkCreateImageView = (PFN_vkCreateImageView)load(context, "vkCreateImageView"); - table->vkCreatePipelineCache = (PFN_vkCreatePipelineCache)load(context, "vkCreatePipelineCache"); - table->vkCreatePipelineLayout = (PFN_vkCreatePipelineLayout)load(context, "vkCreatePipelineLayout"); - table->vkCreateQueryPool = (PFN_vkCreateQueryPool)load(context, "vkCreateQueryPool"); - table->vkCreateRenderPass = (PFN_vkCreateRenderPass)load(context, "vkCreateRenderPass"); - table->vkCreateSampler = (PFN_vkCreateSampler)load(context, "vkCreateSampler"); - table->vkCreateSemaphore = (PFN_vkCreateSemaphore)load(context, "vkCreateSemaphore"); - table->vkCreateShaderModule = (PFN_vkCreateShaderModule)load(context, "vkCreateShaderModule"); - table->vkDestroyBuffer = (PFN_vkDestroyBuffer)load(context, "vkDestroyBuffer"); - table->vkDestroyBufferView = (PFN_vkDestroyBufferView)load(context, "vkDestroyBufferView"); - table->vkDestroyCommandPool = (PFN_vkDestroyCommandPool)load(context, "vkDestroyCommandPool"); - table->vkDestroyDescriptorPool = (PFN_vkDestroyDescriptorPool)load(context, "vkDestroyDescriptorPool"); - table->vkDestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)load(context, "vkDestroyDescriptorSetLayout"); - table->vkDestroyDevice = (PFN_vkDestroyDevice)load(context, "vkDestroyDevice"); - table->vkDestroyEvent = (PFN_vkDestroyEvent)load(context, "vkDestroyEvent"); - table->vkDestroyFence = (PFN_vkDestroyFence)load(context, "vkDestroyFence"); - table->vkDestroyFramebuffer = (PFN_vkDestroyFramebuffer)load(context, "vkDestroyFramebuffer"); - table->vkDestroyImage = (PFN_vkDestroyImage)load(context, "vkDestroyImage"); - table->vkDestroyImageView = (PFN_vkDestroyImageView)load(context, "vkDestroyImageView"); - table->vkDestroyPipeline = (PFN_vkDestroyPipeline)load(context, "vkDestroyPipeline"); - table->vkDestroyPipelineCache = (PFN_vkDestroyPipelineCache)load(context, "vkDestroyPipelineCache"); - table->vkDestroyPipelineLayout = (PFN_vkDestroyPipelineLayout)load(context, "vkDestroyPipelineLayout"); - table->vkDestroyQueryPool = (PFN_vkDestroyQueryPool)load(context, "vkDestroyQueryPool"); - table->vkDestroyRenderPass = (PFN_vkDestroyRenderPass)load(context, "vkDestroyRenderPass"); - table->vkDestroySampler = (PFN_vkDestroySampler)load(context, "vkDestroySampler"); - table->vkDestroySemaphore = (PFN_vkDestroySemaphore)load(context, "vkDestroySemaphore"); - table->vkDestroyShaderModule = (PFN_vkDestroyShaderModule)load(context, "vkDestroyShaderModule"); - table->vkDeviceWaitIdle = (PFN_vkDeviceWaitIdle)load(context, "vkDeviceWaitIdle"); - table->vkEndCommandBuffer = (PFN_vkEndCommandBuffer)load(context, "vkEndCommandBuffer"); - table->vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)load(context, "vkFlushMappedMemoryRanges"); - table->vkFreeCommandBuffers = (PFN_vkFreeCommandBuffers)load(context, "vkFreeCommandBuffers"); - table->vkFreeDescriptorSets = (PFN_vkFreeDescriptorSets)load(context, "vkFreeDescriptorSets"); - table->vkFreeMemory = (PFN_vkFreeMemory)load(context, "vkFreeMemory"); - table->vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)load(context, "vkGetBufferMemoryRequirements"); - table->vkGetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)load(context, "vkGetDeviceMemoryCommitment"); - table->vkGetDeviceQueue = (PFN_vkGetDeviceQueue)load(context, "vkGetDeviceQueue"); - table->vkGetEventStatus = (PFN_vkGetEventStatus)load(context, "vkGetEventStatus"); - table->vkGetFenceStatus = (PFN_vkGetFenceStatus)load(context, "vkGetFenceStatus"); - table->vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)load(context, "vkGetImageMemoryRequirements"); - table->vkGetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements)load(context, "vkGetImageSparseMemoryRequirements"); - table->vkGetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)load(context, "vkGetImageSubresourceLayout"); - table->vkGetPipelineCacheData = (PFN_vkGetPipelineCacheData)load(context, "vkGetPipelineCacheData"); - table->vkGetQueryPoolResults = (PFN_vkGetQueryPoolResults)load(context, "vkGetQueryPoolResults"); - table->vkGetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity)load(context, "vkGetRenderAreaGranularity"); - table->vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)load(context, "vkInvalidateMappedMemoryRanges"); - table->vkMapMemory = (PFN_vkMapMemory)load(context, "vkMapMemory"); - table->vkMergePipelineCaches = (PFN_vkMergePipelineCaches)load(context, "vkMergePipelineCaches"); - table->vkQueueBindSparse = (PFN_vkQueueBindSparse)load(context, "vkQueueBindSparse"); - table->vkQueueSubmit = (PFN_vkQueueSubmit)load(context, "vkQueueSubmit"); - table->vkQueueWaitIdle = (PFN_vkQueueWaitIdle)load(context, "vkQueueWaitIdle"); - table->vkResetCommandBuffer = (PFN_vkResetCommandBuffer)load(context, "vkResetCommandBuffer"); - table->vkResetCommandPool = (PFN_vkResetCommandPool)load(context, "vkResetCommandPool"); - table->vkResetDescriptorPool = (PFN_vkResetDescriptorPool)load(context, "vkResetDescriptorPool"); - table->vkResetEvent = (PFN_vkResetEvent)load(context, "vkResetEvent"); - table->vkResetFences = (PFN_vkResetFences)load(context, "vkResetFences"); - table->vkSetEvent = (PFN_vkSetEvent)load(context, "vkSetEvent"); - table->vkUnmapMemory = (PFN_vkUnmapMemory)load(context, "vkUnmapMemory"); - table->vkUpdateDescriptorSets = (PFN_vkUpdateDescriptorSets)load(context, "vkUpdateDescriptorSets"); - table->vkWaitForFences = (PFN_vkWaitForFences)load(context, "vkWaitForFences"); -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_VERSION_1_1) - table->vkBindBufferMemory2 = (PFN_vkBindBufferMemory2)load(context, "vkBindBufferMemory2"); - table->vkBindImageMemory2 = (PFN_vkBindImageMemory2)load(context, "vkBindImageMemory2"); - table->vkCmdDispatchBase = (PFN_vkCmdDispatchBase)load(context, "vkCmdDispatchBase"); - table->vkCmdSetDeviceMask = (PFN_vkCmdSetDeviceMask)load(context, "vkCmdSetDeviceMask"); - table->vkCreateDescriptorUpdateTemplate = (PFN_vkCreateDescriptorUpdateTemplate)load(context, "vkCreateDescriptorUpdateTemplate"); - table->vkCreateSamplerYcbcrConversion = (PFN_vkCreateSamplerYcbcrConversion)load(context, "vkCreateSamplerYcbcrConversion"); - table->vkDestroyDescriptorUpdateTemplate = (PFN_vkDestroyDescriptorUpdateTemplate)load(context, "vkDestroyDescriptorUpdateTemplate"); - table->vkDestroySamplerYcbcrConversion = (PFN_vkDestroySamplerYcbcrConversion)load(context, "vkDestroySamplerYcbcrConversion"); - table->vkGetBufferMemoryRequirements2 = (PFN_vkGetBufferMemoryRequirements2)load(context, "vkGetBufferMemoryRequirements2"); - table->vkGetDescriptorSetLayoutSupport = (PFN_vkGetDescriptorSetLayoutSupport)load(context, "vkGetDescriptorSetLayoutSupport"); - table->vkGetDeviceGroupPeerMemoryFeatures = (PFN_vkGetDeviceGroupPeerMemoryFeatures)load(context, "vkGetDeviceGroupPeerMemoryFeatures"); - table->vkGetDeviceQueue2 = (PFN_vkGetDeviceQueue2)load(context, "vkGetDeviceQueue2"); - table->vkGetImageMemoryRequirements2 = (PFN_vkGetImageMemoryRequirements2)load(context, "vkGetImageMemoryRequirements2"); - table->vkGetImageSparseMemoryRequirements2 = (PFN_vkGetImageSparseMemoryRequirements2)load(context, "vkGetImageSparseMemoryRequirements2"); - table->vkTrimCommandPool = (PFN_vkTrimCommandPool)load(context, "vkTrimCommandPool"); - table->vkUpdateDescriptorSetWithTemplate = (PFN_vkUpdateDescriptorSetWithTemplate)load(context, "vkUpdateDescriptorSetWithTemplate"); -#endif /* defined(VK_VERSION_1_1) */ -#if defined(VK_VERSION_1_2) - table->vkCmdBeginRenderPass2 = (PFN_vkCmdBeginRenderPass2)load(context, "vkCmdBeginRenderPass2"); - table->vkCmdDrawIndexedIndirectCount = (PFN_vkCmdDrawIndexedIndirectCount)load(context, "vkCmdDrawIndexedIndirectCount"); - table->vkCmdDrawIndirectCount = (PFN_vkCmdDrawIndirectCount)load(context, "vkCmdDrawIndirectCount"); - table->vkCmdEndRenderPass2 = (PFN_vkCmdEndRenderPass2)load(context, "vkCmdEndRenderPass2"); - table->vkCmdNextSubpass2 = (PFN_vkCmdNextSubpass2)load(context, "vkCmdNextSubpass2"); - table->vkCreateRenderPass2 = (PFN_vkCreateRenderPass2)load(context, "vkCreateRenderPass2"); - table->vkGetBufferDeviceAddress = (PFN_vkGetBufferDeviceAddress)load(context, "vkGetBufferDeviceAddress"); - table->vkGetBufferOpaqueCaptureAddress = (PFN_vkGetBufferOpaqueCaptureAddress)load(context, "vkGetBufferOpaqueCaptureAddress"); - table->vkGetDeviceMemoryOpaqueCaptureAddress = (PFN_vkGetDeviceMemoryOpaqueCaptureAddress)load(context, "vkGetDeviceMemoryOpaqueCaptureAddress"); - table->vkGetSemaphoreCounterValue = (PFN_vkGetSemaphoreCounterValue)load(context, "vkGetSemaphoreCounterValue"); - table->vkResetQueryPool = (PFN_vkResetQueryPool)load(context, "vkResetQueryPool"); - table->vkSignalSemaphore = (PFN_vkSignalSemaphore)load(context, "vkSignalSemaphore"); - table->vkWaitSemaphores = (PFN_vkWaitSemaphores)load(context, "vkWaitSemaphores"); -#endif /* defined(VK_VERSION_1_2) */ -#if defined(VK_VERSION_1_3) - table->vkCmdBeginRendering = (PFN_vkCmdBeginRendering)load(context, "vkCmdBeginRendering"); - table->vkCmdBindVertexBuffers2 = (PFN_vkCmdBindVertexBuffers2)load(context, "vkCmdBindVertexBuffers2"); - table->vkCmdBlitImage2 = (PFN_vkCmdBlitImage2)load(context, "vkCmdBlitImage2"); - table->vkCmdCopyBuffer2 = (PFN_vkCmdCopyBuffer2)load(context, "vkCmdCopyBuffer2"); - table->vkCmdCopyBufferToImage2 = (PFN_vkCmdCopyBufferToImage2)load(context, "vkCmdCopyBufferToImage2"); - table->vkCmdCopyImage2 = (PFN_vkCmdCopyImage2)load(context, "vkCmdCopyImage2"); - table->vkCmdCopyImageToBuffer2 = (PFN_vkCmdCopyImageToBuffer2)load(context, "vkCmdCopyImageToBuffer2"); - table->vkCmdEndRendering = (PFN_vkCmdEndRendering)load(context, "vkCmdEndRendering"); - table->vkCmdPipelineBarrier2 = (PFN_vkCmdPipelineBarrier2)load(context, "vkCmdPipelineBarrier2"); - table->vkCmdResetEvent2 = (PFN_vkCmdResetEvent2)load(context, "vkCmdResetEvent2"); - table->vkCmdResolveImage2 = (PFN_vkCmdResolveImage2)load(context, "vkCmdResolveImage2"); - table->vkCmdSetCullMode = (PFN_vkCmdSetCullMode)load(context, "vkCmdSetCullMode"); - table->vkCmdSetDepthBiasEnable = (PFN_vkCmdSetDepthBiasEnable)load(context, "vkCmdSetDepthBiasEnable"); - table->vkCmdSetDepthBoundsTestEnable = (PFN_vkCmdSetDepthBoundsTestEnable)load(context, "vkCmdSetDepthBoundsTestEnable"); - table->vkCmdSetDepthCompareOp = (PFN_vkCmdSetDepthCompareOp)load(context, "vkCmdSetDepthCompareOp"); - table->vkCmdSetDepthTestEnable = (PFN_vkCmdSetDepthTestEnable)load(context, "vkCmdSetDepthTestEnable"); - table->vkCmdSetDepthWriteEnable = (PFN_vkCmdSetDepthWriteEnable)load(context, "vkCmdSetDepthWriteEnable"); - table->vkCmdSetEvent2 = (PFN_vkCmdSetEvent2)load(context, "vkCmdSetEvent2"); - table->vkCmdSetFrontFace = (PFN_vkCmdSetFrontFace)load(context, "vkCmdSetFrontFace"); - table->vkCmdSetPrimitiveRestartEnable = (PFN_vkCmdSetPrimitiveRestartEnable)load(context, "vkCmdSetPrimitiveRestartEnable"); - table->vkCmdSetPrimitiveTopology = (PFN_vkCmdSetPrimitiveTopology)load(context, "vkCmdSetPrimitiveTopology"); - table->vkCmdSetRasterizerDiscardEnable = (PFN_vkCmdSetRasterizerDiscardEnable)load(context, "vkCmdSetRasterizerDiscardEnable"); - table->vkCmdSetScissorWithCount = (PFN_vkCmdSetScissorWithCount)load(context, "vkCmdSetScissorWithCount"); - table->vkCmdSetStencilOp = (PFN_vkCmdSetStencilOp)load(context, "vkCmdSetStencilOp"); - table->vkCmdSetStencilTestEnable = (PFN_vkCmdSetStencilTestEnable)load(context, "vkCmdSetStencilTestEnable"); - table->vkCmdSetViewportWithCount = (PFN_vkCmdSetViewportWithCount)load(context, "vkCmdSetViewportWithCount"); - table->vkCmdWaitEvents2 = (PFN_vkCmdWaitEvents2)load(context, "vkCmdWaitEvents2"); - table->vkCmdWriteTimestamp2 = (PFN_vkCmdWriteTimestamp2)load(context, "vkCmdWriteTimestamp2"); - table->vkCreatePrivateDataSlot = (PFN_vkCreatePrivateDataSlot)load(context, "vkCreatePrivateDataSlot"); - table->vkDestroyPrivateDataSlot = (PFN_vkDestroyPrivateDataSlot)load(context, "vkDestroyPrivateDataSlot"); - table->vkGetDeviceBufferMemoryRequirements = (PFN_vkGetDeviceBufferMemoryRequirements)load(context, "vkGetDeviceBufferMemoryRequirements"); - table->vkGetDeviceImageMemoryRequirements = (PFN_vkGetDeviceImageMemoryRequirements)load(context, "vkGetDeviceImageMemoryRequirements"); - table->vkGetDeviceImageSparseMemoryRequirements = (PFN_vkGetDeviceImageSparseMemoryRequirements)load(context, "vkGetDeviceImageSparseMemoryRequirements"); - table->vkGetPrivateData = (PFN_vkGetPrivateData)load(context, "vkGetPrivateData"); - table->vkQueueSubmit2 = (PFN_vkQueueSubmit2)load(context, "vkQueueSubmit2"); - table->vkSetPrivateData = (PFN_vkSetPrivateData)load(context, "vkSetPrivateData"); -#endif /* defined(VK_VERSION_1_3) */ -#if defined(VK_AMDX_shader_enqueue) - table->vkCmdDispatchGraphAMDX = (PFN_vkCmdDispatchGraphAMDX)load(context, "vkCmdDispatchGraphAMDX"); - table->vkCmdDispatchGraphIndirectAMDX = (PFN_vkCmdDispatchGraphIndirectAMDX)load(context, "vkCmdDispatchGraphIndirectAMDX"); - table->vkCmdDispatchGraphIndirectCountAMDX = (PFN_vkCmdDispatchGraphIndirectCountAMDX)load(context, "vkCmdDispatchGraphIndirectCountAMDX"); - table->vkCmdInitializeGraphScratchMemoryAMDX = (PFN_vkCmdInitializeGraphScratchMemoryAMDX)load(context, "vkCmdInitializeGraphScratchMemoryAMDX"); - table->vkCreateExecutionGraphPipelinesAMDX = (PFN_vkCreateExecutionGraphPipelinesAMDX)load(context, "vkCreateExecutionGraphPipelinesAMDX"); - table->vkGetExecutionGraphPipelineNodeIndexAMDX = (PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)load(context, "vkGetExecutionGraphPipelineNodeIndexAMDX"); - table->vkGetExecutionGraphPipelineScratchSizeAMDX = (PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)load(context, "vkGetExecutionGraphPipelineScratchSizeAMDX"); -#endif /* defined(VK_AMDX_shader_enqueue) */ -#if defined(VK_AMD_buffer_marker) - table->vkCmdWriteBufferMarkerAMD = (PFN_vkCmdWriteBufferMarkerAMD)load(context, "vkCmdWriteBufferMarkerAMD"); -#endif /* defined(VK_AMD_buffer_marker) */ -#if defined(VK_AMD_display_native_hdr) - table->vkSetLocalDimmingAMD = (PFN_vkSetLocalDimmingAMD)load(context, "vkSetLocalDimmingAMD"); -#endif /* defined(VK_AMD_display_native_hdr) */ -#if defined(VK_AMD_draw_indirect_count) - table->vkCmdDrawIndexedIndirectCountAMD = (PFN_vkCmdDrawIndexedIndirectCountAMD)load(context, "vkCmdDrawIndexedIndirectCountAMD"); - table->vkCmdDrawIndirectCountAMD = (PFN_vkCmdDrawIndirectCountAMD)load(context, "vkCmdDrawIndirectCountAMD"); -#endif /* defined(VK_AMD_draw_indirect_count) */ -#if defined(VK_AMD_shader_info) - table->vkGetShaderInfoAMD = (PFN_vkGetShaderInfoAMD)load(context, "vkGetShaderInfoAMD"); -#endif /* defined(VK_AMD_shader_info) */ -#if defined(VK_ANDROID_external_memory_android_hardware_buffer) - table->vkGetAndroidHardwareBufferPropertiesANDROID = (PFN_vkGetAndroidHardwareBufferPropertiesANDROID)load(context, "vkGetAndroidHardwareBufferPropertiesANDROID"); - table->vkGetMemoryAndroidHardwareBufferANDROID = (PFN_vkGetMemoryAndroidHardwareBufferANDROID)load(context, "vkGetMemoryAndroidHardwareBufferANDROID"); -#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ -#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) - table->vkCmdSetAttachmentFeedbackLoopEnableEXT = (PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)load(context, "vkCmdSetAttachmentFeedbackLoopEnableEXT"); -#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ -#if defined(VK_EXT_buffer_device_address) - table->vkGetBufferDeviceAddressEXT = (PFN_vkGetBufferDeviceAddressEXT)load(context, "vkGetBufferDeviceAddressEXT"); -#endif /* defined(VK_EXT_buffer_device_address) */ -#if defined(VK_EXT_calibrated_timestamps) - table->vkGetCalibratedTimestampsEXT = (PFN_vkGetCalibratedTimestampsEXT)load(context, "vkGetCalibratedTimestampsEXT"); -#endif /* defined(VK_EXT_calibrated_timestamps) */ -#if defined(VK_EXT_color_write_enable) - table->vkCmdSetColorWriteEnableEXT = (PFN_vkCmdSetColorWriteEnableEXT)load(context, "vkCmdSetColorWriteEnableEXT"); -#endif /* defined(VK_EXT_color_write_enable) */ -#if defined(VK_EXT_conditional_rendering) - table->vkCmdBeginConditionalRenderingEXT = (PFN_vkCmdBeginConditionalRenderingEXT)load(context, "vkCmdBeginConditionalRenderingEXT"); - table->vkCmdEndConditionalRenderingEXT = (PFN_vkCmdEndConditionalRenderingEXT)load(context, "vkCmdEndConditionalRenderingEXT"); -#endif /* defined(VK_EXT_conditional_rendering) */ -#if defined(VK_EXT_debug_marker) - table->vkCmdDebugMarkerBeginEXT = (PFN_vkCmdDebugMarkerBeginEXT)load(context, "vkCmdDebugMarkerBeginEXT"); - table->vkCmdDebugMarkerEndEXT = (PFN_vkCmdDebugMarkerEndEXT)load(context, "vkCmdDebugMarkerEndEXT"); - table->vkCmdDebugMarkerInsertEXT = (PFN_vkCmdDebugMarkerInsertEXT)load(context, "vkCmdDebugMarkerInsertEXT"); - table->vkDebugMarkerSetObjectNameEXT = (PFN_vkDebugMarkerSetObjectNameEXT)load(context, "vkDebugMarkerSetObjectNameEXT"); - table->vkDebugMarkerSetObjectTagEXT = (PFN_vkDebugMarkerSetObjectTagEXT)load(context, "vkDebugMarkerSetObjectTagEXT"); -#endif /* defined(VK_EXT_debug_marker) */ -#if defined(VK_EXT_depth_bias_control) - table->vkCmdSetDepthBias2EXT = (PFN_vkCmdSetDepthBias2EXT)load(context, "vkCmdSetDepthBias2EXT"); -#endif /* defined(VK_EXT_depth_bias_control) */ -#if defined(VK_EXT_descriptor_buffer) - table->vkCmdBindDescriptorBufferEmbeddedSamplersEXT = (PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)load(context, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT"); - table->vkCmdBindDescriptorBuffersEXT = (PFN_vkCmdBindDescriptorBuffersEXT)load(context, "vkCmdBindDescriptorBuffersEXT"); - table->vkCmdSetDescriptorBufferOffsetsEXT = (PFN_vkCmdSetDescriptorBufferOffsetsEXT)load(context, "vkCmdSetDescriptorBufferOffsetsEXT"); - table->vkGetBufferOpaqueCaptureDescriptorDataEXT = (PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)load(context, "vkGetBufferOpaqueCaptureDescriptorDataEXT"); - table->vkGetDescriptorEXT = (PFN_vkGetDescriptorEXT)load(context, "vkGetDescriptorEXT"); - table->vkGetDescriptorSetLayoutBindingOffsetEXT = (PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)load(context, "vkGetDescriptorSetLayoutBindingOffsetEXT"); - table->vkGetDescriptorSetLayoutSizeEXT = (PFN_vkGetDescriptorSetLayoutSizeEXT)load(context, "vkGetDescriptorSetLayoutSizeEXT"); - table->vkGetImageOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageOpaqueCaptureDescriptorDataEXT"); - table->vkGetImageViewOpaqueCaptureDescriptorDataEXT = (PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)load(context, "vkGetImageViewOpaqueCaptureDescriptorDataEXT"); - table->vkGetSamplerOpaqueCaptureDescriptorDataEXT = (PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)load(context, "vkGetSamplerOpaqueCaptureDescriptorDataEXT"); -#endif /* defined(VK_EXT_descriptor_buffer) */ -#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) - table->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = (PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)load(context, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT"); -#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ -#if defined(VK_EXT_device_fault) - table->vkGetDeviceFaultInfoEXT = (PFN_vkGetDeviceFaultInfoEXT)load(context, "vkGetDeviceFaultInfoEXT"); -#endif /* defined(VK_EXT_device_fault) */ -#if defined(VK_EXT_discard_rectangles) - table->vkCmdSetDiscardRectangleEXT = (PFN_vkCmdSetDiscardRectangleEXT)load(context, "vkCmdSetDiscardRectangleEXT"); -#endif /* defined(VK_EXT_discard_rectangles) */ -#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 - table->vkCmdSetDiscardRectangleEnableEXT = (PFN_vkCmdSetDiscardRectangleEnableEXT)load(context, "vkCmdSetDiscardRectangleEnableEXT"); - table->vkCmdSetDiscardRectangleModeEXT = (PFN_vkCmdSetDiscardRectangleModeEXT)load(context, "vkCmdSetDiscardRectangleModeEXT"); -#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ -#if defined(VK_EXT_display_control) - table->vkDisplayPowerControlEXT = (PFN_vkDisplayPowerControlEXT)load(context, "vkDisplayPowerControlEXT"); - table->vkGetSwapchainCounterEXT = (PFN_vkGetSwapchainCounterEXT)load(context, "vkGetSwapchainCounterEXT"); - table->vkRegisterDeviceEventEXT = (PFN_vkRegisterDeviceEventEXT)load(context, "vkRegisterDeviceEventEXT"); - table->vkRegisterDisplayEventEXT = (PFN_vkRegisterDisplayEventEXT)load(context, "vkRegisterDisplayEventEXT"); -#endif /* defined(VK_EXT_display_control) */ -#if defined(VK_EXT_external_memory_host) - table->vkGetMemoryHostPointerPropertiesEXT = (PFN_vkGetMemoryHostPointerPropertiesEXT)load(context, "vkGetMemoryHostPointerPropertiesEXT"); -#endif /* defined(VK_EXT_external_memory_host) */ -#if defined(VK_EXT_full_screen_exclusive) - table->vkAcquireFullScreenExclusiveModeEXT = (PFN_vkAcquireFullScreenExclusiveModeEXT)load(context, "vkAcquireFullScreenExclusiveModeEXT"); - table->vkReleaseFullScreenExclusiveModeEXT = (PFN_vkReleaseFullScreenExclusiveModeEXT)load(context, "vkReleaseFullScreenExclusiveModeEXT"); -#endif /* defined(VK_EXT_full_screen_exclusive) */ -#if defined(VK_EXT_hdr_metadata) - table->vkSetHdrMetadataEXT = (PFN_vkSetHdrMetadataEXT)load(context, "vkSetHdrMetadataEXT"); -#endif /* defined(VK_EXT_hdr_metadata) */ -#if defined(VK_EXT_host_image_copy) - table->vkCopyImageToImageEXT = (PFN_vkCopyImageToImageEXT)load(context, "vkCopyImageToImageEXT"); - table->vkCopyImageToMemoryEXT = (PFN_vkCopyImageToMemoryEXT)load(context, "vkCopyImageToMemoryEXT"); - table->vkCopyMemoryToImageEXT = (PFN_vkCopyMemoryToImageEXT)load(context, "vkCopyMemoryToImageEXT"); - table->vkTransitionImageLayoutEXT = (PFN_vkTransitionImageLayoutEXT)load(context, "vkTransitionImageLayoutEXT"); -#endif /* defined(VK_EXT_host_image_copy) */ -#if defined(VK_EXT_host_query_reset) - table->vkResetQueryPoolEXT = (PFN_vkResetQueryPoolEXT)load(context, "vkResetQueryPoolEXT"); -#endif /* defined(VK_EXT_host_query_reset) */ -#if defined(VK_EXT_image_drm_format_modifier) - table->vkGetImageDrmFormatModifierPropertiesEXT = (PFN_vkGetImageDrmFormatModifierPropertiesEXT)load(context, "vkGetImageDrmFormatModifierPropertiesEXT"); -#endif /* defined(VK_EXT_image_drm_format_modifier) */ -#if defined(VK_EXT_line_rasterization) - table->vkCmdSetLineStippleEXT = (PFN_vkCmdSetLineStippleEXT)load(context, "vkCmdSetLineStippleEXT"); -#endif /* defined(VK_EXT_line_rasterization) */ -#if defined(VK_EXT_mesh_shader) - table->vkCmdDrawMeshTasksEXT = (PFN_vkCmdDrawMeshTasksEXT)load(context, "vkCmdDrawMeshTasksEXT"); - table->vkCmdDrawMeshTasksIndirectCountEXT = (PFN_vkCmdDrawMeshTasksIndirectCountEXT)load(context, "vkCmdDrawMeshTasksIndirectCountEXT"); - table->vkCmdDrawMeshTasksIndirectEXT = (PFN_vkCmdDrawMeshTasksIndirectEXT)load(context, "vkCmdDrawMeshTasksIndirectEXT"); -#endif /* defined(VK_EXT_mesh_shader) */ -#if defined(VK_EXT_metal_objects) - table->vkExportMetalObjectsEXT = (PFN_vkExportMetalObjectsEXT)load(context, "vkExportMetalObjectsEXT"); -#endif /* defined(VK_EXT_metal_objects) */ -#if defined(VK_EXT_multi_draw) - table->vkCmdDrawMultiEXT = (PFN_vkCmdDrawMultiEXT)load(context, "vkCmdDrawMultiEXT"); - table->vkCmdDrawMultiIndexedEXT = (PFN_vkCmdDrawMultiIndexedEXT)load(context, "vkCmdDrawMultiIndexedEXT"); -#endif /* defined(VK_EXT_multi_draw) */ -#if defined(VK_EXT_opacity_micromap) - table->vkBuildMicromapsEXT = (PFN_vkBuildMicromapsEXT)load(context, "vkBuildMicromapsEXT"); - table->vkCmdBuildMicromapsEXT = (PFN_vkCmdBuildMicromapsEXT)load(context, "vkCmdBuildMicromapsEXT"); - table->vkCmdCopyMemoryToMicromapEXT = (PFN_vkCmdCopyMemoryToMicromapEXT)load(context, "vkCmdCopyMemoryToMicromapEXT"); - table->vkCmdCopyMicromapEXT = (PFN_vkCmdCopyMicromapEXT)load(context, "vkCmdCopyMicromapEXT"); - table->vkCmdCopyMicromapToMemoryEXT = (PFN_vkCmdCopyMicromapToMemoryEXT)load(context, "vkCmdCopyMicromapToMemoryEXT"); - table->vkCmdWriteMicromapsPropertiesEXT = (PFN_vkCmdWriteMicromapsPropertiesEXT)load(context, "vkCmdWriteMicromapsPropertiesEXT"); - table->vkCopyMemoryToMicromapEXT = (PFN_vkCopyMemoryToMicromapEXT)load(context, "vkCopyMemoryToMicromapEXT"); - table->vkCopyMicromapEXT = (PFN_vkCopyMicromapEXT)load(context, "vkCopyMicromapEXT"); - table->vkCopyMicromapToMemoryEXT = (PFN_vkCopyMicromapToMemoryEXT)load(context, "vkCopyMicromapToMemoryEXT"); - table->vkCreateMicromapEXT = (PFN_vkCreateMicromapEXT)load(context, "vkCreateMicromapEXT"); - table->vkDestroyMicromapEXT = (PFN_vkDestroyMicromapEXT)load(context, "vkDestroyMicromapEXT"); - table->vkGetDeviceMicromapCompatibilityEXT = (PFN_vkGetDeviceMicromapCompatibilityEXT)load(context, "vkGetDeviceMicromapCompatibilityEXT"); - table->vkGetMicromapBuildSizesEXT = (PFN_vkGetMicromapBuildSizesEXT)load(context, "vkGetMicromapBuildSizesEXT"); - table->vkWriteMicromapsPropertiesEXT = (PFN_vkWriteMicromapsPropertiesEXT)load(context, "vkWriteMicromapsPropertiesEXT"); -#endif /* defined(VK_EXT_opacity_micromap) */ -#if defined(VK_EXT_pageable_device_local_memory) - table->vkSetDeviceMemoryPriorityEXT = (PFN_vkSetDeviceMemoryPriorityEXT)load(context, "vkSetDeviceMemoryPriorityEXT"); -#endif /* defined(VK_EXT_pageable_device_local_memory) */ -#if defined(VK_EXT_pipeline_properties) - table->vkGetPipelinePropertiesEXT = (PFN_vkGetPipelinePropertiesEXT)load(context, "vkGetPipelinePropertiesEXT"); -#endif /* defined(VK_EXT_pipeline_properties) */ -#if defined(VK_EXT_private_data) - table->vkCreatePrivateDataSlotEXT = (PFN_vkCreatePrivateDataSlotEXT)load(context, "vkCreatePrivateDataSlotEXT"); - table->vkDestroyPrivateDataSlotEXT = (PFN_vkDestroyPrivateDataSlotEXT)load(context, "vkDestroyPrivateDataSlotEXT"); - table->vkGetPrivateDataEXT = (PFN_vkGetPrivateDataEXT)load(context, "vkGetPrivateDataEXT"); - table->vkSetPrivateDataEXT = (PFN_vkSetPrivateDataEXT)load(context, "vkSetPrivateDataEXT"); -#endif /* defined(VK_EXT_private_data) */ -#if defined(VK_EXT_sample_locations) - table->vkCmdSetSampleLocationsEXT = (PFN_vkCmdSetSampleLocationsEXT)load(context, "vkCmdSetSampleLocationsEXT"); -#endif /* defined(VK_EXT_sample_locations) */ -#if defined(VK_EXT_shader_module_identifier) - table->vkGetShaderModuleCreateInfoIdentifierEXT = (PFN_vkGetShaderModuleCreateInfoIdentifierEXT)load(context, "vkGetShaderModuleCreateInfoIdentifierEXT"); - table->vkGetShaderModuleIdentifierEXT = (PFN_vkGetShaderModuleIdentifierEXT)load(context, "vkGetShaderModuleIdentifierEXT"); -#endif /* defined(VK_EXT_shader_module_identifier) */ -#if defined(VK_EXT_shader_object) - table->vkCmdBindShadersEXT = (PFN_vkCmdBindShadersEXT)load(context, "vkCmdBindShadersEXT"); - table->vkCreateShadersEXT = (PFN_vkCreateShadersEXT)load(context, "vkCreateShadersEXT"); - table->vkDestroyShaderEXT = (PFN_vkDestroyShaderEXT)load(context, "vkDestroyShaderEXT"); - table->vkGetShaderBinaryDataEXT = (PFN_vkGetShaderBinaryDataEXT)load(context, "vkGetShaderBinaryDataEXT"); -#endif /* defined(VK_EXT_shader_object) */ -#if defined(VK_EXT_swapchain_maintenance1) - table->vkReleaseSwapchainImagesEXT = (PFN_vkReleaseSwapchainImagesEXT)load(context, "vkReleaseSwapchainImagesEXT"); -#endif /* defined(VK_EXT_swapchain_maintenance1) */ -#if defined(VK_EXT_transform_feedback) - table->vkCmdBeginQueryIndexedEXT = (PFN_vkCmdBeginQueryIndexedEXT)load(context, "vkCmdBeginQueryIndexedEXT"); - table->vkCmdBeginTransformFeedbackEXT = (PFN_vkCmdBeginTransformFeedbackEXT)load(context, "vkCmdBeginTransformFeedbackEXT"); - table->vkCmdBindTransformFeedbackBuffersEXT = (PFN_vkCmdBindTransformFeedbackBuffersEXT)load(context, "vkCmdBindTransformFeedbackBuffersEXT"); - table->vkCmdDrawIndirectByteCountEXT = (PFN_vkCmdDrawIndirectByteCountEXT)load(context, "vkCmdDrawIndirectByteCountEXT"); - table->vkCmdEndQueryIndexedEXT = (PFN_vkCmdEndQueryIndexedEXT)load(context, "vkCmdEndQueryIndexedEXT"); - table->vkCmdEndTransformFeedbackEXT = (PFN_vkCmdEndTransformFeedbackEXT)load(context, "vkCmdEndTransformFeedbackEXT"); -#endif /* defined(VK_EXT_transform_feedback) */ -#if defined(VK_EXT_validation_cache) - table->vkCreateValidationCacheEXT = (PFN_vkCreateValidationCacheEXT)load(context, "vkCreateValidationCacheEXT"); - table->vkDestroyValidationCacheEXT = (PFN_vkDestroyValidationCacheEXT)load(context, "vkDestroyValidationCacheEXT"); - table->vkGetValidationCacheDataEXT = (PFN_vkGetValidationCacheDataEXT)load(context, "vkGetValidationCacheDataEXT"); - table->vkMergeValidationCachesEXT = (PFN_vkMergeValidationCachesEXT)load(context, "vkMergeValidationCachesEXT"); -#endif /* defined(VK_EXT_validation_cache) */ -#if defined(VK_FUCHSIA_buffer_collection) - table->vkCreateBufferCollectionFUCHSIA = (PFN_vkCreateBufferCollectionFUCHSIA)load(context, "vkCreateBufferCollectionFUCHSIA"); - table->vkDestroyBufferCollectionFUCHSIA = (PFN_vkDestroyBufferCollectionFUCHSIA)load(context, "vkDestroyBufferCollectionFUCHSIA"); - table->vkGetBufferCollectionPropertiesFUCHSIA = (PFN_vkGetBufferCollectionPropertiesFUCHSIA)load(context, "vkGetBufferCollectionPropertiesFUCHSIA"); - table->vkSetBufferCollectionBufferConstraintsFUCHSIA = (PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)load(context, "vkSetBufferCollectionBufferConstraintsFUCHSIA"); - table->vkSetBufferCollectionImageConstraintsFUCHSIA = (PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)load(context, "vkSetBufferCollectionImageConstraintsFUCHSIA"); -#endif /* defined(VK_FUCHSIA_buffer_collection) */ -#if defined(VK_FUCHSIA_external_memory) - table->vkGetMemoryZirconHandleFUCHSIA = (PFN_vkGetMemoryZirconHandleFUCHSIA)load(context, "vkGetMemoryZirconHandleFUCHSIA"); - table->vkGetMemoryZirconHandlePropertiesFUCHSIA = (PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)load(context, "vkGetMemoryZirconHandlePropertiesFUCHSIA"); -#endif /* defined(VK_FUCHSIA_external_memory) */ -#if defined(VK_FUCHSIA_external_semaphore) - table->vkGetSemaphoreZirconHandleFUCHSIA = (PFN_vkGetSemaphoreZirconHandleFUCHSIA)load(context, "vkGetSemaphoreZirconHandleFUCHSIA"); - table->vkImportSemaphoreZirconHandleFUCHSIA = (PFN_vkImportSemaphoreZirconHandleFUCHSIA)load(context, "vkImportSemaphoreZirconHandleFUCHSIA"); -#endif /* defined(VK_FUCHSIA_external_semaphore) */ -#if defined(VK_GOOGLE_display_timing) - table->vkGetPastPresentationTimingGOOGLE = (PFN_vkGetPastPresentationTimingGOOGLE)load(context, "vkGetPastPresentationTimingGOOGLE"); - table->vkGetRefreshCycleDurationGOOGLE = (PFN_vkGetRefreshCycleDurationGOOGLE)load(context, "vkGetRefreshCycleDurationGOOGLE"); -#endif /* defined(VK_GOOGLE_display_timing) */ -#if defined(VK_HUAWEI_cluster_culling_shader) - table->vkCmdDrawClusterHUAWEI = (PFN_vkCmdDrawClusterHUAWEI)load(context, "vkCmdDrawClusterHUAWEI"); - table->vkCmdDrawClusterIndirectHUAWEI = (PFN_vkCmdDrawClusterIndirectHUAWEI)load(context, "vkCmdDrawClusterIndirectHUAWEI"); -#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ -#if defined(VK_HUAWEI_invocation_mask) - table->vkCmdBindInvocationMaskHUAWEI = (PFN_vkCmdBindInvocationMaskHUAWEI)load(context, "vkCmdBindInvocationMaskHUAWEI"); -#endif /* defined(VK_HUAWEI_invocation_mask) */ -#if defined(VK_HUAWEI_subpass_shading) - table->vkCmdSubpassShadingHUAWEI = (PFN_vkCmdSubpassShadingHUAWEI)load(context, "vkCmdSubpassShadingHUAWEI"); - table->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = (PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)load(context, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI"); -#endif /* defined(VK_HUAWEI_subpass_shading) */ -#if defined(VK_INTEL_performance_query) - table->vkAcquirePerformanceConfigurationINTEL = (PFN_vkAcquirePerformanceConfigurationINTEL)load(context, "vkAcquirePerformanceConfigurationINTEL"); - table->vkCmdSetPerformanceMarkerINTEL = (PFN_vkCmdSetPerformanceMarkerINTEL)load(context, "vkCmdSetPerformanceMarkerINTEL"); - table->vkCmdSetPerformanceOverrideINTEL = (PFN_vkCmdSetPerformanceOverrideINTEL)load(context, "vkCmdSetPerformanceOverrideINTEL"); - table->vkCmdSetPerformanceStreamMarkerINTEL = (PFN_vkCmdSetPerformanceStreamMarkerINTEL)load(context, "vkCmdSetPerformanceStreamMarkerINTEL"); - table->vkGetPerformanceParameterINTEL = (PFN_vkGetPerformanceParameterINTEL)load(context, "vkGetPerformanceParameterINTEL"); - table->vkInitializePerformanceApiINTEL = (PFN_vkInitializePerformanceApiINTEL)load(context, "vkInitializePerformanceApiINTEL"); - table->vkQueueSetPerformanceConfigurationINTEL = (PFN_vkQueueSetPerformanceConfigurationINTEL)load(context, "vkQueueSetPerformanceConfigurationINTEL"); - table->vkReleasePerformanceConfigurationINTEL = (PFN_vkReleasePerformanceConfigurationINTEL)load(context, "vkReleasePerformanceConfigurationINTEL"); - table->vkUninitializePerformanceApiINTEL = (PFN_vkUninitializePerformanceApiINTEL)load(context, "vkUninitializePerformanceApiINTEL"); -#endif /* defined(VK_INTEL_performance_query) */ -#if defined(VK_KHR_acceleration_structure) - table->vkBuildAccelerationStructuresKHR = (PFN_vkBuildAccelerationStructuresKHR)load(context, "vkBuildAccelerationStructuresKHR"); - table->vkCmdBuildAccelerationStructuresIndirectKHR = (PFN_vkCmdBuildAccelerationStructuresIndirectKHR)load(context, "vkCmdBuildAccelerationStructuresIndirectKHR"); - table->vkCmdBuildAccelerationStructuresKHR = (PFN_vkCmdBuildAccelerationStructuresKHR)load(context, "vkCmdBuildAccelerationStructuresKHR"); - table->vkCmdCopyAccelerationStructureKHR = (PFN_vkCmdCopyAccelerationStructureKHR)load(context, "vkCmdCopyAccelerationStructureKHR"); - table->vkCmdCopyAccelerationStructureToMemoryKHR = (PFN_vkCmdCopyAccelerationStructureToMemoryKHR)load(context, "vkCmdCopyAccelerationStructureToMemoryKHR"); - table->vkCmdCopyMemoryToAccelerationStructureKHR = (PFN_vkCmdCopyMemoryToAccelerationStructureKHR)load(context, "vkCmdCopyMemoryToAccelerationStructureKHR"); - table->vkCmdWriteAccelerationStructuresPropertiesKHR = (PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)load(context, "vkCmdWriteAccelerationStructuresPropertiesKHR"); - table->vkCopyAccelerationStructureKHR = (PFN_vkCopyAccelerationStructureKHR)load(context, "vkCopyAccelerationStructureKHR"); - table->vkCopyAccelerationStructureToMemoryKHR = (PFN_vkCopyAccelerationStructureToMemoryKHR)load(context, "vkCopyAccelerationStructureToMemoryKHR"); - table->vkCopyMemoryToAccelerationStructureKHR = (PFN_vkCopyMemoryToAccelerationStructureKHR)load(context, "vkCopyMemoryToAccelerationStructureKHR"); - table->vkCreateAccelerationStructureKHR = (PFN_vkCreateAccelerationStructureKHR)load(context, "vkCreateAccelerationStructureKHR"); - table->vkDestroyAccelerationStructureKHR = (PFN_vkDestroyAccelerationStructureKHR)load(context, "vkDestroyAccelerationStructureKHR"); - table->vkGetAccelerationStructureBuildSizesKHR = (PFN_vkGetAccelerationStructureBuildSizesKHR)load(context, "vkGetAccelerationStructureBuildSizesKHR"); - table->vkGetAccelerationStructureDeviceAddressKHR = (PFN_vkGetAccelerationStructureDeviceAddressKHR)load(context, "vkGetAccelerationStructureDeviceAddressKHR"); - table->vkGetDeviceAccelerationStructureCompatibilityKHR = (PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)load(context, "vkGetDeviceAccelerationStructureCompatibilityKHR"); - table->vkWriteAccelerationStructuresPropertiesKHR = (PFN_vkWriteAccelerationStructuresPropertiesKHR)load(context, "vkWriteAccelerationStructuresPropertiesKHR"); -#endif /* defined(VK_KHR_acceleration_structure) */ -#if defined(VK_KHR_bind_memory2) - table->vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2KHR)load(context, "vkBindBufferMemory2KHR"); - table->vkBindImageMemory2KHR = (PFN_vkBindImageMemory2KHR)load(context, "vkBindImageMemory2KHR"); -#endif /* defined(VK_KHR_bind_memory2) */ -#if defined(VK_KHR_buffer_device_address) - table->vkGetBufferDeviceAddressKHR = (PFN_vkGetBufferDeviceAddressKHR)load(context, "vkGetBufferDeviceAddressKHR"); - table->vkGetBufferOpaqueCaptureAddressKHR = (PFN_vkGetBufferOpaqueCaptureAddressKHR)load(context, "vkGetBufferOpaqueCaptureAddressKHR"); - table->vkGetDeviceMemoryOpaqueCaptureAddressKHR = (PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)load(context, "vkGetDeviceMemoryOpaqueCaptureAddressKHR"); -#endif /* defined(VK_KHR_buffer_device_address) */ -#if defined(VK_KHR_copy_commands2) - table->vkCmdBlitImage2KHR = (PFN_vkCmdBlitImage2KHR)load(context, "vkCmdBlitImage2KHR"); - table->vkCmdCopyBuffer2KHR = (PFN_vkCmdCopyBuffer2KHR)load(context, "vkCmdCopyBuffer2KHR"); - table->vkCmdCopyBufferToImage2KHR = (PFN_vkCmdCopyBufferToImage2KHR)load(context, "vkCmdCopyBufferToImage2KHR"); - table->vkCmdCopyImage2KHR = (PFN_vkCmdCopyImage2KHR)load(context, "vkCmdCopyImage2KHR"); - table->vkCmdCopyImageToBuffer2KHR = (PFN_vkCmdCopyImageToBuffer2KHR)load(context, "vkCmdCopyImageToBuffer2KHR"); - table->vkCmdResolveImage2KHR = (PFN_vkCmdResolveImage2KHR)load(context, "vkCmdResolveImage2KHR"); -#endif /* defined(VK_KHR_copy_commands2) */ -#if defined(VK_KHR_create_renderpass2) - table->vkCmdBeginRenderPass2KHR = (PFN_vkCmdBeginRenderPass2KHR)load(context, "vkCmdBeginRenderPass2KHR"); - table->vkCmdEndRenderPass2KHR = (PFN_vkCmdEndRenderPass2KHR)load(context, "vkCmdEndRenderPass2KHR"); - table->vkCmdNextSubpass2KHR = (PFN_vkCmdNextSubpass2KHR)load(context, "vkCmdNextSubpass2KHR"); - table->vkCreateRenderPass2KHR = (PFN_vkCreateRenderPass2KHR)load(context, "vkCreateRenderPass2KHR"); -#endif /* defined(VK_KHR_create_renderpass2) */ -#if defined(VK_KHR_deferred_host_operations) - table->vkCreateDeferredOperationKHR = (PFN_vkCreateDeferredOperationKHR)load(context, "vkCreateDeferredOperationKHR"); - table->vkDeferredOperationJoinKHR = (PFN_vkDeferredOperationJoinKHR)load(context, "vkDeferredOperationJoinKHR"); - table->vkDestroyDeferredOperationKHR = (PFN_vkDestroyDeferredOperationKHR)load(context, "vkDestroyDeferredOperationKHR"); - table->vkGetDeferredOperationMaxConcurrencyKHR = (PFN_vkGetDeferredOperationMaxConcurrencyKHR)load(context, "vkGetDeferredOperationMaxConcurrencyKHR"); - table->vkGetDeferredOperationResultKHR = (PFN_vkGetDeferredOperationResultKHR)load(context, "vkGetDeferredOperationResultKHR"); -#endif /* defined(VK_KHR_deferred_host_operations) */ -#if defined(VK_KHR_descriptor_update_template) - table->vkCreateDescriptorUpdateTemplateKHR = (PFN_vkCreateDescriptorUpdateTemplateKHR)load(context, "vkCreateDescriptorUpdateTemplateKHR"); - table->vkDestroyDescriptorUpdateTemplateKHR = (PFN_vkDestroyDescriptorUpdateTemplateKHR)load(context, "vkDestroyDescriptorUpdateTemplateKHR"); - table->vkUpdateDescriptorSetWithTemplateKHR = (PFN_vkUpdateDescriptorSetWithTemplateKHR)load(context, "vkUpdateDescriptorSetWithTemplateKHR"); -#endif /* defined(VK_KHR_descriptor_update_template) */ -#if defined(VK_KHR_device_group) - table->vkCmdDispatchBaseKHR = (PFN_vkCmdDispatchBaseKHR)load(context, "vkCmdDispatchBaseKHR"); - table->vkCmdSetDeviceMaskKHR = (PFN_vkCmdSetDeviceMaskKHR)load(context, "vkCmdSetDeviceMaskKHR"); - table->vkGetDeviceGroupPeerMemoryFeaturesKHR = (PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)load(context, "vkGetDeviceGroupPeerMemoryFeaturesKHR"); -#endif /* defined(VK_KHR_device_group) */ -#if defined(VK_KHR_display_swapchain) - table->vkCreateSharedSwapchainsKHR = (PFN_vkCreateSharedSwapchainsKHR)load(context, "vkCreateSharedSwapchainsKHR"); -#endif /* defined(VK_KHR_display_swapchain) */ -#if defined(VK_KHR_draw_indirect_count) - table->vkCmdDrawIndexedIndirectCountKHR = (PFN_vkCmdDrawIndexedIndirectCountKHR)load(context, "vkCmdDrawIndexedIndirectCountKHR"); - table->vkCmdDrawIndirectCountKHR = (PFN_vkCmdDrawIndirectCountKHR)load(context, "vkCmdDrawIndirectCountKHR"); -#endif /* defined(VK_KHR_draw_indirect_count) */ -#if defined(VK_KHR_dynamic_rendering) - table->vkCmdBeginRenderingKHR = (PFN_vkCmdBeginRenderingKHR)load(context, "vkCmdBeginRenderingKHR"); - table->vkCmdEndRenderingKHR = (PFN_vkCmdEndRenderingKHR)load(context, "vkCmdEndRenderingKHR"); -#endif /* defined(VK_KHR_dynamic_rendering) */ -#if defined(VK_KHR_external_fence_fd) - table->vkGetFenceFdKHR = (PFN_vkGetFenceFdKHR)load(context, "vkGetFenceFdKHR"); - table->vkImportFenceFdKHR = (PFN_vkImportFenceFdKHR)load(context, "vkImportFenceFdKHR"); -#endif /* defined(VK_KHR_external_fence_fd) */ -#if defined(VK_KHR_external_fence_win32) - table->vkGetFenceWin32HandleKHR = (PFN_vkGetFenceWin32HandleKHR)load(context, "vkGetFenceWin32HandleKHR"); - table->vkImportFenceWin32HandleKHR = (PFN_vkImportFenceWin32HandleKHR)load(context, "vkImportFenceWin32HandleKHR"); -#endif /* defined(VK_KHR_external_fence_win32) */ -#if defined(VK_KHR_external_memory_fd) - table->vkGetMemoryFdKHR = (PFN_vkGetMemoryFdKHR)load(context, "vkGetMemoryFdKHR"); - table->vkGetMemoryFdPropertiesKHR = (PFN_vkGetMemoryFdPropertiesKHR)load(context, "vkGetMemoryFdPropertiesKHR"); -#endif /* defined(VK_KHR_external_memory_fd) */ -#if defined(VK_KHR_external_memory_win32) - table->vkGetMemoryWin32HandleKHR = (PFN_vkGetMemoryWin32HandleKHR)load(context, "vkGetMemoryWin32HandleKHR"); - table->vkGetMemoryWin32HandlePropertiesKHR = (PFN_vkGetMemoryWin32HandlePropertiesKHR)load(context, "vkGetMemoryWin32HandlePropertiesKHR"); -#endif /* defined(VK_KHR_external_memory_win32) */ -#if defined(VK_KHR_external_semaphore_fd) - table->vkGetSemaphoreFdKHR = (PFN_vkGetSemaphoreFdKHR)load(context, "vkGetSemaphoreFdKHR"); - table->vkImportSemaphoreFdKHR = (PFN_vkImportSemaphoreFdKHR)load(context, "vkImportSemaphoreFdKHR"); -#endif /* defined(VK_KHR_external_semaphore_fd) */ -#if defined(VK_KHR_external_semaphore_win32) - table->vkGetSemaphoreWin32HandleKHR = (PFN_vkGetSemaphoreWin32HandleKHR)load(context, "vkGetSemaphoreWin32HandleKHR"); - table->vkImportSemaphoreWin32HandleKHR = (PFN_vkImportSemaphoreWin32HandleKHR)load(context, "vkImportSemaphoreWin32HandleKHR"); -#endif /* defined(VK_KHR_external_semaphore_win32) */ -#if defined(VK_KHR_fragment_shading_rate) - table->vkCmdSetFragmentShadingRateKHR = (PFN_vkCmdSetFragmentShadingRateKHR)load(context, "vkCmdSetFragmentShadingRateKHR"); -#endif /* defined(VK_KHR_fragment_shading_rate) */ -#if defined(VK_KHR_get_memory_requirements2) - table->vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2KHR)load(context, "vkGetBufferMemoryRequirements2KHR"); - table->vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2KHR)load(context, "vkGetImageMemoryRequirements2KHR"); - table->vkGetImageSparseMemoryRequirements2KHR = (PFN_vkGetImageSparseMemoryRequirements2KHR)load(context, "vkGetImageSparseMemoryRequirements2KHR"); -#endif /* defined(VK_KHR_get_memory_requirements2) */ -#if defined(VK_KHR_maintenance1) - table->vkTrimCommandPoolKHR = (PFN_vkTrimCommandPoolKHR)load(context, "vkTrimCommandPoolKHR"); -#endif /* defined(VK_KHR_maintenance1) */ -#if defined(VK_KHR_maintenance3) - table->vkGetDescriptorSetLayoutSupportKHR = (PFN_vkGetDescriptorSetLayoutSupportKHR)load(context, "vkGetDescriptorSetLayoutSupportKHR"); -#endif /* defined(VK_KHR_maintenance3) */ -#if defined(VK_KHR_maintenance4) - table->vkGetDeviceBufferMemoryRequirementsKHR = (PFN_vkGetDeviceBufferMemoryRequirementsKHR)load(context, "vkGetDeviceBufferMemoryRequirementsKHR"); - table->vkGetDeviceImageMemoryRequirementsKHR = (PFN_vkGetDeviceImageMemoryRequirementsKHR)load(context, "vkGetDeviceImageMemoryRequirementsKHR"); - table->vkGetDeviceImageSparseMemoryRequirementsKHR = (PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)load(context, "vkGetDeviceImageSparseMemoryRequirementsKHR"); -#endif /* defined(VK_KHR_maintenance4) */ -#if defined(VK_KHR_maintenance5) - table->vkCmdBindIndexBuffer2KHR = (PFN_vkCmdBindIndexBuffer2KHR)load(context, "vkCmdBindIndexBuffer2KHR"); - table->vkGetDeviceImageSubresourceLayoutKHR = (PFN_vkGetDeviceImageSubresourceLayoutKHR)load(context, "vkGetDeviceImageSubresourceLayoutKHR"); - table->vkGetImageSubresourceLayout2KHR = (PFN_vkGetImageSubresourceLayout2KHR)load(context, "vkGetImageSubresourceLayout2KHR"); - table->vkGetRenderingAreaGranularityKHR = (PFN_vkGetRenderingAreaGranularityKHR)load(context, "vkGetRenderingAreaGranularityKHR"); -#endif /* defined(VK_KHR_maintenance5) */ -#if defined(VK_KHR_map_memory2) - table->vkMapMemory2KHR = (PFN_vkMapMemory2KHR)load(context, "vkMapMemory2KHR"); - table->vkUnmapMemory2KHR = (PFN_vkUnmapMemory2KHR)load(context, "vkUnmapMemory2KHR"); -#endif /* defined(VK_KHR_map_memory2) */ -#if defined(VK_KHR_performance_query) - table->vkAcquireProfilingLockKHR = (PFN_vkAcquireProfilingLockKHR)load(context, "vkAcquireProfilingLockKHR"); - table->vkReleaseProfilingLockKHR = (PFN_vkReleaseProfilingLockKHR)load(context, "vkReleaseProfilingLockKHR"); -#endif /* defined(VK_KHR_performance_query) */ -#if defined(VK_KHR_pipeline_executable_properties) - table->vkGetPipelineExecutableInternalRepresentationsKHR = (PFN_vkGetPipelineExecutableInternalRepresentationsKHR)load(context, "vkGetPipelineExecutableInternalRepresentationsKHR"); - table->vkGetPipelineExecutablePropertiesKHR = (PFN_vkGetPipelineExecutablePropertiesKHR)load(context, "vkGetPipelineExecutablePropertiesKHR"); - table->vkGetPipelineExecutableStatisticsKHR = (PFN_vkGetPipelineExecutableStatisticsKHR)load(context, "vkGetPipelineExecutableStatisticsKHR"); -#endif /* defined(VK_KHR_pipeline_executable_properties) */ -#if defined(VK_KHR_present_wait) - table->vkWaitForPresentKHR = (PFN_vkWaitForPresentKHR)load(context, "vkWaitForPresentKHR"); -#endif /* defined(VK_KHR_present_wait) */ -#if defined(VK_KHR_push_descriptor) - table->vkCmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)load(context, "vkCmdPushDescriptorSetKHR"); -#endif /* defined(VK_KHR_push_descriptor) */ -#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) - table->vkCmdTraceRaysIndirect2KHR = (PFN_vkCmdTraceRaysIndirect2KHR)load(context, "vkCmdTraceRaysIndirect2KHR"); -#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_ray_tracing_pipeline) - table->vkCmdSetRayTracingPipelineStackSizeKHR = (PFN_vkCmdSetRayTracingPipelineStackSizeKHR)load(context, "vkCmdSetRayTracingPipelineStackSizeKHR"); - table->vkCmdTraceRaysIndirectKHR = (PFN_vkCmdTraceRaysIndirectKHR)load(context, "vkCmdTraceRaysIndirectKHR"); - table->vkCmdTraceRaysKHR = (PFN_vkCmdTraceRaysKHR)load(context, "vkCmdTraceRaysKHR"); - table->vkCreateRayTracingPipelinesKHR = (PFN_vkCreateRayTracingPipelinesKHR)load(context, "vkCreateRayTracingPipelinesKHR"); - table->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = (PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)load(context, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR"); - table->vkGetRayTracingShaderGroupHandlesKHR = (PFN_vkGetRayTracingShaderGroupHandlesKHR)load(context, "vkGetRayTracingShaderGroupHandlesKHR"); - table->vkGetRayTracingShaderGroupStackSizeKHR = (PFN_vkGetRayTracingShaderGroupStackSizeKHR)load(context, "vkGetRayTracingShaderGroupStackSizeKHR"); -#endif /* defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_sampler_ycbcr_conversion) - table->vkCreateSamplerYcbcrConversionKHR = (PFN_vkCreateSamplerYcbcrConversionKHR)load(context, "vkCreateSamplerYcbcrConversionKHR"); - table->vkDestroySamplerYcbcrConversionKHR = (PFN_vkDestroySamplerYcbcrConversionKHR)load(context, "vkDestroySamplerYcbcrConversionKHR"); -#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ -#if defined(VK_KHR_shared_presentable_image) - table->vkGetSwapchainStatusKHR = (PFN_vkGetSwapchainStatusKHR)load(context, "vkGetSwapchainStatusKHR"); -#endif /* defined(VK_KHR_shared_presentable_image) */ -#if defined(VK_KHR_swapchain) - table->vkAcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)load(context, "vkAcquireNextImageKHR"); - table->vkCreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)load(context, "vkCreateSwapchainKHR"); - table->vkDestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)load(context, "vkDestroySwapchainKHR"); - table->vkGetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)load(context, "vkGetSwapchainImagesKHR"); - table->vkQueuePresentKHR = (PFN_vkQueuePresentKHR)load(context, "vkQueuePresentKHR"); -#endif /* defined(VK_KHR_swapchain) */ -#if defined(VK_KHR_synchronization2) - table->vkCmdPipelineBarrier2KHR = (PFN_vkCmdPipelineBarrier2KHR)load(context, "vkCmdPipelineBarrier2KHR"); - table->vkCmdResetEvent2KHR = (PFN_vkCmdResetEvent2KHR)load(context, "vkCmdResetEvent2KHR"); - table->vkCmdSetEvent2KHR = (PFN_vkCmdSetEvent2KHR)load(context, "vkCmdSetEvent2KHR"); - table->vkCmdWaitEvents2KHR = (PFN_vkCmdWaitEvents2KHR)load(context, "vkCmdWaitEvents2KHR"); - table->vkCmdWriteTimestamp2KHR = (PFN_vkCmdWriteTimestamp2KHR)load(context, "vkCmdWriteTimestamp2KHR"); - table->vkQueueSubmit2KHR = (PFN_vkQueueSubmit2KHR)load(context, "vkQueueSubmit2KHR"); -#endif /* defined(VK_KHR_synchronization2) */ -#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) - table->vkCmdWriteBufferMarker2AMD = (PFN_vkCmdWriteBufferMarker2AMD)load(context, "vkCmdWriteBufferMarker2AMD"); -#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ -#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) - table->vkGetQueueCheckpointData2NV = (PFN_vkGetQueueCheckpointData2NV)load(context, "vkGetQueueCheckpointData2NV"); -#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_KHR_timeline_semaphore) - table->vkGetSemaphoreCounterValueKHR = (PFN_vkGetSemaphoreCounterValueKHR)load(context, "vkGetSemaphoreCounterValueKHR"); - table->vkSignalSemaphoreKHR = (PFN_vkSignalSemaphoreKHR)load(context, "vkSignalSemaphoreKHR"); - table->vkWaitSemaphoresKHR = (PFN_vkWaitSemaphoresKHR)load(context, "vkWaitSemaphoresKHR"); -#endif /* defined(VK_KHR_timeline_semaphore) */ -#if defined(VK_KHR_video_decode_queue) - table->vkCmdDecodeVideoKHR = (PFN_vkCmdDecodeVideoKHR)load(context, "vkCmdDecodeVideoKHR"); -#endif /* defined(VK_KHR_video_decode_queue) */ -#if defined(VK_KHR_video_encode_queue) - table->vkCmdEncodeVideoKHR = (PFN_vkCmdEncodeVideoKHR)load(context, "vkCmdEncodeVideoKHR"); - table->vkGetEncodedVideoSessionParametersKHR = (PFN_vkGetEncodedVideoSessionParametersKHR)load(context, "vkGetEncodedVideoSessionParametersKHR"); -#endif /* defined(VK_KHR_video_encode_queue) */ -#if defined(VK_KHR_video_queue) - table->vkBindVideoSessionMemoryKHR = (PFN_vkBindVideoSessionMemoryKHR)load(context, "vkBindVideoSessionMemoryKHR"); - table->vkCmdBeginVideoCodingKHR = (PFN_vkCmdBeginVideoCodingKHR)load(context, "vkCmdBeginVideoCodingKHR"); - table->vkCmdControlVideoCodingKHR = (PFN_vkCmdControlVideoCodingKHR)load(context, "vkCmdControlVideoCodingKHR"); - table->vkCmdEndVideoCodingKHR = (PFN_vkCmdEndVideoCodingKHR)load(context, "vkCmdEndVideoCodingKHR"); - table->vkCreateVideoSessionKHR = (PFN_vkCreateVideoSessionKHR)load(context, "vkCreateVideoSessionKHR"); - table->vkCreateVideoSessionParametersKHR = (PFN_vkCreateVideoSessionParametersKHR)load(context, "vkCreateVideoSessionParametersKHR"); - table->vkDestroyVideoSessionKHR = (PFN_vkDestroyVideoSessionKHR)load(context, "vkDestroyVideoSessionKHR"); - table->vkDestroyVideoSessionParametersKHR = (PFN_vkDestroyVideoSessionParametersKHR)load(context, "vkDestroyVideoSessionParametersKHR"); - table->vkGetVideoSessionMemoryRequirementsKHR = (PFN_vkGetVideoSessionMemoryRequirementsKHR)load(context, "vkGetVideoSessionMemoryRequirementsKHR"); - table->vkUpdateVideoSessionParametersKHR = (PFN_vkUpdateVideoSessionParametersKHR)load(context, "vkUpdateVideoSessionParametersKHR"); -#endif /* defined(VK_KHR_video_queue) */ -#if defined(VK_NVX_binary_import) - table->vkCmdCuLaunchKernelNVX = (PFN_vkCmdCuLaunchKernelNVX)load(context, "vkCmdCuLaunchKernelNVX"); - table->vkCreateCuFunctionNVX = (PFN_vkCreateCuFunctionNVX)load(context, "vkCreateCuFunctionNVX"); - table->vkCreateCuModuleNVX = (PFN_vkCreateCuModuleNVX)load(context, "vkCreateCuModuleNVX"); - table->vkDestroyCuFunctionNVX = (PFN_vkDestroyCuFunctionNVX)load(context, "vkDestroyCuFunctionNVX"); - table->vkDestroyCuModuleNVX = (PFN_vkDestroyCuModuleNVX)load(context, "vkDestroyCuModuleNVX"); -#endif /* defined(VK_NVX_binary_import) */ -#if defined(VK_NVX_image_view_handle) - table->vkGetImageViewAddressNVX = (PFN_vkGetImageViewAddressNVX)load(context, "vkGetImageViewAddressNVX"); - table->vkGetImageViewHandleNVX = (PFN_vkGetImageViewHandleNVX)load(context, "vkGetImageViewHandleNVX"); -#endif /* defined(VK_NVX_image_view_handle) */ -#if defined(VK_NV_clip_space_w_scaling) - table->vkCmdSetViewportWScalingNV = (PFN_vkCmdSetViewportWScalingNV)load(context, "vkCmdSetViewportWScalingNV"); -#endif /* defined(VK_NV_clip_space_w_scaling) */ -#if defined(VK_NV_copy_memory_indirect) - table->vkCmdCopyMemoryIndirectNV = (PFN_vkCmdCopyMemoryIndirectNV)load(context, "vkCmdCopyMemoryIndirectNV"); - table->vkCmdCopyMemoryToImageIndirectNV = (PFN_vkCmdCopyMemoryToImageIndirectNV)load(context, "vkCmdCopyMemoryToImageIndirectNV"); -#endif /* defined(VK_NV_copy_memory_indirect) */ -#if defined(VK_NV_cuda_kernel_launch) - table->vkCmdCudaLaunchKernelNV = (PFN_vkCmdCudaLaunchKernelNV)load(context, "vkCmdCudaLaunchKernelNV"); - table->vkCreateCudaFunctionNV = (PFN_vkCreateCudaFunctionNV)load(context, "vkCreateCudaFunctionNV"); - table->vkCreateCudaModuleNV = (PFN_vkCreateCudaModuleNV)load(context, "vkCreateCudaModuleNV"); - table->vkDestroyCudaFunctionNV = (PFN_vkDestroyCudaFunctionNV)load(context, "vkDestroyCudaFunctionNV"); - table->vkDestroyCudaModuleNV = (PFN_vkDestroyCudaModuleNV)load(context, "vkDestroyCudaModuleNV"); - table->vkGetCudaModuleCacheNV = (PFN_vkGetCudaModuleCacheNV)load(context, "vkGetCudaModuleCacheNV"); -#endif /* defined(VK_NV_cuda_kernel_launch) */ -#if defined(VK_NV_device_diagnostic_checkpoints) - table->vkCmdSetCheckpointNV = (PFN_vkCmdSetCheckpointNV)load(context, "vkCmdSetCheckpointNV"); - table->vkGetQueueCheckpointDataNV = (PFN_vkGetQueueCheckpointDataNV)load(context, "vkGetQueueCheckpointDataNV"); -#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_NV_device_generated_commands) - table->vkCmdBindPipelineShaderGroupNV = (PFN_vkCmdBindPipelineShaderGroupNV)load(context, "vkCmdBindPipelineShaderGroupNV"); - table->vkCmdExecuteGeneratedCommandsNV = (PFN_vkCmdExecuteGeneratedCommandsNV)load(context, "vkCmdExecuteGeneratedCommandsNV"); - table->vkCmdPreprocessGeneratedCommandsNV = (PFN_vkCmdPreprocessGeneratedCommandsNV)load(context, "vkCmdPreprocessGeneratedCommandsNV"); - table->vkCreateIndirectCommandsLayoutNV = (PFN_vkCreateIndirectCommandsLayoutNV)load(context, "vkCreateIndirectCommandsLayoutNV"); - table->vkDestroyIndirectCommandsLayoutNV = (PFN_vkDestroyIndirectCommandsLayoutNV)load(context, "vkDestroyIndirectCommandsLayoutNV"); - table->vkGetGeneratedCommandsMemoryRequirementsNV = (PFN_vkGetGeneratedCommandsMemoryRequirementsNV)load(context, "vkGetGeneratedCommandsMemoryRequirementsNV"); -#endif /* defined(VK_NV_device_generated_commands) */ -#if defined(VK_NV_device_generated_commands_compute) - table->vkCmdUpdatePipelineIndirectBufferNV = (PFN_vkCmdUpdatePipelineIndirectBufferNV)load(context, "vkCmdUpdatePipelineIndirectBufferNV"); - table->vkGetPipelineIndirectDeviceAddressNV = (PFN_vkGetPipelineIndirectDeviceAddressNV)load(context, "vkGetPipelineIndirectDeviceAddressNV"); - table->vkGetPipelineIndirectMemoryRequirementsNV = (PFN_vkGetPipelineIndirectMemoryRequirementsNV)load(context, "vkGetPipelineIndirectMemoryRequirementsNV"); -#endif /* defined(VK_NV_device_generated_commands_compute) */ -#if defined(VK_NV_external_memory_rdma) - table->vkGetMemoryRemoteAddressNV = (PFN_vkGetMemoryRemoteAddressNV)load(context, "vkGetMemoryRemoteAddressNV"); -#endif /* defined(VK_NV_external_memory_rdma) */ -#if defined(VK_NV_external_memory_win32) - table->vkGetMemoryWin32HandleNV = (PFN_vkGetMemoryWin32HandleNV)load(context, "vkGetMemoryWin32HandleNV"); -#endif /* defined(VK_NV_external_memory_win32) */ -#if defined(VK_NV_fragment_shading_rate_enums) - table->vkCmdSetFragmentShadingRateEnumNV = (PFN_vkCmdSetFragmentShadingRateEnumNV)load(context, "vkCmdSetFragmentShadingRateEnumNV"); -#endif /* defined(VK_NV_fragment_shading_rate_enums) */ -#if defined(VK_NV_low_latency2) - table->vkGetLatencyTimingsNV = (PFN_vkGetLatencyTimingsNV)load(context, "vkGetLatencyTimingsNV"); - table->vkLatencySleepNV = (PFN_vkLatencySleepNV)load(context, "vkLatencySleepNV"); - table->vkQueueNotifyOutOfBandNV = (PFN_vkQueueNotifyOutOfBandNV)load(context, "vkQueueNotifyOutOfBandNV"); - table->vkSetLatencyMarkerNV = (PFN_vkSetLatencyMarkerNV)load(context, "vkSetLatencyMarkerNV"); - table->vkSetLatencySleepModeNV = (PFN_vkSetLatencySleepModeNV)load(context, "vkSetLatencySleepModeNV"); -#endif /* defined(VK_NV_low_latency2) */ -#if defined(VK_NV_memory_decompression) - table->vkCmdDecompressMemoryIndirectCountNV = (PFN_vkCmdDecompressMemoryIndirectCountNV)load(context, "vkCmdDecompressMemoryIndirectCountNV"); - table->vkCmdDecompressMemoryNV = (PFN_vkCmdDecompressMemoryNV)load(context, "vkCmdDecompressMemoryNV"); -#endif /* defined(VK_NV_memory_decompression) */ -#if defined(VK_NV_mesh_shader) - table->vkCmdDrawMeshTasksIndirectCountNV = (PFN_vkCmdDrawMeshTasksIndirectCountNV)load(context, "vkCmdDrawMeshTasksIndirectCountNV"); - table->vkCmdDrawMeshTasksIndirectNV = (PFN_vkCmdDrawMeshTasksIndirectNV)load(context, "vkCmdDrawMeshTasksIndirectNV"); - table->vkCmdDrawMeshTasksNV = (PFN_vkCmdDrawMeshTasksNV)load(context, "vkCmdDrawMeshTasksNV"); -#endif /* defined(VK_NV_mesh_shader) */ -#if defined(VK_NV_optical_flow) - table->vkBindOpticalFlowSessionImageNV = (PFN_vkBindOpticalFlowSessionImageNV)load(context, "vkBindOpticalFlowSessionImageNV"); - table->vkCmdOpticalFlowExecuteNV = (PFN_vkCmdOpticalFlowExecuteNV)load(context, "vkCmdOpticalFlowExecuteNV"); - table->vkCreateOpticalFlowSessionNV = (PFN_vkCreateOpticalFlowSessionNV)load(context, "vkCreateOpticalFlowSessionNV"); - table->vkDestroyOpticalFlowSessionNV = (PFN_vkDestroyOpticalFlowSessionNV)load(context, "vkDestroyOpticalFlowSessionNV"); -#endif /* defined(VK_NV_optical_flow) */ -#if defined(VK_NV_ray_tracing) - table->vkBindAccelerationStructureMemoryNV = (PFN_vkBindAccelerationStructureMemoryNV)load(context, "vkBindAccelerationStructureMemoryNV"); - table->vkCmdBuildAccelerationStructureNV = (PFN_vkCmdBuildAccelerationStructureNV)load(context, "vkCmdBuildAccelerationStructureNV"); - table->vkCmdCopyAccelerationStructureNV = (PFN_vkCmdCopyAccelerationStructureNV)load(context, "vkCmdCopyAccelerationStructureNV"); - table->vkCmdTraceRaysNV = (PFN_vkCmdTraceRaysNV)load(context, "vkCmdTraceRaysNV"); - table->vkCmdWriteAccelerationStructuresPropertiesNV = (PFN_vkCmdWriteAccelerationStructuresPropertiesNV)load(context, "vkCmdWriteAccelerationStructuresPropertiesNV"); - table->vkCompileDeferredNV = (PFN_vkCompileDeferredNV)load(context, "vkCompileDeferredNV"); - table->vkCreateAccelerationStructureNV = (PFN_vkCreateAccelerationStructureNV)load(context, "vkCreateAccelerationStructureNV"); - table->vkCreateRayTracingPipelinesNV = (PFN_vkCreateRayTracingPipelinesNV)load(context, "vkCreateRayTracingPipelinesNV"); - table->vkDestroyAccelerationStructureNV = (PFN_vkDestroyAccelerationStructureNV)load(context, "vkDestroyAccelerationStructureNV"); - table->vkGetAccelerationStructureHandleNV = (PFN_vkGetAccelerationStructureHandleNV)load(context, "vkGetAccelerationStructureHandleNV"); - table->vkGetAccelerationStructureMemoryRequirementsNV = (PFN_vkGetAccelerationStructureMemoryRequirementsNV)load(context, "vkGetAccelerationStructureMemoryRequirementsNV"); - table->vkGetRayTracingShaderGroupHandlesNV = (PFN_vkGetRayTracingShaderGroupHandlesNV)load(context, "vkGetRayTracingShaderGroupHandlesNV"); -#endif /* defined(VK_NV_ray_tracing) */ -#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 - table->vkCmdSetExclusiveScissorEnableNV = (PFN_vkCmdSetExclusiveScissorEnableNV)load(context, "vkCmdSetExclusiveScissorEnableNV"); -#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ -#if defined(VK_NV_scissor_exclusive) - table->vkCmdSetExclusiveScissorNV = (PFN_vkCmdSetExclusiveScissorNV)load(context, "vkCmdSetExclusiveScissorNV"); -#endif /* defined(VK_NV_scissor_exclusive) */ -#if defined(VK_NV_shading_rate_image) - table->vkCmdBindShadingRateImageNV = (PFN_vkCmdBindShadingRateImageNV)load(context, "vkCmdBindShadingRateImageNV"); - table->vkCmdSetCoarseSampleOrderNV = (PFN_vkCmdSetCoarseSampleOrderNV)load(context, "vkCmdSetCoarseSampleOrderNV"); - table->vkCmdSetViewportShadingRatePaletteNV = (PFN_vkCmdSetViewportShadingRatePaletteNV)load(context, "vkCmdSetViewportShadingRatePaletteNV"); -#endif /* defined(VK_NV_shading_rate_image) */ -#if defined(VK_QCOM_tile_properties) - table->vkGetDynamicRenderingTilePropertiesQCOM = (PFN_vkGetDynamicRenderingTilePropertiesQCOM)load(context, "vkGetDynamicRenderingTilePropertiesQCOM"); - table->vkGetFramebufferTilePropertiesQCOM = (PFN_vkGetFramebufferTilePropertiesQCOM)load(context, "vkGetFramebufferTilePropertiesQCOM"); -#endif /* defined(VK_QCOM_tile_properties) */ -#if defined(VK_QNX_external_memory_screen_buffer) - table->vkGetScreenBufferPropertiesQNX = (PFN_vkGetScreenBufferPropertiesQNX)load(context, "vkGetScreenBufferPropertiesQNX"); -#endif /* defined(VK_QNX_external_memory_screen_buffer) */ -#if defined(VK_VALVE_descriptor_set_host_mapping) - table->vkGetDescriptorSetHostMappingVALVE = (PFN_vkGetDescriptorSetHostMappingVALVE)load(context, "vkGetDescriptorSetHostMappingVALVE"); - table->vkGetDescriptorSetLayoutHostMappingInfoVALVE = (PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)load(context, "vkGetDescriptorSetLayoutHostMappingInfoVALVE"); -#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ -#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - table->vkCmdBindVertexBuffers2EXT = (PFN_vkCmdBindVertexBuffers2EXT)load(context, "vkCmdBindVertexBuffers2EXT"); - table->vkCmdSetCullModeEXT = (PFN_vkCmdSetCullModeEXT)load(context, "vkCmdSetCullModeEXT"); - table->vkCmdSetDepthBoundsTestEnableEXT = (PFN_vkCmdSetDepthBoundsTestEnableEXT)load(context, "vkCmdSetDepthBoundsTestEnableEXT"); - table->vkCmdSetDepthCompareOpEXT = (PFN_vkCmdSetDepthCompareOpEXT)load(context, "vkCmdSetDepthCompareOpEXT"); - table->vkCmdSetDepthTestEnableEXT = (PFN_vkCmdSetDepthTestEnableEXT)load(context, "vkCmdSetDepthTestEnableEXT"); - table->vkCmdSetDepthWriteEnableEXT = (PFN_vkCmdSetDepthWriteEnableEXT)load(context, "vkCmdSetDepthWriteEnableEXT"); - table->vkCmdSetFrontFaceEXT = (PFN_vkCmdSetFrontFaceEXT)load(context, "vkCmdSetFrontFaceEXT"); - table->vkCmdSetPrimitiveTopologyEXT = (PFN_vkCmdSetPrimitiveTopologyEXT)load(context, "vkCmdSetPrimitiveTopologyEXT"); - table->vkCmdSetScissorWithCountEXT = (PFN_vkCmdSetScissorWithCountEXT)load(context, "vkCmdSetScissorWithCountEXT"); - table->vkCmdSetStencilOpEXT = (PFN_vkCmdSetStencilOpEXT)load(context, "vkCmdSetStencilOpEXT"); - table->vkCmdSetStencilTestEnableEXT = (PFN_vkCmdSetStencilTestEnableEXT)load(context, "vkCmdSetStencilTestEnableEXT"); - table->vkCmdSetViewportWithCountEXT = (PFN_vkCmdSetViewportWithCountEXT)load(context, "vkCmdSetViewportWithCountEXT"); -#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - table->vkCmdSetDepthBiasEnableEXT = (PFN_vkCmdSetDepthBiasEnableEXT)load(context, "vkCmdSetDepthBiasEnableEXT"); - table->vkCmdSetLogicOpEXT = (PFN_vkCmdSetLogicOpEXT)load(context, "vkCmdSetLogicOpEXT"); - table->vkCmdSetPatchControlPointsEXT = (PFN_vkCmdSetPatchControlPointsEXT)load(context, "vkCmdSetPatchControlPointsEXT"); - table->vkCmdSetPrimitiveRestartEnableEXT = (PFN_vkCmdSetPrimitiveRestartEnableEXT)load(context, "vkCmdSetPrimitiveRestartEnableEXT"); - table->vkCmdSetRasterizerDiscardEnableEXT = (PFN_vkCmdSetRasterizerDiscardEnableEXT)load(context, "vkCmdSetRasterizerDiscardEnableEXT"); -#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - table->vkCmdSetAlphaToCoverageEnableEXT = (PFN_vkCmdSetAlphaToCoverageEnableEXT)load(context, "vkCmdSetAlphaToCoverageEnableEXT"); - table->vkCmdSetAlphaToOneEnableEXT = (PFN_vkCmdSetAlphaToOneEnableEXT)load(context, "vkCmdSetAlphaToOneEnableEXT"); - table->vkCmdSetColorBlendAdvancedEXT = (PFN_vkCmdSetColorBlendAdvancedEXT)load(context, "vkCmdSetColorBlendAdvancedEXT"); - table->vkCmdSetColorBlendEnableEXT = (PFN_vkCmdSetColorBlendEnableEXT)load(context, "vkCmdSetColorBlendEnableEXT"); - table->vkCmdSetColorBlendEquationEXT = (PFN_vkCmdSetColorBlendEquationEXT)load(context, "vkCmdSetColorBlendEquationEXT"); - table->vkCmdSetColorWriteMaskEXT = (PFN_vkCmdSetColorWriteMaskEXT)load(context, "vkCmdSetColorWriteMaskEXT"); - table->vkCmdSetConservativeRasterizationModeEXT = (PFN_vkCmdSetConservativeRasterizationModeEXT)load(context, "vkCmdSetConservativeRasterizationModeEXT"); - table->vkCmdSetDepthClampEnableEXT = (PFN_vkCmdSetDepthClampEnableEXT)load(context, "vkCmdSetDepthClampEnableEXT"); - table->vkCmdSetDepthClipEnableEXT = (PFN_vkCmdSetDepthClipEnableEXT)load(context, "vkCmdSetDepthClipEnableEXT"); - table->vkCmdSetDepthClipNegativeOneToOneEXT = (PFN_vkCmdSetDepthClipNegativeOneToOneEXT)load(context, "vkCmdSetDepthClipNegativeOneToOneEXT"); - table->vkCmdSetExtraPrimitiveOverestimationSizeEXT = (PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)load(context, "vkCmdSetExtraPrimitiveOverestimationSizeEXT"); - table->vkCmdSetLineRasterizationModeEXT = (PFN_vkCmdSetLineRasterizationModeEXT)load(context, "vkCmdSetLineRasterizationModeEXT"); - table->vkCmdSetLineStippleEnableEXT = (PFN_vkCmdSetLineStippleEnableEXT)load(context, "vkCmdSetLineStippleEnableEXT"); - table->vkCmdSetLogicOpEnableEXT = (PFN_vkCmdSetLogicOpEnableEXT)load(context, "vkCmdSetLogicOpEnableEXT"); - table->vkCmdSetPolygonModeEXT = (PFN_vkCmdSetPolygonModeEXT)load(context, "vkCmdSetPolygonModeEXT"); - table->vkCmdSetProvokingVertexModeEXT = (PFN_vkCmdSetProvokingVertexModeEXT)load(context, "vkCmdSetProvokingVertexModeEXT"); - table->vkCmdSetRasterizationSamplesEXT = (PFN_vkCmdSetRasterizationSamplesEXT)load(context, "vkCmdSetRasterizationSamplesEXT"); - table->vkCmdSetRasterizationStreamEXT = (PFN_vkCmdSetRasterizationStreamEXT)load(context, "vkCmdSetRasterizationStreamEXT"); - table->vkCmdSetSampleLocationsEnableEXT = (PFN_vkCmdSetSampleLocationsEnableEXT)load(context, "vkCmdSetSampleLocationsEnableEXT"); - table->vkCmdSetSampleMaskEXT = (PFN_vkCmdSetSampleMaskEXT)load(context, "vkCmdSetSampleMaskEXT"); - table->vkCmdSetTessellationDomainOriginEXT = (PFN_vkCmdSetTessellationDomainOriginEXT)load(context, "vkCmdSetTessellationDomainOriginEXT"); -#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) - table->vkCmdSetViewportWScalingEnableNV = (PFN_vkCmdSetViewportWScalingEnableNV)load(context, "vkCmdSetViewportWScalingEnableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) - table->vkCmdSetViewportSwizzleNV = (PFN_vkCmdSetViewportSwizzleNV)load(context, "vkCmdSetViewportSwizzleNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) - table->vkCmdSetCoverageToColorEnableNV = (PFN_vkCmdSetCoverageToColorEnableNV)load(context, "vkCmdSetCoverageToColorEnableNV"); - table->vkCmdSetCoverageToColorLocationNV = (PFN_vkCmdSetCoverageToColorLocationNV)load(context, "vkCmdSetCoverageToColorLocationNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) - table->vkCmdSetCoverageModulationModeNV = (PFN_vkCmdSetCoverageModulationModeNV)load(context, "vkCmdSetCoverageModulationModeNV"); - table->vkCmdSetCoverageModulationTableEnableNV = (PFN_vkCmdSetCoverageModulationTableEnableNV)load(context, "vkCmdSetCoverageModulationTableEnableNV"); - table->vkCmdSetCoverageModulationTableNV = (PFN_vkCmdSetCoverageModulationTableNV)load(context, "vkCmdSetCoverageModulationTableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) - table->vkCmdSetShadingRateImageEnableNV = (PFN_vkCmdSetShadingRateImageEnableNV)load(context, "vkCmdSetShadingRateImageEnableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) - table->vkCmdSetRepresentativeFragmentTestEnableNV = (PFN_vkCmdSetRepresentativeFragmentTestEnableNV)load(context, "vkCmdSetRepresentativeFragmentTestEnableNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) - table->vkCmdSetCoverageReductionModeNV = (PFN_vkCmdSetCoverageReductionModeNV)load(context, "vkCmdSetCoverageReductionModeNV"); -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ -#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) - table->vkGetDeviceGroupSurfacePresentModes2EXT = (PFN_vkGetDeviceGroupSurfacePresentModes2EXT)load(context, "vkGetDeviceGroupSurfacePresentModes2EXT"); -#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) - table->vkGetImageSubresourceLayout2EXT = (PFN_vkGetImageSubresourceLayout2EXT)load(context, "vkGetImageSubresourceLayout2EXT"); -#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ -#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) - table->vkCmdSetVertexInputEXT = (PFN_vkCmdSetVertexInputEXT)load(context, "vkCmdSetVertexInputEXT"); -#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ -#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) - table->vkCmdPushDescriptorSetWithTemplateKHR = (PFN_vkCmdPushDescriptorSetWithTemplateKHR)load(context, "vkCmdPushDescriptorSetWithTemplateKHR"); -#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) - table->vkGetDeviceGroupPresentCapabilitiesKHR = (PFN_vkGetDeviceGroupPresentCapabilitiesKHR)load(context, "vkGetDeviceGroupPresentCapabilitiesKHR"); - table->vkGetDeviceGroupSurfacePresentModesKHR = (PFN_vkGetDeviceGroupSurfacePresentModesKHR)load(context, "vkGetDeviceGroupSurfacePresentModesKHR"); -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) - table->vkAcquireNextImage2KHR = (PFN_vkAcquireNextImage2KHR)load(context, "vkAcquireNextImage2KHR"); -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ - /* VOLK_GENERATE_LOAD_DEVICE_TABLE */ -} - -#ifdef __GNUC__ -#ifdef VOLK_DEFAULT_VISIBILITY -# pragma GCC visibility push(default) -#else -# pragma GCC visibility push(hidden) -#endif -#endif - -/* VOLK_GENERATE_PROTOTYPES_C */ -#if defined(VK_VERSION_1_0) -PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; -PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; -PFN_vkAllocateMemory vkAllocateMemory; -PFN_vkBeginCommandBuffer vkBeginCommandBuffer; -PFN_vkBindBufferMemory vkBindBufferMemory; -PFN_vkBindImageMemory vkBindImageMemory; -PFN_vkCmdBeginQuery vkCmdBeginQuery; -PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; -PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; -PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; -PFN_vkCmdBindPipeline vkCmdBindPipeline; -PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; -PFN_vkCmdBlitImage vkCmdBlitImage; -PFN_vkCmdClearAttachments vkCmdClearAttachments; -PFN_vkCmdClearColorImage vkCmdClearColorImage; -PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; -PFN_vkCmdCopyBuffer vkCmdCopyBuffer; -PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; -PFN_vkCmdCopyImage vkCmdCopyImage; -PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; -PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; -PFN_vkCmdDispatch vkCmdDispatch; -PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; -PFN_vkCmdDraw vkCmdDraw; -PFN_vkCmdDrawIndexed vkCmdDrawIndexed; -PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; -PFN_vkCmdDrawIndirect vkCmdDrawIndirect; -PFN_vkCmdEndQuery vkCmdEndQuery; -PFN_vkCmdEndRenderPass vkCmdEndRenderPass; -PFN_vkCmdExecuteCommands vkCmdExecuteCommands; -PFN_vkCmdFillBuffer vkCmdFillBuffer; -PFN_vkCmdNextSubpass vkCmdNextSubpass; -PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; -PFN_vkCmdPushConstants vkCmdPushConstants; -PFN_vkCmdResetEvent vkCmdResetEvent; -PFN_vkCmdResetQueryPool vkCmdResetQueryPool; -PFN_vkCmdResolveImage vkCmdResolveImage; -PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; -PFN_vkCmdSetDepthBias vkCmdSetDepthBias; -PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; -PFN_vkCmdSetEvent vkCmdSetEvent; -PFN_vkCmdSetLineWidth vkCmdSetLineWidth; -PFN_vkCmdSetScissor vkCmdSetScissor; -PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; -PFN_vkCmdSetStencilReference vkCmdSetStencilReference; -PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; -PFN_vkCmdSetViewport vkCmdSetViewport; -PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; -PFN_vkCmdWaitEvents vkCmdWaitEvents; -PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; -PFN_vkCreateBuffer vkCreateBuffer; -PFN_vkCreateBufferView vkCreateBufferView; -PFN_vkCreateCommandPool vkCreateCommandPool; -PFN_vkCreateComputePipelines vkCreateComputePipelines; -PFN_vkCreateDescriptorPool vkCreateDescriptorPool; -PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; -PFN_vkCreateDevice vkCreateDevice; -PFN_vkCreateEvent vkCreateEvent; -PFN_vkCreateFence vkCreateFence; -PFN_vkCreateFramebuffer vkCreateFramebuffer; -PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; -PFN_vkCreateImage vkCreateImage; -PFN_vkCreateImageView vkCreateImageView; -PFN_vkCreateInstance vkCreateInstance; -PFN_vkCreatePipelineCache vkCreatePipelineCache; -PFN_vkCreatePipelineLayout vkCreatePipelineLayout; -PFN_vkCreateQueryPool vkCreateQueryPool; -PFN_vkCreateRenderPass vkCreateRenderPass; -PFN_vkCreateSampler vkCreateSampler; -PFN_vkCreateSemaphore vkCreateSemaphore; -PFN_vkCreateShaderModule vkCreateShaderModule; -PFN_vkDestroyBuffer vkDestroyBuffer; -PFN_vkDestroyBufferView vkDestroyBufferView; -PFN_vkDestroyCommandPool vkDestroyCommandPool; -PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; -PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; -PFN_vkDestroyDevice vkDestroyDevice; -PFN_vkDestroyEvent vkDestroyEvent; -PFN_vkDestroyFence vkDestroyFence; -PFN_vkDestroyFramebuffer vkDestroyFramebuffer; -PFN_vkDestroyImage vkDestroyImage; -PFN_vkDestroyImageView vkDestroyImageView; -PFN_vkDestroyInstance vkDestroyInstance; -PFN_vkDestroyPipeline vkDestroyPipeline; -PFN_vkDestroyPipelineCache vkDestroyPipelineCache; -PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; -PFN_vkDestroyQueryPool vkDestroyQueryPool; -PFN_vkDestroyRenderPass vkDestroyRenderPass; -PFN_vkDestroySampler vkDestroySampler; -PFN_vkDestroySemaphore vkDestroySemaphore; -PFN_vkDestroyShaderModule vkDestroyShaderModule; -PFN_vkDeviceWaitIdle vkDeviceWaitIdle; -PFN_vkEndCommandBuffer vkEndCommandBuffer; -PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; -PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; -PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; -PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; -PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; -PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; -PFN_vkFreeCommandBuffers vkFreeCommandBuffers; -PFN_vkFreeDescriptorSets vkFreeDescriptorSets; -PFN_vkFreeMemory vkFreeMemory; -PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; -PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; -PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; -PFN_vkGetDeviceQueue vkGetDeviceQueue; -PFN_vkGetEventStatus vkGetEventStatus; -PFN_vkGetFenceStatus vkGetFenceStatus; -PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; -PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; -PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; -PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; -PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; -PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; -PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; -PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; -PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; -PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; -PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; -PFN_vkGetPipelineCacheData vkGetPipelineCacheData; -PFN_vkGetQueryPoolResults vkGetQueryPoolResults; -PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; -PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; -PFN_vkMapMemory vkMapMemory; -PFN_vkMergePipelineCaches vkMergePipelineCaches; -PFN_vkQueueBindSparse vkQueueBindSparse; -PFN_vkQueueSubmit vkQueueSubmit; -PFN_vkQueueWaitIdle vkQueueWaitIdle; -PFN_vkResetCommandBuffer vkResetCommandBuffer; -PFN_vkResetCommandPool vkResetCommandPool; -PFN_vkResetDescriptorPool vkResetDescriptorPool; -PFN_vkResetEvent vkResetEvent; -PFN_vkResetFences vkResetFences; -PFN_vkSetEvent vkSetEvent; -PFN_vkUnmapMemory vkUnmapMemory; -PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; -PFN_vkWaitForFences vkWaitForFences; -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_VERSION_1_1) -PFN_vkBindBufferMemory2 vkBindBufferMemory2; -PFN_vkBindImageMemory2 vkBindImageMemory2; -PFN_vkCmdDispatchBase vkCmdDispatchBase; -PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask; -PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate; -PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion; -PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate; -PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion; -PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion; -PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups; -PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; -PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport; -PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures; -PFN_vkGetDeviceQueue2 vkGetDeviceQueue2; -PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; -PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2; -PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties; -PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties; -PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties; -PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2; -PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2; -PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2; -PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; -PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2; -PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2; -PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2; -PFN_vkTrimCommandPool vkTrimCommandPool; -PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate; -#endif /* defined(VK_VERSION_1_1) */ -#if defined(VK_VERSION_1_2) -PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2; -PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount; -PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount; -PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2; -PFN_vkCmdNextSubpass2 vkCmdNextSubpass2; -PFN_vkCreateRenderPass2 vkCreateRenderPass2; -PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress; -PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress; -PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress; -PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue; -PFN_vkResetQueryPool vkResetQueryPool; -PFN_vkSignalSemaphore vkSignalSemaphore; -PFN_vkWaitSemaphores vkWaitSemaphores; -#endif /* defined(VK_VERSION_1_2) */ -#if defined(VK_VERSION_1_3) -PFN_vkCmdBeginRendering vkCmdBeginRendering; -PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2; -PFN_vkCmdBlitImage2 vkCmdBlitImage2; -PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2; -PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2; -PFN_vkCmdCopyImage2 vkCmdCopyImage2; -PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2; -PFN_vkCmdEndRendering vkCmdEndRendering; -PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2; -PFN_vkCmdResetEvent2 vkCmdResetEvent2; -PFN_vkCmdResolveImage2 vkCmdResolveImage2; -PFN_vkCmdSetCullMode vkCmdSetCullMode; -PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable; -PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable; -PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp; -PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable; -PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable; -PFN_vkCmdSetEvent2 vkCmdSetEvent2; -PFN_vkCmdSetFrontFace vkCmdSetFrontFace; -PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable; -PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology; -PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable; -PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount; -PFN_vkCmdSetStencilOp vkCmdSetStencilOp; -PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable; -PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount; -PFN_vkCmdWaitEvents2 vkCmdWaitEvents2; -PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2; -PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot; -PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot; -PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements; -PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements; -PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements; -PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties; -PFN_vkGetPrivateData vkGetPrivateData; -PFN_vkQueueSubmit2 vkQueueSubmit2; -PFN_vkSetPrivateData vkSetPrivateData; -#endif /* defined(VK_VERSION_1_3) */ -#if defined(VK_AMDX_shader_enqueue) -PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX; -PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX; -PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX; -PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX; -PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX; -PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX; -PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX; -#endif /* defined(VK_AMDX_shader_enqueue) */ -#if defined(VK_AMD_buffer_marker) -PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD; -#endif /* defined(VK_AMD_buffer_marker) */ -#if defined(VK_AMD_display_native_hdr) -PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD; -#endif /* defined(VK_AMD_display_native_hdr) */ -#if defined(VK_AMD_draw_indirect_count) -PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; -PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; -#endif /* defined(VK_AMD_draw_indirect_count) */ -#if defined(VK_AMD_shader_info) -PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD; -#endif /* defined(VK_AMD_shader_info) */ -#if defined(VK_ANDROID_external_memory_android_hardware_buffer) -PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID; -PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID; -#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ -#if defined(VK_EXT_acquire_drm_display) -PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT; -PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT; -#endif /* defined(VK_EXT_acquire_drm_display) */ -#if defined(VK_EXT_acquire_xlib_display) -PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT; -PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT; -#endif /* defined(VK_EXT_acquire_xlib_display) */ -#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) -PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT; -#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ -#if defined(VK_EXT_buffer_device_address) -PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT; -#endif /* defined(VK_EXT_buffer_device_address) */ -#if defined(VK_EXT_calibrated_timestamps) -PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT; -PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; -#endif /* defined(VK_EXT_calibrated_timestamps) */ -#if defined(VK_EXT_color_write_enable) -PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT; -#endif /* defined(VK_EXT_color_write_enable) */ -#if defined(VK_EXT_conditional_rendering) -PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT; -PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT; -#endif /* defined(VK_EXT_conditional_rendering) */ -#if defined(VK_EXT_debug_marker) -PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; -PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; -PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; -PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; -PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; -#endif /* defined(VK_EXT_debug_marker) */ -#if defined(VK_EXT_debug_report) -PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT; -PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT; -PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT; -#endif /* defined(VK_EXT_debug_report) */ -#if defined(VK_EXT_debug_utils) -PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT; -PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT; -PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT; -PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; -PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; -PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT; -PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT; -PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT; -PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT; -PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT; -PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT; -#endif /* defined(VK_EXT_debug_utils) */ -#if defined(VK_EXT_depth_bias_control) -PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT; -#endif /* defined(VK_EXT_depth_bias_control) */ -#if defined(VK_EXT_descriptor_buffer) -PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT; -PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT; -PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT; -PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT; -PFN_vkGetDescriptorEXT vkGetDescriptorEXT; -PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT; -PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT; -PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT; -PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT; -PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT; -#endif /* defined(VK_EXT_descriptor_buffer) */ -#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) -PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; -#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ -#if defined(VK_EXT_device_fault) -PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT; -#endif /* defined(VK_EXT_device_fault) */ -#if defined(VK_EXT_direct_mode_display) -PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT; -#endif /* defined(VK_EXT_direct_mode_display) */ -#if defined(VK_EXT_directfb_surface) -PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT; -PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT; -#endif /* defined(VK_EXT_directfb_surface) */ -#if defined(VK_EXT_discard_rectangles) -PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT; -#endif /* defined(VK_EXT_discard_rectangles) */ -#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 -PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT; -PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT; -#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ -#if defined(VK_EXT_display_control) -PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT; -PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT; -PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT; -PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT; -#endif /* defined(VK_EXT_display_control) */ -#if defined(VK_EXT_display_surface_counter) -PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT; -#endif /* defined(VK_EXT_display_surface_counter) */ -#if defined(VK_EXT_external_memory_host) -PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT; -#endif /* defined(VK_EXT_external_memory_host) */ -#if defined(VK_EXT_full_screen_exclusive) -PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT; -PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT; -PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT; -#endif /* defined(VK_EXT_full_screen_exclusive) */ -#if defined(VK_EXT_hdr_metadata) -PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT; -#endif /* defined(VK_EXT_hdr_metadata) */ -#if defined(VK_EXT_headless_surface) -PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT; -#endif /* defined(VK_EXT_headless_surface) */ -#if defined(VK_EXT_host_image_copy) -PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT; -PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT; -PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT; -PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT; -#endif /* defined(VK_EXT_host_image_copy) */ -#if defined(VK_EXT_host_query_reset) -PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; -#endif /* defined(VK_EXT_host_query_reset) */ -#if defined(VK_EXT_image_drm_format_modifier) -PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT; -#endif /* defined(VK_EXT_image_drm_format_modifier) */ -#if defined(VK_EXT_line_rasterization) -PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT; -#endif /* defined(VK_EXT_line_rasterization) */ -#if defined(VK_EXT_mesh_shader) -PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT; -PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT; -PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT; -#endif /* defined(VK_EXT_mesh_shader) */ -#if defined(VK_EXT_metal_objects) -PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT; -#endif /* defined(VK_EXT_metal_objects) */ -#if defined(VK_EXT_metal_surface) -PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT; -#endif /* defined(VK_EXT_metal_surface) */ -#if defined(VK_EXT_multi_draw) -PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT; -PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT; -#endif /* defined(VK_EXT_multi_draw) */ -#if defined(VK_EXT_opacity_micromap) -PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT; -PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT; -PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT; -PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT; -PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT; -PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT; -PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT; -PFN_vkCopyMicromapEXT vkCopyMicromapEXT; -PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT; -PFN_vkCreateMicromapEXT vkCreateMicromapEXT; -PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT; -PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT; -PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT; -PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT; -#endif /* defined(VK_EXT_opacity_micromap) */ -#if defined(VK_EXT_pageable_device_local_memory) -PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT; -#endif /* defined(VK_EXT_pageable_device_local_memory) */ -#if defined(VK_EXT_pipeline_properties) -PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT; -#endif /* defined(VK_EXT_pipeline_properties) */ -#if defined(VK_EXT_private_data) -PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT; -PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT; -PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT; -PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT; -#endif /* defined(VK_EXT_private_data) */ -#if defined(VK_EXT_sample_locations) -PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT; -PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT; -#endif /* defined(VK_EXT_sample_locations) */ -#if defined(VK_EXT_shader_module_identifier) -PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT; -PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT; -#endif /* defined(VK_EXT_shader_module_identifier) */ -#if defined(VK_EXT_shader_object) -PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT; -PFN_vkCreateShadersEXT vkCreateShadersEXT; -PFN_vkDestroyShaderEXT vkDestroyShaderEXT; -PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT; -#endif /* defined(VK_EXT_shader_object) */ -#if defined(VK_EXT_swapchain_maintenance1) -PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT; -#endif /* defined(VK_EXT_swapchain_maintenance1) */ -#if defined(VK_EXT_tooling_info) -PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT; -#endif /* defined(VK_EXT_tooling_info) */ -#if defined(VK_EXT_transform_feedback) -PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT; -PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; -PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; -PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT; -PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT; -PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; -#endif /* defined(VK_EXT_transform_feedback) */ -#if defined(VK_EXT_validation_cache) -PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT; -PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT; -PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT; -PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT; -#endif /* defined(VK_EXT_validation_cache) */ -#if defined(VK_FUCHSIA_buffer_collection) -PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA; -PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA; -PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA; -PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA; -PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA; -#endif /* defined(VK_FUCHSIA_buffer_collection) */ -#if defined(VK_FUCHSIA_external_memory) -PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA; -PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA; -#endif /* defined(VK_FUCHSIA_external_memory) */ -#if defined(VK_FUCHSIA_external_semaphore) -PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA; -PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA; -#endif /* defined(VK_FUCHSIA_external_semaphore) */ -#if defined(VK_FUCHSIA_imagepipe_surface) -PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA; -#endif /* defined(VK_FUCHSIA_imagepipe_surface) */ -#if defined(VK_GGP_stream_descriptor_surface) -PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP; -#endif /* defined(VK_GGP_stream_descriptor_surface) */ -#if defined(VK_GOOGLE_display_timing) -PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE; -PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE; -#endif /* defined(VK_GOOGLE_display_timing) */ -#if defined(VK_HUAWEI_cluster_culling_shader) -PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI; -PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI; -#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ -#if defined(VK_HUAWEI_invocation_mask) -PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI; -#endif /* defined(VK_HUAWEI_invocation_mask) */ -#if defined(VK_HUAWEI_subpass_shading) -PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI; -PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI; -#endif /* defined(VK_HUAWEI_subpass_shading) */ -#if defined(VK_INTEL_performance_query) -PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL; -PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL; -PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL; -PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL; -PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL; -PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL; -PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL; -PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL; -PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL; -#endif /* defined(VK_INTEL_performance_query) */ -#if defined(VK_KHR_acceleration_structure) -PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR; -PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR; -PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR; -PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR; -PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR; -PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR; -PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR; -PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR; -PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR; -PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR; -PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR; -PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR; -PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR; -PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR; -PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR; -PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR; -#endif /* defined(VK_KHR_acceleration_structure) */ -#if defined(VK_KHR_android_surface) -PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR; -#endif /* defined(VK_KHR_android_surface) */ -#if defined(VK_KHR_bind_memory2) -PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR; -PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR; -#endif /* defined(VK_KHR_bind_memory2) */ -#if defined(VK_KHR_buffer_device_address) -PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR; -PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR; -PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR; -#endif /* defined(VK_KHR_buffer_device_address) */ -#if defined(VK_KHR_cooperative_matrix) -PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR; -#endif /* defined(VK_KHR_cooperative_matrix) */ -#if defined(VK_KHR_copy_commands2) -PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR; -PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR; -PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR; -PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR; -PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR; -PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR; -#endif /* defined(VK_KHR_copy_commands2) */ -#if defined(VK_KHR_create_renderpass2) -PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR; -PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR; -PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR; -PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR; -#endif /* defined(VK_KHR_create_renderpass2) */ -#if defined(VK_KHR_deferred_host_operations) -PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR; -PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR; -PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR; -PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR; -PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR; -#endif /* defined(VK_KHR_deferred_host_operations) */ -#if defined(VK_KHR_descriptor_update_template) -PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; -PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; -PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; -#endif /* defined(VK_KHR_descriptor_update_template) */ -#if defined(VK_KHR_device_group) -PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR; -PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR; -PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR; -#endif /* defined(VK_KHR_device_group) */ -#if defined(VK_KHR_device_group_creation) -PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR; -#endif /* defined(VK_KHR_device_group_creation) */ -#if defined(VK_KHR_display) -PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR; -PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR; -PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR; -PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR; -PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR; -PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR; -PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR; -#endif /* defined(VK_KHR_display) */ -#if defined(VK_KHR_display_swapchain) -PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR; -#endif /* defined(VK_KHR_display_swapchain) */ -#if defined(VK_KHR_draw_indirect_count) -PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR; -PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR; -#endif /* defined(VK_KHR_draw_indirect_count) */ -#if defined(VK_KHR_dynamic_rendering) -PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR; -PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR; -#endif /* defined(VK_KHR_dynamic_rendering) */ -#if defined(VK_KHR_external_fence_capabilities) -PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR; -#endif /* defined(VK_KHR_external_fence_capabilities) */ -#if defined(VK_KHR_external_fence_fd) -PFN_vkGetFenceFdKHR vkGetFenceFdKHR; -PFN_vkImportFenceFdKHR vkImportFenceFdKHR; -#endif /* defined(VK_KHR_external_fence_fd) */ -#if defined(VK_KHR_external_fence_win32) -PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR; -PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR; -#endif /* defined(VK_KHR_external_fence_win32) */ -#if defined(VK_KHR_external_memory_capabilities) -PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR; -#endif /* defined(VK_KHR_external_memory_capabilities) */ -#if defined(VK_KHR_external_memory_fd) -PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR; -PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR; -#endif /* defined(VK_KHR_external_memory_fd) */ -#if defined(VK_KHR_external_memory_win32) -PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR; -PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR; -#endif /* defined(VK_KHR_external_memory_win32) */ -#if defined(VK_KHR_external_semaphore_capabilities) -PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; -#endif /* defined(VK_KHR_external_semaphore_capabilities) */ -#if defined(VK_KHR_external_semaphore_fd) -PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR; -PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR; -#endif /* defined(VK_KHR_external_semaphore_fd) */ -#if defined(VK_KHR_external_semaphore_win32) -PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR; -PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR; -#endif /* defined(VK_KHR_external_semaphore_win32) */ -#if defined(VK_KHR_fragment_shading_rate) -PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR; -PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR; -#endif /* defined(VK_KHR_fragment_shading_rate) */ -#if defined(VK_KHR_get_display_properties2) -PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR; -PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR; -PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR; -PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR; -#endif /* defined(VK_KHR_get_display_properties2) */ -#if defined(VK_KHR_get_memory_requirements2) -PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR; -PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR; -PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR; -#endif /* defined(VK_KHR_get_memory_requirements2) */ -#if defined(VK_KHR_get_physical_device_properties2) -PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; -PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR; -PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR; -PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR; -PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; -PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR; -PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR; -#endif /* defined(VK_KHR_get_physical_device_properties2) */ -#if defined(VK_KHR_get_surface_capabilities2) -PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR; -PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR; -#endif /* defined(VK_KHR_get_surface_capabilities2) */ -#if defined(VK_KHR_maintenance1) -PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; -#endif /* defined(VK_KHR_maintenance1) */ -#if defined(VK_KHR_maintenance3) -PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR; -#endif /* defined(VK_KHR_maintenance3) */ -#if defined(VK_KHR_maintenance4) -PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR; -PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR; -PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR; -#endif /* defined(VK_KHR_maintenance4) */ -#if defined(VK_KHR_maintenance5) -PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR; -PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR; -PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR; -PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR; -#endif /* defined(VK_KHR_maintenance5) */ -#if defined(VK_KHR_map_memory2) -PFN_vkMapMemory2KHR vkMapMemory2KHR; -PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR; -#endif /* defined(VK_KHR_map_memory2) */ -#if defined(VK_KHR_performance_query) -PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR; -PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR; -PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR; -PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR; -#endif /* defined(VK_KHR_performance_query) */ -#if defined(VK_KHR_pipeline_executable_properties) -PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR; -PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR; -PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR; -#endif /* defined(VK_KHR_pipeline_executable_properties) */ -#if defined(VK_KHR_present_wait) -PFN_vkWaitForPresentKHR vkWaitForPresentKHR; -#endif /* defined(VK_KHR_present_wait) */ -#if defined(VK_KHR_push_descriptor) -PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR; -#endif /* defined(VK_KHR_push_descriptor) */ -#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) -PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR; -#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_ray_tracing_pipeline) -PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR; -PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR; -PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR; -PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR; -PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; -PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR; -PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR; -#endif /* defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_sampler_ycbcr_conversion) -PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR; -PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR; -#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ -#if defined(VK_KHR_shared_presentable_image) -PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR; -#endif /* defined(VK_KHR_shared_presentable_image) */ -#if defined(VK_KHR_surface) -PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; -PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; -PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; -PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; -PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; -#endif /* defined(VK_KHR_surface) */ -#if defined(VK_KHR_swapchain) -PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; -PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; -PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; -PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; -PFN_vkQueuePresentKHR vkQueuePresentKHR; -#endif /* defined(VK_KHR_swapchain) */ -#if defined(VK_KHR_synchronization2) -PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR; -PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR; -PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR; -PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR; -PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR; -PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR; -#endif /* defined(VK_KHR_synchronization2) */ -#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) -PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD; -#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ -#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) -PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV; -#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_KHR_timeline_semaphore) -PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; -PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR; -PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; -#endif /* defined(VK_KHR_timeline_semaphore) */ -#if defined(VK_KHR_video_decode_queue) -PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR; -#endif /* defined(VK_KHR_video_decode_queue) */ -#if defined(VK_KHR_video_encode_queue) -PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR; -PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR; -PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR; -#endif /* defined(VK_KHR_video_encode_queue) */ -#if defined(VK_KHR_video_queue) -PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR; -PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR; -PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR; -PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR; -PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR; -PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR; -PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR; -PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR; -PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR; -PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR; -PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR; -PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR; -#endif /* defined(VK_KHR_video_queue) */ -#if defined(VK_KHR_wayland_surface) -PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR; -PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR; -#endif /* defined(VK_KHR_wayland_surface) */ -#if defined(VK_KHR_win32_surface) -PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR; -PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR; -#endif /* defined(VK_KHR_win32_surface) */ -#if defined(VK_KHR_xcb_surface) -PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR; -PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR; -#endif /* defined(VK_KHR_xcb_surface) */ -#if defined(VK_KHR_xlib_surface) -PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR; -PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR; -#endif /* defined(VK_KHR_xlib_surface) */ -#if defined(VK_MVK_ios_surface) -PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK; -#endif /* defined(VK_MVK_ios_surface) */ -#if defined(VK_MVK_macos_surface) -PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK; -#endif /* defined(VK_MVK_macos_surface) */ -#if defined(VK_NN_vi_surface) -PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN; -#endif /* defined(VK_NN_vi_surface) */ -#if defined(VK_NVX_binary_import) -PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX; -PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX; -PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX; -PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX; -PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX; -#endif /* defined(VK_NVX_binary_import) */ -#if defined(VK_NVX_image_view_handle) -PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX; -PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX; -#endif /* defined(VK_NVX_image_view_handle) */ -#if defined(VK_NV_acquire_winrt_display) -PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV; -PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV; -#endif /* defined(VK_NV_acquire_winrt_display) */ -#if defined(VK_NV_clip_space_w_scaling) -PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV; -#endif /* defined(VK_NV_clip_space_w_scaling) */ -#if defined(VK_NV_cooperative_matrix) -PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV; -#endif /* defined(VK_NV_cooperative_matrix) */ -#if defined(VK_NV_copy_memory_indirect) -PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV; -PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV; -#endif /* defined(VK_NV_copy_memory_indirect) */ -#if defined(VK_NV_coverage_reduction_mode) -PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV; -#endif /* defined(VK_NV_coverage_reduction_mode) */ -#if defined(VK_NV_cuda_kernel_launch) -PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV; -PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV; -PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV; -PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV; -PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV; -PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV; -#endif /* defined(VK_NV_cuda_kernel_launch) */ -#if defined(VK_NV_device_diagnostic_checkpoints) -PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV; -PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV; -#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_NV_device_generated_commands) -PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV; -PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV; -PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV; -PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV; -PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV; -PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV; -#endif /* defined(VK_NV_device_generated_commands) */ -#if defined(VK_NV_device_generated_commands_compute) -PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV; -PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV; -PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV; -#endif /* defined(VK_NV_device_generated_commands_compute) */ -#if defined(VK_NV_external_memory_capabilities) -PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV; -#endif /* defined(VK_NV_external_memory_capabilities) */ -#if defined(VK_NV_external_memory_rdma) -PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV; -#endif /* defined(VK_NV_external_memory_rdma) */ -#if defined(VK_NV_external_memory_win32) -PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV; -#endif /* defined(VK_NV_external_memory_win32) */ -#if defined(VK_NV_fragment_shading_rate_enums) -PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV; -#endif /* defined(VK_NV_fragment_shading_rate_enums) */ -#if defined(VK_NV_low_latency2) -PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV; -PFN_vkLatencySleepNV vkLatencySleepNV; -PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV; -PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV; -PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV; -#endif /* defined(VK_NV_low_latency2) */ -#if defined(VK_NV_memory_decompression) -PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV; -PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV; -#endif /* defined(VK_NV_memory_decompression) */ -#if defined(VK_NV_mesh_shader) -PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV; -PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV; -PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV; -#endif /* defined(VK_NV_mesh_shader) */ -#if defined(VK_NV_optical_flow) -PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV; -PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV; -PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV; -PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV; -PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV; -#endif /* defined(VK_NV_optical_flow) */ -#if defined(VK_NV_ray_tracing) -PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV; -PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV; -PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV; -PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV; -PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV; -PFN_vkCompileDeferredNV vkCompileDeferredNV; -PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV; -PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV; -PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV; -PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV; -PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV; -PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV; -#endif /* defined(VK_NV_ray_tracing) */ -#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 -PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV; -#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ -#if defined(VK_NV_scissor_exclusive) -PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV; -#endif /* defined(VK_NV_scissor_exclusive) */ -#if defined(VK_NV_shading_rate_image) -PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV; -PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV; -PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV; -#endif /* defined(VK_NV_shading_rate_image) */ -#if defined(VK_QCOM_tile_properties) -PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM; -PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM; -#endif /* defined(VK_QCOM_tile_properties) */ -#if defined(VK_QNX_external_memory_screen_buffer) -PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX; -#endif /* defined(VK_QNX_external_memory_screen_buffer) */ -#if defined(VK_QNX_screen_surface) -PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX; -PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX; -#endif /* defined(VK_QNX_screen_surface) */ -#if defined(VK_VALVE_descriptor_set_host_mapping) -PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE; -PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE; -#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ -#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) -PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; -PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; -PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; -PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; -PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; -PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; -PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; -PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; -PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT; -PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; -PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; -PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) -PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT; -PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT; -PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT; -PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT; -PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) -PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT; -PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT; -PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT; -PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT; -PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT; -PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT; -PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT; -PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT; -PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT; -PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT; -PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT; -PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT; -PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT; -PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT; -PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT; -PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT; -PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT; -PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT; -PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT; -PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT; -PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) -PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) -PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) -PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV; -PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) -PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV; -PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV; -PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) -PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) -PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) -PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ -#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) -PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT; -#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) -PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT; -#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ -#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) -PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT; -#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ -#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) -PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR; -#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) -PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR; -PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR; -PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR; -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) -PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR; -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ -/* VOLK_GENERATE_PROTOTYPES_C */ - -#ifdef __GNUC__ -# pragma GCC visibility pop -#endif - -#ifdef __cplusplus -} -#endif -/* clang-format on */ diff --git a/third_party/volk.h b/third_party/volk.h deleted file mode 100644 index 47fbed2..0000000 --- a/third_party/volk.h +++ /dev/null @@ -1,1985 +0,0 @@ -/** - * volk - * - * Copyright (C) 2018-2023, by Arseny Kapoulkine (arseny.kapoulkine@gmail.com) - * Report bugs and download new versions at https://github.com/zeux/volk - * - * This library is distributed under the MIT License. See notice at the end of this file. - */ -/* clang-format off */ -#ifndef VOLK_H_ -#define VOLK_H_ - -#if defined(VULKAN_H_) && !defined(VK_NO_PROTOTYPES) -# error To use volk, you need to define VK_NO_PROTOTYPES before including vulkan.h -#endif - -/* VOLK_GENERATE_VERSION_DEFINE */ -#define VOLK_HEADER_VERSION 270 -/* VOLK_GENERATE_VERSION_DEFINE */ - -#ifndef VK_NO_PROTOTYPES -# define VK_NO_PROTOTYPES -#endif - -#ifndef VULKAN_H_ -# ifdef VOLK_VULKAN_H_PATH -# include VOLK_VULKAN_H_PATH -# elif defined(VK_USE_PLATFORM_WIN32_KHR) -# include -# include - - /* When VK_USE_PLATFORM_WIN32_KHR is defined, instead of including vulkan.h directly, we include individual parts of the SDK - * This is necessary to avoid including which is very heavy - it takes 200ms to parse without WIN32_LEAN_AND_MEAN - * and 100ms to parse with it. vulkan_win32.h only needs a few symbols that are easy to redefine ourselves. - */ - typedef unsigned long DWORD; - typedef const wchar_t* LPCWSTR; - typedef void* HANDLE; - typedef struct HINSTANCE__* HINSTANCE; - typedef struct HWND__* HWND; - typedef struct HMONITOR__* HMONITOR; - typedef struct _SECURITY_ATTRIBUTES SECURITY_ATTRIBUTES; - -# include - -# ifdef VK_ENABLE_BETA_EXTENSIONS -# include -# endif -# else -# include -# endif -#endif - -/* Disable several extensions on earlier SDKs because later SDKs introduce a backwards incompatible change to function signatures */ -#if VK_HEADER_VERSION < 140 -# undef VK_NVX_image_view_handle -#endif -#if VK_HEADER_VERSION < 184 -# undef VK_HUAWEI_subpass_shading -#endif - -#ifdef __cplusplus -extern "C" { -#endif - -struct VolkDeviceTable; - -/** - * Initialize library by loading Vulkan loader; call this function before creating the Vulkan instance. - * - * Returns VK_SUCCESS on success and VK_ERROR_INITIALIZATION_FAILED otherwise. - */ -VkResult volkInitialize(void); - -/** - * Initialize library by providing a custom handler to load global symbols. - * - * This function can be used instead of volkInitialize. - * The handler function pointer will be asked to load global Vulkan symbols which require no instance - * (such as vkCreateInstance, vkEnumerateInstance* and vkEnumerateInstanceVersion if available). - */ -void volkInitializeCustom(PFN_vkGetInstanceProcAddr handler); - -/** - * Finalize library by unloading Vulkan loader and resetting global symbols to NULL. - */ -void volkFinalize(void); - -/** - * Get Vulkan instance version supported by the Vulkan loader, or 0 if Vulkan isn't supported - * - * Returns 0 if volkInitialize wasn't called or failed. - */ -uint32_t volkGetInstanceVersion(void); - -/** - * Load global function pointers using application-created VkInstance; call this function after creating the Vulkan instance. - */ -void volkLoadInstance(VkInstance instance); - -/** - * Load global function pointers using application-created VkInstance; call this function after creating the Vulkan instance. - * Skips loading device-based function pointers, requires usage of volkLoadDevice afterwards. - */ -void volkLoadInstanceOnly(VkInstance instance); - -/** - * Load global function pointers using application-created VkDevice; call this function after creating the Vulkan device. - * - * Note: this is not suitable for applications that want to use multiple VkDevice objects concurrently. - */ -void volkLoadDevice(VkDevice device); - -/** - * Return last VkInstance for which global function pointers have been loaded via volkLoadInstance(), - * or VK_NULL_HANDLE if volkLoadInstance() has not been called. - */ -VkInstance volkGetLoadedInstance(void); - -/** - * Return last VkDevice for which global function pointers have been loaded via volkLoadDevice(), - * or VK_NULL_HANDLE if volkLoadDevice() has not been called. - */ -VkDevice volkGetLoadedDevice(void); - -/** - * Load function pointers using application-created VkDevice into a table. - * Application should use function pointers from that table instead of using global function pointers. - */ -void volkLoadDeviceTable(struct VolkDeviceTable* table, VkDevice device); - -/** - * Device-specific function pointer table - */ -struct VolkDeviceTable -{ - /* VOLK_GENERATE_DEVICE_TABLE */ -#if defined(VK_VERSION_1_0) - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; - PFN_vkAllocateMemory vkAllocateMemory; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer; - PFN_vkBindBufferMemory vkBindBufferMemory; - PFN_vkBindImageMemory vkBindImageMemory; - PFN_vkCmdBeginQuery vkCmdBeginQuery; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; - PFN_vkCmdBindPipeline vkCmdBindPipeline; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; - PFN_vkCmdBlitImage vkCmdBlitImage; - PFN_vkCmdClearAttachments vkCmdClearAttachments; - PFN_vkCmdClearColorImage vkCmdClearColorImage; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; - PFN_vkCmdCopyImage vkCmdCopyImage; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; - PFN_vkCmdDispatch vkCmdDispatch; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; - PFN_vkCmdDraw vkCmdDraw; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect; - PFN_vkCmdEndQuery vkCmdEndQuery; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands; - PFN_vkCmdFillBuffer vkCmdFillBuffer; - PFN_vkCmdNextSubpass vkCmdNextSubpass; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; - PFN_vkCmdPushConstants vkCmdPushConstants; - PFN_vkCmdResetEvent vkCmdResetEvent; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool; - PFN_vkCmdResolveImage vkCmdResolveImage; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; - PFN_vkCmdSetEvent vkCmdSetEvent; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth; - PFN_vkCmdSetScissor vkCmdSetScissor; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; - PFN_vkCmdSetViewport vkCmdSetViewport; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; - PFN_vkCmdWaitEvents vkCmdWaitEvents; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; - PFN_vkCreateBuffer vkCreateBuffer; - PFN_vkCreateBufferView vkCreateBufferView; - PFN_vkCreateCommandPool vkCreateCommandPool; - PFN_vkCreateComputePipelines vkCreateComputePipelines; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; - PFN_vkCreateEvent vkCreateEvent; - PFN_vkCreateFence vkCreateFence; - PFN_vkCreateFramebuffer vkCreateFramebuffer; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; - PFN_vkCreateImage vkCreateImage; - PFN_vkCreateImageView vkCreateImageView; - PFN_vkCreatePipelineCache vkCreatePipelineCache; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout; - PFN_vkCreateQueryPool vkCreateQueryPool; - PFN_vkCreateRenderPass vkCreateRenderPass; - PFN_vkCreateSampler vkCreateSampler; - PFN_vkCreateSemaphore vkCreateSemaphore; - PFN_vkCreateShaderModule vkCreateShaderModule; - PFN_vkDestroyBuffer vkDestroyBuffer; - PFN_vkDestroyBufferView vkDestroyBufferView; - PFN_vkDestroyCommandPool vkDestroyCommandPool; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; - PFN_vkDestroyDevice vkDestroyDevice; - PFN_vkDestroyEvent vkDestroyEvent; - PFN_vkDestroyFence vkDestroyFence; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer; - PFN_vkDestroyImage vkDestroyImage; - PFN_vkDestroyImageView vkDestroyImageView; - PFN_vkDestroyPipeline vkDestroyPipeline; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; - PFN_vkDestroyQueryPool vkDestroyQueryPool; - PFN_vkDestroyRenderPass vkDestroyRenderPass; - PFN_vkDestroySampler vkDestroySampler; - PFN_vkDestroySemaphore vkDestroySemaphore; - PFN_vkDestroyShaderModule vkDestroyShaderModule; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle; - PFN_vkEndCommandBuffer vkEndCommandBuffer; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets; - PFN_vkFreeMemory vkFreeMemory; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; - PFN_vkGetDeviceQueue vkGetDeviceQueue; - PFN_vkGetEventStatus vkGetEventStatus; - PFN_vkGetFenceStatus vkGetFenceStatus; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; - PFN_vkMapMemory vkMapMemory; - PFN_vkMergePipelineCaches vkMergePipelineCaches; - PFN_vkQueueBindSparse vkQueueBindSparse; - PFN_vkQueueSubmit vkQueueSubmit; - PFN_vkQueueWaitIdle vkQueueWaitIdle; - PFN_vkResetCommandBuffer vkResetCommandBuffer; - PFN_vkResetCommandPool vkResetCommandPool; - PFN_vkResetDescriptorPool vkResetDescriptorPool; - PFN_vkResetEvent vkResetEvent; - PFN_vkResetFences vkResetFences; - PFN_vkSetEvent vkSetEvent; - PFN_vkUnmapMemory vkUnmapMemory; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; - PFN_vkWaitForFences vkWaitForFences; -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_VERSION_1_1) - PFN_vkBindBufferMemory2 vkBindBufferMemory2; - PFN_vkBindImageMemory2 vkBindImageMemory2; - PFN_vkCmdDispatchBase vkCmdDispatchBase; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2; - PFN_vkTrimCommandPool vkTrimCommandPool; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate; -#endif /* defined(VK_VERSION_1_1) */ -#if defined(VK_VERSION_1_2) - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount; - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2; - PFN_vkCreateRenderPass2 vkCreateRenderPass2; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue; - PFN_vkResetQueryPool vkResetQueryPool; - PFN_vkSignalSemaphore vkSignalSemaphore; - PFN_vkWaitSemaphores vkWaitSemaphores; -#endif /* defined(VK_VERSION_1_2) */ -#if defined(VK_VERSION_1_3) - PFN_vkCmdBeginRendering vkCmdBeginRendering; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2; - PFN_vkCmdBlitImage2 vkCmdBlitImage2; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2; - PFN_vkCmdCopyImage2 vkCmdCopyImage2; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2; - PFN_vkCmdEndRendering vkCmdEndRendering; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2; - PFN_vkCmdResetEvent2 vkCmdResetEvent2; - PFN_vkCmdResolveImage2 vkCmdResolveImage2; - PFN_vkCmdSetCullMode vkCmdSetCullMode; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable; - PFN_vkCmdSetEvent2 vkCmdSetEvent2; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2; - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements; - PFN_vkGetPrivateData vkGetPrivateData; - PFN_vkQueueSubmit2 vkQueueSubmit2; - PFN_vkSetPrivateData vkSetPrivateData; -#endif /* defined(VK_VERSION_1_3) */ -#if defined(VK_AMDX_shader_enqueue) - PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX; - PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX; - PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX; - PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX; - PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX; - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX; - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX; -#endif /* defined(VK_AMDX_shader_enqueue) */ -#if defined(VK_AMD_buffer_marker) - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD; -#endif /* defined(VK_AMD_buffer_marker) */ -#if defined(VK_AMD_display_native_hdr) - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD; -#endif /* defined(VK_AMD_display_native_hdr) */ -#if defined(VK_AMD_draw_indirect_count) - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; -#endif /* defined(VK_AMD_draw_indirect_count) */ -#if defined(VK_AMD_shader_info) - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD; -#endif /* defined(VK_AMD_shader_info) */ -#if defined(VK_ANDROID_external_memory_android_hardware_buffer) - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID; -#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ -#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT; -#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ -#if defined(VK_EXT_buffer_device_address) - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT; -#endif /* defined(VK_EXT_buffer_device_address) */ -#if defined(VK_EXT_calibrated_timestamps) - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT; -#endif /* defined(VK_EXT_calibrated_timestamps) */ -#if defined(VK_EXT_color_write_enable) - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT; -#endif /* defined(VK_EXT_color_write_enable) */ -#if defined(VK_EXT_conditional_rendering) - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT; -#endif /* defined(VK_EXT_conditional_rendering) */ -#if defined(VK_EXT_debug_marker) - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; -#endif /* defined(VK_EXT_debug_marker) */ -#if defined(VK_EXT_depth_bias_control) - PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT; -#endif /* defined(VK_EXT_depth_bias_control) */ -#if defined(VK_EXT_descriptor_buffer) - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT; - PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT; - PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT; - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT; - PFN_vkGetDescriptorEXT vkGetDescriptorEXT; - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT; - PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT; - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT; - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT; - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT; -#endif /* defined(VK_EXT_descriptor_buffer) */ -#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) - PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; -#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ -#if defined(VK_EXT_device_fault) - PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT; -#endif /* defined(VK_EXT_device_fault) */ -#if defined(VK_EXT_discard_rectangles) - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT; -#endif /* defined(VK_EXT_discard_rectangles) */ -#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 - PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT; - PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT; -#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ -#if defined(VK_EXT_display_control) - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT; -#endif /* defined(VK_EXT_display_control) */ -#if defined(VK_EXT_external_memory_host) - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT; -#endif /* defined(VK_EXT_external_memory_host) */ -#if defined(VK_EXT_full_screen_exclusive) - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT; -#endif /* defined(VK_EXT_full_screen_exclusive) */ -#if defined(VK_EXT_hdr_metadata) - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT; -#endif /* defined(VK_EXT_hdr_metadata) */ -#if defined(VK_EXT_host_image_copy) - PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT; - PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT; - PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT; - PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT; -#endif /* defined(VK_EXT_host_image_copy) */ -#if defined(VK_EXT_host_query_reset) - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; -#endif /* defined(VK_EXT_host_query_reset) */ -#if defined(VK_EXT_image_drm_format_modifier) - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT; -#endif /* defined(VK_EXT_image_drm_format_modifier) */ -#if defined(VK_EXT_line_rasterization) - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT; -#endif /* defined(VK_EXT_line_rasterization) */ -#if defined(VK_EXT_mesh_shader) - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT; -#endif /* defined(VK_EXT_mesh_shader) */ -#if defined(VK_EXT_metal_objects) - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT; -#endif /* defined(VK_EXT_metal_objects) */ -#if defined(VK_EXT_multi_draw) - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT; -#endif /* defined(VK_EXT_multi_draw) */ -#if defined(VK_EXT_opacity_micromap) - PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT; - PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT; - PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT; - PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT; - PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT; - PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT; - PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT; - PFN_vkCopyMicromapEXT vkCopyMicromapEXT; - PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT; - PFN_vkCreateMicromapEXT vkCreateMicromapEXT; - PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT; - PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT; - PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT; - PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT; -#endif /* defined(VK_EXT_opacity_micromap) */ -#if defined(VK_EXT_pageable_device_local_memory) - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT; -#endif /* defined(VK_EXT_pageable_device_local_memory) */ -#if defined(VK_EXT_pipeline_properties) - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT; -#endif /* defined(VK_EXT_pipeline_properties) */ -#if defined(VK_EXT_private_data) - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT; -#endif /* defined(VK_EXT_private_data) */ -#if defined(VK_EXT_sample_locations) - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT; -#endif /* defined(VK_EXT_sample_locations) */ -#if defined(VK_EXT_shader_module_identifier) - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT; - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT; -#endif /* defined(VK_EXT_shader_module_identifier) */ -#if defined(VK_EXT_shader_object) - PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT; - PFN_vkCreateShadersEXT vkCreateShadersEXT; - PFN_vkDestroyShaderEXT vkDestroyShaderEXT; - PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT; -#endif /* defined(VK_EXT_shader_object) */ -#if defined(VK_EXT_swapchain_maintenance1) - PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT; -#endif /* defined(VK_EXT_swapchain_maintenance1) */ -#if defined(VK_EXT_transform_feedback) - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; -#endif /* defined(VK_EXT_transform_feedback) */ -#if defined(VK_EXT_validation_cache) - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT; -#endif /* defined(VK_EXT_validation_cache) */ -#if defined(VK_FUCHSIA_buffer_collection) - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA; -#endif /* defined(VK_FUCHSIA_buffer_collection) */ -#if defined(VK_FUCHSIA_external_memory) - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA; -#endif /* defined(VK_FUCHSIA_external_memory) */ -#if defined(VK_FUCHSIA_external_semaphore) - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA; - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA; -#endif /* defined(VK_FUCHSIA_external_semaphore) */ -#if defined(VK_GOOGLE_display_timing) - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE; - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE; -#endif /* defined(VK_GOOGLE_display_timing) */ -#if defined(VK_HUAWEI_cluster_culling_shader) - PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI; - PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI; -#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ -#if defined(VK_HUAWEI_invocation_mask) - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI; -#endif /* defined(VK_HUAWEI_invocation_mask) */ -#if defined(VK_HUAWEI_subpass_shading) - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI; - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI; -#endif /* defined(VK_HUAWEI_subpass_shading) */ -#if defined(VK_INTEL_performance_query) - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL; - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL; -#endif /* defined(VK_INTEL_performance_query) */ -#if defined(VK_KHR_acceleration_structure) - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR; - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR; -#endif /* defined(VK_KHR_acceleration_structure) */ -#if defined(VK_KHR_bind_memory2) - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR; -#endif /* defined(VK_KHR_bind_memory2) */ -#if defined(VK_KHR_buffer_device_address) - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR; -#endif /* defined(VK_KHR_buffer_device_address) */ -#if defined(VK_KHR_copy_commands2) - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR; - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR; -#endif /* defined(VK_KHR_copy_commands2) */ -#if defined(VK_KHR_create_renderpass2) - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR; - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR; -#endif /* defined(VK_KHR_create_renderpass2) */ -#if defined(VK_KHR_deferred_host_operations) - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR; -#endif /* defined(VK_KHR_deferred_host_operations) */ -#if defined(VK_KHR_descriptor_update_template) - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; -#endif /* defined(VK_KHR_descriptor_update_template) */ -#if defined(VK_KHR_device_group) - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR; - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR; -#endif /* defined(VK_KHR_device_group) */ -#if defined(VK_KHR_display_swapchain) - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR; -#endif /* defined(VK_KHR_display_swapchain) */ -#if defined(VK_KHR_draw_indirect_count) - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR; - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR; -#endif /* defined(VK_KHR_draw_indirect_count) */ -#if defined(VK_KHR_dynamic_rendering) - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR; -#endif /* defined(VK_KHR_dynamic_rendering) */ -#if defined(VK_KHR_external_fence_fd) - PFN_vkGetFenceFdKHR vkGetFenceFdKHR; - PFN_vkImportFenceFdKHR vkImportFenceFdKHR; -#endif /* defined(VK_KHR_external_fence_fd) */ -#if defined(VK_KHR_external_fence_win32) - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR; - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR; -#endif /* defined(VK_KHR_external_fence_win32) */ -#if defined(VK_KHR_external_memory_fd) - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR; -#endif /* defined(VK_KHR_external_memory_fd) */ -#if defined(VK_KHR_external_memory_win32) - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR; -#endif /* defined(VK_KHR_external_memory_win32) */ -#if defined(VK_KHR_external_semaphore_fd) - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR; - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR; -#endif /* defined(VK_KHR_external_semaphore_fd) */ -#if defined(VK_KHR_external_semaphore_win32) - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR; - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR; -#endif /* defined(VK_KHR_external_semaphore_win32) */ -#if defined(VK_KHR_fragment_shading_rate) - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR; -#endif /* defined(VK_KHR_fragment_shading_rate) */ -#if defined(VK_KHR_get_memory_requirements2) - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR; - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR; -#endif /* defined(VK_KHR_get_memory_requirements2) */ -#if defined(VK_KHR_maintenance1) - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; -#endif /* defined(VK_KHR_maintenance1) */ -#if defined(VK_KHR_maintenance3) - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR; -#endif /* defined(VK_KHR_maintenance3) */ -#if defined(VK_KHR_maintenance4) - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR; -#endif /* defined(VK_KHR_maintenance4) */ -#if defined(VK_KHR_maintenance5) - PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR; - PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR; - PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR; - PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR; -#endif /* defined(VK_KHR_maintenance5) */ -#if defined(VK_KHR_map_memory2) - PFN_vkMapMemory2KHR vkMapMemory2KHR; - PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR; -#endif /* defined(VK_KHR_map_memory2) */ -#if defined(VK_KHR_performance_query) - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR; -#endif /* defined(VK_KHR_performance_query) */ -#if defined(VK_KHR_pipeline_executable_properties) - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR; - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR; -#endif /* defined(VK_KHR_pipeline_executable_properties) */ -#if defined(VK_KHR_present_wait) - PFN_vkWaitForPresentKHR vkWaitForPresentKHR; -#endif /* defined(VK_KHR_present_wait) */ -#if defined(VK_KHR_push_descriptor) - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR; -#endif /* defined(VK_KHR_push_descriptor) */ -#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR; -#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_ray_tracing_pipeline) - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR; - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR; -#endif /* defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_sampler_ycbcr_conversion) - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR; -#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ -#if defined(VK_KHR_shared_presentable_image) - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR; -#endif /* defined(VK_KHR_shared_presentable_image) */ -#if defined(VK_KHR_swapchain) - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; - PFN_vkQueuePresentKHR vkQueuePresentKHR; -#endif /* defined(VK_KHR_swapchain) */ -#if defined(VK_KHR_synchronization2) - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR; - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR; -#endif /* defined(VK_KHR_synchronization2) */ -#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD; -#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ -#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV; -#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_KHR_timeline_semaphore) - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; -#endif /* defined(VK_KHR_timeline_semaphore) */ -#if defined(VK_KHR_video_decode_queue) - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR; -#endif /* defined(VK_KHR_video_decode_queue) */ -#if defined(VK_KHR_video_encode_queue) - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR; - PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR; -#endif /* defined(VK_KHR_video_encode_queue) */ -#if defined(VK_KHR_video_queue) - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR; - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR; -#endif /* defined(VK_KHR_video_queue) */ -#if defined(VK_NVX_binary_import) - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX; - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX; -#endif /* defined(VK_NVX_binary_import) */ -#if defined(VK_NVX_image_view_handle) - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX; - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX; -#endif /* defined(VK_NVX_image_view_handle) */ -#if defined(VK_NV_clip_space_w_scaling) - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV; -#endif /* defined(VK_NV_clip_space_w_scaling) */ -#if defined(VK_NV_copy_memory_indirect) - PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV; - PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV; -#endif /* defined(VK_NV_copy_memory_indirect) */ -#if defined(VK_NV_cuda_kernel_launch) - PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV; - PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV; - PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV; - PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV; - PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV; - PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV; -#endif /* defined(VK_NV_cuda_kernel_launch) */ -#if defined(VK_NV_device_diagnostic_checkpoints) - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV; -#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_NV_device_generated_commands) - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV; - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV; -#endif /* defined(VK_NV_device_generated_commands) */ -#if defined(VK_NV_device_generated_commands_compute) - PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV; - PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV; - PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV; -#endif /* defined(VK_NV_device_generated_commands_compute) */ -#if defined(VK_NV_external_memory_rdma) - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV; -#endif /* defined(VK_NV_external_memory_rdma) */ -#if defined(VK_NV_external_memory_win32) - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV; -#endif /* defined(VK_NV_external_memory_win32) */ -#if defined(VK_NV_fragment_shading_rate_enums) - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV; -#endif /* defined(VK_NV_fragment_shading_rate_enums) */ -#if defined(VK_NV_low_latency2) - PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV; - PFN_vkLatencySleepNV vkLatencySleepNV; - PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV; - PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV; - PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV; -#endif /* defined(VK_NV_low_latency2) */ -#if defined(VK_NV_memory_decompression) - PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV; - PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV; -#endif /* defined(VK_NV_memory_decompression) */ -#if defined(VK_NV_mesh_shader) - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV; - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV; -#endif /* defined(VK_NV_mesh_shader) */ -#if defined(VK_NV_optical_flow) - PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV; - PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV; - PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV; - PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV; -#endif /* defined(VK_NV_optical_flow) */ -#if defined(VK_NV_ray_tracing) - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV; - PFN_vkCompileDeferredNV vkCompileDeferredNV; - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV; -#endif /* defined(VK_NV_ray_tracing) */ -#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 - PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV; -#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ -#if defined(VK_NV_scissor_exclusive) - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV; -#endif /* defined(VK_NV_scissor_exclusive) */ -#if defined(VK_NV_shading_rate_image) - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV; -#endif /* defined(VK_NV_shading_rate_image) */ -#if defined(VK_QCOM_tile_properties) - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM; - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM; -#endif /* defined(VK_QCOM_tile_properties) */ -#if defined(VK_QNX_external_memory_screen_buffer) - PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX; -#endif /* defined(VK_QNX_external_memory_screen_buffer) */ -#if defined(VK_VALVE_descriptor_set_host_mapping) - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE; - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE; -#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ -#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT; - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) - PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT; - PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT; - PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT; - PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT; - PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT; - PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT; - PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT; - PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT; - PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT; - PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT; - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT; - PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT; - PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT; - PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT; - PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT; - PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT; - PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT; - PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT; - PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT; - PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT; - PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) - PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) - PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) - PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV; - PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) - PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV; - PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV; - PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) - PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) - PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) - PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ -#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT; -#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT; -#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ -#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT; -#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ -#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR; -#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR; -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR; -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ - /* VOLK_GENERATE_DEVICE_TABLE */ -}; - -/* VOLK_GENERATE_PROTOTYPES_H */ -#if defined(VK_VERSION_1_0) -extern PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; -extern PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; -extern PFN_vkAllocateMemory vkAllocateMemory; -extern PFN_vkBeginCommandBuffer vkBeginCommandBuffer; -extern PFN_vkBindBufferMemory vkBindBufferMemory; -extern PFN_vkBindImageMemory vkBindImageMemory; -extern PFN_vkCmdBeginQuery vkCmdBeginQuery; -extern PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; -extern PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; -extern PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; -extern PFN_vkCmdBindPipeline vkCmdBindPipeline; -extern PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; -extern PFN_vkCmdBlitImage vkCmdBlitImage; -extern PFN_vkCmdClearAttachments vkCmdClearAttachments; -extern PFN_vkCmdClearColorImage vkCmdClearColorImage; -extern PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; -extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; -extern PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; -extern PFN_vkCmdCopyImage vkCmdCopyImage; -extern PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; -extern PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; -extern PFN_vkCmdDispatch vkCmdDispatch; -extern PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; -extern PFN_vkCmdDraw vkCmdDraw; -extern PFN_vkCmdDrawIndexed vkCmdDrawIndexed; -extern PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; -extern PFN_vkCmdDrawIndirect vkCmdDrawIndirect; -extern PFN_vkCmdEndQuery vkCmdEndQuery; -extern PFN_vkCmdEndRenderPass vkCmdEndRenderPass; -extern PFN_vkCmdExecuteCommands vkCmdExecuteCommands; -extern PFN_vkCmdFillBuffer vkCmdFillBuffer; -extern PFN_vkCmdNextSubpass vkCmdNextSubpass; -extern PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; -extern PFN_vkCmdPushConstants vkCmdPushConstants; -extern PFN_vkCmdResetEvent vkCmdResetEvent; -extern PFN_vkCmdResetQueryPool vkCmdResetQueryPool; -extern PFN_vkCmdResolveImage vkCmdResolveImage; -extern PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; -extern PFN_vkCmdSetDepthBias vkCmdSetDepthBias; -extern PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; -extern PFN_vkCmdSetEvent vkCmdSetEvent; -extern PFN_vkCmdSetLineWidth vkCmdSetLineWidth; -extern PFN_vkCmdSetScissor vkCmdSetScissor; -extern PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; -extern PFN_vkCmdSetStencilReference vkCmdSetStencilReference; -extern PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; -extern PFN_vkCmdSetViewport vkCmdSetViewport; -extern PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; -extern PFN_vkCmdWaitEvents vkCmdWaitEvents; -extern PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; -extern PFN_vkCreateBuffer vkCreateBuffer; -extern PFN_vkCreateBufferView vkCreateBufferView; -extern PFN_vkCreateCommandPool vkCreateCommandPool; -extern PFN_vkCreateComputePipelines vkCreateComputePipelines; -extern PFN_vkCreateDescriptorPool vkCreateDescriptorPool; -extern PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; -extern PFN_vkCreateDevice vkCreateDevice; -extern PFN_vkCreateEvent vkCreateEvent; -extern PFN_vkCreateFence vkCreateFence; -extern PFN_vkCreateFramebuffer vkCreateFramebuffer; -extern PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; -extern PFN_vkCreateImage vkCreateImage; -extern PFN_vkCreateImageView vkCreateImageView; -extern PFN_vkCreateInstance vkCreateInstance; -extern PFN_vkCreatePipelineCache vkCreatePipelineCache; -extern PFN_vkCreatePipelineLayout vkCreatePipelineLayout; -extern PFN_vkCreateQueryPool vkCreateQueryPool; -extern PFN_vkCreateRenderPass vkCreateRenderPass; -extern PFN_vkCreateSampler vkCreateSampler; -extern PFN_vkCreateSemaphore vkCreateSemaphore; -extern PFN_vkCreateShaderModule vkCreateShaderModule; -extern PFN_vkDestroyBuffer vkDestroyBuffer; -extern PFN_vkDestroyBufferView vkDestroyBufferView; -extern PFN_vkDestroyCommandPool vkDestroyCommandPool; -extern PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; -extern PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; -extern PFN_vkDestroyDevice vkDestroyDevice; -extern PFN_vkDestroyEvent vkDestroyEvent; -extern PFN_vkDestroyFence vkDestroyFence; -extern PFN_vkDestroyFramebuffer vkDestroyFramebuffer; -extern PFN_vkDestroyImage vkDestroyImage; -extern PFN_vkDestroyImageView vkDestroyImageView; -extern PFN_vkDestroyInstance vkDestroyInstance; -extern PFN_vkDestroyPipeline vkDestroyPipeline; -extern PFN_vkDestroyPipelineCache vkDestroyPipelineCache; -extern PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; -extern PFN_vkDestroyQueryPool vkDestroyQueryPool; -extern PFN_vkDestroyRenderPass vkDestroyRenderPass; -extern PFN_vkDestroySampler vkDestroySampler; -extern PFN_vkDestroySemaphore vkDestroySemaphore; -extern PFN_vkDestroyShaderModule vkDestroyShaderModule; -extern PFN_vkDeviceWaitIdle vkDeviceWaitIdle; -extern PFN_vkEndCommandBuffer vkEndCommandBuffer; -extern PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; -extern PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; -extern PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; -extern PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; -extern PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; -extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; -extern PFN_vkFreeCommandBuffers vkFreeCommandBuffers; -extern PFN_vkFreeDescriptorSets vkFreeDescriptorSets; -extern PFN_vkFreeMemory vkFreeMemory; -extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; -extern PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; -extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; -extern PFN_vkGetDeviceQueue vkGetDeviceQueue; -extern PFN_vkGetEventStatus vkGetEventStatus; -extern PFN_vkGetFenceStatus vkGetFenceStatus; -extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; -extern PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; -extern PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; -extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; -extern PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; -extern PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; -extern PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; -extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; -extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; -extern PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; -extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; -extern PFN_vkGetPipelineCacheData vkGetPipelineCacheData; -extern PFN_vkGetQueryPoolResults vkGetQueryPoolResults; -extern PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; -extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; -extern PFN_vkMapMemory vkMapMemory; -extern PFN_vkMergePipelineCaches vkMergePipelineCaches; -extern PFN_vkQueueBindSparse vkQueueBindSparse; -extern PFN_vkQueueSubmit vkQueueSubmit; -extern PFN_vkQueueWaitIdle vkQueueWaitIdle; -extern PFN_vkResetCommandBuffer vkResetCommandBuffer; -extern PFN_vkResetCommandPool vkResetCommandPool; -extern PFN_vkResetDescriptorPool vkResetDescriptorPool; -extern PFN_vkResetEvent vkResetEvent; -extern PFN_vkResetFences vkResetFences; -extern PFN_vkSetEvent vkSetEvent; -extern PFN_vkUnmapMemory vkUnmapMemory; -extern PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; -extern PFN_vkWaitForFences vkWaitForFences; -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_VERSION_1_1) -extern PFN_vkBindBufferMemory2 vkBindBufferMemory2; -extern PFN_vkBindImageMemory2 vkBindImageMemory2; -extern PFN_vkCmdDispatchBase vkCmdDispatchBase; -extern PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask; -extern PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate; -extern PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion; -extern PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate; -extern PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion; -extern PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion; -extern PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups; -extern PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2; -extern PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport; -extern PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures; -extern PFN_vkGetDeviceQueue2 vkGetDeviceQueue2; -extern PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2; -extern PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2; -extern PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties; -extern PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties; -extern PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties; -extern PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2; -extern PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2; -extern PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2; -extern PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2; -extern PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2; -extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2; -extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2; -extern PFN_vkTrimCommandPool vkTrimCommandPool; -extern PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate; -#endif /* defined(VK_VERSION_1_1) */ -#if defined(VK_VERSION_1_2) -extern PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2; -extern PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount; -extern PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount; -extern PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2; -extern PFN_vkCmdNextSubpass2 vkCmdNextSubpass2; -extern PFN_vkCreateRenderPass2 vkCreateRenderPass2; -extern PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress; -extern PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress; -extern PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress; -extern PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue; -extern PFN_vkResetQueryPool vkResetQueryPool; -extern PFN_vkSignalSemaphore vkSignalSemaphore; -extern PFN_vkWaitSemaphores vkWaitSemaphores; -#endif /* defined(VK_VERSION_1_2) */ -#if defined(VK_VERSION_1_3) -extern PFN_vkCmdBeginRendering vkCmdBeginRendering; -extern PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2; -extern PFN_vkCmdBlitImage2 vkCmdBlitImage2; -extern PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2; -extern PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2; -extern PFN_vkCmdCopyImage2 vkCmdCopyImage2; -extern PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2; -extern PFN_vkCmdEndRendering vkCmdEndRendering; -extern PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2; -extern PFN_vkCmdResetEvent2 vkCmdResetEvent2; -extern PFN_vkCmdResolveImage2 vkCmdResolveImage2; -extern PFN_vkCmdSetCullMode vkCmdSetCullMode; -extern PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable; -extern PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable; -extern PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp; -extern PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable; -extern PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable; -extern PFN_vkCmdSetEvent2 vkCmdSetEvent2; -extern PFN_vkCmdSetFrontFace vkCmdSetFrontFace; -extern PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable; -extern PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology; -extern PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable; -extern PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount; -extern PFN_vkCmdSetStencilOp vkCmdSetStencilOp; -extern PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable; -extern PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount; -extern PFN_vkCmdWaitEvents2 vkCmdWaitEvents2; -extern PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2; -extern PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot; -extern PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot; -extern PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements; -extern PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements; -extern PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements; -extern PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties; -extern PFN_vkGetPrivateData vkGetPrivateData; -extern PFN_vkQueueSubmit2 vkQueueSubmit2; -extern PFN_vkSetPrivateData vkSetPrivateData; -#endif /* defined(VK_VERSION_1_3) */ -#if defined(VK_AMDX_shader_enqueue) -extern PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX; -extern PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX; -extern PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX; -extern PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX; -extern PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX; -extern PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX; -extern PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX; -#endif /* defined(VK_AMDX_shader_enqueue) */ -#if defined(VK_AMD_buffer_marker) -extern PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD; -#endif /* defined(VK_AMD_buffer_marker) */ -#if defined(VK_AMD_display_native_hdr) -extern PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD; -#endif /* defined(VK_AMD_display_native_hdr) */ -#if defined(VK_AMD_draw_indirect_count) -extern PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD; -extern PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD; -#endif /* defined(VK_AMD_draw_indirect_count) */ -#if defined(VK_AMD_shader_info) -extern PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD; -#endif /* defined(VK_AMD_shader_info) */ -#if defined(VK_ANDROID_external_memory_android_hardware_buffer) -extern PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID; -extern PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID; -#endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */ -#if defined(VK_EXT_acquire_drm_display) -extern PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT; -extern PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT; -#endif /* defined(VK_EXT_acquire_drm_display) */ -#if defined(VK_EXT_acquire_xlib_display) -extern PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT; -extern PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT; -#endif /* defined(VK_EXT_acquire_xlib_display) */ -#if defined(VK_EXT_attachment_feedback_loop_dynamic_state) -extern PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT; -#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */ -#if defined(VK_EXT_buffer_device_address) -extern PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT; -#endif /* defined(VK_EXT_buffer_device_address) */ -#if defined(VK_EXT_calibrated_timestamps) -extern PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT; -extern PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; -#endif /* defined(VK_EXT_calibrated_timestamps) */ -#if defined(VK_EXT_color_write_enable) -extern PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT; -#endif /* defined(VK_EXT_color_write_enable) */ -#if defined(VK_EXT_conditional_rendering) -extern PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT; -extern PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT; -#endif /* defined(VK_EXT_conditional_rendering) */ -#if defined(VK_EXT_debug_marker) -extern PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT; -extern PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT; -extern PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT; -extern PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT; -extern PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT; -#endif /* defined(VK_EXT_debug_marker) */ -#if defined(VK_EXT_debug_report) -extern PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT; -extern PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT; -extern PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT; -#endif /* defined(VK_EXT_debug_report) */ -#if defined(VK_EXT_debug_utils) -extern PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT; -extern PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT; -extern PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT; -extern PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT; -extern PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT; -extern PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT; -extern PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT; -extern PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT; -extern PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT; -extern PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT; -extern PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT; -#endif /* defined(VK_EXT_debug_utils) */ -#if defined(VK_EXT_depth_bias_control) -extern PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT; -#endif /* defined(VK_EXT_depth_bias_control) */ -#if defined(VK_EXT_descriptor_buffer) -extern PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT; -extern PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT; -extern PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT; -extern PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT; -extern PFN_vkGetDescriptorEXT vkGetDescriptorEXT; -extern PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT; -extern PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT; -extern PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT; -extern PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT; -extern PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT; -#endif /* defined(VK_EXT_descriptor_buffer) */ -#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) -extern PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; -#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */ -#if defined(VK_EXT_device_fault) -extern PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT; -#endif /* defined(VK_EXT_device_fault) */ -#if defined(VK_EXT_direct_mode_display) -extern PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT; -#endif /* defined(VK_EXT_direct_mode_display) */ -#if defined(VK_EXT_directfb_surface) -extern PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT; -extern PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT; -#endif /* defined(VK_EXT_directfb_surface) */ -#if defined(VK_EXT_discard_rectangles) -extern PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT; -#endif /* defined(VK_EXT_discard_rectangles) */ -#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 -extern PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT; -extern PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT; -#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */ -#if defined(VK_EXT_display_control) -extern PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT; -extern PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT; -extern PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT; -extern PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT; -#endif /* defined(VK_EXT_display_control) */ -#if defined(VK_EXT_display_surface_counter) -extern PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT; -#endif /* defined(VK_EXT_display_surface_counter) */ -#if defined(VK_EXT_external_memory_host) -extern PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT; -#endif /* defined(VK_EXT_external_memory_host) */ -#if defined(VK_EXT_full_screen_exclusive) -extern PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT; -extern PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT; -extern PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT; -#endif /* defined(VK_EXT_full_screen_exclusive) */ -#if defined(VK_EXT_hdr_metadata) -extern PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT; -#endif /* defined(VK_EXT_hdr_metadata) */ -#if defined(VK_EXT_headless_surface) -extern PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT; -#endif /* defined(VK_EXT_headless_surface) */ -#if defined(VK_EXT_host_image_copy) -extern PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT; -extern PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT; -extern PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT; -extern PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT; -#endif /* defined(VK_EXT_host_image_copy) */ -#if defined(VK_EXT_host_query_reset) -extern PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT; -#endif /* defined(VK_EXT_host_query_reset) */ -#if defined(VK_EXT_image_drm_format_modifier) -extern PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT; -#endif /* defined(VK_EXT_image_drm_format_modifier) */ -#if defined(VK_EXT_line_rasterization) -extern PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT; -#endif /* defined(VK_EXT_line_rasterization) */ -#if defined(VK_EXT_mesh_shader) -extern PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT; -extern PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT; -extern PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT; -#endif /* defined(VK_EXT_mesh_shader) */ -#if defined(VK_EXT_metal_objects) -extern PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT; -#endif /* defined(VK_EXT_metal_objects) */ -#if defined(VK_EXT_metal_surface) -extern PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT; -#endif /* defined(VK_EXT_metal_surface) */ -#if defined(VK_EXT_multi_draw) -extern PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT; -extern PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT; -#endif /* defined(VK_EXT_multi_draw) */ -#if defined(VK_EXT_opacity_micromap) -extern PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT; -extern PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT; -extern PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT; -extern PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT; -extern PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT; -extern PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT; -extern PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT; -extern PFN_vkCopyMicromapEXT vkCopyMicromapEXT; -extern PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT; -extern PFN_vkCreateMicromapEXT vkCreateMicromapEXT; -extern PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT; -extern PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT; -extern PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT; -extern PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT; -#endif /* defined(VK_EXT_opacity_micromap) */ -#if defined(VK_EXT_pageable_device_local_memory) -extern PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT; -#endif /* defined(VK_EXT_pageable_device_local_memory) */ -#if defined(VK_EXT_pipeline_properties) -extern PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT; -#endif /* defined(VK_EXT_pipeline_properties) */ -#if defined(VK_EXT_private_data) -extern PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT; -extern PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT; -extern PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT; -extern PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT; -#endif /* defined(VK_EXT_private_data) */ -#if defined(VK_EXT_sample_locations) -extern PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT; -extern PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT; -#endif /* defined(VK_EXT_sample_locations) */ -#if defined(VK_EXT_shader_module_identifier) -extern PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT; -extern PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT; -#endif /* defined(VK_EXT_shader_module_identifier) */ -#if defined(VK_EXT_shader_object) -extern PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT; -extern PFN_vkCreateShadersEXT vkCreateShadersEXT; -extern PFN_vkDestroyShaderEXT vkDestroyShaderEXT; -extern PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT; -#endif /* defined(VK_EXT_shader_object) */ -#if defined(VK_EXT_swapchain_maintenance1) -extern PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT; -#endif /* defined(VK_EXT_swapchain_maintenance1) */ -#if defined(VK_EXT_tooling_info) -extern PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT; -#endif /* defined(VK_EXT_tooling_info) */ -#if defined(VK_EXT_transform_feedback) -extern PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT; -extern PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT; -extern PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT; -extern PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT; -extern PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT; -extern PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT; -#endif /* defined(VK_EXT_transform_feedback) */ -#if defined(VK_EXT_validation_cache) -extern PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT; -extern PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT; -extern PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT; -extern PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT; -#endif /* defined(VK_EXT_validation_cache) */ -#if defined(VK_FUCHSIA_buffer_collection) -extern PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA; -extern PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA; -extern PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA; -extern PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA; -extern PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA; -#endif /* defined(VK_FUCHSIA_buffer_collection) */ -#if defined(VK_FUCHSIA_external_memory) -extern PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA; -extern PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA; -#endif /* defined(VK_FUCHSIA_external_memory) */ -#if defined(VK_FUCHSIA_external_semaphore) -extern PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA; -extern PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA; -#endif /* defined(VK_FUCHSIA_external_semaphore) */ -#if defined(VK_FUCHSIA_imagepipe_surface) -extern PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA; -#endif /* defined(VK_FUCHSIA_imagepipe_surface) */ -#if defined(VK_GGP_stream_descriptor_surface) -extern PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP; -#endif /* defined(VK_GGP_stream_descriptor_surface) */ -#if defined(VK_GOOGLE_display_timing) -extern PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE; -extern PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE; -#endif /* defined(VK_GOOGLE_display_timing) */ -#if defined(VK_HUAWEI_cluster_culling_shader) -extern PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI; -extern PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI; -#endif /* defined(VK_HUAWEI_cluster_culling_shader) */ -#if defined(VK_HUAWEI_invocation_mask) -extern PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI; -#endif /* defined(VK_HUAWEI_invocation_mask) */ -#if defined(VK_HUAWEI_subpass_shading) -extern PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI; -extern PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI; -#endif /* defined(VK_HUAWEI_subpass_shading) */ -#if defined(VK_INTEL_performance_query) -extern PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL; -extern PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL; -extern PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL; -extern PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL; -extern PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL; -extern PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL; -extern PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL; -extern PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL; -extern PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL; -#endif /* defined(VK_INTEL_performance_query) */ -#if defined(VK_KHR_acceleration_structure) -extern PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR; -extern PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR; -extern PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR; -extern PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR; -extern PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR; -extern PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR; -extern PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR; -extern PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR; -extern PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR; -extern PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR; -extern PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR; -extern PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR; -extern PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR; -extern PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR; -extern PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR; -extern PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR; -#endif /* defined(VK_KHR_acceleration_structure) */ -#if defined(VK_KHR_android_surface) -extern PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR; -#endif /* defined(VK_KHR_android_surface) */ -#if defined(VK_KHR_bind_memory2) -extern PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR; -extern PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR; -#endif /* defined(VK_KHR_bind_memory2) */ -#if defined(VK_KHR_buffer_device_address) -extern PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR; -extern PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR; -extern PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR; -#endif /* defined(VK_KHR_buffer_device_address) */ -#if defined(VK_KHR_cooperative_matrix) -extern PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR; -#endif /* defined(VK_KHR_cooperative_matrix) */ -#if defined(VK_KHR_copy_commands2) -extern PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR; -extern PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR; -extern PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR; -extern PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR; -extern PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR; -extern PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR; -#endif /* defined(VK_KHR_copy_commands2) */ -#if defined(VK_KHR_create_renderpass2) -extern PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR; -extern PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR; -extern PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR; -extern PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR; -#endif /* defined(VK_KHR_create_renderpass2) */ -#if defined(VK_KHR_deferred_host_operations) -extern PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR; -extern PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR; -extern PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR; -extern PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR; -extern PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR; -#endif /* defined(VK_KHR_deferred_host_operations) */ -#if defined(VK_KHR_descriptor_update_template) -extern PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR; -extern PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR; -extern PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR; -#endif /* defined(VK_KHR_descriptor_update_template) */ -#if defined(VK_KHR_device_group) -extern PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR; -extern PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR; -extern PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR; -#endif /* defined(VK_KHR_device_group) */ -#if defined(VK_KHR_device_group_creation) -extern PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR; -#endif /* defined(VK_KHR_device_group_creation) */ -#if defined(VK_KHR_display) -extern PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR; -extern PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR; -extern PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR; -extern PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR; -extern PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR; -extern PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR; -extern PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR; -#endif /* defined(VK_KHR_display) */ -#if defined(VK_KHR_display_swapchain) -extern PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR; -#endif /* defined(VK_KHR_display_swapchain) */ -#if defined(VK_KHR_draw_indirect_count) -extern PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR; -extern PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR; -#endif /* defined(VK_KHR_draw_indirect_count) */ -#if defined(VK_KHR_dynamic_rendering) -extern PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR; -extern PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR; -#endif /* defined(VK_KHR_dynamic_rendering) */ -#if defined(VK_KHR_external_fence_capabilities) -extern PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR; -#endif /* defined(VK_KHR_external_fence_capabilities) */ -#if defined(VK_KHR_external_fence_fd) -extern PFN_vkGetFenceFdKHR vkGetFenceFdKHR; -extern PFN_vkImportFenceFdKHR vkImportFenceFdKHR; -#endif /* defined(VK_KHR_external_fence_fd) */ -#if defined(VK_KHR_external_fence_win32) -extern PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR; -extern PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR; -#endif /* defined(VK_KHR_external_fence_win32) */ -#if defined(VK_KHR_external_memory_capabilities) -extern PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR; -#endif /* defined(VK_KHR_external_memory_capabilities) */ -#if defined(VK_KHR_external_memory_fd) -extern PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR; -extern PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR; -#endif /* defined(VK_KHR_external_memory_fd) */ -#if defined(VK_KHR_external_memory_win32) -extern PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR; -extern PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR; -#endif /* defined(VK_KHR_external_memory_win32) */ -#if defined(VK_KHR_external_semaphore_capabilities) -extern PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; -#endif /* defined(VK_KHR_external_semaphore_capabilities) */ -#if defined(VK_KHR_external_semaphore_fd) -extern PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR; -extern PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR; -#endif /* defined(VK_KHR_external_semaphore_fd) */ -#if defined(VK_KHR_external_semaphore_win32) -extern PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR; -extern PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR; -#endif /* defined(VK_KHR_external_semaphore_win32) */ -#if defined(VK_KHR_fragment_shading_rate) -extern PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR; -extern PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR; -#endif /* defined(VK_KHR_fragment_shading_rate) */ -#if defined(VK_KHR_get_display_properties2) -extern PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR; -extern PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR; -extern PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR; -extern PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR; -#endif /* defined(VK_KHR_get_display_properties2) */ -#if defined(VK_KHR_get_memory_requirements2) -extern PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR; -extern PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR; -extern PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR; -#endif /* defined(VK_KHR_get_memory_requirements2) */ -#if defined(VK_KHR_get_physical_device_properties2) -extern PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR; -extern PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR; -extern PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR; -extern PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR; -extern PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR; -extern PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR; -extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR; -#endif /* defined(VK_KHR_get_physical_device_properties2) */ -#if defined(VK_KHR_get_surface_capabilities2) -extern PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR; -extern PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR; -#endif /* defined(VK_KHR_get_surface_capabilities2) */ -#if defined(VK_KHR_maintenance1) -extern PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR; -#endif /* defined(VK_KHR_maintenance1) */ -#if defined(VK_KHR_maintenance3) -extern PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR; -#endif /* defined(VK_KHR_maintenance3) */ -#if defined(VK_KHR_maintenance4) -extern PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR; -extern PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR; -extern PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR; -#endif /* defined(VK_KHR_maintenance4) */ -#if defined(VK_KHR_maintenance5) -extern PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR; -extern PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR; -extern PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR; -extern PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR; -#endif /* defined(VK_KHR_maintenance5) */ -#if defined(VK_KHR_map_memory2) -extern PFN_vkMapMemory2KHR vkMapMemory2KHR; -extern PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR; -#endif /* defined(VK_KHR_map_memory2) */ -#if defined(VK_KHR_performance_query) -extern PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR; -extern PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR; -extern PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR; -extern PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR; -#endif /* defined(VK_KHR_performance_query) */ -#if defined(VK_KHR_pipeline_executable_properties) -extern PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR; -extern PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR; -extern PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR; -#endif /* defined(VK_KHR_pipeline_executable_properties) */ -#if defined(VK_KHR_present_wait) -extern PFN_vkWaitForPresentKHR vkWaitForPresentKHR; -#endif /* defined(VK_KHR_present_wait) */ -#if defined(VK_KHR_push_descriptor) -extern PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR; -#endif /* defined(VK_KHR_push_descriptor) */ -#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) -extern PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR; -#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_ray_tracing_pipeline) -extern PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR; -extern PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR; -extern PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR; -extern PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR; -extern PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; -extern PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR; -extern PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR; -#endif /* defined(VK_KHR_ray_tracing_pipeline) */ -#if defined(VK_KHR_sampler_ycbcr_conversion) -extern PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR; -extern PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR; -#endif /* defined(VK_KHR_sampler_ycbcr_conversion) */ -#if defined(VK_KHR_shared_presentable_image) -extern PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR; -#endif /* defined(VK_KHR_shared_presentable_image) */ -#if defined(VK_KHR_surface) -extern PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; -extern PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; -extern PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; -extern PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; -extern PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; -#endif /* defined(VK_KHR_surface) */ -#if defined(VK_KHR_swapchain) -extern PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; -extern PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; -extern PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; -extern PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; -extern PFN_vkQueuePresentKHR vkQueuePresentKHR; -#endif /* defined(VK_KHR_swapchain) */ -#if defined(VK_KHR_synchronization2) -extern PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR; -extern PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR; -extern PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR; -extern PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR; -extern PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR; -extern PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR; -#endif /* defined(VK_KHR_synchronization2) */ -#if defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) -extern PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD; -#endif /* defined(VK_KHR_synchronization2) && defined(VK_AMD_buffer_marker) */ -#if defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) -extern PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV; -#endif /* defined(VK_KHR_synchronization2) && defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_KHR_timeline_semaphore) -extern PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR; -extern PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR; -extern PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR; -#endif /* defined(VK_KHR_timeline_semaphore) */ -#if defined(VK_KHR_video_decode_queue) -extern PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR; -#endif /* defined(VK_KHR_video_decode_queue) */ -#if defined(VK_KHR_video_encode_queue) -extern PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR; -extern PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR; -extern PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR; -#endif /* defined(VK_KHR_video_encode_queue) */ -#if defined(VK_KHR_video_queue) -extern PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR; -extern PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR; -extern PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR; -extern PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR; -extern PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR; -extern PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR; -extern PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR; -extern PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR; -extern PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR; -extern PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR; -extern PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR; -extern PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR; -#endif /* defined(VK_KHR_video_queue) */ -#if defined(VK_KHR_wayland_surface) -extern PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR; -extern PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR; -#endif /* defined(VK_KHR_wayland_surface) */ -#if defined(VK_KHR_win32_surface) -extern PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR; -extern PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR; -#endif /* defined(VK_KHR_win32_surface) */ -#if defined(VK_KHR_xcb_surface) -extern PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR; -extern PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR; -#endif /* defined(VK_KHR_xcb_surface) */ -#if defined(VK_KHR_xlib_surface) -extern PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR; -extern PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR; -#endif /* defined(VK_KHR_xlib_surface) */ -#if defined(VK_MVK_ios_surface) -extern PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK; -#endif /* defined(VK_MVK_ios_surface) */ -#if defined(VK_MVK_macos_surface) -extern PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK; -#endif /* defined(VK_MVK_macos_surface) */ -#if defined(VK_NN_vi_surface) -extern PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN; -#endif /* defined(VK_NN_vi_surface) */ -#if defined(VK_NVX_binary_import) -extern PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX; -extern PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX; -extern PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX; -extern PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX; -extern PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX; -#endif /* defined(VK_NVX_binary_import) */ -#if defined(VK_NVX_image_view_handle) -extern PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX; -extern PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX; -#endif /* defined(VK_NVX_image_view_handle) */ -#if defined(VK_NV_acquire_winrt_display) -extern PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV; -extern PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV; -#endif /* defined(VK_NV_acquire_winrt_display) */ -#if defined(VK_NV_clip_space_w_scaling) -extern PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV; -#endif /* defined(VK_NV_clip_space_w_scaling) */ -#if defined(VK_NV_cooperative_matrix) -extern PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV; -#endif /* defined(VK_NV_cooperative_matrix) */ -#if defined(VK_NV_copy_memory_indirect) -extern PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV; -extern PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV; -#endif /* defined(VK_NV_copy_memory_indirect) */ -#if defined(VK_NV_coverage_reduction_mode) -extern PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV; -#endif /* defined(VK_NV_coverage_reduction_mode) */ -#if defined(VK_NV_cuda_kernel_launch) -extern PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV; -extern PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV; -extern PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV; -extern PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV; -extern PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV; -extern PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV; -#endif /* defined(VK_NV_cuda_kernel_launch) */ -#if defined(VK_NV_device_diagnostic_checkpoints) -extern PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV; -extern PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV; -#endif /* defined(VK_NV_device_diagnostic_checkpoints) */ -#if defined(VK_NV_device_generated_commands) -extern PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV; -extern PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV; -extern PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV; -extern PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV; -extern PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV; -extern PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV; -#endif /* defined(VK_NV_device_generated_commands) */ -#if defined(VK_NV_device_generated_commands_compute) -extern PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV; -extern PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV; -extern PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV; -#endif /* defined(VK_NV_device_generated_commands_compute) */ -#if defined(VK_NV_external_memory_capabilities) -extern PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV; -#endif /* defined(VK_NV_external_memory_capabilities) */ -#if defined(VK_NV_external_memory_rdma) -extern PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV; -#endif /* defined(VK_NV_external_memory_rdma) */ -#if defined(VK_NV_external_memory_win32) -extern PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV; -#endif /* defined(VK_NV_external_memory_win32) */ -#if defined(VK_NV_fragment_shading_rate_enums) -extern PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV; -#endif /* defined(VK_NV_fragment_shading_rate_enums) */ -#if defined(VK_NV_low_latency2) -extern PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV; -extern PFN_vkLatencySleepNV vkLatencySleepNV; -extern PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV; -extern PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV; -extern PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV; -#endif /* defined(VK_NV_low_latency2) */ -#if defined(VK_NV_memory_decompression) -extern PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV; -extern PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV; -#endif /* defined(VK_NV_memory_decompression) */ -#if defined(VK_NV_mesh_shader) -extern PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV; -extern PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV; -extern PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV; -#endif /* defined(VK_NV_mesh_shader) */ -#if defined(VK_NV_optical_flow) -extern PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV; -extern PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV; -extern PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV; -extern PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV; -extern PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV; -#endif /* defined(VK_NV_optical_flow) */ -#if defined(VK_NV_ray_tracing) -extern PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV; -extern PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV; -extern PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV; -extern PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV; -extern PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV; -extern PFN_vkCompileDeferredNV vkCompileDeferredNV; -extern PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV; -extern PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV; -extern PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV; -extern PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV; -extern PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV; -extern PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV; -#endif /* defined(VK_NV_ray_tracing) */ -#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 -extern PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV; -#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */ -#if defined(VK_NV_scissor_exclusive) -extern PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV; -#endif /* defined(VK_NV_scissor_exclusive) */ -#if defined(VK_NV_shading_rate_image) -extern PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV; -extern PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV; -extern PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV; -#endif /* defined(VK_NV_shading_rate_image) */ -#if defined(VK_QCOM_tile_properties) -extern PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM; -extern PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM; -#endif /* defined(VK_QCOM_tile_properties) */ -#if defined(VK_QNX_external_memory_screen_buffer) -extern PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX; -#endif /* defined(VK_QNX_external_memory_screen_buffer) */ -#if defined(VK_QNX_screen_surface) -extern PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX; -extern PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX; -#endif /* defined(VK_QNX_screen_surface) */ -#if defined(VK_VALVE_descriptor_set_host_mapping) -extern PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE; -extern PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE; -#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */ -#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) -extern PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT; -extern PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT; -extern PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT; -extern PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT; -extern PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT; -extern PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT; -extern PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT; -extern PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT; -extern PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT; -extern PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT; -extern PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT; -extern PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) -extern PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT; -extern PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT; -extern PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT; -extern PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT; -extern PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) -extern PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT; -extern PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT; -extern PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT; -extern PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT; -extern PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT; -extern PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT; -extern PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT; -extern PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT; -extern PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT; -extern PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT; -extern PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT; -extern PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT; -extern PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT; -extern PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT; -extern PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT; -extern PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT; -extern PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT; -extern PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT; -extern PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT; -extern PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT; -extern PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT; -#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) -extern PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) -extern PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) -extern PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV; -extern PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) -extern PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV; -extern PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV; -extern PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) -extern PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) -extern PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */ -#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) -extern PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV; -#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */ -#if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) -extern PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT; -#endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) -extern PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT; -#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */ -#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) -extern PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT; -#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */ -#if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) -extern PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR; -#endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) -extern PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR; -extern PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR; -extern PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR; -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_surface)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ -#if (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) -extern PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR; -#endif /* (defined(VK_KHR_device_group) && defined(VK_KHR_swapchain)) || (defined(VK_KHR_swapchain) && defined(VK_VERSION_1_1)) */ -/* VOLK_GENERATE_PROTOTYPES_H */ - -#ifdef __cplusplus -} -#endif - -#endif - -#ifdef VOLK_IMPLEMENTATION -#undef VOLK_IMPLEMENTATION -/* Prevent tools like dependency checkers from detecting a cyclic dependency */ -#define VOLK_SOURCE "volk.c" -#include VOLK_SOURCE -#endif - -/** - * Copyright (c) 2018-2023 Arseny Kapoulkine - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in all - * copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - * SOFTWARE. -*/ -/* clang-format on */ diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 0b7fafb..60b70f0 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -62,6 +62,8 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ + using VULKAN_HPP_NAMESPACE::exchange; + //================== //=== BASE TYPEs === //================== @@ -236,37 +238,48 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ChromaLocationKHR; using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlagBits; using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags; + using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlagsKHR; using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlagBits; using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags; + using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlagsKHR; using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType; using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateTypeKHR; using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBits; using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBitsKHR; using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagsKHR; using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits; using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBitsKHR; using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagsKHR; using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBits; using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsKHR; using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsKHR; using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits; using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsKHR; using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsKHR; using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBits; using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBitsKHR; using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagsKHR; using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits; using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBitsKHR; using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags; + using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagsKHR; using VULKAN_HPP_NAMESPACE::FenceImportFlagBits; using VULKAN_HPP_NAMESPACE::FenceImportFlagBitsKHR; using VULKAN_HPP_NAMESPACE::FenceImportFlags; + using VULKAN_HPP_NAMESPACE::FenceImportFlagsKHR; using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBits; using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBitsKHR; using VULKAN_HPP_NAMESPACE::MemoryAllocateFlags; + using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsKHR; using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBits; using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBitsKHR; using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags; + using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagsKHR; using VULKAN_HPP_NAMESPACE::PointClippingBehavior; using VULKAN_HPP_NAMESPACE::PointClippingBehaviorKHR; using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion; @@ -276,6 +289,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBits; using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBitsKHR; using VULKAN_HPP_NAMESPACE::SemaphoreImportFlags; + using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagsKHR; using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlagBits; using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags; using VULKAN_HPP_NAMESPACE::TessellationDomainOrigin; @@ -285,11 +299,13 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBits; using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBitsEXT; using VULKAN_HPP_NAMESPACE::DescriptorBindingFlags; + using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagsEXT; using VULKAN_HPP_NAMESPACE::DriverId; using VULKAN_HPP_NAMESPACE::DriverIdKHR; using VULKAN_HPP_NAMESPACE::ResolveModeFlagBits; using VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR; using VULKAN_HPP_NAMESPACE::ResolveModeFlags; + using VULKAN_HPP_NAMESPACE::ResolveModeFlagsKHR; using VULKAN_HPP_NAMESPACE::SamplerReductionMode; using VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT; using VULKAN_HPP_NAMESPACE::SemaphoreType; @@ -297,6 +313,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBits; using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBitsKHR; using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags; + using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagsKHR; using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence; using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR; @@ -304,27 +321,35 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::AccessFlagBits2; using VULKAN_HPP_NAMESPACE::AccessFlagBits2KHR; using VULKAN_HPP_NAMESPACE::AccessFlags2; + using VULKAN_HPP_NAMESPACE::AccessFlags2KHR; using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2; using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2KHR; using VULKAN_HPP_NAMESPACE::FormatFeatureFlags2; + using VULKAN_HPP_NAMESPACE::FormatFeatureFlags2KHR; using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBits; using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBitsEXT; using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags; + using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT; using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2; using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2KHR; using VULKAN_HPP_NAMESPACE::PipelineStageFlags2; + using VULKAN_HPP_NAMESPACE::PipelineStageFlags2KHR; using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBits; using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBitsEXT; using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags; + using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT; using VULKAN_HPP_NAMESPACE::RenderingFlagBits; using VULKAN_HPP_NAMESPACE::RenderingFlagBitsKHR; using VULKAN_HPP_NAMESPACE::RenderingFlags; + using VULKAN_HPP_NAMESPACE::RenderingFlagsKHR; using VULKAN_HPP_NAMESPACE::SubmitFlagBits; using VULKAN_HPP_NAMESPACE::SubmitFlagBitsKHR; using VULKAN_HPP_NAMESPACE::SubmitFlags; + using VULKAN_HPP_NAMESPACE::SubmitFlagsKHR; using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBits; using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBitsEXT; using VULKAN_HPP_NAMESPACE::ToolPurposeFlags; + using VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT; //=== VK_KHR_surface === using VULKAN_HPP_NAMESPACE::ColorSpaceKHR; @@ -551,15 +576,18 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsKHR; using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsNV; using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR; + using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV; using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR; using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR; using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV; using VULKAN_HPP_NAMESPACE::GeometryFlagBitsKHR; using VULKAN_HPP_NAMESPACE::GeometryFlagBitsNV; using VULKAN_HPP_NAMESPACE::GeometryFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryFlagsNV; using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsKHR; using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsNV; using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR; + using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsNV; using VULKAN_HPP_NAMESPACE::GeometryTypeKHR; using VULKAN_HPP_NAMESPACE::GeometryTypeNV; @@ -806,6 +834,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR; using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR; + //=== VK_AMD_anti_lag === + using VULKAN_HPP_NAMESPACE::AntiLagModeAMD; + using VULKAN_HPP_NAMESPACE::AntiLagStageAMD; + //=== VK_EXT_shader_object === using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT; using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT; @@ -844,6 +876,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::TimeDomainEXT; using VULKAN_HPP_NAMESPACE::TimeDomainKHR; + //=== VK_KHR_maintenance7 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR; + //========================= //=== Index Type Traits === //========================= @@ -906,11 +941,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::CompressionExhaustedEXTError; using VULKAN_HPP_NAMESPACE::InvalidVideoStdParametersKHRError; + using VULKAN_HPP_NAMESPACE::NotEnoughSpaceKHRError; #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - using VULKAN_HPP_NAMESPACE::createResultValueType; - using VULKAN_HPP_NAMESPACE::ignore; - using VULKAN_HPP_NAMESPACE::resultCheck; using VULKAN_HPP_NAMESPACE::ResultValue; using VULKAN_HPP_NAMESPACE::ResultValueType; @@ -2355,6 +2388,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::KHRMaintenance5ExtensionName; using VULKAN_HPP_NAMESPACE::KHRMaintenance5SpecVersion; + //=== VK_AMD_anti_lag === + using VULKAN_HPP_NAMESPACE::AMDAntiLagExtensionName; + using VULKAN_HPP_NAMESPACE::AMDAntiLagSpecVersion; + //=== VK_KHR_ray_tracing_position_fetch === using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchExtensionName; using VULKAN_HPP_NAMESPACE::KHRRayTracingPositionFetchSpecVersion; @@ -2363,6 +2400,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::EXTShaderObjectExtensionName; using VULKAN_HPP_NAMESPACE::EXTShaderObjectSpecVersion; + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::KHRPipelineBinaryExtensionName; + using VULKAN_HPP_NAMESPACE::KHRPipelineBinarySpecVersion; + using VULKAN_HPP_NAMESPACE::MaxPipelineBinaryKeySizeKHR; + //=== VK_QCOM_tile_properties === using VULKAN_HPP_NAMESPACE::QCOMTilePropertiesExtensionName; using VULKAN_HPP_NAMESPACE::QCOMTilePropertiesSpecVersion; @@ -2387,6 +2429,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::EXTMutableDescriptorTypeExtensionName; using VULKAN_HPP_NAMESPACE::EXTMutableDescriptorTypeSpecVersion; + //=== VK_EXT_legacy_vertex_attributes === + using VULKAN_HPP_NAMESPACE::EXTLegacyVertexAttributesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTLegacyVertexAttributesSpecVersion; + //=== VK_EXT_layer_settings === using VULKAN_HPP_NAMESPACE::EXTLayerSettingsExtensionName; using VULKAN_HPP_NAMESPACE::EXTLayerSettingsSpecVersion; @@ -2415,6 +2461,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewRenderAreasExtensionName; using VULKAN_HPP_NAMESPACE::QCOMMultiviewPerViewRenderAreasSpecVersion; + //=== VK_KHR_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::KHRComputeShaderDerivativesExtensionName; + using VULKAN_HPP_NAMESPACE::KHRComputeShaderDerivativesSpecVersion; + //=== VK_KHR_video_decode_av1 === using VULKAN_HPP_NAMESPACE::KHRVideoDecodeAv1ExtensionName; using VULKAN_HPP_NAMESPACE::KHRVideoDecodeAv1SpecVersion; @@ -2498,14 +2548,34 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::NVRawAccessChainsExtensionName; using VULKAN_HPP_NAMESPACE::NVRawAccessChainsSpecVersion; + //=== VK_KHR_shader_relaxed_extended_instruction === + using VULKAN_HPP_NAMESPACE::KHRShaderRelaxedExtendedInstructionExtensionName; + using VULKAN_HPP_NAMESPACE::KHRShaderRelaxedExtendedInstructionSpecVersion; + + //=== VK_NV_command_buffer_inheritance === + using VULKAN_HPP_NAMESPACE::NVCommandBufferInheritanceExtensionName; + using VULKAN_HPP_NAMESPACE::NVCommandBufferInheritanceSpecVersion; + + //=== VK_KHR_maintenance7 === + using VULKAN_HPP_NAMESPACE::KHRMaintenance7ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRMaintenance7SpecVersion; + //=== VK_NV_shader_atomic_float16_vector === using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorExtensionName; using VULKAN_HPP_NAMESPACE::NVShaderAtomicFloat16VectorSpecVersion; + //=== VK_EXT_shader_replicated_composites === + using VULKAN_HPP_NAMESPACE::EXTShaderReplicatedCompositesExtensionName; + using VULKAN_HPP_NAMESPACE::EXTShaderReplicatedCompositesSpecVersion; + //=== VK_NV_ray_tracing_validation === using VULKAN_HPP_NAMESPACE::NVRayTracingValidationExtensionName; using VULKAN_HPP_NAMESPACE::NVRayTracingValidationSpecVersion; + //=== VK_MESA_image_alignment_control === + using VULKAN_HPP_NAMESPACE::MESAImageAlignmentControlExtensionName; + using VULKAN_HPP_NAMESPACE::MESAImageAlignmentControlSpecVersion; + //======================== //=== CONSTEXPR VALUEs === //======================== @@ -3520,9 +3590,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP; #endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_compute_shader_derivatives === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV; - //=== VK_NV_mesh_shader === using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV; using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV; @@ -4183,6 +4250,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR; + //=== VK_AMD_anti_lag === + using VULKAN_HPP_NAMESPACE::AntiLagDataAMD; + using VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD; + //=== VK_KHR_ray_tracing_position_fetch === using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR; @@ -4191,6 +4263,20 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT; using VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT; + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR; + using VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR; + //=== VK_QCOM_tile_properties === using VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM; using VULKAN_HPP_NAMESPACE::TilePropertiesQCOM; @@ -4218,6 +4304,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + //=== VK_EXT_legacy_vertex_attributes === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + //=== VK_EXT_layer_settings === using VULKAN_HPP_NAMESPACE::LayerSettingEXT; using VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT; @@ -4252,6 +4342,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + //=== VK_KHR_compute_shader_derivatives === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR; + //=== VK_KHR_video_decode_av1 === using VULKAN_HPP_NAMESPACE::VideoDecodeAV1CapabilitiesKHR; using VULKAN_HPP_NAMESPACE::VideoDecodeAV1DpbSlotInfoKHR; @@ -4346,12 +4441,33 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_NV_raw_access_chains === using VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV; + //=== VK_KHR_shader_relaxed_extended_instruction === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + //=== VK_NV_command_buffer_inheritance === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV; + + //=== VK_KHR_maintenance7 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR; + //=== VK_NV_shader_atomic_float16_vector === using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + //=== VK_EXT_shader_replicated_composites === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + //=== VK_NV_ray_tracing_validation === using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV; + //=== VK_MESA_image_alignment_control === + using VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA; + //=============== //=== HANDLEs === //=============== @@ -4454,6 +4570,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_object === using VULKAN_HPP_NAMESPACE::ShaderEXT; + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + //====================== //=== UNIQUE HANDLEs === //====================== @@ -4551,8 +4670,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::UniqueOpticalFlowSessionNV; //=== VK_EXT_shader_object === - using VULKAN_HPP_NAMESPACE::UniqueHandleTraits; using VULKAN_HPP_NAMESPACE::UniqueShaderEXT; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::UniqueHandleTraits; + using VULKAN_HPP_NAMESPACE::UniquePipelineBinaryKHR; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ //====================== @@ -4655,8 +4777,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::SharedOpticalFlowSessionNV; //=== VK_EXT_shader_object === - using VULKAN_HPP_NAMESPACE::SharedHandleTraits; using VULKAN_HPP_NAMESPACE::SharedShaderEXT; + + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_NAMESPACE::SharedHandleTraits; + using VULKAN_HPP_NAMESPACE::SharedPipelineBinaryKHR; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ //=========================== @@ -4675,7 +4800,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::StructExtends; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#if defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL ) +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL using VULKAN_HPP_NAMESPACE::DynamicLoader; #endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/ @@ -4728,7 +4853,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_RAII_NAMESPACE::Context; using VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher; using VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher; - using VULKAN_HPP_RAII_NAMESPACE::exchange; using VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher; //==================== @@ -4838,6 +4962,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_RAII_NAMESPACE::ShaderEXT; using VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs; + //=== VK_KHR_pipeline_binary === + using VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR; + using VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHRs; + } // namespace VULKAN_HPP_RAII_NAMESPACE #endif } // namespace VULKAN_HPP_NAMESPACE diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index f456a7a..d5744c1 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -12,6 +12,7 @@ #include // ArrayWrapperND #include // strnlen #include // std::string +#include // std::exchange #include #include @@ -56,7 +57,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 281, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 295, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -146,66 +147,72 @@ namespace VULKAN_HPP_NAMESPACE } #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - template ::value, int>::type = 0> - std::strong_ordering operator<=>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) <=> *static_cast const *>( &rhs ); - } -#else - template ::value, int>::type = 0> - bool operator<( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) < *static_cast const *>( &rhs ); - } - - template ::value, int>::type = 0> - bool operator<=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) <= *static_cast const *>( &rhs ); - } - - template ::value, int>::type = 0> - bool operator>( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) > *static_cast const *>( &rhs ); - } - - template ::value, int>::type = 0> - bool operator>=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) >= *static_cast const *>( &rhs ); - } -#endif - - template ::value, int>::type = 0> - bool operator==( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) == *static_cast const *>( &rhs ); - } - - template ::value, int>::type = 0> - bool operator!=( ArrayWrapper1D const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *static_cast const *>( this ) != *static_cast const *>( &rhs ); - } - private: VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT { - size_t n = std::min( N, len ); + size_t n = ( std::min )( N - 1, len ); for ( size_t i = 0; i < n; ++i ) { ( *this )[i] = data[i]; } - for ( size_t i = n; i < N; ++i ) - { - ( *this )[i] = 0; - } + ( *this )[n] = 0; } }; - // specialization of relational operators between std::string and arrays of chars +// relational operators between ArrayWrapper1D of chars with potentially different sizes +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template + std::strong_ordering operator<=>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + int result = strcmp( lhs.data(), rhs.data() ); + return ( result < 0 ) ? std::strong_ordering::less : ( ( result > 0 ) ? std::strong_ordering::greater : std::strong_ordering::equal ); + } +#else + template + bool operator<( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) < 0; + } + + template + bool operator<=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) <= 0; + } + + template + bool operator>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) > 0; + } + + template + bool operator>=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) >= 0; + } +#endif + + template + bool operator==( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) == 0; + } + + template + bool operator!=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return strcmp( lhs.data(), rhs.data() ) != 0; + } + +// specialization of relational operators between std::string and arrays of chars +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template + std::strong_ordering operator<=>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT + { + return lhs <=> rhs.data(); + } +#else template bool operator<( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -229,6 +236,7 @@ namespace VULKAN_HPP_NAMESPACE { return lhs >= rhs.data(); } +#endif template bool operator==( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT @@ -381,38 +389,19 @@ namespace VULKAN_HPP_NAMESPACE { } - ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT + template ::value && std::is_lvalue_reference::value, int>::type = 0> + ArrayProxyNoTemporaries( B && value ) VULKAN_HPP_NOEXCEPT : m_count( 1 ) , m_ptr( &value ) { } - template - ArrayProxyNoTemporaries( V && value ) = delete; - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type & value ) VULKAN_HPP_NOEXCEPT - : m_count( 1 ) - , m_ptr( &value ) - { - } - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type && value ) = delete; - ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT : m_count( count ) , m_ptr( ptr ) { } - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const::type * ptr ) VULKAN_HPP_NOEXCEPT - : m_count( count ) - , m_ptr( ptr ) - { - } - template ArrayProxyNoTemporaries( T ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT : m_count( C ) @@ -423,62 +412,29 @@ namespace VULKAN_HPP_NAMESPACE template ArrayProxyNoTemporaries( T ( &&ptr )[C] ) = delete; - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT - : m_count( C ) - , m_ptr( ptr ) - { - } - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( typename std::remove_const::type ( &&ptr )[C] ) = delete; - - ArrayProxyNoTemporaries( std::initializer_list const & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) - { - } - - ArrayProxyNoTemporaries( std::initializer_list const && list ) = delete; - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> const & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) - { - } - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> const && list ) = delete; - - ArrayProxyNoTemporaries( std::initializer_list & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) - { - } - - ArrayProxyNoTemporaries( std::initializer_list && list ) = delete; - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> & list ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( list.size() ) ) - , m_ptr( list.begin() ) - { - } - - template ::value, int>::type = 0> - ArrayProxyNoTemporaries( std::initializer_list::type> && list ) = delete; - - // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. + // Any l-value reference with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. template ().data() ), T *>::value && - std::is_convertible().size() ), std::size_t>::value>::type * = nullptr> - ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT + typename std::enable_if().begin() ), T *>::value && + std::is_convertible().data() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, + int>::type = 0> + ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT : m_count( static_cast( v.size() ) ) , m_ptr( v.data() ) { } + // Any l-value reference with a .begin() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. + template ().begin() ), T *>::value && + std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, + int>::type = 0> + ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT + : m_count( static_cast( v.size() ) ) + , m_ptr( v.begin() ) + { + } + const T * begin() const VULKAN_HPP_NOEXCEPT { return m_ptr; @@ -652,6 +608,8 @@ namespace VULKAN_HPP_NAMESPACE template class StructureChain : public std::tuple { + // Note: StructureChain has no move constructor or move assignment operator, as it is not supposed to contain movable containers. + // In order to get a copy-operation on a move-operations, those functions are neither deleted nor defaulted. public: StructureChain() VULKAN_HPP_NOEXCEPT { @@ -668,15 +626,6 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &std::get<0>( rhs ) ) ); } - StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT : std::tuple( std::forward>( rhs ) ) - { - static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); - link( &std::get<0>( *this ), - &std::get<0>( rhs ), - reinterpret_cast( &std::get<0>( *this ) ), - reinterpret_cast( &std::get<0>( rhs ) ) ); - } - StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple( elems... ) { static_assert( StructureChainValidation::valid, "The structure chain is not valid!" ); @@ -693,8 +642,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - StructureChain & operator=( StructureChain && rhs ) = delete; - template >::type, size_t Which = 0> T & get() VULKAN_HPP_NOEXCEPT { @@ -4395,9 +4342,9 @@ namespace VULKAN_HPP_NAMESPACE } void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfoKHR * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pLocationInfo ); + return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); } //=== VK_EXT_buffer_device_address === @@ -5759,6 +5706,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); } + //=== VK_AMD_anti_lag === + + void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAntiLagUpdateAMD( device, pData ); + } + //=== VK_EXT_shader_object === VkResult vkCreateShadersEXT( VkDevice device, @@ -5788,6 +5742,44 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); } + //=== VK_KHR_pipeline_binary === + + VkResult vkCreatePipelineBinariesKHR( VkDevice device, + const VkPipelineBinaryCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); + } + + void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); + } + + VkResult vkGetPipelineKeyKHR( VkDevice device, + const VkPipelineCreateInfoKHR * pPipelineCreateInfo, + VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); + } + + VkResult vkGetPipelineBinaryDataKHR( VkDevice device, + const VkPipelineBinaryDataInfoKHR * pInfo, + VkPipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); + } + + VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR * pInfo, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); + } + //=== VK_QCOM_tile_properties === VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, @@ -5927,6 +5919,18 @@ namespace VULKAN_HPP_NAMESPACE } #endif +#if ( 14 <= VULKAN_HPP_CPP_VERSION ) + using std::exchange; +#else + template + VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) + { + T oldValue = std::move( obj ); + obj = std::forward( newValue ); + return oldValue; + } +#endif + #if !defined( VULKAN_HPP_NO_SMART_HANDLE ) struct AllocationCallbacks; @@ -6144,7 +6148,7 @@ namespace VULKAN_HPP_NAMESPACE using RemoteAddressNV = void *; using SampleMask = uint32_t; - template + template struct CppType { }; @@ -6553,6 +6557,14 @@ namespace VULKAN_HPP_NAMESPACE CompressionExhaustedEXTError( char const * message ) : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {} }; + class NotEnoughSpaceKHRError : public SystemError + { + public: + NotEnoughSpaceKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} + + NotEnoughSpaceKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} + }; + namespace detail { [[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) @@ -6595,17 +6607,13 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VK_USE_PLATFORM_WIN32_KHR*/ case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message ); case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); + case Result::eErrorNotEnoughSpaceKHR: throw NotEnoughSpaceKHRError( message ); default: throw SystemError( make_error_code( result ), message ); } } } // namespace detail #endif - template - void ignore( T const & ) VULKAN_HPP_NOEXCEPT - { - } - template struct ResultValue { @@ -6718,65 +6726,76 @@ namespace VULKAN_HPP_NAMESPACE #endif }; - VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result ) + namespace detail { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - return result; -#else - ignore( result ); -#endif - } - - template - VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result, T & data ) - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - return ResultValue( result, data ); -#else - ignore( result ); - return data; -#endif - } - - template - VULKAN_HPP_INLINE typename ResultValueType::type createResultValueType( Result result, T && data ) - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - return ResultValue( result, std::move( data ) ); -#else - ignore( result ); - return std::move( data ); -#endif - } - - VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - ignore( message ); - VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); -#else - if ( result != Result::eSuccess ) + template + void ignore( T const & ) VULKAN_HPP_NOEXCEPT { - detail::throwResultException( result, message ); } -#endif - } - VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) - { -#ifdef VULKAN_HPP_NO_EXCEPTIONS - ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - ignore( message ); - ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); -#else - if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result ) { - detail::throwResultException( result, message ); - } +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return result; +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); #endif - } + } + + template + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T & data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, data ); +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); + return data; +#endif + } + + template + VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T && data ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + return ResultValue( result, std::move( data ) ); +#else + VULKAN_HPP_NAMESPACE::detail::ignore( result ); + return std::move( data ); +#endif + } + } // namespace detail + + namespace detail + { + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_NAMESPACE::detail::ignore( message ); + VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); +#else + if ( result != Result::eSuccess ) + { + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); + } +#endif + } + + VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) + { +#ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_NAMESPACE::detail::ignore( message ); + VULKAN_HPP_NAMESPACE::detail::ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty + VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); +#else + if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) + { + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); + } +#endif + } + } // namespace detail //=========================== //=== CONSTEXPR CONSTANTs === @@ -6847,6 +6866,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_module_identifier === VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxShaderModuleIdentifierSizeEXT = VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT; + //=== VK_KHR_pipeline_binary === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPipelineBinaryKeySizeKHR = VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; + //=== VK_KHR_video_decode_av1 === VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxVideoAv1ReferencesPerFrameKHR = VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; @@ -6993,10 +7015,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthRangeUnrestrictedSpecVersion = VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION; //=== VK_KHR_sampler_mirror_clamp_to_edge === - VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_mirror_clamp_to_edge extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeExtensionName = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_mirror_clamp_to_edge extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeSpecVersion = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeSpecVersion = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION; //=== VK_IMG_filter_cubic === VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicExtensionName = VK_IMG_FILTER_CUBIC_EXTENSION_NAME; @@ -7015,10 +7035,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderExplicitVertexParameterSpecVersion = VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION; //=== VK_EXT_debug_marker === - VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_marker extension has been promoted to VK_EXT_debug_utils." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerExtensionName = VK_EXT_DEBUG_MARKER_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_debug_marker extension has been promoted to VK_EXT_debug_utils." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerSpecVersion = VK_EXT_DEBUG_MARKER_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerSpecVersion = VK_EXT_DEBUG_MARKER_SPEC_VERSION; //=== VK_KHR_video_queue === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueExtensionName = VK_KHR_VIDEO_QUEUE_EXTENSION_NAME; @@ -7051,10 +7069,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVXImageViewHandleSpecVersion = VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION; //=== VK_AMD_draw_indirect_count === - VULKAN_HPP_DEPRECATED( "The VK_AMD_draw_indirect_count extension has been promoted to VK_KHR_draw_indirect_count." ) VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountExtensionName = VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_AMD_draw_indirect_count extension has been promoted to VK_KHR_draw_indirect_count." ) - VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountSpecVersion = VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountSpecVersion = VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION; //=== VK_AMD_negative_viewport_height === VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) @@ -7093,10 +7109,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderInfoSpecVersion = VK_AMD_SHADER_INFO_SPEC_VERSION; //=== VK_KHR_dynamic_rendering === - VULKAN_HPP_DEPRECATED( "The VK_KHR_dynamic_rendering extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingExtensionName = VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_dynamic_rendering extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingSpecVersion = VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingSpecVersion = VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION; //=== VK_AMD_shader_image_load_store_lod === VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodExtensionName = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME; @@ -7113,10 +7127,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVCornerSampledImageSpecVersion = VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION; //=== VK_KHR_multiview === - VULKAN_HPP_DEPRECATED( "The VK_KHR_multiview extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewExtensionName = VK_KHR_MULTIVIEW_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_multiview extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewSpecVersion = VK_KHR_MULTIVIEW_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewSpecVersion = VK_KHR_MULTIVIEW_SPEC_VERSION; //=== VK_IMG_format_pvrtc === VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) @@ -7146,23 +7158,17 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_win32_keyed_mutex === - VULKAN_HPP_DEPRECATED( "The VK_NV_win32_keyed_mutex extension has been promoted to VK_KHR_win32_keyed_mutex." ) VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexExtensionName = VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_NV_win32_keyed_mutex extension has been promoted to VK_KHR_win32_keyed_mutex." ) - VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexSpecVersion = VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexSpecVersion = VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_get_physical_device_properties2 extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2ExtensionName = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_get_physical_device_properties2 extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2SpecVersion = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2SpecVersion = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION; //=== VK_KHR_device_group === - VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupExtensionName = VK_KHR_DEVICE_GROUP_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupSpecVersion = VK_KHR_DEVICE_GROUP_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupSpecVersion = VK_KHR_DEVICE_GROUP_SPEC_VERSION; //=== VK_EXT_validation_flags === VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) @@ -7177,10 +7183,8 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_shader_draw_parameters === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_draw_parameters extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersExtensionName = VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_draw_parameters extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersSpecVersion = VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersSpecVersion = VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION; //=== VK_EXT_shader_subgroup_ballot === VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) @@ -7195,10 +7199,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderSubgroupVoteSpecVersion = VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION; //=== VK_EXT_texture_compression_astc_hdr === - VULKAN_HPP_DEPRECATED( "The VK_EXT_texture_compression_astc_hdr extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrExtensionName = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_texture_compression_astc_hdr extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrSpecVersion = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrSpecVersion = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION; //=== VK_EXT_astc_decode_mode === VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeExtensionName = VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME; @@ -7209,28 +7211,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineRobustnessSpecVersion = VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION; //=== VK_KHR_maintenance1 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance1 extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1ExtensionName = VK_KHR_MAINTENANCE_1_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance1 extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1SpecVersion = VK_KHR_MAINTENANCE_1_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1SpecVersion = VK_KHR_MAINTENANCE_1_SPEC_VERSION; //=== VK_KHR_device_group_creation === - VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group_creation extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationExtensionName = VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_device_group_creation extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationSpecVersion = VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationSpecVersion = VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION; //=== VK_KHR_external_memory_capabilities === - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory_capabilities extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesExtensionName = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory_capabilities extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesSpecVersion = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesSpecVersion = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; //=== VK_KHR_external_memory === - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryExtensionName = VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_memory extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemorySpecVersion = VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemorySpecVersion = VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === @@ -7249,16 +7243,12 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_capabilities === - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore_capabilities extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore_capabilities extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION; //=== VK_KHR_external_semaphore === - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_semaphore extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === @@ -7279,26 +7269,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTConditionalRenderingSpecVersion = VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION; //=== VK_KHR_shader_float16_int8 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float16_int8 extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8ExtensionName = VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float16_int8 extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8SpecVersion = VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8SpecVersion = VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION; //=== VK_KHR_16bit_storage === - VULKAN_HPP_DEPRECATED( "The VK_KHR_16bit_storage extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageExtensionName = VK_KHR_16BIT_STORAGE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_16bit_storage extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageSpecVersion = VK_KHR_16BIT_STORAGE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageSpecVersion = VK_KHR_16BIT_STORAGE_SPEC_VERSION; //=== VK_KHR_incremental_present === VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentExtensionName = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentSpecVersion = VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION; //=== VK_KHR_descriptor_update_template === - VULKAN_HPP_DEPRECATED( "The VK_KHR_descriptor_update_template extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateExtensionName = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_descriptor_update_template extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateSpecVersion = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateSpecVersion = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION; //=== VK_NV_clip_space_w_scaling === VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingExtensionName = VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME; @@ -7367,16 +7351,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTHdrMetadataSpecVersion = VK_EXT_HDR_METADATA_SPEC_VERSION; //=== VK_KHR_imageless_framebuffer === - VULKAN_HPP_DEPRECATED( "The VK_KHR_imageless_framebuffer extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferExtensionName = VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_imageless_framebuffer extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferSpecVersion = VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferSpecVersion = VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION; //=== VK_KHR_create_renderpass2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_create_renderpass2 extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2ExtensionName = VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_create_renderpass2 extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2SpecVersion = VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2SpecVersion = VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION; //=== VK_IMG_relaxed_line_rasterization === VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationExtensionName = VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME; @@ -7387,16 +7367,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRSharedPresentableImageSpecVersion = VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION; //=== VK_KHR_external_fence_capabilities === - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence_capabilities extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesExtensionName = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence_capabilities extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesSpecVersion = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesSpecVersion = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION; //=== VK_KHR_external_fence === - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceExtensionName = VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_external_fence extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceSpecVersion = VK_KHR_EXTERNAL_FENCE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceSpecVersion = VK_KHR_EXTERNAL_FENCE_SPEC_VERSION; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === @@ -7413,20 +7389,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRPerformanceQuerySpecVersion = VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION; //=== VK_KHR_maintenance2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance2 extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2ExtensionName = VK_KHR_MAINTENANCE_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance2 extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2SpecVersion = VK_KHR_MAINTENANCE_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2SpecVersion = VK_KHR_MAINTENANCE_2_SPEC_VERSION; //=== VK_KHR_get_surface_capabilities2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2ExtensionName = VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2SpecVersion = VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION; //=== VK_KHR_variable_pointers === - VULKAN_HPP_DEPRECATED( "The VK_KHR_variable_pointers extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersExtensionName = VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_variable_pointers extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersSpecVersion = VK_KHR_VARIABLE_POINTERS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersSpecVersion = VK_KHR_VARIABLE_POINTERS_SPEC_VERSION; //=== VK_KHR_get_display_properties2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2ExtensionName = VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME; @@ -7457,10 +7429,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTQueueFamilyForeignSpecVersion = VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION; //=== VK_KHR_dedicated_allocation === - VULKAN_HPP_DEPRECATED( "The VK_KHR_dedicated_allocation extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationExtensionName = VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_dedicated_allocation extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationSpecVersion = VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationSpecVersion = VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION; //=== VK_EXT_debug_utils === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsExtensionName = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; @@ -7473,16 +7443,12 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ //=== VK_EXT_sampler_filter_minmax === - VULKAN_HPP_DEPRECATED( "The VK_EXT_sampler_filter_minmax extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxExtensionName = VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_sampler_filter_minmax extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxSpecVersion = VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxSpecVersion = VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION; //=== VK_KHR_storage_buffer_storage_class === - VULKAN_HPP_DEPRECATED( "The VK_KHR_storage_buffer_storage_class extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassExtensionName = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_storage_buffer_storage_class extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassSpecVersion = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassSpecVersion = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION; //=== VK_AMD_gpu_shader_int16 === VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) @@ -7505,10 +7471,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderFragmentMaskSpecVersion = VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION; //=== VK_EXT_inline_uniform_block === - VULKAN_HPP_DEPRECATED( "The VK_EXT_inline_uniform_block extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockExtensionName = VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_inline_uniform_block extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockSpecVersion = VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockSpecVersion = VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION; //=== VK_EXT_shader_stencil_export === VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportExtensionName = VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME; @@ -7519,22 +7483,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTSampleLocationsSpecVersion = VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION; //=== VK_KHR_relaxed_block_layout === - VULKAN_HPP_DEPRECATED( "The VK_KHR_relaxed_block_layout extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutExtensionName = VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_relaxed_block_layout extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutSpecVersion = VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutSpecVersion = VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION; //=== VK_KHR_get_memory_requirements2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_get_memory_requirements2 extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2ExtensionName = VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_get_memory_requirements2 extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2SpecVersion = VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2SpecVersion = VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION; //=== VK_KHR_image_format_list === - VULKAN_HPP_DEPRECATED( "The VK_KHR_image_format_list extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListExtensionName = VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_image_format_list extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListSpecVersion = VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListSpecVersion = VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION; //=== VK_EXT_blend_operation_advanced === VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedExtensionName = VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME; @@ -7573,16 +7531,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTPostDepthCoverageSpecVersion = VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION; //=== VK_KHR_sampler_ycbcr_conversion === - VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_ycbcr_conversion extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionExtensionName = VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_sampler_ycbcr_conversion extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionSpecVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionSpecVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION; //=== VK_KHR_bind_memory2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_bind_memory2 extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2ExtensionName = VK_KHR_BIND_MEMORY_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_bind_memory2 extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2SpecVersion = VK_KHR_BIND_MEMORY_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2SpecVersion = VK_KHR_BIND_MEMORY_2_SPEC_VERSION; //=== VK_EXT_image_drm_format_modifier === VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierExtensionName = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME; @@ -7593,16 +7547,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTValidationCacheSpecVersion = VK_EXT_VALIDATION_CACHE_SPEC_VERSION; //=== VK_EXT_descriptor_indexing === - VULKAN_HPP_DEPRECATED( "The VK_EXT_descriptor_indexing extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingExtensionName = VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_descriptor_indexing extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingSpecVersion = VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingSpecVersion = VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION; //=== VK_EXT_shader_viewport_index_layer === - VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_viewport_index_layer extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerExtensionName = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_viewport_index_layer extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerSpecVersion = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerSpecVersion = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION; #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_portability_subset === @@ -7615,24 +7565,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVShadingRateImageSpecVersion = VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION; //=== VK_NV_ray_tracing === + VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingExtensionName = VK_NV_RAY_TRACING_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingSpecVersion = VK_NV_RAY_TRACING_SPEC_VERSION; + VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) + VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingSpecVersion = VK_NV_RAY_TRACING_SPEC_VERSION; //=== VK_NV_representative_fragment_test === VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestExtensionName = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestSpecVersion = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION; //=== VK_KHR_maintenance3 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance3 extension has been promoted to core in version 1.1." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3ExtensionName = VK_KHR_MAINTENANCE_3_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance3 extension has been promoted to core in version 1.1." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3SpecVersion = VK_KHR_MAINTENANCE_3_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3SpecVersion = VK_KHR_MAINTENANCE_3_SPEC_VERSION; //=== VK_KHR_draw_indirect_count === - VULKAN_HPP_DEPRECATED( "The VK_KHR_draw_indirect_count extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountExtensionName = VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_draw_indirect_count extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountSpecVersion = VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountSpecVersion = VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION; //=== VK_EXT_filter_cubic === VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicExtensionName = VK_EXT_FILTER_CUBIC_EXTENSION_NAME; @@ -7643,22 +7591,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassShaderResolveSpecVersion = VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION; //=== VK_EXT_global_priority === - VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority extension has been promoted to VK_KHR_global_priority." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityExtensionName = VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority extension has been promoted to VK_KHR_global_priority." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; //=== VK_KHR_shader_subgroup_extended_types === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_subgroup_extended_types extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesExtensionName = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_subgroup_extended_types extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesSpecVersion = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesSpecVersion = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION; //=== VK_KHR_8bit_storage === - VULKAN_HPP_DEPRECATED( "The VK_KHR_8bit_storage extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageExtensionName = VK_KHR_8BIT_STORAGE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_8bit_storage extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageSpecVersion = VK_KHR_8BIT_STORAGE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageSpecVersion = VK_KHR_8BIT_STORAGE_SPEC_VERSION; //=== VK_EXT_external_memory_host === VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostExtensionName = VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME; @@ -7669,10 +7611,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDBufferMarkerSpecVersion = VK_AMD_BUFFER_MARKER_SPEC_VERSION; //=== VK_KHR_shader_atomic_int64 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_atomic_int64 extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64ExtensionName = VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_atomic_int64 extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64SpecVersion = VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64SpecVersion = VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION; //=== VK_KHR_shader_clock === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockExtensionName = VK_KHR_SHADER_CLOCK_EXTENSION_NAME; @@ -7683,10 +7623,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDPipelineCompilerControlSpecVersion = VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION; //=== VK_EXT_calibrated_timestamps === - VULKAN_HPP_DEPRECATED( "The VK_EXT_calibrated_timestamps extension has been promoted to VK_KHR_calibrated_timestamps." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsExtensionName = VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_calibrated_timestamps extension has been promoted to VK_KHR_calibrated_timestamps." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsSpecVersion = VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsSpecVersion = VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION; //=== VK_AMD_shader_core_properties === VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME; @@ -7705,10 +7643,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDMemoryOverallocationBehaviorSpecVersion = VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION; //=== VK_EXT_vertex_attribute_divisor === - VULKAN_HPP_DEPRECATED( "The VK_EXT_vertex_attribute_divisor extension has been promoted to VK_KHR_vertex_attribute_divisor." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorExtensionName = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_vertex_attribute_divisor extension has been promoted to VK_KHR_vertex_attribute_divisor." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_frame_token === @@ -7717,32 +7653,24 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_GGP*/ //=== VK_EXT_pipeline_creation_feedback === - VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_feedback extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackExtensionName = VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_feedback extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackSpecVersion = VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackSpecVersion = VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION; //=== VK_KHR_driver_properties === - VULKAN_HPP_DEPRECATED( "The VK_KHR_driver_properties extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesExtensionName = VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_driver_properties extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesSpecVersion = VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesSpecVersion = VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION; //=== VK_KHR_shader_float_controls === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float_controls extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_float_controls extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsSpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsSpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION; //=== VK_NV_shader_subgroup_partitioned === VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedExtensionName = VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedSpecVersion = VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION; //=== VK_KHR_depth_stencil_resolve === - VULKAN_HPP_DEPRECATED( "The VK_KHR_depth_stencil_resolve extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveExtensionName = VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_depth_stencil_resolve extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveSpecVersion = VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveSpecVersion = VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION; //=== VK_KHR_swapchain_mutable_format === VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatExtensionName = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME; @@ -7757,10 +7685,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVMeshShaderSpecVersion = VK_NV_MESH_SHADER_SPEC_VERSION; //=== VK_NV_fragment_shader_barycentric === - VULKAN_HPP_DEPRECATED( "The VK_NV_fragment_shader_barycentric extension has been promoted to VK_KHR_fragment_shader_barycentric." ) VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricExtensionName = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_NV_fragment_shader_barycentric extension has been promoted to VK_KHR_fragment_shader_barycentric." ) - VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricSpecVersion = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricSpecVersion = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; //=== VK_NV_shader_image_footprint === VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintExtensionName = VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME; @@ -7775,10 +7701,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceDiagnosticCheckpointsSpecVersion = VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION; //=== VK_KHR_timeline_semaphore === - VULKAN_HPP_DEPRECATED( "The VK_KHR_timeline_semaphore extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreExtensionName = VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_timeline_semaphore extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreSpecVersion = VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreSpecVersion = VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION; //=== VK_INTEL_shader_integer_functions2 === VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2ExtensionName = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME; @@ -7789,10 +7713,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto INTELPerformanceQuerySpecVersion = VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION; //=== VK_KHR_vulkan_memory_model === - VULKAN_HPP_DEPRECATED( "The VK_KHR_vulkan_memory_model extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelExtensionName = VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_vulkan_memory_model extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelSpecVersion = VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelSpecVersion = VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION; //=== VK_EXT_pci_bus_info === VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoExtensionName = VK_EXT_PCI_BUS_INFO_EXTENSION_NAME; @@ -7809,10 +7731,8 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_KHR_shader_terminate_invocation === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_terminate_invocation extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationExtensionName = VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_terminate_invocation extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationSpecVersion = VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationSpecVersion = VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION; #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === @@ -7825,10 +7745,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMapSpecVersion = VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION; //=== VK_EXT_scalar_block_layout === - VULKAN_HPP_DEPRECATED( "The VK_EXT_scalar_block_layout extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutExtensionName = VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_scalar_block_layout extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutSpecVersion = VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutSpecVersion = VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION; //=== VK_GOOGLE_hlsl_functionality1 === VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1ExtensionName = VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME; @@ -7839,10 +7757,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEDecorateStringSpecVersion = VK_GOOGLE_DECORATE_STRING_SPEC_VERSION; //=== VK_EXT_subgroup_size_control === - VULKAN_HPP_DEPRECATED( "The VK_EXT_subgroup_size_control extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlExtensionName = VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_subgroup_size_control extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlSpecVersion = VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlSpecVersion = VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION; //=== VK_KHR_fragment_shading_rate === VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateExtensionName = VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME; @@ -7869,10 +7785,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderQuadControlSpecVersion = VK_KHR_SHADER_QUAD_CONTROL_SPEC_VERSION; //=== VK_KHR_spirv_1_4 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_spirv_1_4 extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14ExtensionName = VK_KHR_SPIRV_1_4_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_spirv_1_4 extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14SpecVersion = VK_KHR_SPIRV_1_4_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14SpecVersion = VK_KHR_SPIRV_1_4_SPEC_VERSION; //=== VK_EXT_memory_budget === VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetExtensionName = VK_EXT_MEMORY_BUDGET_EXTENSION_NAME; @@ -7891,10 +7805,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVDedicatedAllocationImageAliasingSpecVersion = VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION; //=== VK_KHR_separate_depth_stencil_layouts === - VULKAN_HPP_DEPRECATED( "The VK_KHR_separate_depth_stencil_layouts extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsExtensionName = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_separate_depth_stencil_layouts extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsSpecVersion = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsSpecVersion = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION; //=== VK_EXT_buffer_device_address === VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) @@ -7903,16 +7815,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTBufferDeviceAddressSpecVersion = VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; //=== VK_EXT_tooling_info === - VULKAN_HPP_DEPRECATED( "The VK_EXT_tooling_info extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoExtensionName = VK_EXT_TOOLING_INFO_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_tooling_info extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoSpecVersion = VK_EXT_TOOLING_INFO_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoSpecVersion = VK_EXT_TOOLING_INFO_SPEC_VERSION; //=== VK_EXT_separate_stencil_usage === - VULKAN_HPP_DEPRECATED( "The VK_EXT_separate_stencil_usage extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageExtensionName = VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_separate_stencil_usage extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageSpecVersion = VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageSpecVersion = VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION; //=== VK_EXT_validation_features === VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) @@ -7941,10 +7849,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcrImageArraysSpecVersion = VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION; //=== VK_KHR_uniform_buffer_standard_layout === - VULKAN_HPP_DEPRECATED( "The VK_KHR_uniform_buffer_standard_layout extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutExtensionName = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_uniform_buffer_standard_layout extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutSpecVersion = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutSpecVersion = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION; //=== VK_EXT_provoking_vertex === VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexExtensionName = VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME; @@ -7961,38 +7867,28 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTHeadlessSurfaceSpecVersion = VK_EXT_HEADLESS_SURFACE_SPEC_VERSION; //=== VK_KHR_buffer_device_address === - VULKAN_HPP_DEPRECATED( "The VK_KHR_buffer_device_address extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressExtensionName = VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_buffer_device_address extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressSpecVersion = VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressSpecVersion = VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; //=== VK_EXT_line_rasterization === - VULKAN_HPP_DEPRECATED( "The VK_EXT_line_rasterization extension has been promoted to VK_KHR_line_rasterization." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationExtensionName = VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_line_rasterization extension has been promoted to VK_KHR_line_rasterization." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationSpecVersion = VK_EXT_LINE_RASTERIZATION_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationSpecVersion = VK_EXT_LINE_RASTERIZATION_SPEC_VERSION; //=== VK_EXT_shader_atomic_float === VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatSpecVersion = VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION; //=== VK_EXT_host_query_reset === - VULKAN_HPP_DEPRECATED( "The VK_EXT_host_query_reset extension has been promoted to core in version 1.2." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetExtensionName = VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_host_query_reset extension has been promoted to core in version 1.2." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetSpecVersion = VK_EXT_HOST_QUERY_RESET_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetSpecVersion = VK_EXT_HOST_QUERY_RESET_SPEC_VERSION; //=== VK_EXT_index_type_uint8 === - VULKAN_HPP_DEPRECATED( "The VK_EXT_index_type_uint8 extension has been promoted to VK_KHR_index_type_uint8." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8ExtensionName = VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_index_type_uint8 extension has been promoted to VK_KHR_index_type_uint8." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8SpecVersion = VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8SpecVersion = VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION; //=== VK_EXT_extended_dynamic_state === - VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateSpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateSpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION; //=== VK_KHR_deferred_host_operations === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsExtensionName = VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME; @@ -8027,10 +7923,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTSwapchainMaintenance1SpecVersion = VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION; //=== VK_EXT_shader_demote_to_helper_invocation === - VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_demote_to_helper_invocation extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationExtensionName = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_demote_to_helper_invocation extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationSpecVersion = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationSpecVersion = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION; //=== VK_NV_device_generated_commands === VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; @@ -8041,16 +7935,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVInheritedViewportScissorSpecVersion = VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION; //=== VK_KHR_shader_integer_dot_product === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_integer_dot_product extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductExtensionName = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_integer_dot_product extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductSpecVersion = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductSpecVersion = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION; //=== VK_EXT_texel_buffer_alignment === - VULKAN_HPP_DEPRECATED( "The VK_EXT_texel_buffer_alignment extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentExtensionName = VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_texel_buffer_alignment extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentSpecVersion = VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentSpecVersion = VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION; //=== VK_QCOM_render_pass_transform === VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformExtensionName = VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME; @@ -8089,26 +7979,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVPresentBarrierSpecVersion = VK_NV_PRESENT_BARRIER_SPEC_VERSION; //=== VK_KHR_shader_non_semantic_info === - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_non_semantic_info extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoExtensionName = VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_shader_non_semantic_info extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoSpecVersion = VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoSpecVersion = VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION; //=== VK_KHR_present_id === VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdExtensionName = VK_KHR_PRESENT_ID_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdSpecVersion = VK_KHR_PRESENT_ID_SPEC_VERSION; //=== VK_EXT_private_data === - VULKAN_HPP_DEPRECATED( "The VK_EXT_private_data extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataExtensionName = VK_EXT_PRIVATE_DATA_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_private_data extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataSpecVersion = VK_EXT_PRIVATE_DATA_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataSpecVersion = VK_EXT_PRIVATE_DATA_SPEC_VERSION; //=== VK_EXT_pipeline_creation_cache_control === - VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_cache_control extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlExtensionName = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_pipeline_creation_cache_control extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlSpecVersion = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlSpecVersion = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION; //=== VK_KHR_video_encode_queue === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueExtensionName = VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME; @@ -8139,10 +8023,8 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_synchronization2 extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2ExtensionName = VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_synchronization2 extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2SpecVersion = VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2SpecVersion = VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION; //=== VK_EXT_descriptor_buffer === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferExtensionName = VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME; @@ -8165,10 +8047,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupUniformControlFlowSpecVersion = VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION; //=== VK_KHR_zero_initialize_workgroup_memory === - VULKAN_HPP_DEPRECATED( "The VK_KHR_zero_initialize_workgroup_memory extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemoryExtensionName = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_zero_initialize_workgroup_memory extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemorySpecVersion = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemorySpecVersion = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION; //=== VK_NV_fragment_shading_rate_enums === VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsExtensionName = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME; @@ -8183,10 +8063,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTMeshShaderSpecVersion = VK_EXT_MESH_SHADER_SPEC_VERSION; //=== VK_EXT_ycbcr_2plane_444_formats === - VULKAN_HPP_DEPRECATED( "The VK_EXT_ycbcr_2plane_444_formats extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsExtensionName = VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_ycbcr_2plane_444_formats extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsSpecVersion = VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsSpecVersion = VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION; //=== VK_EXT_fragment_density_map2 === VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2ExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME; @@ -8197,20 +8075,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRotatedCopyCommandsSpecVersion = VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION; //=== VK_EXT_image_robustness === - VULKAN_HPP_DEPRECATED( "The VK_EXT_image_robustness extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessExtensionName = VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_image_robustness extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessSpecVersion = VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessSpecVersion = VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION; //=== VK_KHR_workgroup_memory_explicit_layout === VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutExtensionName = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutSpecVersion = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION; //=== VK_KHR_copy_commands2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_copy_commands2 extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2ExtensionName = VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_copy_commands2 extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2SpecVersion = VK_KHR_COPY_COMMANDS_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2SpecVersion = VK_KHR_COPY_COMMANDS_2_SPEC_VERSION; //=== VK_EXT_image_compression_control === VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME; @@ -8221,20 +8095,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTAttachmentFeedbackLoopLayoutSpecVersion = VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION; //=== VK_EXT_4444_formats === - VULKAN_HPP_DEPRECATED( "The VK_EXT_4444_formats extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsExtensionName = VK_EXT_4444_FORMATS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_4444_formats extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsSpecVersion = VK_EXT_4444_FORMATS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsSpecVersion = VK_EXT_4444_FORMATS_SPEC_VERSION; //=== VK_EXT_device_fault === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultExtensionName = VK_EXT_DEVICE_FAULT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultSpecVersion = VK_EXT_DEVICE_FAULT_SPEC_VERSION; //=== VK_ARM_rasterization_order_attachment_access === - VULKAN_HPP_DEPRECATED( "The VK_ARM_rasterization_order_attachment_access extension has been promoted to VK_EXT_rasterization_order_attachment_access." ) VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessExtensionName = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_ARM_rasterization_order_attachment_access extension has been promoted to VK_EXT_rasterization_order_attachment_access." ) - VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessSpecVersion = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessSpecVersion = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; //=== VK_EXT_rgba10x6_formats === VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsExtensionName = VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME; @@ -8253,10 +8123,8 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ //=== VK_VALVE_mutable_descriptor_type === - VULKAN_HPP_DEPRECATED( "The VK_VALVE_mutable_descriptor_type extension has been promoted to VK_EXT_mutable_descriptor_type." ) VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeExtensionName = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_VALVE_mutable_descriptor_type extension has been promoted to VK_EXT_mutable_descriptor_type." ) - VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeSpecVersion = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeSpecVersion = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; //=== VK_EXT_vertex_input_dynamic_state === VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateExtensionName = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME; @@ -8279,10 +8147,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrimitiveTopologyListRestartSpecVersion = VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION; //=== VK_KHR_format_feature_flags2 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_format_feature_flags2 extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2ExtensionName = VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_format_feature_flags2 extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2SpecVersion = VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2SpecVersion = VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === @@ -8327,10 +8193,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTMultisampledRenderToSingleSampledSpecVersion = VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION; //=== VK_EXT_extended_dynamic_state2 === - VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state2 extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_extended_dynamic_state2 extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === @@ -8351,10 +8215,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingMaintenance1SpecVersion = VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION; //=== VK_EXT_global_priority_query === - VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority_query extension has been promoted to VK_KHR_global_priority." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQueryExtensionName = VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_global_priority_query extension has been promoted to VK_KHR_global_priority." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQuerySpecVersion = VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQuerySpecVersion = VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION; //=== VK_EXT_image_view_min_lod === VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodExtensionName = VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME; @@ -8387,10 +8249,8 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_load_store_op_none === - VULKAN_HPP_DEPRECATED( "The VK_EXT_load_store_op_none extension has been promoted to VK_KHR_load_store_op_none." ) VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneExtensionName = VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_EXT_load_store_op_none extension has been promoted to VK_KHR_load_store_op_none." ) - VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneSpecVersion = VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneSpecVersion = VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION; //=== VK_HUAWEI_cluster_culling_shader === VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderExtensionName = VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME; @@ -8405,10 +8265,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTPageableDeviceLocalMemorySpecVersion = VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION; //=== VK_KHR_maintenance4 === - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance4 extension has been promoted to core in version 1.3." ) VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4ExtensionName = VK_KHR_MAINTENANCE_4_EXTENSION_NAME; - VULKAN_HPP_DEPRECATED( "The VK_KHR_maintenance4 extension has been promoted to core in version 1.3." ) - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4SpecVersion = VK_KHR_MAINTENANCE_4_SPEC_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4SpecVersion = VK_KHR_MAINTENANCE_4_SPEC_VERSION; //=== VK_ARM_shader_core_properties === VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesExtensionName = VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME; @@ -8528,6 +8386,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5ExtensionName = VK_KHR_MAINTENANCE_5_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance5SpecVersion = VK_KHR_MAINTENANCE_5_SPEC_VERSION; + //=== VK_AMD_anti_lag === + VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagExtensionName = VK_AMD_ANTI_LAG_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagSpecVersion = VK_AMD_ANTI_LAG_SPEC_VERSION; + //=== VK_KHR_ray_tracing_position_fetch === VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchExtensionName = VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchSpecVersion = VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION; @@ -8536,6 +8398,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectExtensionName = VK_EXT_SHADER_OBJECT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderObjectSpecVersion = VK_EXT_SHADER_OBJECT_SPEC_VERSION; + //=== VK_KHR_pipeline_binary === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinaryExtensionName = VK_KHR_PIPELINE_BINARY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinarySpecVersion = VK_KHR_PIPELINE_BINARY_SPEC_VERSION; + //=== VK_QCOM_tile_properties === VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesExtensionName = VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesSpecVersion = VK_QCOM_TILE_PROPERTIES_SPEC_VERSION; @@ -8560,6 +8426,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeExtensionName = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTMutableDescriptorTypeSpecVersion = VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; + //=== VK_EXT_legacy_vertex_attributes === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesExtensionName = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesSpecVersion = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION; + //=== VK_EXT_layer_settings === VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsExtensionName = VK_EXT_LAYER_SETTINGS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsSpecVersion = VK_EXT_LAYER_SETTINGS_SPEC_VERSION; @@ -8588,6 +8458,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasExtensionName = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto QCOMMultiviewPerViewRenderAreasSpecVersion = VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION; + //=== VK_KHR_compute_shader_derivatives === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesExtensionName = VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesSpecVersion = VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; + //=== VK_KHR_video_decode_av1 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1ExtensionName = VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1SpecVersion = VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION; @@ -8670,14 +8544,34 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsExtensionName = VK_NV_RAW_ACCESS_CHAINS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsSpecVersion = VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION; + //=== VK_KHR_shader_relaxed_extended_instruction === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionExtensionName = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionSpecVersion = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION; + + //=== VK_NV_command_buffer_inheritance === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceExtensionName = VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceSpecVersion = VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION; + + //=== VK_KHR_maintenance7 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7ExtensionName = VK_KHR_MAINTENANCE_7_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7SpecVersion = VK_KHR_MAINTENANCE_7_SPEC_VERSION; + //=== VK_NV_shader_atomic_float16_vector === VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorExtensionName = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorSpecVersion = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION; + //=== VK_EXT_shader_replicated_composites === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesExtensionName = VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesSpecVersion = VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION; + //=== VK_NV_ray_tracing_validation === VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationExtensionName = VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationSpecVersion = VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION; + //=== VK_MESA_image_alignment_control === + VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlExtensionName = VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlSpecVersion = VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION; + } // namespace VULKAN_HPP_NAMESPACE // clang-format off @@ -12062,25 +11956,6 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_compute_shader_derivatives === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_NV_mesh_shader === template <> struct StructExtends @@ -12543,6 +12418,24 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_present_wait === template <> struct StructExtends @@ -15711,6 +15604,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_AMD_anti_lag === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_ray_tracing_position_fetch === template <> struct StructExtends @@ -15758,6 +15670,70 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_KHR_pipeline_binary === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_QCOM_tile_properties === template <> struct StructExtends @@ -15917,6 +15893,34 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_legacy_vertex_attributes === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_layer_settings === template <> struct StructExtends @@ -16095,6 +16099,34 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_KHR_compute_shader_derivatives === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_video_decode_av1 === template <> struct StructExtends @@ -16628,6 +16660,90 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_KHR_shader_relaxed_extended_instruction === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_command_buffer_inheritance === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_maintenance7 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_shader_atomic_float16_vector === template <> struct StructExtends @@ -16647,6 +16763,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_shader_replicated_composites === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_ray_tracing_validation === template <> struct StructExtends @@ -16666,6 +16801,43 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_MESA_image_alignment_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL @@ -17861,12 +18033,22 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + //=== VK_AMD_anti_lag === + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; + //=== VK_EXT_shader_object === PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + //=== VK_KHR_pipeline_binary === + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; + //=== VK_QCOM_tile_properties === PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; @@ -19263,12 +19445,22 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) ); + //=== VK_EXT_shader_object === vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetInstanceProcAddr( instance, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetInstanceProcAddr( instance, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetInstanceProcAddr( instance, "vkReleaseCapturedPipelineDataKHR" ) ); + //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = @@ -20313,12 +20505,22 @@ namespace VULKAN_HPP_NAMESPACE vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + //=== VK_EXT_shader_object === vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index 1f30669..6f1c17f 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,21 +69,25 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 281 +#define VK_HEADER_VERSION 295 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) +// VK_MAKE_VERSION is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. #define VK_MAKE_VERSION(major, minor, patch) \ ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) +// VK_VERSION_MAJOR is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. #define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22U) +// VK_VERSION_MINOR is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. #define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) +// VK_VERSION_PATCH is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. #define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) @@ -185,6 +189,8 @@ typedef enum VkResult { VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR = -1000299000, VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000, VK_INCOMPATIBLE_SHADER_BINARY_EXT = 1000482000, + VK_PIPELINE_BINARY_MISSING_KHR = 1000483000, + VK_ERROR_NOT_ENOUGH_SPACE_KHR = -1000483000, VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, @@ -193,6 +199,7 @@ typedef enum VkResult { VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + // VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT is a deprecated alias VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, VK_RESULT_MAX_ENUM = 0x7FFFFFFF } VkResult; @@ -689,7 +696,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000, @@ -1031,10 +1037,23 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = 1000338003, VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = 1000470005, VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = 1000470006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD = 1000476000, + VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD = 1000476001, + VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD = 1000476002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001, VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT = 1000482002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR = 1000483000, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR = 1000483001, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR = 1000483002, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR = 1000483003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR = 1000483004, + VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR = 1000483005, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR = 1000483006, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR = 1000483007, + VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR = 1000483008, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR = 1000483009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000, VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000, @@ -1046,6 +1065,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV = 1000492001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000, VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT = 1000495000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT = 1000495001, VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT = 1000496000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001, @@ -1065,6 +1086,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM = 1000510000, VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM = 1000510001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR = 1000201000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR = 1000511000, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR = 1000512000, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR = 1000512001, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR = 1000512003, @@ -1110,10 +1133,22 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT = 1000545008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV = 1000546000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV = 1000555000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR = 1000558000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV = 1000559000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR = 1000562000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR = 1000562001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR = 1000562002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR = 1000562003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR = 1000562004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV = 1000563000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV = 1000568000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA = 1000575000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA = 1000575001, + VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO, VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, @@ -1158,6 +1193,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + // VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT is a deprecated alias VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, @@ -1220,6 +1256,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, @@ -1227,6 +1264,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + // VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL is a deprecated alias VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, @@ -1397,6 +1435,7 @@ typedef enum VkObjectType { VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000, VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000, VK_OBJECT_TYPE_SHADER_EXT = 1000482000, + VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, @@ -1404,6 +1443,7 @@ typedef enum VkObjectType { } VkObjectType; typedef enum VkVendorId { + VK_VENDOR_ID_KHRONOS = 0x10000, VK_VENDOR_ID_VIV = 0x10001, VK_VENDOR_ID_VSI = 0x10002, VK_VENDOR_ID_KAZAN = 0x10003, @@ -1676,7 +1716,7 @@ typedef enum VkFormat { VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, - VK_FORMAT_R16G16_S10_5_NV = 1000464000, + VK_FORMAT_R16G16_SFIXED5_NV = 1000464000, VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = 1000470000, VK_FORMAT_A8_UNORM_KHR = 1000470001, VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, @@ -1733,6 +1773,8 @@ typedef enum VkFormat { VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16, VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + // VK_FORMAT_R16G16_S10_5_NV is a deprecated alias + VK_FORMAT_R16G16_S10_5_NV = VK_FORMAT_R16G16_SFIXED5_NV, VK_FORMAT_MAX_ENUM = 0x7FFFFFFF } VkFormat; @@ -2083,6 +2125,7 @@ typedef enum VkSamplerAddressMode { VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + // VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR is a deprecated alias VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF } VkSamplerAddressMode; @@ -2166,7 +2209,8 @@ typedef enum VkIndexType { typedef enum VkSubpassContents { VK_SUBPASS_CONTENTS_INLINE = 0, VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, - VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = 1000451000, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR = 1000451000, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF } VkSubpassContents; @@ -2607,7 +2651,9 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000, VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000, VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, @@ -2816,6 +2862,7 @@ typedef enum VkStencilFaceFlagBits { VK_STENCIL_FACE_FRONT_BIT = 0x00000001, VK_STENCIL_FACE_BACK_BIT = 0x00000002, VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003, + // VK_STENCIL_FRONT_AND_BACK is a deprecated alias VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK, VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkStencilFaceFlagBits; @@ -3226,7 +3273,9 @@ typedef struct VkDeviceCreateInfo { VkDeviceCreateFlags flags; uint32_t queueCreateInfoCount; const VkDeviceQueueCreateInfo* pQueueCreateInfos; + // enabledLayerCount is deprecated and should not be used uint32_t enabledLayerCount; + // ppEnabledLayerNames is deprecated and should not be used const char* const* ppEnabledLayerNames; uint32_t enabledExtensionCount; const char* const* ppEnabledExtensionNames; @@ -5784,7 +5833,8 @@ typedef enum VkDriverId { VK_DRIVER_ID_MESA_DOZEN = 23, VK_DRIVER_ID_MESA_NVK = 24, VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25, - VK_DRIVER_ID_MESA_AGXV = 26, + VK_DRIVER_ID_MESA_HONEYKRISP = 26, + VK_DRIVER_ID_RESERVED_27 = 27, VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, @@ -6617,6 +6667,7 @@ static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0 static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI = 0x8000000000ULL; +// VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI is a deprecated alias static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL; @@ -6708,11 +6759,12 @@ typedef enum VkRenderingFlagBits { VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001, VK_RENDERING_SUSPENDING_BIT = 0x00000002, VK_RENDERING_RESUMING_BIT = 0x00000004, - VK_RENDERING_CONTENTS_INLINE_BIT_EXT = 0x00000010, VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008, + VK_RENDERING_CONTENTS_INLINE_BIT_KHR = 0x00000010, VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT, VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT, + VK_RENDERING_CONTENTS_INLINE_BIT_EXT = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkRenderingFlagBits; typedef VkFlags VkRenderingFlags; @@ -7569,6 +7621,7 @@ typedef enum VkColorSpaceKHR { VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + // VK_COLOR_SPACE_DOLBYVISION_EXT is deprecated, but no reason was given in the API XML VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, @@ -7576,7 +7629,9 @@ typedef enum VkColorSpaceKHR { VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, + // VK_COLORSPACE_SRGB_NONLINEAR_KHR is a deprecated alias VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + // VK_COLOR_SPACE_DCI_P3_LINEAR_EXT is a deprecated alias VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF } VkColorSpaceKHR; @@ -9007,7 +9062,9 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( #define VK_KHR_maintenance1 1 #define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 #define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" +// VK_KHR_MAINTENANCE1_SPEC_VERSION is a deprecated alias #define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION +// VK_KHR_MAINTENANCE1_EXTENSION_NAME is a deprecated alias #define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; @@ -9489,8 +9546,11 @@ typedef enum VkPerformanceCounterScopeKHR { VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, + // VK_QUERY_SCOPE_COMMAND_BUFFER_KHR is a deprecated alias VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + // VK_QUERY_SCOPE_RENDER_PASS_KHR is a deprecated alias VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + // VK_QUERY_SCOPE_COMMAND_KHR is a deprecated alias VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF } VkPerformanceCounterScopeKHR; @@ -9508,7 +9568,9 @@ typedef enum VkPerformanceCounterStorageKHR { typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR is a deprecated alias VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR is a deprecated alias VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkPerformanceCounterDescriptionFlagBitsKHR; @@ -9610,7 +9672,9 @@ VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( #define VK_KHR_maintenance2 1 #define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" +// VK_KHR_MAINTENANCE2_SPEC_VERSION is a deprecated alias #define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION +// VK_KHR_MAINTENANCE2_EXTENSION_NAME is a deprecated alias #define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; @@ -9877,7 +9941,9 @@ VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( #define VK_KHR_maintenance3 1 #define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" +// VK_KHR_MAINTENANCE3_SPEC_VERSION is a deprecated alias #define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION +// VK_KHR_MAINTENANCE3_EXTENSION_NAME is a deprecated alias #define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; @@ -10259,7 +10325,7 @@ typedef struct VkRenderingInputAttachmentIndexInfoKHR { } VkRenderingInputAttachmentIndexInfoKHR; typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocationsKHR)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfoKHR* pLocationInfo); -typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocationsKHR( @@ -10268,7 +10334,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocationsKHR( VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfoKHR* pLocationInfo); + const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); #endif @@ -10605,10 +10671,6 @@ typedef enum VkVideoEncodeTuningModeKHR { VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoEncodeTuningModeKHR; - -typedef enum VkVideoEncodeFlagBitsKHR { - VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkVideoEncodeFlagBitsKHR; typedef VkFlags VkVideoEncodeFlagsKHR; typedef enum VkVideoEncodeCapabilityFlagBitsKHR { @@ -11109,6 +11171,7 @@ typedef VkFlags64 VkPipelineCreateFlagBits2KHR; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; @@ -11137,6 +11200,7 @@ static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCE static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; typedef VkFlags64 VkBufferUsageFlags2KHR; @@ -11271,6 +11335,128 @@ typedef struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR { +// VK_KHR_pipeline_binary is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_binary 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineBinaryKHR) +#define VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR 32U +#define VK_KHR_PIPELINE_BINARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_BINARY_EXTENSION_NAME "VK_KHR_pipeline_binary" +typedef struct VkPhysicalDevicePipelineBinaryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaries; +} VkPhysicalDevicePipelineBinaryFeaturesKHR; + +typedef struct VkPhysicalDevicePipelineBinaryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaryInternalCache; + VkBool32 pipelineBinaryInternalCacheControl; + VkBool32 pipelineBinaryPrefersInternalCache; + VkBool32 pipelineBinaryPrecompiledInternalCache; + VkBool32 pipelineBinaryCompressedData; +} VkPhysicalDevicePipelineBinaryPropertiesKHR; + +typedef struct VkDevicePipelineBinaryInternalCacheControlKHR { + VkStructureType sType; + const void* pNext; + VkBool32 disableInternalCache; +} VkDevicePipelineBinaryInternalCacheControlKHR; + +typedef struct VkPipelineBinaryKeyKHR { + VkStructureType sType; + void* pNext; + uint32_t keySize; + uint8_t key[VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR]; +} VkPipelineBinaryKeyKHR; + +typedef struct VkPipelineBinaryDataKHR { + size_t dataSize; + void* pData; +} VkPipelineBinaryDataKHR; + +typedef struct VkPipelineBinaryKeysAndDataKHR { + uint32_t binaryCount; + const VkPipelineBinaryKeyKHR* pPipelineBinaryKeys; + const VkPipelineBinaryDataKHR* pPipelineBinaryData; +} VkPipelineBinaryKeysAndDataKHR; + +typedef struct VkPipelineCreateInfoKHR { + VkStructureType sType; + void* pNext; +} VkPipelineCreateInfoKHR; + +typedef struct VkPipelineBinaryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const VkPipelineBinaryKeysAndDataKHR* pKeysAndDataInfo; + VkPipeline pipeline; + const VkPipelineCreateInfoKHR* pPipelineCreateInfo; +} VkPipelineBinaryCreateInfoKHR; + +typedef struct VkPipelineBinaryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t binaryCount; + const VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryInfoKHR; + +typedef struct VkReleaseCapturedPipelineDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipeline pipeline; +} VkReleaseCapturedPipelineDataInfoKHR; + +typedef struct VkPipelineBinaryDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipelineBinaryKHR pipelineBinary; +} VkPipelineBinaryDataInfoKHR; + +typedef struct VkPipelineBinaryHandlesInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t pipelineBinaryCount; + VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryHandlesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineBinariesKHR)(VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineBinaryKHR)(VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineKeyKHR)(VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineBinaryDataKHR)(VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseCapturedPipelineDataKHR)(VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineBinariesKHR( + VkDevice device, + const VkPipelineBinaryCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineBinaryHandlesInfoKHR* pBinaries); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineBinaryKHR( + VkDevice device, + VkPipelineBinaryKHR pipelineBinary, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineKeyKHR( + VkDevice device, + const VkPipelineCreateInfoKHR* pPipelineCreateInfo, + VkPipelineBinaryKeyKHR* pPipelineKey); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineBinaryDataKHR( + VkDevice device, + const VkPipelineBinaryDataInfoKHR* pInfo, + VkPipelineBinaryKeyKHR* pPipelineBinaryKey, + size_t* pPipelineBinaryDataSize, + void* pPipelineBinaryData); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseCapturedPipelineDataKHR( + VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR* pInfo, + const VkAllocationCallbacks* pAllocator); +#endif + + // VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_cooperative_matrix 1 #define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 @@ -11350,6 +11536,25 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR #endif +// VK_KHR_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_compute_shader_derivatives 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_KHR_compute_shader_derivatives" +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; + +typedef struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 meshAndTaskShaderDerivatives; +} VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + + // VK_KHR_video_decode_av1 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_decode_av1 1 #include "vk_video/vulkan_video_codec_av1std.h" @@ -11699,6 +11904,74 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( #endif +// VK_KHR_shader_relaxed_extended_instruction is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_relaxed_extended_instruction 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME "VK_KHR_shader_relaxed_extended_instruction" +typedef struct VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderRelaxedExtendedInstruction; +} VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + + +// VK_KHR_maintenance7 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance7 1 +#define VK_KHR_MAINTENANCE_7_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_7_EXTENSION_NAME "VK_KHR_maintenance7" + +typedef enum VkPhysicalDeviceLayeredApiKHR { + VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR = 0, + VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR = 1, + VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR = 2, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR = 3, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR = 4, + VK_PHYSICAL_DEVICE_LAYERED_API_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPhysicalDeviceLayeredApiKHR; +typedef struct VkPhysicalDeviceMaintenance7FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance7; +} VkPhysicalDeviceMaintenance7FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance7PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 robustFragmentShadingRateAttachmentAccess; + VkBool32 separateDepthStencilAttachmentAccess; + uint32_t maxDescriptorSetTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetTotalBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic; +} VkPhysicalDeviceMaintenance7PropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceLayeredApiKHR layeredAPI; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; +} VkPhysicalDeviceLayeredApiPropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesListKHR { + VkStructureType sType; + void* pNext; + uint32_t layeredApiCount; + VkPhysicalDeviceLayeredApiPropertiesKHR* pLayeredApis; +} VkPhysicalDeviceLayeredApiPropertiesListKHR; + +typedef struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties2 properties; +} VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; + + + // VK_EXT_debug_report is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_debug_report 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) @@ -11747,7 +12020,9 @@ typedef enum VkDebugReportObjectTypeEXT { VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT = 1000307000, VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT = 1000307001, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, + // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT is a deprecated alias VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + // VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT is a deprecated alias VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, @@ -12519,6 +12794,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( typedef enum VkSurfaceCounterFlagBitsEXT { VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + // VK_SURFACE_COUNTER_VBLANK_EXT is a deprecated alias VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF } VkSurfaceCounterFlagBitsEXT; @@ -12687,7 +12963,9 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( #define VK_NV_viewport_array2 1 #define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 #define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" +// VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION is a deprecated alias #define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION +// VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME is a deprecated alias #define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME @@ -12842,13 +13120,13 @@ typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { // VK_EXT_swapchain_colorspace is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_swapchain_colorspace 1 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 5 #define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" // VK_EXT_hdr_metadata is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_hdr_metadata 1 -#define VK_EXT_HDR_METADATA_SPEC_VERSION 2 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 3 #define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" typedef struct VkXYColorEXT { float x; @@ -14145,12 +14423,7 @@ typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; #define VK_NV_compute_shader_derivatives 1 #define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 #define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" -typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 computeDerivativeGroupQuads; - VkBool32 computeDerivativeGroupLinear; -} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; +typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; @@ -14535,7 +14808,9 @@ typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLay #define VK_GOOGLE_hlsl_functionality1 1 #define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 #define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" +// VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION is a deprecated alias #define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION +// VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME is a deprecated alias #define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME @@ -16052,14 +16327,14 @@ typedef struct VkDescriptorAddressInfoEXT { typedef struct VkDescriptorBufferBindingInfoEXT { VkStructureType sType; - void* pNext; + const void* pNext; VkDeviceAddress address; VkBufferUsageFlags usage; } VkDescriptorBufferBindingInfoEXT; typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT { VkStructureType sType; - void* pNext; + const void* pNext; VkBuffer buffer; } VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; @@ -17691,7 +17966,7 @@ typedef struct VkRenderPassStripeSubmitInfoARM { // VK_QCOM_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_fragment_density_map_offset 1 -#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 2 #define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { VkStructureType sType; @@ -18502,7 +18777,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV( // VK_EXT_legacy_dithering is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_legacy_dithering 1 -#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1 +#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 2 #define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { VkStructureType sType; @@ -18524,6 +18799,53 @@ typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT { +// VK_AMD_anti_lag is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_anti_lag 1 +#define VK_AMD_ANTI_LAG_SPEC_VERSION 1 +#define VK_AMD_ANTI_LAG_EXTENSION_NAME "VK_AMD_anti_lag" + +typedef enum VkAntiLagModeAMD { + VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD = 0, + VK_ANTI_LAG_MODE_ON_AMD = 1, + VK_ANTI_LAG_MODE_OFF_AMD = 2, + VK_ANTI_LAG_MODE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagModeAMD; + +typedef enum VkAntiLagStageAMD { + VK_ANTI_LAG_STAGE_INPUT_AMD = 0, + VK_ANTI_LAG_STAGE_PRESENT_AMD = 1, + VK_ANTI_LAG_STAGE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagStageAMD; +typedef struct VkPhysicalDeviceAntiLagFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 antiLag; +} VkPhysicalDeviceAntiLagFeaturesAMD; + +typedef struct VkAntiLagPresentationInfoAMD { + VkStructureType sType; + void* pNext; + VkAntiLagStageAMD stage; + uint64_t frameIndex; +} VkAntiLagPresentationInfoAMD; + +typedef struct VkAntiLagDataAMD { + VkStructureType sType; + const void* pNext; + VkAntiLagModeAMD mode; + uint32_t maxFPS; + const VkAntiLagPresentationInfoAMD* pPresentationInfo; +} VkAntiLagDataAMD; + +typedef void (VKAPI_PTR *PFN_vkAntiLagUpdateAMD)(VkDevice device, const VkAntiLagDataAMD* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkAntiLagUpdateAMD( + VkDevice device, + const VkAntiLagDataAMD* pData); +#endif + + // VK_EXT_shader_object is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_shader_object 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderEXT) @@ -18727,6 +19049,24 @@ typedef struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV { #define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type" +// VK_EXT_legacy_vertex_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_vertex_attributes 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME "VK_EXT_legacy_vertex_attributes" +typedef struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 legacyVertexAttributes; +} VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; + +typedef struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nativeUnalignedPerformance; +} VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + + // VK_EXT_layer_settings is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_layer_settings 1 #define VK_EXT_LAYER_SETTINGS_SPEC_VERSION 2 @@ -19123,6 +19463,18 @@ typedef struct VkPhysicalDeviceRawAccessChainsFeaturesNV { +// VK_NV_command_buffer_inheritance is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_command_buffer_inheritance 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME "VK_NV_command_buffer_inheritance" +typedef struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 commandBufferInheritance; +} VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; + + + // VK_NV_shader_atomic_float16_vector is a preprocessor guard. Do not pass it to API calls. #define VK_NV_shader_atomic_float16_vector 1 #define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION 1 @@ -19135,6 +19487,18 @@ typedef struct VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV { +// VK_EXT_shader_replicated_composites is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_replicated_composites 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME "VK_EXT_shader_replicated_composites" +typedef struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderReplicatedComposites; +} VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + + // VK_NV_ray_tracing_validation is a preprocessor guard. Do not pass it to API calls. #define VK_NV_ray_tracing_validation 1 #define VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION 1 @@ -19147,6 +19511,30 @@ typedef struct VkPhysicalDeviceRayTracingValidationFeaturesNV { +// VK_MESA_image_alignment_control is a preprocessor guard. Do not pass it to API calls. +#define VK_MESA_image_alignment_control 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME "VK_MESA_image_alignment_control" +typedef struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA { + VkStructureType sType; + void* pNext; + VkBool32 imageAlignmentControl; +} VkPhysicalDeviceImageAlignmentControlFeaturesMESA; + +typedef struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA { + VkStructureType sType; + void* pNext; + uint32_t supportedImageAlignmentMask; +} VkPhysicalDeviceImageAlignmentControlPropertiesMESA; + +typedef struct VkImageAlignmentControlCreateInfoMESA { + VkStructureType sType; + const void* pNext; + uint32_t maximumRequestedAlignment; +} VkImageAlignmentControlCreateInfoMESA; + + + // VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index 222a796..fe4f89d 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -8,6 +8,9 @@ #ifndef VULKAN_ENUMS_HPP #define VULKAN_ENUMS_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + namespace VULKAN_HPP_NAMESPACE { template @@ -281,7 +284,9 @@ namespace VULKAN_HPP_NAMESPACE eErrorInvalidVideoStdParametersKHR = VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR, eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT, eIncompatibleShaderBinaryEXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, - eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT + eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT, + ePipelineBinaryMissingKHR = VK_PIPELINE_BINARY_MISSING_KHR, + eErrorNotEnoughSpaceKHR = VK_ERROR_NOT_ENOUGH_SPACE_KHR }; enum class StructureType @@ -940,15 +945,11 @@ namespace VULKAN_HPP_NAMESPACE ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV, ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT, eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT, - eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, - eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT, eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT, ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT, ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR, ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD, - eCalibratedTimestampInfoKHR = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, - eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD, eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR, eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR, @@ -956,24 +957,19 @@ namespace VULKAN_HPP_NAMESPACE eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR, eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR, eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR, + eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, - ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, - ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, - ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, - ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, #if defined( VK_USE_PLATFORM_GGP ) ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP, #endif /*VK_USE_PLATFORM_GGP*/ - ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV, ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV, - ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, - ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV, ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV, ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, @@ -1038,15 +1034,7 @@ namespace VULKAN_HPP_NAMESPACE eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT, #endif /*VK_USE_PLATFORM_WIN32_KHR*/ eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, - ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, - ePipelineRasterizationLineStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, - ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, - ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT, - ePhysicalDeviceIndexTypeUint8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, - ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT, ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR, ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, @@ -1145,62 +1133,54 @@ namespace VULKAN_HPP_NAMESPACE eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT, eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT, #endif /*VK_USE_PLATFORM_METAL_EXT*/ - eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, - eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, - ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, - ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, - ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, - eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT, - eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT, - eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT, - eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT, - eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT, - eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, - ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, - eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT, - ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD, - ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR, - ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, - ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, - ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, - ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, - eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV, - ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV, - eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV, - ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT, - ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT, - ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, - ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, - ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, - eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, - ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, - ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT, - eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT, - eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, - eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, - eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, - eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, - eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT, - ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT, - ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, - ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT, - eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT, - eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT, - ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, - ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, - ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, + ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, + ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, + eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT, + eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT, + eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT, + eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT, + eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT, + eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, + ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, + ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, + eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT, + ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD, + ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, + ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, + ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR, + ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, + ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, + ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, + ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, + eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV, + ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV, + eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV, + ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT, + ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT, + ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, + ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, + eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, + ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, + ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT, + eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT, + eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT, + ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT, + ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, + ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT, + eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT, + eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT, + ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, - ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, - eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, - eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT, eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT, eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT, @@ -1270,69 +1250,71 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceDisplacementMicromapPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV, eAccelerationStructureTrianglesDisplacementMicromapNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI, - ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI, - ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI, - ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT, - eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, - ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, - ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, - ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, - eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM, - ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM, - ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM, - ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT, - eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT, - ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE, - eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE, - eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE, - ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT, - ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, - ePhysicalDeviceRenderPassStripedFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM, - ePhysicalDeviceRenderPassStripedPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM, - eRenderPassStripeBeginInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM, - eRenderPassStripeInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM, - eRenderPassStripeSubmitInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM, - ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, - ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, - eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, - ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV, - ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, - ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, - ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, - ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV, - eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV, - ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV, - ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, - ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR, - ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, - ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, - ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM, - eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM, - ePhysicalDeviceNestedCommandBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT, - ePhysicalDeviceNestedCommandBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT, - eExternalMemoryAcquireUnmodifiedEXT = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT, - ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, - ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, - ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT, - eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT, - eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT, - eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT, - eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG, - eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG, - ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT, - ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT, - ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT, - eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT, - ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV, - ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV, - eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV, - eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV, - eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV, - eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV, - eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, - ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, - ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, + ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI, + ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI, + ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI, + ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT, + eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, + ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, + ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, + ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, + eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM, + ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM, + ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM, + ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT, + eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT, + ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE, + eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE, + eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE, + ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT, + ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, + ePhysicalDeviceRenderPassStripedFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM, + ePhysicalDeviceRenderPassStripedPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM, + eRenderPassStripeBeginInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM, + eRenderPassStripeInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM, + eRenderPassStripeSubmitInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM, + ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, + ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, + eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, + ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV, + ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, + ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, + ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, + ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV, + eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV, + ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV, + ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, + ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR, + ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, + ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, + ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM, + eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM, + ePhysicalDeviceNestedCommandBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT, + ePhysicalDeviceNestedCommandBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT, + eExternalMemoryAcquireUnmodifiedEXT = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT, + ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, + ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, + ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT, + eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT, + eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT, + eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT, + eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG, + eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG, + ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT, + ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT, + ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT, + eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, + ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, + ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV, + ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV, + eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV, + eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV, + eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV, + eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV, + eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, + ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, + ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, #if defined( VK_USE_PLATFORM_ANDROID_KHR ) ePhysicalDeviceExternalFormatResolveFeaturesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID, ePhysicalDeviceExternalFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID, @@ -1342,12 +1324,29 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR, eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR, eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR, + eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, + eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR, eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR, + ePhysicalDeviceAntiLagFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD, + eAntiLagDataAMD = VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD, + eAntiLagPresentationInfoAMD = VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD, ePhysicalDeviceRayTracingPositionFetchFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR, ePhysicalDeviceShaderObjectFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT, ePhysicalDeviceShaderObjectPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT, eShaderCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT, + ePhysicalDevicePipelineBinaryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR, + ePipelineBinaryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR, + ePipelineBinaryInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR, + ePipelineBinaryKeyKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR, + ePhysicalDevicePipelineBinaryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR, + eReleaseCapturedPipelineDataInfoKHR = VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR, + ePipelineBinaryDataInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR, + ePipelineCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR, + eDevicePipelineBinaryInternalCacheControlKHR = VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR, + ePipelineBinaryHandlesInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR, ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM, eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM, ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC, @@ -1357,6 +1356,12 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceRayTracingInvocationReorderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV, ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV, ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV, + ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, + ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, + eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, + eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, + ePhysicalDeviceLegacyVertexAttributesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT, + ePhysicalDeviceLegacyVertexAttributesPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT, eLayerSettingsCreateInfoEXT = VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT, ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM, ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM, @@ -1376,6 +1381,9 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceCooperativeMatrixPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR, ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM, eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM, + ePhysicalDeviceComputeShaderDerivativesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, + ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, + ePhysicalDeviceComputeShaderDerivativesPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR, eVideoDecodeAv1CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR, eVideoDecodeAv1PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR, eVideoDecodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR, @@ -1395,6 +1403,10 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceCubicClampFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM, ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT, ePhysicalDeviceVertexAttributeDivisorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR, + ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, ePhysicalDeviceShaderFloatControls2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR, #if defined( VK_USE_PLATFORM_SCREEN_QNX ) eScreenBufferPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX, @@ -1403,21 +1415,42 @@ namespace VULKAN_HPP_NAMESPACE eExternalFormatQNX = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX, ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX, #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - ePhysicalDeviceLayeredDriverPropertiesMSFT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT, - ePhysicalDeviceShaderExpectAssumeFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR, - ePhysicalDeviceMaintenance6FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR, - ePhysicalDeviceMaintenance6PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR, - eBindMemoryStatusKHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR, - eBindDescriptorSetsInfoKHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR, - ePushConstantsInfoKHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR, - ePushDescriptorSetInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, - ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, - eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, - eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, - ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV, - ePhysicalDeviceRawAccessChainsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV, - ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV, - ePhysicalDeviceRayTracingValidationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV + ePhysicalDeviceLayeredDriverPropertiesMSFT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT, + ePhysicalDeviceIndexTypeUint8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceLineRasterizationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePipelineRasterizationLineStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePhysicalDeviceLineRasterizationPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + eCalibratedTimestampInfoKHR = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, + ePhysicalDeviceShaderExpectAssumeFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR, + ePhysicalDeviceMaintenance6FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR, + ePhysicalDeviceMaintenance6PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR, + eBindMemoryStatusKHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR, + eBindDescriptorSetsInfoKHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR, + ePushConstantsInfoKHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR, + ePushDescriptorSetInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, + ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, + eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, + eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, + ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV, + ePhysicalDeviceRawAccessChainsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV, + ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR, + ePhysicalDeviceCommandBufferInheritanceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV, + ePhysicalDeviceMaintenance7FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR, + ePhysicalDeviceMaintenance7PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR, + ePhysicalDeviceLayeredApiPropertiesListKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR, + ePhysicalDeviceLayeredApiPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR, + ePhysicalDeviceLayeredApiVulkanPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR, + ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV, + ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT, + ePhysicalDeviceRayTracingValidationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV, + ePhysicalDeviceImageAlignmentControlFeaturesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA, + ePhysicalDeviceImageAlignmentControlPropertiesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA, + eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA }; enum class PipelineCacheHeaderVersion @@ -1484,11 +1517,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_FUCHSIA*/ eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, - eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT + eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT, + ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR }; enum class VendorId { + eKhronos = VK_VENDOR_ID_KHRONOS, eVIV = VK_VENDOR_ID_VIV, eVSI = VK_VENDOR_ID_VSI, eKazan = VK_VENDOR_ID_KAZAN, @@ -1801,6 +1836,7 @@ namespace VULKAN_HPP_NAMESPACE ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG, ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, + eR16G16Sfixed5NV = VK_FORMAT_R16G16_SFIXED5_NV, eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV, eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR, eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR @@ -1843,11 +1879,11 @@ namespace VULKAN_HPP_NAMESPACE eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR, eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT, - eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, - eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR, eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR, eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, @@ -1870,10 +1906,10 @@ namespace VULKAN_HPP_NAMESPACE FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter | FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit | FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits::eDisjoint | - FormatFeatureFlagBits::eCositedChromaSamples | FormatFeatureFlagBits::eSampledImageFilterMinmax | FormatFeatureFlagBits::eSampledImageFilterCubicEXT | - FormatFeatureFlagBits::eVideoDecodeOutputKHR | FormatFeatureFlagBits::eVideoDecodeDpbKHR | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR | - FormatFeatureFlagBits::eFragmentDensityMapEXT | FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits::eVideoEncodeInputKHR | - FormatFeatureFlagBits::eVideoEncodeDpbKHR; + FormatFeatureFlagBits::eCositedChromaSamples | FormatFeatureFlagBits::eSampledImageFilterMinmax | FormatFeatureFlagBits::eVideoDecodeOutputKHR | + FormatFeatureFlagBits::eVideoDecodeDpbKHR | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR | + FormatFeatureFlagBits::eSampledImageFilterCubicEXT | FormatFeatureFlagBits::eFragmentDensityMapEXT | + FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits::eVideoEncodeInputKHR | FormatFeatureFlagBits::eVideoEncodeDpbKHR; }; enum class ImageCreateFlagBits : VkImageCreateFlags @@ -1949,9 +1985,9 @@ namespace VULKAN_HPP_NAMESPACE eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, - eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, @@ -1972,7 +2008,7 @@ namespace VULKAN_HPP_NAMESPACE ImageUsageFlagBits::eTransferSrc | ImageUsageFlagBits::eTransferDst | ImageUsageFlagBits::eSampled | ImageUsageFlagBits::eStorage | ImageUsageFlagBits::eColorAttachment | ImageUsageFlagBits::eDepthStencilAttachment | ImageUsageFlagBits::eTransientAttachment | ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eVideoDecodeDstKHR | ImageUsageFlagBits::eVideoDecodeSrcKHR | - ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | + ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | ImageUsageFlagBits::eHostTransferEXT | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | ImageUsageFlagBits::eVideoEncodeDpbKHR | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT | ImageUsageFlagBits::eInvocationMaskHUAWEI | ImageUsageFlagBits::eSampleWeightQCOM | ImageUsageFlagBits::eSampleBlockMatchQCOM; @@ -2155,14 +2191,14 @@ namespace VULKAN_HPP_NAMESPACE eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV, eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, + eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, + eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, - eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV, - eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, - eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV + eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV }; using PipelineStageFlags = Flags; @@ -2178,9 +2214,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineStageFlagBits::eColorAttachmentOutput | PipelineStageFlagBits::eComputeShader | PipelineStageFlagBits::eTransfer | PipelineStageFlagBits::eBottomOfPipe | PipelineStageFlagBits::eHost | PipelineStageFlagBits::eAllGraphics | PipelineStageFlagBits::eAllCommands | PipelineStageFlagBits::eNone | PipelineStageFlagBits::eTransformFeedbackEXT | PipelineStageFlagBits::eConditionalRenderingEXT | - PipelineStageFlagBits::eAccelerationStructureBuildKHR | PipelineStageFlagBits::eRayTracingShaderKHR | - PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits::eTaskShaderEXT | PipelineStageFlagBits::eMeshShaderEXT | - PipelineStageFlagBits::eFragmentDensityProcessEXT | PipelineStageFlagBits::eCommandPreprocessNV; + PipelineStageFlagBits::eAccelerationStructureBuildKHR | PipelineStageFlagBits::eRayTracingShaderKHR | PipelineStageFlagBits::eFragmentDensityProcessEXT | + PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits::eCommandPreprocessNV | PipelineStageFlagBits::eTaskShaderEXT | + PipelineStageFlagBits::eMeshShaderEXT; }; enum class MemoryMapFlagBits : VkMemoryMapFlags @@ -2523,9 +2559,9 @@ namespace VULKAN_HPP_NAMESPACE eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, + eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, - eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, eRenderingLocalReadKHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR, eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, @@ -2779,8 +2815,6 @@ namespace VULKAN_HPP_NAMESPACE eExclusiveScissorEnableNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV, eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV, eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR, - eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT, ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT, eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT, @@ -2816,7 +2850,9 @@ namespace VULKAN_HPP_NAMESPACE eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV, eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV, eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV, - eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT + eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT, + eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT }; enum class FrontFace @@ -3241,9 +3277,9 @@ namespace VULKAN_HPP_NAMESPACE ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT, + eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, - eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, ePerStageNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV }; @@ -3256,7 +3292,7 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR | DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT | - DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT | DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | + DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT | DescriptorSetLayoutCreateFlagBits::ePerStageNV; }; @@ -3277,10 +3313,10 @@ namespace VULKAN_HPP_NAMESPACE eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT, eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR, eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV, - eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, - eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE, eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM, - eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM + eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM, + eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, + eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE }; enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags @@ -3326,9 +3362,9 @@ namespace VULKAN_HPP_NAMESPACE eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV, eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, + eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, - eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV }; @@ -3346,8 +3382,8 @@ namespace VULKAN_HPP_NAMESPACE AccessFlagBits::eTransferRead | AccessFlagBits::eTransferWrite | AccessFlagBits::eHostRead | AccessFlagBits::eHostWrite | AccessFlagBits::eMemoryRead | AccessFlagBits::eMemoryWrite | AccessFlagBits::eNone | AccessFlagBits::eTransformFeedbackWriteEXT | AccessFlagBits::eTransformFeedbackCounterReadEXT | AccessFlagBits::eTransformFeedbackCounterWriteEXT | AccessFlagBits::eConditionalRenderingReadEXT | AccessFlagBits::eColorAttachmentReadNoncoherentEXT | - AccessFlagBits::eAccelerationStructureReadKHR | AccessFlagBits::eAccelerationStructureWriteKHR | AccessFlagBits::eFragmentShadingRateAttachmentReadKHR | - AccessFlagBits::eFragmentDensityMapReadEXT | AccessFlagBits::eCommandPreprocessReadNV | AccessFlagBits::eCommandPreprocessWriteNV; + AccessFlagBits::eAccelerationStructureReadKHR | AccessFlagBits::eAccelerationStructureWriteKHR | AccessFlagBits::eFragmentDensityMapReadEXT | + AccessFlagBits::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits::eCommandPreprocessReadNV | AccessFlagBits::eCommandPreprocessWriteNV; }; enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags @@ -3564,6 +3600,51 @@ namespace VULKAN_HPP_NAMESPACE eUint8EXT = VK_INDEX_TYPE_UINT8_EXT }; + //========================= + //=== Index Type Traits === + //========================= + + template + struct IndexTypeValue + { + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; + }; + + template <> + struct CppType + { + using Type = uint16_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct IndexTypeValue + { + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8KHR; + }; + + template <> + struct CppType + { + using Type = uint8_t; + }; + enum class StencilFaceFlagBits : VkStencilFaceFlags { eFront = VK_STENCIL_FACE_FRONT_BIT, @@ -3586,6 +3667,7 @@ namespace VULKAN_HPP_NAMESPACE { eInline = VK_SUBPASS_CONTENTS_INLINE, eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, + eInlineAndSecondaryCommandBuffersKHR = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, eInlineAndSecondaryCommandBuffersEXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT }; @@ -3947,7 +4029,8 @@ namespace VULKAN_HPP_NAMESPACE eMesaDozen = VK_DRIVER_ID_MESA_DOZEN, eMesaNvk = VK_DRIVER_ID_MESA_NVK, eImaginationOpenSourceMESA = VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA, - eMesaAgxv = VK_DRIVER_ID_MESA_AGXV + eMesaHoneykrisp = VK_DRIVER_ID_MESA_HONEYKRISP, + eReserved27 = VK_DRIVER_ID_RESERVED_27 }; using DriverIdKHR = DriverId; @@ -4274,8 +4357,9 @@ namespace VULKAN_HPP_NAMESPACE eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, eSuspending = VK_RENDERING_SUSPENDING_BIT, eResuming = VK_RENDERING_RESUMING_BIT, - eContentsInlineEXT = VK_RENDERING_CONTENTS_INLINE_BIT_EXT, - eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT + eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT, + eContentsInlineKHR = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, + eContentsInlineEXT = VK_RENDERING_CONTENTS_INLINE_BIT_EXT }; using RenderingFlagBitsKHR = RenderingFlagBits; @@ -4287,8 +4371,8 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR RenderingFlags allFlags = RenderingFlagBits::eContentsSecondaryCommandBuffers | RenderingFlagBits::eSuspending | - RenderingFlagBits::eResuming | RenderingFlagBits::eContentsInlineEXT | - RenderingFlagBits::eEnableLegacyDitheringEXT; + RenderingFlagBits::eResuming | RenderingFlagBits::eEnableLegacyDitheringEXT | + RenderingFlagBits::eContentsInlineKHR; }; enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2 @@ -6213,7 +6297,7 @@ namespace VULKAN_HPP_NAMESPACE enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR { ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR, - eInsufficientstreamBufferRangeDetectionBit = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR + eInsufficientBitstreamBufferRangeDetection = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR }; using VideoEncodeCapabilityFlagsKHR = Flags; @@ -6223,14 +6307,14 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = - VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientstreamBufferRangeDetectionBit; + VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection; }; enum class VideoEncodeFeedbackFlagBitsKHR : VkVideoEncodeFeedbackFlagsKHR { - estreamBufferOffsetBit = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR, - estreamBytesWrittenBit = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, - estreamHasOverridesBit = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR + eBitstreamBufferOffset = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR, + eBitstreamBytesWritten = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, + eBitstreamHasOverrides = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR }; using VideoEncodeFeedbackFlagsKHR = Flags; @@ -6239,9 +6323,9 @@ namespace VULKAN_HPP_NAMESPACE struct FlagTraits { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFeedbackFlagsKHR allFlags = VideoEncodeFeedbackFlagBitsKHR::estreamBufferOffsetBit | - VideoEncodeFeedbackFlagBitsKHR::estreamBytesWrittenBit | - VideoEncodeFeedbackFlagBitsKHR::estreamHasOverridesBit; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFeedbackFlagsKHR allFlags = VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset | + VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten | + VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides; }; enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR @@ -6917,6 +7001,7 @@ namespace VULKAN_HPP_NAMESPACE eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, + eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR, eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, @@ -6944,7 +7029,8 @@ namespace VULKAN_HPP_NAMESPACE eNoProtectedAccessEXT = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT, eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, - eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT + eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT, + eCaptureData = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR }; using PipelineCreateFlags2KHR = Flags; @@ -6955,11 +7041,12 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags = PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative | - PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | - PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | - PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | - PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT | - PipelineCreateFlagBits2KHR::eLibrary | PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | + PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | + PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | PipelineCreateFlagBits2KHR::eCaptureStatistics | + PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | + PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | + PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2KHR::eLibrary | + PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders | PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindableNV | @@ -6967,7 +7054,8 @@ namespace VULKAN_HPP_NAMESPACE PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT | PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT | PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT | - PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2KHR::eDescriptorBufferEXT; + PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2KHR::eDescriptorBufferEXT | + PipelineCreateFlagBits2KHR::eCaptureData; }; enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR @@ -7026,6 +7114,21 @@ namespace VULKAN_HPP_NAMESPACE BufferUsageFlagBits2KHR::eMicromapStorageEXT; }; + //=== VK_AMD_anti_lag === + + enum class AntiLagModeAMD + { + eDriverControl = VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD, + eOn = VK_ANTI_LAG_MODE_ON_AMD, + eOff = VK_ANTI_LAG_MODE_OFF_AMD + }; + + enum class AntiLagStageAMD + { + eInput = VK_ANTI_LAG_STAGE_INPUT_AMD, + ePresent = VK_ANTI_LAG_STAGE_PRESENT_AMD + }; + //=== VK_EXT_shader_object === enum class ShaderCreateFlagBitsEXT : VkShaderCreateFlagsEXT @@ -7079,6 +7182,75 @@ namespace VULKAN_HPP_NAMESPACE eString = VK_LAYER_SETTING_TYPE_STRING_EXT }; + //================================= + //=== Layer Setting Type Traits === + //================================= + + template <> + struct CppType + { + using Type = vk::Bool32; + }; + + template <> + struct CppType + { + using Type = int32_t; + }; + + template <> + struct CppType + { + using Type = int64_t; + }; + + template <> + struct CppType + { + using Type = uint32_t; + }; + + template <> + struct CppType + { + using Type = uint64_t; + }; + + template <> + struct CppType + { + using Type = float; + }; + + template <> + struct CppType + { + using Type = double; + }; + + template <> + struct CppType + { + using Type = char *; + }; + + template + bool isSameType( LayerSettingTypeEXT layerSettingType ) + { + switch ( layerSettingType ) + { + case LayerSettingTypeEXT::eBool32: return std::is_same::value; + case LayerSettingTypeEXT::eInt32: return std::is_same::value; + case LayerSettingTypeEXT::eInt64: return std::is_same::value; + case LayerSettingTypeEXT::eUint32: return std::is_same::value; + case LayerSettingTypeEXT::eUint64: return std::is_same::value; + case LayerSettingTypeEXT::eFloat32: return std::is_same::value; + case LayerSettingTypeEXT::eFloat64: return std::is_same::value; + case LayerSettingTypeEXT::eString: return std::is_same::value; + default: return false; + } + } + //=== VK_NV_low_latency2 === enum class LatencyMarkerNV @@ -7178,49 +7350,15 @@ namespace VULKAN_HPP_NAMESPACE }; using TimeDomainEXT = TimeDomainKHR; - //========================= - //=== Index Type Traits === - //========================= + //=== VK_KHR_maintenance7 === - template - struct IndexTypeValue + enum class PhysicalDeviceLayeredApiKHR { - }; - - template <> - struct IndexTypeValue - { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; - }; - - template <> - struct CppType - { - using Type = uint16_t; - }; - - template <> - struct IndexTypeValue - { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; - }; - - template <> - struct CppType - { - using Type = uint32_t; - }; - - template <> - struct IndexTypeValue - { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8KHR; - }; - - template <> - struct CppType - { - using Type = uint8_t; + eVulkan = VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR, + eD3D12 = VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR, + eMetal = VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR, + eOpengl = VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR, + eOpengles = VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR }; //=========================================================== @@ -7342,7 +7480,11 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; //=== VK_EXT_shader_object === - case VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_KHR_pipeline_binary === + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; default: VULKAN_HPP_ASSERT( false && "unknown ObjectType" ); return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; } diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index d9df2f6..da405d5 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -63,6 +63,7 @@ namespace VULKAN_HPP_NAMESPACE { "VK_MVK_macos_surface", "VK_EXT_metal_surface" }, #endif /*VK_USE_PLATFORM_MACOS_MVK*/ { "VK_AMD_gpu_shader_int16", "VK_KHR_shader_float16_int8" }, + { "VK_NV_ray_tracing", "VK_KHR_ray_tracing_pipeline" }, { "VK_EXT_buffer_device_address", "VK_KHR_buffer_device_address" }, { "VK_EXT_validation_features", "VK_EXT_layer_settings" } }; @@ -399,20 +400,24 @@ namespace VULKAN_HPP_NAMESPACE "VK_ANDROID_external_format_resolve", #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ "VK_KHR_maintenance5", + "VK_AMD_anti_lag", "VK_KHR_ray_tracing_position_fetch", "VK_EXT_shader_object", + "VK_KHR_pipeline_binary", "VK_QCOM_tile_properties", "VK_SEC_amigo_profiling", "VK_QCOM_multiview_per_view_viewports", "VK_NV_ray_tracing_invocation_reorder", "VK_NV_extended_sparse_address_space", "VK_EXT_mutable_descriptor_type", + "VK_EXT_legacy_vertex_attributes", "VK_ARM_shader_core_builtins", "VK_EXT_pipeline_library_group_handles", "VK_EXT_dynamic_rendering_unused_attachments", "VK_NV_low_latency2", "VK_KHR_cooperative_matrix", "VK_QCOM_multiview_per_view_render_areas", + "VK_KHR_compute_shader_derivatives", "VK_KHR_video_decode_av1", "VK_KHR_video_maintenance1", "VK_NV_per_stage_descriptor_set", @@ -435,8 +440,13 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_maintenance6", "VK_NV_descriptor_pool_overallocation", "VK_NV_raw_access_chains", + "VK_KHR_shader_relaxed_extended_instruction", + "VK_NV_command_buffer_inheritance", + "VK_KHR_maintenance7", "VK_NV_shader_atomic_float16_vector", - "VK_NV_ray_tracing_validation" + "VK_EXT_shader_replicated_composites", + "VK_NV_ray_tracing_validation", + "VK_MESA_image_alignment_control" }; return deviceExtensions; } @@ -2145,6 +2155,11 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_dynamic_rendering", } } }, { "VK_VERSION_1_3", { {} } } } }, + { "VK_KHR_pipeline_binary", + { { "VK_VERSION_1_0", + { { + "VK_KHR_maintenance5", + } } } } }, { "VK_QCOM_tile_properties", { { "VK_VERSION_1_0", { { @@ -2173,6 +2188,11 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_KHR_maintenance3", } } } } }, + { "VK_EXT_legacy_vertex_attributes", + { { "VK_VERSION_1_0", + { { + "VK_EXT_vertex_input_dynamic_state", + } } } } }, { "VK_ARM_shader_core_builtins", { { "VK_VERSION_1_0", { { @@ -2207,6 +2227,11 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_compute_shader_derivatives", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } } } }, { "VK_KHR_video_decode_av1", { { "VK_VERSION_1_0", { { @@ -2306,7 +2331,14 @@ namespace VULKAN_HPP_NAMESPACE } } }, { "VK_VERSION_1_1", { {} } } } }, { "VK_KHR_maintenance6", { { "VK_VERSION_1_1", { {} } } } }, - { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } } + { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_KHR_maintenance7", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_MESA_image_alignment_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } } }; auto depIt = dependencies.find( extension ); return ( depIt != dependencies.end() ) ? depIt->second : noDependencies; @@ -2400,6 +2432,7 @@ namespace VULKAN_HPP_NAMESPACE { "VK_KHR_driver_properties", "VK_VERSION_1_2" }, { "VK_KHR_shader_float_controls", "VK_VERSION_1_2" }, { "VK_KHR_depth_stencil_resolve", "VK_VERSION_1_2" }, + { "VK_NV_compute_shader_derivatives", "VK_KHR_compute_shader_derivatives" }, { "VK_NV_fragment_shader_barycentric", "VK_KHR_fragment_shader_barycentric" }, { "VK_KHR_timeline_semaphore", "VK_VERSION_1_2" }, { "VK_KHR_vulkan_memory_model", "VK_VERSION_1_2" }, @@ -2503,6 +2536,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_KHR_shader_float16_int8"; } + if ( extension == "VK_NV_ray_tracing" ) + { + return "VK_KHR_ray_tracing_pipeline"; + } if ( extension == "VK_EXT_buffer_device_address" ) { return "VK_KHR_buffer_device_address"; @@ -2719,6 +2756,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_VERSION_1_2"; } + if ( extension == "VK_NV_compute_shader_derivatives" ) + { + return "VK_KHR_compute_shader_derivatives"; + } if ( extension == "VK_NV_fragment_shader_barycentric" ) { return "VK_KHR_fragment_shader_barycentric"; @@ -2877,7 +2918,8 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_MACOS_MVK ) ( extension == "VK_MVK_macos_surface" ) || #endif /*VK_USE_PLATFORM_MACOS_MVK*/ - ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_EXT_buffer_device_address" ) || ( extension == "VK_EXT_validation_features" ); + ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_EXT_buffer_device_address" ) || + ( extension == "VK_EXT_validation_features" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ) @@ -3052,14 +3094,16 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_ANDROID_KHR ) || ( extension == "VK_ANDROID_external_format_resolve" ) #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || ( extension == "VK_EXT_shader_object" ) || - ( extension == "VK_QCOM_tile_properties" ) || ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || + || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_AMD_anti_lag" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || + ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_KHR_pipeline_binary" ) || ( extension == "VK_QCOM_tile_properties" ) || + ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || ( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_NV_extended_sparse_address_space" ) || - ( extension == "VK_EXT_mutable_descriptor_type" ) || ( extension == "VK_ARM_shader_core_builtins" ) || - ( extension == "VK_EXT_pipeline_library_group_handles" ) || ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || - ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) || - ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || ( extension == "VK_KHR_video_decode_av1" ) || - ( extension == "VK_KHR_video_maintenance1" ) || ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || + ( extension == "VK_EXT_mutable_descriptor_type" ) || ( extension == "VK_EXT_legacy_vertex_attributes" ) || + ( extension == "VK_ARM_shader_core_builtins" ) || ( extension == "VK_EXT_pipeline_library_group_handles" ) || + ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || ( extension == "VK_NV_low_latency2" ) || + ( extension == "VK_KHR_cooperative_matrix" ) || ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || + ( extension == "VK_KHR_compute_shader_derivatives" ) || ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_maintenance1" ) || + ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) || ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) || ( extension == "VK_KHR_load_store_op_none" ) || ( extension == "VK_KHR_shader_float_controls2" ) @@ -3069,7 +3113,10 @@ namespace VULKAN_HPP_NAMESPACE || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || ( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ) || ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_NV_raw_access_chains" ) || - ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_NV_ray_tracing_validation" ); + ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || ( extension == "VK_NV_command_buffer_inheritance" ) || + ( extension == "VK_KHR_maintenance7" ) || ( extension == "VK_NV_shader_atomic_float16_vector" ) || + ( extension == "VK_EXT_shader_replicated_composites" ) || ( extension == "VK_NV_ray_tracing_validation" ) || + ( extension == "VK_MESA_image_alignment_control" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) @@ -3162,15 +3209,16 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || ( extension == "VK_EXT_vertex_attribute_divisor" ) || ( extension == "VK_EXT_pipeline_creation_feedback" ) || ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) || - ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_KHR_timeline_semaphore" ) || - ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) || - ( extension == "VK_EXT_scalar_block_layout" ) || ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_spirv_1_4" ) || - ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_tooling_info" ) || - ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || - ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_host_query_reset" ) || - ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || - ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) || - ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) || + ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_fragment_shader_barycentric" ) || + ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || + ( extension == "VK_KHR_shader_terminate_invocation" ) || ( extension == "VK_EXT_scalar_block_layout" ) || + ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_spirv_1_4" ) || ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || + ( extension == "VK_EXT_tooling_info" ) || ( extension == "VK_EXT_separate_stencil_usage" ) || + ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || ( extension == "VK_KHR_buffer_device_address" ) || + ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || + ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || + ( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) || + ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) || ( extension == "VK_KHR_synchronization2" ) || ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) || ( extension == "VK_EXT_image_robustness" ) || ( extension == "VK_KHR_copy_commands2" ) || ( extension == "VK_EXT_4444_formats" ) || diff --git a/third_party/vulkan/vulkan_format_traits.hpp b/third_party/vulkan/vulkan_format_traits.hpp index 8f26981..25790fd 100644 --- a/third_party/vulkan/vulkan_format_traits.hpp +++ b/third_party/vulkan/vulkan_format_traits.hpp @@ -362,7 +362,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8; - case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 4; case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 2; case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; @@ -621,7 +621,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC1_4BPP"; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP"; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP"; - case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return "32-bit"; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return "32-bit"; case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return "16-bit"; case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return "8-bit alpha"; @@ -2005,7 +2005,7 @@ namespace VULKAN_HPP_NAMESPACE case 3: return 4; default: VULKAN_HPP_ASSERT( false ); return 0; } - case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: switch ( component ) { case 0: return 16; @@ -2283,7 +2283,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4; - case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 2; case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 4; case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; @@ -4299,7 +4299,7 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "A"; default: VULKAN_HPP_ASSERT( false ); return ""; } - case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: switch ( component ) { case 0: return "R"; @@ -6334,11 +6334,11 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "SRGB"; default: VULKAN_HPP_ASSERT( false ); return ""; } - case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: switch ( component ) { - case 0: return "SINT"; - case 1: return "SINT"; + case 0: return "SFIXED5"; + case 1: return "SFIXED5"; default: VULKAN_HPP_ASSERT( false ); return ""; } case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: @@ -7657,7 +7657,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; - case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1; + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 1; case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 1; case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index 86a4863..f91d6a4 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -8,6 +8,9 @@ #ifndef VULKAN_FUNCS_HPP #define VULKAN_FUNCS_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + namespace VULKAN_HPP_NAMESPACE { @@ -44,9 +47,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); - return createResultValueType( result, std::move( instance ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -64,10 +67,10 @@ namespace VULKAN_HPP_NAMESPACE d.vkCreateInstance( reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); - return createResultValueType( result, - UniqueHandle( instance, ObjectDestroy( allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( instance, ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -125,13 +128,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast( physicalDevices.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); if ( physicalDeviceCount < physicalDevices.size() ) { physicalDevices.resize( physicalDeviceCount ); } - return createResultValueType( result, std::move( physicalDevices ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); } template ( physicalDevices.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); if ( physicalDeviceCount < physicalDevices.size() ) { physicalDevices.resize( physicalDeviceCount ); } - return createResultValueType( result, std::move( physicalDevices ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -261,9 +264,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); - return createResultValueType( result, std::move( imageFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -453,9 +456,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); - return createResultValueType( result, std::move( device ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -476,9 +479,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); - return createResultValueType( result, UniqueHandle( device, ObjectDestroy( allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( device, ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -539,13 +543,13 @@ namespace VULKAN_HPP_NAMESPACE layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -576,13 +580,13 @@ namespace VULKAN_HPP_NAMESPACE layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -621,13 +625,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -658,13 +662,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -700,13 +704,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -775,13 +779,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -865,9 +869,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -888,9 +892,9 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -911,9 +915,9 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -948,9 +952,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); - return createResultValueType( result, std::move( memory ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -971,10 +975,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &allocateInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); - return createResultValueType( result, - UniqueHandle( memory, ObjectFree( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( memory, ObjectFree( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1068,9 +1072,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( size ), static_cast( flags ), &pData ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); - return createResultValueType( result, std::move( pData ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1103,9 +1107,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1132,9 +1136,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1187,9 +1191,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1216,9 +1220,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1485,9 +1489,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1520,9 +1524,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); - return createResultValueType( result, std::move( fence ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1541,10 +1545,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); - return createResultValueType( result, - UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1622,9 +1626,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1645,7 +1649,7 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); return static_cast( result ); @@ -1679,7 +1683,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkWaitForFences( m_device, fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); @@ -1717,9 +1721,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); - return createResultValueType( result, std::move( semaphore ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1740,9 +1744,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( semaphore, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -1829,9 +1833,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); - return createResultValueType( result, std::move( event ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1850,10 +1854,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); - return createResultValueType( result, - UniqueHandle( event, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( event, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1927,7 +1931,7 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); return static_cast( result ); @@ -1952,9 +1956,9 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1975,9 +1979,9 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -2012,9 +2016,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); - return createResultValueType( result, std::move( queryPool ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2035,9 +2039,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( queryPool, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -2142,9 +2146,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); return ResultValue>( result, std::move( data ) ); } @@ -2171,7 +2175,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &data ), static_cast( stride ), static_cast( flags ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); return ResultValue( result, std::move( data ) ); @@ -2207,9 +2211,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); - return createResultValueType( result, std::move( buffer ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2228,10 +2232,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); - return createResultValueType( result, - UniqueHandle( buffer, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( buffer, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2319,9 +2323,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); - return createResultValueType( result, std::move( view ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2342,10 +2346,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); - return createResultValueType( result, - UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2431,9 +2435,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); - return createResultValueType( result, std::move( image ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2452,10 +2456,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); - return createResultValueType( result, - UniqueHandle( image, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( image, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2576,9 +2580,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); - return createResultValueType( result, std::move( view ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2599,10 +2603,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); - return createResultValueType( result, - UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2690,9 +2694,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); - return createResultValueType( result, std::move( shaderModule ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2713,9 +2717,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( shaderModule, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -2804,9 +2808,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); - return createResultValueType( result, std::move( pipelineCache ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2827,9 +2831,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( pipelineCache, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -2921,13 +2925,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template ( pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2990,9 +2994,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3034,9 +3038,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3064,9 +3068,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3091,9 +3095,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } @@ -3119,9 +3123,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -3156,9 +3160,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -3189,9 +3193,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); @@ -3237,9 +3241,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3267,9 +3271,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3294,9 +3298,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } @@ -3322,9 +3326,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -3359,9 +3363,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -3392,9 +3396,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); @@ -3485,9 +3489,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); - return createResultValueType( result, std::move( pipelineLayout ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3508,9 +3512,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( pipelineLayout, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -3597,9 +3601,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); - return createResultValueType( result, std::move( sampler ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3618,10 +3622,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); - return createResultValueType( result, - UniqueHandle( sampler, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( sampler, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3709,9 +3713,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); - return createResultValueType( result, std::move( setLayout ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3732,9 +3736,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( setLayout, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -3827,9 +3831,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); - return createResultValueType( result, std::move( descriptorPool ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3850,9 +3854,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( descriptorPool, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -3958,9 +3962,9 @@ namespace VULKAN_HPP_NAMESPACE std::vector descriptorSets( allocateInfo.descriptorSetCount ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); - return createResultValueType( result, std::move( descriptorSets ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); } template descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); - return createResultValueType( result, std::move( descriptorSets ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3998,7 +4002,7 @@ namespace VULKAN_HPP_NAMESPACE std::vector descriptorSets( allocateInfo.descriptorSetCount ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets; uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); PoolFree deleter( *this, allocateInfo.descriptorPool, d ); @@ -4006,7 +4010,7 @@ namespace VULKAN_HPP_NAMESPACE { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); } - return createResultValueType( result, std::move( uniqueDescriptorSets ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); } template < @@ -4028,7 +4032,7 @@ namespace VULKAN_HPP_NAMESPACE std::vector descriptorSets( allocateInfo.descriptorSetCount ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); PoolFree deleter( *this, allocateInfo.descriptorPool, d ); @@ -4036,7 +4040,7 @@ namespace VULKAN_HPP_NAMESPACE { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); } - return createResultValueType( result, std::move( uniqueDescriptorSets ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4161,9 +4165,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); - return createResultValueType( result, std::move( framebuffer ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4184,9 +4188,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( framebuffer, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -4275,9 +4279,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); - return createResultValueType( result, std::move( renderPass ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4298,9 +4302,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -4415,9 +4419,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); - return createResultValueType( result, std::move( commandPool ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4438,9 +4442,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( commandPool, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -4519,9 +4523,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -4548,9 +4552,9 @@ namespace VULKAN_HPP_NAMESPACE std::vector commandBuffers( allocateInfo.commandBufferCount ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); - return createResultValueType( result, std::move( commandBuffers ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); } template commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); - return createResultValueType( result, std::move( commandBuffers ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4588,7 +4592,7 @@ namespace VULKAN_HPP_NAMESPACE std::vector commandBuffers( allocateInfo.commandBufferCount ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers; uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); PoolFree deleter( *this, allocateInfo.commandPool, d ); @@ -4596,7 +4600,7 @@ namespace VULKAN_HPP_NAMESPACE { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); } - return createResultValueType( result, std::move( uniqueCommandBuffers ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); } template < @@ -4618,7 +4622,7 @@ namespace VULKAN_HPP_NAMESPACE std::vector commandBuffers( allocateInfo.commandBufferCount ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); PoolFree deleter( *this, allocateInfo.commandPool, d ); @@ -4626,7 +4630,7 @@ namespace VULKAN_HPP_NAMESPACE { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); } - return createResultValueType( result, std::move( uniqueCommandBuffers ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4705,9 +4709,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4728,9 +4732,9 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -4753,9 +4757,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -5730,9 +5734,9 @@ namespace VULKAN_HPP_NAMESPACE uint32_t apiVersion; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); - return createResultValueType( result, std::move( apiVersion ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -5757,9 +5761,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -5784,9 +5788,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -5877,13 +5881,13 @@ namespace VULKAN_HPP_NAMESPACE m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); } template ( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6254,9 +6258,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); - return createResultValueType( result, std::move( imageFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } template @@ -6275,9 +6279,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); - return createResultValueType( result, std::move( structureChain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6608,9 +6612,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); - return createResultValueType( result, std::move( ycbcrConversion ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -6632,9 +6636,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -6731,9 +6735,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); - return createResultValueType( result, std::move( descriptorUpdateTemplate ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -6755,11 +6759,11 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); - return createResultValueType( result, - UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7068,9 +7072,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); - return createResultValueType( result, std::move( renderPass ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -7091,9 +7095,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -7203,9 +7207,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t value; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); - return createResultValueType( result, std::move( value ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7230,7 +7234,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); @@ -7257,9 +7261,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7375,13 +7379,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } - return createResultValueType( result, std::move( toolProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } template < @@ -7413,13 +7417,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } - return createResultValueType( result, std::move( toolProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7454,9 +7458,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); - return createResultValueType( result, std::move( privateDataSlot ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -7477,9 +7481,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -7566,9 +7570,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -7726,9 +7730,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8346,9 +8350,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 supported; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); - return createResultValueType( result, std::move( supported ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8375,9 +8379,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); - return createResultValueType( result, std::move( surfaceCapabilities ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8416,13 +8420,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, static_cast( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { surfaceFormats.resize( surfaceFormatCount ); } - return createResultValueType( result, std::move( surfaceFormats ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } template ( surface ), &surfaceFormatCount, reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { surfaceFormats.resize( surfaceFormatCount ); } - return createResultValueType( result, std::move( surfaceFormats ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8497,13 +8501,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, static_cast( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { presentModes.resize( presentModeCount ); } - return createResultValueType( result, std::move( presentModes ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } template ( surface ), &presentModeCount, reinterpret_cast( presentModes.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { presentModes.resize( presentModeCount ); } - return createResultValueType( result, std::move( presentModes ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8576,9 +8580,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); - return createResultValueType( result, std::move( swapchain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -8599,9 +8603,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -8694,13 +8698,13 @@ namespace VULKAN_HPP_NAMESPACE m_device, static_cast( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); if ( swapchainImageCount < swapchainImages.size() ) { swapchainImages.resize( swapchainImageCount ); } - return createResultValueType( result, std::move( swapchainImages ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); } template ( swapchain ), &swapchainImageCount, reinterpret_cast( swapchainImages.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); if ( swapchainImageCount < swapchainImages.size() ) { swapchainImages.resize( swapchainImageCount ); } - return createResultValueType( result, std::move( swapchainImages ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8767,12 +8771,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t imageIndex; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireNextImageKHR( m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return ResultValue( result, std::move( imageIndex ) ); } @@ -8798,7 +8802,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); @@ -8828,9 +8832,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); - return createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8858,9 +8862,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); - return createResultValueType( result, std::move( modes ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8900,13 +8904,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, static_cast( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); VULKAN_HPP_ASSERT( rectCount <= rects.size() ); if ( rectCount < rects.size() ) { rects.resize( rectCount ); } - return createResultValueType( result, std::move( rects ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); } template ( surface ), &rectCount, reinterpret_cast( rects.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); VULKAN_HPP_ASSERT( rectCount <= rects.size() ); if ( rectCount < rects.size() ) { rects.resize( rectCount ); } - return createResultValueType( result, std::move( rects ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8967,12 +8971,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t imageIndex; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( &acquireInfo ), &imageIndex ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return ResultValue( result, std::move( imageIndex ) ); } @@ -9013,13 +9017,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -9047,13 +9051,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9091,13 +9095,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -9127,13 +9131,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9171,13 +9175,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast( displays.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); VULKAN_HPP_ASSERT( displayCount <= displays.size() ); if ( displayCount < displays.size() ) { displays.resize( displayCount ); } - return createResultValueType( result, std::move( displays ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); } template ( displays.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); VULKAN_HPP_ASSERT( displayCount <= displays.size() ); if ( displayCount < displays.size() ) { displays.resize( displayCount ); } - return createResultValueType( result, std::move( displays ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9250,13 +9254,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9333,9 +9337,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); - return createResultValueType( result, std::move( mode ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9358,9 +9362,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( mode, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -9391,9 +9395,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); - return createResultValueType( result, std::move( capabilities ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9428,9 +9432,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9451,9 +9455,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -9495,9 +9499,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); - return createResultValueType( result, std::move( swapchains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); } template ( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); - return createResultValueType( result, std::move( swapchains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); } template @@ -9544,9 +9548,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); - return createResultValueType( result, std::move( swapchain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9569,7 +9573,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains; uniqueSwapchains.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -9577,7 +9581,7 @@ namespace VULKAN_HPP_NAMESPACE { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); } - return createResultValueType( result, std::move( uniqueSwapchains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); } template ( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); uniqueSwapchains.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -9611,7 +9615,7 @@ namespace VULKAN_HPP_NAMESPACE { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); } - return createResultValueType( result, std::move( uniqueSwapchains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); } template @@ -9632,9 +9636,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -9674,9 +9678,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9697,9 +9701,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -9765,9 +9769,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9788,9 +9792,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -9860,9 +9864,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9883,9 +9887,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -9952,9 +9956,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9975,9 +9979,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -10018,9 +10022,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10041,9 +10045,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -10091,9 +10095,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); - return createResultValueType( result, std::move( callback ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10114,9 +10118,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( callback, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -10248,9 +10252,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10274,9 +10278,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10356,9 +10360,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); - return createResultValueType( result, std::move( capabilities ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } template @@ -10374,9 +10378,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get(); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); - return createResultValueType( result, std::move( structureChain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10423,13 +10427,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( videoFormatProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); if ( videoFormatPropertyCount < videoFormatProperties.size() ) { videoFormatProperties.resize( videoFormatPropertyCount ); } - return createResultValueType( result, std::move( videoFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); } template ( videoFormatProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); if ( videoFormatPropertyCount < videoFormatProperties.size() ) { videoFormatProperties.resize( videoFormatPropertyCount ); } - return createResultValueType( result, std::move( videoFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10506,9 +10510,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSession ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); - return createResultValueType( result, std::move( videoSession ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10529,9 +10533,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSession ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( videoSession, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -10717,9 +10721,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( videoSession ), bindSessionMemoryInfos.size(), reinterpret_cast( bindSessionMemoryInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10755,9 +10759,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSessionParameters ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); - return createResultValueType( result, std::move( videoSessionParameters ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10778,11 +10782,11 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSessionParameters ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); - return createResultValueType( result, - UniqueHandle( - videoSessionParameters, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + videoSessionParameters, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10815,9 +10819,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkUpdateVideoSessionParametersKHR( m_device, static_cast( videoSessionParameters ), reinterpret_cast( &updateInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11181,9 +11185,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); - return createResultValueType( result, std::move( module ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11204,10 +11208,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); - return createResultValueType( result, - UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11243,9 +11247,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); - return createResultValueType( result, std::move( function ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11266,9 +11270,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -11449,9 +11453,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11549,13 +11553,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( info.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); VULKAN_HPP_ASSERT( infoSize <= info.size() ); if ( infoSize < info.size() ) { info.resize( infoSize ); } - return createResultValueType( result, std::move( info ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); } template ( info.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); VULKAN_HPP_ASSERT( infoSize <= info.size() ); if ( infoSize < info.size() ) { info.resize( infoSize ); } - return createResultValueType( result, std::move( info ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11671,9 +11675,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11694,9 +11698,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -11755,9 +11759,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); - return createResultValueType( result, std::move( externalImageFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11788,9 +11792,9 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); - return createResultValueType( result, std::move( handle ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -11958,9 +11962,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); - return createResultValueType( result, std::move( imageFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } template @@ -11979,9 +11983,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); - return createResultValueType( result, std::move( structureChain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12332,9 +12336,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -12355,9 +12359,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -12413,13 +12417,13 @@ namespace VULKAN_HPP_NAMESPACE m_instance, &physicalDeviceGroupCount, reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); } template ( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); } - return createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDeviceGroupProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12521,9 +12525,9 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); - return createResultValueType( result, std::move( handle ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12557,9 +12561,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); - return createResultValueType( result, std::move( memoryWin32HandleProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -12588,9 +12592,9 @@ namespace VULKAN_HPP_NAMESPACE int fd; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); - return createResultValueType( result, std::move( fd ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12618,9 +12622,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); - return createResultValueType( result, std::move( memoryFdProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12684,9 +12688,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12712,9 +12716,9 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); - return createResultValueType( result, std::move( handle ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -12741,9 +12745,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12769,9 +12773,9 @@ namespace VULKAN_HPP_NAMESPACE int fd; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); - return createResultValueType( result, std::move( fd ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12917,9 +12921,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); - return createResultValueType( result, std::move( descriptorUpdateTemplate ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -12941,11 +12945,11 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); - return createResultValueType( result, - UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13085,9 +13089,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13114,9 +13118,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); - return createResultValueType( result, std::move( display ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -13132,10 +13136,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); - return createResultValueType( result, - UniqueHandle( display, ObjectRelease( *this, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13168,9 +13172,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); - return createResultValueType( result, std::move( surfaceCapabilities ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13199,9 +13203,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13236,9 +13240,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); - return createResultValueType( result, std::move( fence ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -13259,10 +13263,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &deviceEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); - return createResultValueType( result, - UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13302,9 +13306,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); - return createResultValueType( result, std::move( fence ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -13327,10 +13331,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &displayEventInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); - return createResultValueType( result, - UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13359,9 +13363,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t counterValue; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); - return createResultValueType( result, std::move( counterValue ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13391,9 +13395,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); - return createResultValueType( result, std::move( displayTimingProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13439,13 +13443,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( presentationTimings.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); if ( presentationTimingCount < presentationTimings.size() ) { presentationTimings.resize( presentationTimingCount ); } - return createResultValueType( result, std::move( presentationTimings ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); } template < @@ -13482,13 +13486,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( presentationTimings.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); if ( presentationTimingCount < presentationTimings.size() ) { presentationTimings.resize( presentationTimingCount ); } - return createResultValueType( result, std::move( presentationTimings ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13608,9 +13612,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); - return createResultValueType( result, std::move( renderPass ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -13631,9 +13635,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -13735,9 +13739,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); } @@ -13801,9 +13805,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13830,9 +13834,9 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); - return createResultValueType( result, std::move( handle ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -13859,9 +13863,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13887,9 +13891,9 @@ namespace VULKAN_HPP_NAMESPACE int fd; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); - return createResultValueType( result, std::move( fd ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13948,14 +13952,14 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( counterDescriptions.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); VULKAN_HPP_ASSERT( counterCount <= counters.size() ); if ( counterCount < counters.size() ) { counters.resize( counterCount ); counterDescriptions.resize( counterCount ); } - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ( counterDescriptions.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); VULKAN_HPP_ASSERT( counterCount <= counters.size() ); if ( counterCount < counters.size() ) { counters.resize( counterCount ); counterDescriptions.resize( counterCount ); } - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14064,9 +14068,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14107,9 +14111,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - return createResultValueType( result, std::move( surfaceCapabilities ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); } template @@ -14128,9 +14132,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - return createResultValueType( result, std::move( structureChain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14175,13 +14179,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { surfaceFormats.resize( surfaceFormatCount ); } - return createResultValueType( result, std::move( surfaceFormats ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } template ( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { surfaceFormats.resize( surfaceFormatCount ); } - return createResultValueType( result, std::move( surfaceFormats ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } template @@ -14257,7 +14261,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { @@ -14267,7 +14271,7 @@ namespace VULKAN_HPP_NAMESPACE { structureChains[i].template get() = surfaceFormats[i]; } - return createResultValueType( result, std::move( structureChains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); } template ( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { @@ -14318,7 +14322,7 @@ namespace VULKAN_HPP_NAMESPACE { structureChains[i].template get() = surfaceFormats[i]; } - return createResultValueType( result, std::move( structureChains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14359,13 +14363,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -14395,13 +14399,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14440,13 +14444,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -14477,13 +14481,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14523,13 +14527,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -14562,13 +14566,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14599,9 +14603,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); - return createResultValueType( result, std::move( capabilities ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14639,9 +14643,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -14662,9 +14666,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -14705,9 +14709,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -14728,9 +14732,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -14759,9 +14763,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14785,9 +14789,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14925,9 +14929,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); - return createResultValueType( result, std::move( messenger ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -14948,9 +14952,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( messenger, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -15072,9 +15076,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template @@ -15092,9 +15096,9 @@ namespace VULKAN_HPP_NAMESPACE structureChain.template get(); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); - return createResultValueType( result, std::move( structureChain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15123,9 +15127,9 @@ namespace VULKAN_HPP_NAMESPACE struct AHardwareBuffer * buffer; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); - return createResultValueType( result, std::move( buffer ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ @@ -15172,9 +15176,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -15202,9 +15206,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -15229,9 +15233,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } @@ -15258,9 +15262,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -15296,9 +15300,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -15329,9 +15333,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); @@ -15364,9 +15368,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( m_device, static_cast( executionGraph ), reinterpret_cast( &sizeInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); - return createResultValueType( result, std::move( sizeInfo ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15395,9 +15399,9 @@ namespace VULKAN_HPP_NAMESPACE uint32_t nodeIndex; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( m_device, static_cast( executionGraph ), reinterpret_cast( &nodeInfo ), &nodeIndex ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); - return createResultValueType( result, std::move( nodeIndex ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15730,9 +15734,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); - return createResultValueType( result, std::move( accelerationStructure ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -15753,9 +15757,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); } @@ -15962,7 +15966,7 @@ namespace VULKAN_HPP_NAMESPACE infos.size(), reinterpret_cast( infos.data() ), reinterpret_cast( pBuildRangeInfos.data() ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -15995,7 +15999,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureKHR( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -16030,7 +16034,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -16065,7 +16069,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -16120,9 +16124,9 @@ namespace VULKAN_HPP_NAMESPACE data.size() * sizeof( DataType ), reinterpret_cast( data.data() ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -16147,9 +16151,9 @@ namespace VULKAN_HPP_NAMESPACE sizeof( DataType ), reinterpret_cast( &data ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16464,12 +16468,12 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -16499,12 +16503,12 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -16531,12 +16535,12 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } @@ -16565,12 +16569,12 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -16608,12 +16612,12 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -16646,12 +16650,12 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); @@ -16687,9 +16691,9 @@ namespace VULKAN_HPP_NAMESPACE std::vector data( dataSize / sizeof( DataType ) ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -16705,9 +16709,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16740,9 +16744,9 @@ namespace VULKAN_HPP_NAMESPACE std::vector data( dataSize / sizeof( DataType ) ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -16758,9 +16762,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16857,9 +16861,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); - return createResultValueType( result, std::move( ycbcrConversion ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -16881,9 +16885,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -16942,9 +16946,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16969,9 +16973,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17000,9 +17004,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17039,9 +17043,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); - return createResultValueType( result, std::move( validationCache ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -17062,9 +17066,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( validationCache, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -17151,9 +17155,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17191,13 +17195,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template ( validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17340,9 +17344,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); - return createResultValueType( result, std::move( accelerationStructure ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -17363,9 +17367,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); } @@ -17502,9 +17506,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17646,9 +17650,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -17676,9 +17680,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -17703,9 +17707,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } @@ -17731,9 +17735,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -17768,9 +17772,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( pipelines.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -17801,9 +17805,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipeline ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); @@ -17839,9 +17843,9 @@ namespace VULKAN_HPP_NAMESPACE std::vector data( dataSize / sizeof( DataType ) ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -17857,9 +17861,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17888,9 +17892,9 @@ namespace VULKAN_HPP_NAMESPACE std::vector data( dataSize / sizeof( DataType ) ); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -17905,9 +17909,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17973,9 +17977,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -18104,9 +18108,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); - return createResultValueType( result, std::move( memoryHostPointerProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18163,13 +18167,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { timeDomains.resize( timeDomainCount ); } - return createResultValueType( result, std::move( timeDomains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } template ( timeDomains.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { timeDomains.resize( timeDomainCount ); } - return createResultValueType( result, std::move( timeDomains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18237,9 +18241,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t & maxDeviation = data_.second; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template @@ -18282,9 +18286,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t & maxDeviation = data_.second; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18488,9 +18492,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t value; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); - return createResultValueType( result, std::move( value ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18515,7 +18519,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); @@ -18542,9 +18546,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18571,9 +18575,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18604,9 +18608,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18631,9 +18635,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18658,9 +18662,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18691,9 +18695,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); - return createResultValueType( result, std::move( configuration ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -18712,9 +18716,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( configuration, ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -18740,9 +18744,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -18766,9 +18770,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -18793,9 +18797,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -18822,9 +18826,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); - return createResultValueType( result, std::move( value ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18874,9 +18878,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -18897,9 +18901,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -18940,9 +18944,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -18963,9 +18967,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -19011,13 +19015,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast( fragmentShadingRates.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); if ( fragmentShadingRateCount < fragmentShadingRates.size() ) { fragmentShadingRates.resize( fragmentShadingRateCount ); } - return createResultValueType( result, std::move( fragmentShadingRates ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); } template ( fragmentShadingRates.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); if ( fragmentShadingRateCount < fragmentShadingRates.size() ) { fragmentShadingRates.resize( fragmentShadingRateCount ); } - return createResultValueType( result, std::move( fragmentShadingRates ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fragmentShadingRates ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19113,17 +19117,20 @@ namespace VULKAN_HPP_NAMESPACE #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pLocationInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast( pLocationInfo ) ); + d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, + reinterpret_cast( pInputAttachmentIndexInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & locationInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) @@ -19131,7 +19138,8 @@ namespace VULKAN_HPP_NAMESPACE "Function requires " ); # endif - d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast( &locationInfo ) ); + d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, + reinterpret_cast( &inputAttachmentIndexInfo ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19199,13 +19207,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } - return createResultValueType( result, std::move( toolProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } template < @@ -19237,13 +19245,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } - return createResultValueType( result, std::move( toolProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19271,9 +19279,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); } @@ -19315,13 +19324,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19401,13 +19410,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &combinationCount, reinterpret_cast( combinations.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); if ( combinationCount < combinations.size() ) { combinations.resize( combinationCount ); } - return createResultValueType( result, std::move( combinations ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); } template ( combinations.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); if ( combinationCount < combinations.size() ) { combinations.resize( combinationCount ); } - return createResultValueType( result, std::move( combinations ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( combinations ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19496,13 +19505,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( presentModes.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { presentModes.resize( presentModeCount ); } - return createResultValueType( result, std::move( presentModes ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } template ( presentModes.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { presentModes.resize( presentModeCount ); } - return createResultValueType( result, std::move( presentModes ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19566,9 +19575,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -19592,9 +19601,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -19623,9 +19632,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); - return createResultValueType( result, std::move( modes ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -19663,9 +19672,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -19686,9 +19695,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -20006,9 +20015,9 @@ namespace VULKAN_HPP_NAMESPACE m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); - return createResultValueType( result, std::move( deferredOperation ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -20026,9 +20035,9 @@ namespace VULKAN_HPP_NAMESPACE m_device, reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( deferredOperation, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -20143,9 +20152,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); return static_cast( result ); } @@ -20195,13 +20205,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); VULKAN_HPP_ASSERT( executableCount <= properties.size() ); if ( executableCount < properties.size() ) { properties.resize( executableCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); VULKAN_HPP_ASSERT( executableCount <= properties.size() ); if ( executableCount < properties.size() ) { properties.resize( executableCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20292,13 +20302,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( statistics.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); if ( statisticCount < statistics.size() ) { statistics.resize( statisticCount ); } - return createResultValueType( result, std::move( statistics ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); } template ( statistics.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); if ( statisticCount < statistics.size() ) { statistics.resize( statisticCount ); } - return createResultValueType( result, std::move( statistics ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( statistics ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20391,13 +20401,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( internalRepresentations.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); if ( internalRepresentationCount < internalRepresentations.size() ) { internalRepresentations.resize( internalRepresentationCount ); } - return createResultValueType( result, std::move( internalRepresentations ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); } template ( internalRepresentations.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); if ( internalRepresentationCount < internalRepresentations.size() ) { internalRepresentations.resize( internalRepresentationCount ); } - return createResultValueType( result, std::move( internalRepresentations ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( internalRepresentations ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20468,9 +20478,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20494,9 +20504,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20520,9 +20530,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyImageToImageEXT( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20549,9 +20559,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20635,9 +20645,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMapMemory2KHR( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); - return createResultValueType( result, std::move( pData ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20661,9 +20671,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20689,9 +20699,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast( &releaseInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20839,9 +20849,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); - return createResultValueType( result, std::move( indirectCommandsLayout ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -20862,11 +20872,11 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); - return createResultValueType( result, - UniqueHandle( - indirectCommandsLayout, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectCommandsLayout, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20974,9 +20984,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -21003,9 +21013,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); - return createResultValueType( result, std::move( display ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -21021,10 +21031,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); - return createResultValueType( result, - UniqueHandle( display, ObjectRelease( *this, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21062,9 +21072,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); - return createResultValueType( result, std::move( privateDataSlot ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -21085,9 +21095,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -21147,9 +21157,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -21215,9 +21225,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, reinterpret_cast( &qualityLevelInfo ), reinterpret_cast( &qualityLevelProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - return createResultValueType( result, std::move( qualityLevelProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); } template @@ -21238,9 +21248,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, reinterpret_cast( &qualityLevelInfo ), reinterpret_cast( &qualityLevelProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - return createResultValueType( result, std::move( structureChain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21297,9 +21307,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template @@ -21382,9 +21392,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21492,9 +21502,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); - return createResultValueType( result, std::move( module ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -21515,9 +21525,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -21556,13 +21566,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); if ( cacheSize < cacheData.size() ) { cacheData.resize( cacheSize ); } - return createResultValueType( result, std::move( cacheData ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); } template ( module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); if ( cacheSize < cacheData.size() ) { cacheData.resize( cacheSize ); } - return createResultValueType( result, std::move( cacheData ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21630,9 +21640,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); - return createResultValueType( result, std::move( function ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -21653,9 +21663,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -21960,9 +21970,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22244,9 +22254,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22273,9 +22283,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22302,9 +22312,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22331,9 +22341,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22361,9 +22371,9 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22564,56 +22574,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>::type - Device::getFaultInfoEXT( Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetDeviceFaultInfoEXT && "Function requires " ); -# endif - - std::pair data_; - VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first; - VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second; - VULKAN_HPP_NAMESPACE::Result result; - do - { - result = - static_cast( d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast( &faultCounts ), nullptr ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - std::free( faultInfo.pAddressInfos ); - if ( faultCounts.addressInfoCount ) - { - faultInfo.pAddressInfos = reinterpret_cast( - std::malloc( faultCounts.addressInfoCount * sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) ) ); - } - std::free( faultInfo.pVendorInfos ); - if ( faultCounts.vendorInfoCount ) - { - faultInfo.pVendorInfos = reinterpret_cast( - std::malloc( faultCounts.vendorInfoCount * sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) ) ); - } - std::free( faultInfo.pVendorBinaryData ); - if ( faultCounts.vendorBinarySize ) - { - faultInfo.pVendorBinaryData = std::malloc( faultCounts.vendorBinarySize ); - } - result = static_cast( d.vkGetDeviceFaultInfoEXT( - m_device, reinterpret_cast( &faultCounts ), reinterpret_cast( &faultInfo ) ) ); - } - } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( - result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); - - return createResultValueType( result, std::move( data_ ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -22637,9 +22597,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -22665,9 +22625,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); - return createResultValueType( result, std::move( display ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -22683,10 +22643,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); - return createResultValueType( result, - UniqueHandle( display, ObjectRelease( *this, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( + result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22726,9 +22686,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -22749,9 +22709,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -22848,9 +22808,9 @@ namespace VULKAN_HPP_NAMESPACE zx_handle_t zirconHandle; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); - return createResultValueType( result, std::move( zirconHandle ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22888,9 +22848,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), zirconHandle, reinterpret_cast( &memoryZirconHandleProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); - return createResultValueType( result, std::move( memoryZirconHandleProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -22920,9 +22880,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22950,9 +22910,9 @@ namespace VULKAN_HPP_NAMESPACE zx_handle_t zirconHandle; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); - return createResultValueType( result, std::move( zirconHandle ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -22992,9 +22952,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &collection ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); - return createResultValueType( result, std::move( collection ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -23015,9 +22975,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &collection ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( collection, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -23049,9 +23009,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( m_device, static_cast( collection ), reinterpret_cast( &imageConstraintsInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23081,9 +23041,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( m_device, static_cast( collection ), reinterpret_cast( &bufferConstraintsInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23168,9 +23128,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( m_device, static_cast( collection ), reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -23201,9 +23161,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); - return createResultValueType( result, std::move( maxWorkgroupSize ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23251,9 +23211,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::RemoteAddressNV address; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryRemoteAddressNV( m_device, reinterpret_cast( &memoryGetRemoteAddressInfo ), reinterpret_cast( &address ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); - return createResultValueType( result, std::move( address ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23282,9 +23242,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelinePropertiesEXT( m_device, reinterpret_cast( &pipelineInfo ), reinterpret_cast( &pipelineProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); - return createResultValueType( result, std::move( pipelineProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23361,9 +23321,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); - return createResultValueType( result, std::move( surface ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -23384,9 +23344,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -23560,9 +23520,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( µmap ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); - return createResultValueType( result, std::move( micromap ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -23583,9 +23543,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( µmap ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( micromap, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -23691,7 +23651,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBuildMicromapsEXT( m_device, static_cast( deferredOperation ), infos.size(), reinterpret_cast( infos.data() ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -23723,7 +23683,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -23754,7 +23714,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMicromapToMemoryEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -23785,7 +23745,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); @@ -23832,9 +23792,9 @@ namespace VULKAN_HPP_NAMESPACE data.size() * sizeof( DataType ), reinterpret_cast( data.data() ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -23858,9 +23818,9 @@ namespace VULKAN_HPP_NAMESPACE sizeof( DataType ), reinterpret_cast( &data ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -24963,13 +24923,13 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( imageFormatProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); if ( formatCount < imageFormatProperties.size() ) { imageFormatProperties.resize( formatCount ); } - return createResultValueType( result, std::move( imageFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } template ( imageFormatProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); if ( formatCount < imageFormatProperties.size() ) { imageFormatProperties.resize( formatCount ); } - return createResultValueType( result, std::move( imageFormatProperties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25048,9 +25008,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &session ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); - return createResultValueType( result, std::move( session ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -25071,9 +25031,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &session ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); - return createResultValueType( + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( session, ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ @@ -25168,9 +25128,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( bindingPoint ), static_cast( view ), static_cast( layout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -25346,6 +25306,28 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_AMD_anti_lag === + + template + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkAntiLagUpdateAMD( m_device, reinterpret_cast( pData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function requires " ); +# endif + + d.vkAntiLagUpdateAMD( m_device, reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_shader_object === template @@ -25382,9 +25364,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( result, std::move( shaders ) ); } @@ -25410,9 +25392,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( result, std::move( shaders ) ); } @@ -25435,9 +25417,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shader ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue( result, std::move( shader ) ); } @@ -25461,9 +25443,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders; uniqueShaders.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -25496,9 +25478,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( createInfos.data() ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( shaders.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); uniqueShaders.reserve( createInfos.size() ); ObjectDestroy deleter( *this, allocator, d ); @@ -25527,9 +25509,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &createInfo ), reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shader ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( result, UniqueHandle( shader, ObjectDestroy( *this, allocator, d ) ) ); @@ -25620,13 +25602,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template ( shader ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } - return createResultValueType( result, std::move( data ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25700,6 +25682,469 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_pipeline_binary === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreatePipelineBinariesKHR( m_device, + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pBinaries ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + + return ResultValue>( result, std::move( pipelineBinaries ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> + Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries( pipelineBinaryKHRAllocator ); + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + + return ResultValue>( result, std::move( pipelineBinaries ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries; + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); + } + return ResultValue, PipelineBinaryKHRAllocator>>( + result, std::move( uniquePipelineBinaries ) ); + } + + template >::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> + Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); +# endif + + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( d.vkCreatePipelineBinariesKHR( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); + std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries( + pipelineBinaryKHRAllocator ); + uniquePipelineBinaries.reserve( pipelineBinaries.size() ); + ObjectDestroy deleter( *this, allocator, d ); + for ( auto const & pipelineBinary : pipelineBinaries ) + { + uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); + } + return ResultValue, PipelineBinaryKHRAllocator>>( + result, std::move( uniquePipelineBinaries ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineBinaryKHR( m_device, static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); +# endif + + d.vkDestroyPipelineBinaryKHR( + m_device, + static_cast( pipelineBinary ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyPipelineBinaryKHR( m_device, static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); +# endif + + d.vkDestroyPipelineBinaryKHR( + m_device, + static_cast( pipelineBinary ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineKeyKHR( + m_device, reinterpret_cast( pPipelineCreateInfo ), reinterpret_cast( pPipelineKey ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::getPipelineKeyKHR( Optional pipelineCreateInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineKeyKHR && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelineKeyKHR( + m_device, + reinterpret_cast( static_cast( pipelineCreateInfo ) ), + reinterpret_cast( &pipelineKey ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineKey ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( pInfo ), + reinterpret_cast( pPipelineBinaryKey ), + pPipelineBinaryDataSize, + pPipelineBinaryData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); +# endif + + std::pair> data_; + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + typename ResultValueType>>::type + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); +# endif + + std::pair> data_( + std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE Result Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkReleaseCapturedPipelineDataKHR( + m_device, reinterpret_cast( pInfo ), reinterpret_cast( pAllocator ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkReleaseCapturedPipelineDataKHR && "Function requires " ); +# endif + + d.vkReleaseCapturedPipelineDataKHR( + m_device, + reinterpret_cast( &info ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_QCOM_tile_properties === template @@ -25836,9 +26281,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetLatencySleepModeNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepModeInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); - return createResultValueType( result ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25901,19 +26346,48 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV - Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); # endif - VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + std::vector timings; + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); - return latencyMarkerInfo; + return timings; + } + + template < + typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); +# endif + + std::vector timings( latencyTimingsFrameReportNVAllocator ); + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); + d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + + return timings; } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25975,13 +26449,13 @@ namespace VULKAN_HPP_NAMESPACE m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26059,9 +26533,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); - return createResultValueType( result, std::move( properties ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template @@ -26077,9 +26551,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); - return createResultValueType( result, std::move( structureChain ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ @@ -26130,13 +26604,13 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { timeDomains.resize( timeDomainCount ); } - return createResultValueType( result, std::move( timeDomains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } template ( timeDomains.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { timeDomains.resize( timeDomainCount ); } - return createResultValueType( result, std::move( timeDomains ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26204,9 +26678,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t & maxDeviation = data_.second; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ( d.vkGetCalibratedTimestampsKHR( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template @@ -26249,9 +26723,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t & maxDeviation = data_.second; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); - return createResultValueType( result, std::move( data_ ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index 4208b8c..4670c46 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -8,6 +8,9 @@ #ifndef VULKAN_HANDLES_HPP #define VULKAN_HANDLES_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + namespace VULKAN_HPP_NAMESPACE { @@ -997,9 +1000,6 @@ namespace VULKAN_HPP_NAMESPACE struct PresentFrameTokenGGP; #endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_NV_compute_shader_derivatives === - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV; - //=== VK_NV_mesh_shader === struct PhysicalDeviceMeshShaderFeaturesNV; struct PhysicalDeviceMeshShaderPropertiesNV; @@ -1660,6 +1660,11 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineCreateFlags2CreateInfoKHR; struct BufferUsageFlags2CreateInfoKHR; + //=== VK_AMD_anti_lag === + struct PhysicalDeviceAntiLagFeaturesAMD; + struct AntiLagDataAMD; + struct AntiLagPresentationInfoAMD; + //=== VK_KHR_ray_tracing_position_fetch === struct PhysicalDeviceRayTracingPositionFetchFeaturesKHR; @@ -1668,6 +1673,20 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceShaderObjectPropertiesEXT; struct ShaderCreateInfoEXT; + //=== VK_KHR_pipeline_binary === + struct PhysicalDevicePipelineBinaryFeaturesKHR; + struct PhysicalDevicePipelineBinaryPropertiesKHR; + struct DevicePipelineBinaryInternalCacheControlKHR; + struct PipelineBinaryKeyKHR; + struct PipelineBinaryDataKHR; + struct PipelineBinaryKeysAndDataKHR; + struct PipelineBinaryCreateInfoKHR; + struct PipelineBinaryInfoKHR; + struct ReleaseCapturedPipelineDataInfoKHR; + struct PipelineBinaryDataInfoKHR; + struct PipelineCreateInfoKHR; + struct PipelineBinaryHandlesInfoKHR; + //=== VK_QCOM_tile_properties === struct PhysicalDeviceTilePropertiesFeaturesQCOM; struct TilePropertiesQCOM; @@ -1695,6 +1714,10 @@ namespace VULKAN_HPP_NAMESPACE struct MutableDescriptorTypeCreateInfoEXT; using MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT; + //=== VK_EXT_legacy_vertex_attributes === + struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + //=== VK_EXT_layer_settings === struct LayerSettingsCreateInfoEXT; struct LayerSettingEXT; @@ -1729,6 +1752,11 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; struct MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + //=== VK_KHR_compute_shader_derivatives === + struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR; + //=== VK_KHR_video_decode_av1 === struct VideoDecodeAV1ProfileInfoKHR; struct VideoDecodeAV1CapabilitiesKHR; @@ -1823,12 +1851,33 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_raw_access_chains === struct PhysicalDeviceRawAccessChainsFeaturesNV; + //=== VK_KHR_shader_relaxed_extended_instruction === + struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + //=== VK_NV_command_buffer_inheritance === + struct PhysicalDeviceCommandBufferInheritanceFeaturesNV; + + //=== VK_KHR_maintenance7 === + struct PhysicalDeviceMaintenance7FeaturesKHR; + struct PhysicalDeviceMaintenance7PropertiesKHR; + struct PhysicalDeviceLayeredApiPropertiesListKHR; + struct PhysicalDeviceLayeredApiPropertiesKHR; + struct PhysicalDeviceLayeredApiVulkanPropertiesKHR; + //=== VK_NV_shader_atomic_float16_vector === struct PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + //=== VK_EXT_shader_replicated_composites === + struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + //=== VK_NV_ray_tracing_validation === struct PhysicalDeviceRayTracingValidationFeaturesNV; + //=== VK_MESA_image_alignment_control === + struct PhysicalDeviceImageAlignmentControlFeaturesMESA; + struct PhysicalDeviceImageAlignmentControlPropertiesMESA; + struct ImageAlignmentControlCreateInfoMESA; + //=================================== //=== HANDLE forward declarations === //=================================== @@ -1929,6 +1978,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_object === class ShaderEXT; + //=== VK_KHR_pipeline_binary === + class PipelineBinaryKHR; + #ifndef VULKAN_HPP_NO_SMART_HANDLE //====================== //=== UNIQUE HANDLEs === @@ -2384,6 +2436,16 @@ namespace VULKAN_HPP_NAMESPACE }; using UniqueShaderEXT = UniqueHandle; + + //=== VK_KHR_pipeline_binary === + template + class UniqueHandleTraits + { + public: + using deleter = ObjectDestroy; + }; + + using UniquePipelineBinaryKHR = UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ //=============== @@ -2407,7 +2469,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR; public: - VULKAN_HPP_CONSTEXPR SurfaceKHR() = default; + SurfaceKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + SurfaceKHR( SurfaceKHR const & rhs ) = default; + SurfaceKHR & operator=( SurfaceKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SurfaceKHR( SurfaceKHR && rhs ) = default; + SurfaceKHR & operator=( SurfaceKHR && rhs ) = default; +#else + SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_surfaceKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_surfaceKHR, {} ) ) {} + + SurfaceKHR & operator=( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_surfaceKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_surfaceKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -2477,11 +2554,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR; }; +#endif template <> struct isVulkanHandleType @@ -2500,7 +2579,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT; public: - VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() = default; + DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DebugReportCallbackEXT( DebugReportCallbackEXT const & rhs ) = default; + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) = default; + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) = default; +#else + DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_debugReportCallbackEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugReportCallbackEXT, {} ) ) + { + } + + DebugReportCallbackEXT & operator=( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_debugReportCallbackEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugReportCallbackEXT, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -2573,11 +2670,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT; }; +#endif template <> struct isVulkanHandleType @@ -2596,7 +2695,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() = default; + DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & rhs ) = default; + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) = default; + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) = default; +#else + DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_debugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugUtilsMessengerEXT, {} ) ) + { + } + + DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_debugUtilsMessengerEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_debugUtilsMessengerEXT, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -2663,11 +2780,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT; }; +#endif template <> struct isVulkanHandleType @@ -2686,7 +2805,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR; public: - VULKAN_HPP_CONSTEXPR DisplayKHR() = default; + DisplayKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DisplayKHR( DisplayKHR const & rhs ) = default; + DisplayKHR & operator=( DisplayKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DisplayKHR( DisplayKHR && rhs ) = default; + DisplayKHR & operator=( DisplayKHR && rhs ) = default; +#else + DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_displayKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayKHR, {} ) ) {} + + DisplayKHR & operator=( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_displayKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -2756,11 +2890,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DisplayKHR; }; +#endif template <> struct isVulkanHandleType @@ -2779,7 +2915,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR; public: - VULKAN_HPP_CONSTEXPR SwapchainKHR() = default; + SwapchainKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + SwapchainKHR( SwapchainKHR const & rhs ) = default; + SwapchainKHR & operator=( SwapchainKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SwapchainKHR( SwapchainKHR && rhs ) = default; + SwapchainKHR & operator=( SwapchainKHR && rhs ) = default; +#else + SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_swapchainKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchainKHR, {} ) ) {} + + SwapchainKHR & operator=( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_swapchainKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchainKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -2849,11 +3000,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR; }; +#endif template <> struct isVulkanHandleType @@ -2872,7 +3025,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore; public: - VULKAN_HPP_CONSTEXPR Semaphore() = default; + Semaphore() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Semaphore( Semaphore const & rhs ) = default; + Semaphore & operator=( Semaphore const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Semaphore( Semaphore && rhs ) = default; + Semaphore & operator=( Semaphore && rhs ) = default; +#else + Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT : m_semaphore( VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) {} + + Semaphore & operator=( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT + { + m_semaphore = VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -2942,11 +3110,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Semaphore; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Semaphore; }; +#endif template <> struct isVulkanHandleType @@ -2965,7 +3135,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence; public: - VULKAN_HPP_CONSTEXPR Fence() = default; + Fence() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Fence( Fence const & rhs ) = default; + Fence & operator=( Fence const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Fence( Fence && rhs ) = default; + Fence & operator=( Fence && rhs ) = default; +#else + Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT : m_fence( VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ) ) {} + + Fence & operator=( Fence && rhs ) VULKAN_HPP_NOEXCEPT + { + m_fence = VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3035,11 +3220,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Fence; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Fence; }; +#endif template <> struct isVulkanHandleType @@ -3058,7 +3245,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() = default; + PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & rhs ) = default; + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) = default; + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) = default; +#else + PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + : m_performanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::exchange( rhs.m_performanceConfigurationINTEL, {} ) ) + { + } + + PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT + { + m_performanceConfigurationINTEL = VULKAN_HPP_NAMESPACE::exchange( rhs.m_performanceConfigurationINTEL, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3125,11 +3330,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL; }; +#endif template <> struct isVulkanHandleType @@ -3148,7 +3355,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool; public: - VULKAN_HPP_CONSTEXPR QueryPool() = default; + QueryPool() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + QueryPool( QueryPool const & rhs ) = default; + QueryPool & operator=( QueryPool const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + QueryPool( QueryPool && rhs ) = default; + QueryPool & operator=( QueryPool && rhs ) = default; +#else + QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT : m_queryPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) {} + + QueryPool & operator=( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_queryPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3218,11 +3440,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::QueryPool; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::QueryPool; }; +#endif template <> struct isVulkanHandleType @@ -3241,7 +3465,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer; public: - VULKAN_HPP_CONSTEXPR Buffer() = default; + Buffer() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Buffer( Buffer const & rhs ) = default; + Buffer & operator=( Buffer const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Buffer( Buffer && rhs ) = default; + Buffer & operator=( Buffer && rhs ) = default; +#else + Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT : m_buffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ) ) {} + + Buffer & operator=( Buffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_buffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3311,11 +3550,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Buffer; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Buffer; }; +#endif template <> struct isVulkanHandleType @@ -3334,7 +3575,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout; public: - VULKAN_HPP_CONSTEXPR PipelineLayout() = default; + PipelineLayout() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PipelineLayout( PipelineLayout const & rhs ) = default; + PipelineLayout & operator=( PipelineLayout const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineLayout( PipelineLayout && rhs ) = default; + PipelineLayout & operator=( PipelineLayout && rhs ) = default; +#else + PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) {} + + PipelineLayout & operator=( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineLayout = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3404,11 +3660,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::PipelineLayout; }; +#endif template <> struct isVulkanHandleType @@ -3427,7 +3685,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet; public: - VULKAN_HPP_CONSTEXPR DescriptorSet() = default; + DescriptorSet() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorSet( DescriptorSet const & rhs ) = default; + DescriptorSet & operator=( DescriptorSet const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorSet( DescriptorSet && rhs ) = default; + DescriptorSet & operator=( DescriptorSet && rhs ) = default; +#else + DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT : m_descriptorSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) {} + + DescriptorSet & operator=( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSet = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3497,11 +3770,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DescriptorSet; }; +#endif template <> struct isVulkanHandleType @@ -3520,7 +3795,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView; public: - VULKAN_HPP_CONSTEXPR ImageView() = default; + ImageView() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ImageView( ImageView const & rhs ) = default; + ImageView & operator=( ImageView const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ImageView( ImageView && rhs ) = default; + ImageView & operator=( ImageView && rhs ) = default; +#else + ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT : m_imageView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ) ) {} + + ImageView & operator=( ImageView && rhs ) VULKAN_HPP_NOEXCEPT + { + m_imageView = VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3590,11 +3880,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::ImageView; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::ImageView; }; +#endif template <> struct isVulkanHandleType @@ -3613,7 +3905,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline; public: - VULKAN_HPP_CONSTEXPR Pipeline() = default; + Pipeline() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Pipeline( Pipeline const & rhs ) = default; + Pipeline & operator=( Pipeline const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Pipeline( Pipeline && rhs ) = default; + Pipeline & operator=( Pipeline && rhs ) = default; +#else + Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT : m_pipeline( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) {} + + Pipeline & operator=( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipeline = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3683,11 +3990,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Pipeline; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Pipeline; }; +#endif template <> struct isVulkanHandleType @@ -3706,7 +4015,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR ShaderEXT() = default; + ShaderEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ShaderEXT( ShaderEXT const & rhs ) = default; + ShaderEXT & operator=( ShaderEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ShaderEXT( ShaderEXT && rhs ) = default; + ShaderEXT & operator=( ShaderEXT && rhs ) = default; +#else + ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_shaderEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderEXT, {} ) ) {} + + ShaderEXT & operator=( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_shaderEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderEXT, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR ShaderEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3770,11 +4094,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::ShaderEXT; }; +#endif template <> struct isVulkanHandleType @@ -3793,7 +4119,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage; public: - VULKAN_HPP_CONSTEXPR Image() = default; + Image() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Image( Image const & rhs ) = default; + Image & operator=( Image const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Image( Image && rhs ) = default; + Image & operator=( Image && rhs ) = default; +#else + Image( Image && rhs ) VULKAN_HPP_NOEXCEPT : m_image( VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ) ) {} + + Image & operator=( Image && rhs ) VULKAN_HPP_NOEXCEPT + { + m_image = VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3863,11 +4204,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Image; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Image; }; +#endif template <> struct isVulkanHandleType @@ -3886,7 +4229,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV; public: - VULKAN_HPP_CONSTEXPR AccelerationStructureNV() = default; + AccelerationStructureNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + AccelerationStructureNV( AccelerationStructureNV const & rhs ) = default; + AccelerationStructureNV & operator=( AccelerationStructureNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + AccelerationStructureNV( AccelerationStructureNV && rhs ) = default; + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) = default; +#else + AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureNV, {} ) ) + { + } + + AccelerationStructureNV & operator=( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureNV, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -3959,11 +4320,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV; }; +#endif template <> struct isVulkanHandleType @@ -3982,7 +4345,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV() = default; + OpticalFlowSessionNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + OpticalFlowSessionNV( OpticalFlowSessionNV const & rhs ) = default; + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) = default; + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) = default; +#else + OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_opticalFlowSessionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_opticalFlowSessionNV, {} ) ) + { + } + + OpticalFlowSessionNV & operator=( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_opticalFlowSessionNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_opticalFlowSessionNV, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR OpticalFlowSessionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -4049,11 +4430,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV; }; +#endif template <> struct isVulkanHandleType @@ -4072,7 +4455,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate; public: - VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() = default; + DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorUpdateTemplate( DescriptorUpdateTemplate const & rhs ) = default; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) = default; + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) = default; +#else + DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + : m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + { + } + + DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorUpdateTemplate = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -4145,11 +4546,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate; }; +#endif template <> struct isVulkanHandleType @@ -4170,7 +4573,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent; public: - VULKAN_HPP_CONSTEXPR Event() = default; + Event() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Event( Event const & rhs ) = default; + Event & operator=( Event const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Event( Event && rhs ) = default; + Event & operator=( Event && rhs ) = default; +#else + Event( Event && rhs ) VULKAN_HPP_NOEXCEPT : m_event( VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ) ) {} + + Event & operator=( Event && rhs ) VULKAN_HPP_NOEXCEPT + { + m_event = VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -4240,11 +4658,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Event; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Event; }; +#endif template <> struct isVulkanHandleType @@ -4263,7 +4683,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR; public: - VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() = default; + AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + AccelerationStructureKHR( AccelerationStructureKHR const & rhs ) = default; + AccelerationStructureKHR & operator=( AccelerationStructureKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) = default; + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) = default; +#else + AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_accelerationStructureKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureKHR, {} ) ) + { + } + + AccelerationStructureKHR & operator=( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_accelerationStructureKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructureKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -4336,11 +4774,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR; }; +#endif template <> struct isVulkanHandleType @@ -4359,7 +4799,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR MicromapEXT() = default; + MicromapEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + MicromapEXT( MicromapEXT const & rhs ) = default; + MicromapEXT & operator=( MicromapEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + MicromapEXT( MicromapEXT && rhs ) = default; + MicromapEXT & operator=( MicromapEXT && rhs ) = default; +#else + MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_micromapEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromapEXT, {} ) ) {} + + MicromapEXT & operator=( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_micromapEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromapEXT, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR MicromapEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -4423,11 +4878,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::MicromapEXT; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::MicromapEXT; }; +#endif template <> struct isVulkanHandleType @@ -4446,7 +4903,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer; public: - VULKAN_HPP_CONSTEXPR CommandBuffer() = default; + CommandBuffer() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CommandBuffer( CommandBuffer const & rhs ) = default; + CommandBuffer & operator=( CommandBuffer const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CommandBuffer( CommandBuffer && rhs ) = default; + CommandBuffer & operator=( CommandBuffer && rhs ) = default; +#else + CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT : m_commandBuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) {} + + CommandBuffer & operator=( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_commandBuffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -5950,11 +6422,11 @@ namespace VULKAN_HPP_NAMESPACE #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pLocationInfo, + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & locationInfo, + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6817,11 +7289,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::CommandBuffer; }; +#endif template <> struct isVulkanHandleType @@ -6840,7 +7314,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory; public: - VULKAN_HPP_CONSTEXPR DeviceMemory() = default; + DeviceMemory() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DeviceMemory( DeviceMemory const & rhs ) = default; + DeviceMemory & operator=( DeviceMemory const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DeviceMemory( DeviceMemory && rhs ) = default; + DeviceMemory & operator=( DeviceMemory && rhs ) = default; +#else + DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT : m_deviceMemory( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deviceMemory, {} ) ) {} + + DeviceMemory & operator=( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT + { + m_deviceMemory = VULKAN_HPP_NAMESPACE::exchange( rhs.m_deviceMemory, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -6863,7 +7352,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( DeviceMemory const & ) const = default; #else - bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT { return m_deviceMemory == rhs.m_deviceMemory; } @@ -6910,11 +7399,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DeviceMemory; }; +#endif template <> struct isVulkanHandleType @@ -6933,7 +7424,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR VideoSessionKHR() = default; + VideoSessionKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + VideoSessionKHR( VideoSessionKHR const & rhs ) = default; + VideoSessionKHR & operator=( VideoSessionKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + VideoSessionKHR( VideoSessionKHR && rhs ) = default; + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) = default; +#else + VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_videoSessionKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionKHR, {} ) ) {} + + VideoSessionKHR & operator=( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR VideoSessionKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -6997,11 +7503,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::VideoSessionKHR; }; +#endif template <> struct isVulkanHandleType @@ -7020,7 +7528,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR DeferredOperationKHR() = default; + DeferredOperationKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DeferredOperationKHR( DeferredOperationKHR const & rhs ) = default; + DeferredOperationKHR & operator=( DeferredOperationKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DeferredOperationKHR( DeferredOperationKHR && rhs ) = default; + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) = default; +#else + DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_deferredOperationKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_deferredOperationKHR, {} ) ) + { + } + + DeferredOperationKHR & operator=( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_deferredOperationKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_deferredOperationKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7087,11 +7613,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR; }; +#endif template <> struct isVulkanHandleType @@ -7111,7 +7639,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA; public: - VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA() = default; + BufferCollectionFUCHSIA() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & rhs ) = default; + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) = default; + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) = default; +# else + BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + : m_bufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferCollectionFUCHSIA, {} ) ) + { + } + + BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT + { + m_bufferCollectionFUCHSIA = VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferCollectionFUCHSIA, {} ); + return *this; + } +# endif VULKAN_HPP_CONSTEXPR BufferCollectionFUCHSIA( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7184,11 +7730,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; }; +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA; }; +# endif template <> struct isVulkanHandleType @@ -7208,7 +7756,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView; public: - VULKAN_HPP_CONSTEXPR BufferView() = default; + BufferView() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + BufferView( BufferView const & rhs ) = default; + BufferView & operator=( BufferView const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + BufferView( BufferView && rhs ) = default; + BufferView & operator=( BufferView && rhs ) = default; +#else + BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT : m_bufferView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) {} + + BufferView & operator=( BufferView && rhs ) VULKAN_HPP_NOEXCEPT + { + m_bufferView = VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7278,11 +7841,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::BufferView; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::BufferView; }; +#endif template <> struct isVulkanHandleType @@ -7301,7 +7866,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool; public: - VULKAN_HPP_CONSTEXPR CommandPool() = default; + CommandPool() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CommandPool( CommandPool const & rhs ) = default; + CommandPool & operator=( CommandPool const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CommandPool( CommandPool && rhs ) = default; + CommandPool & operator=( CommandPool && rhs ) = default; +#else + CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT : m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) {} + + CommandPool & operator=( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_commandPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7371,11 +7951,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::CommandPool; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::CommandPool; }; +#endif template <> struct isVulkanHandleType @@ -7394,7 +7976,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache; public: - VULKAN_HPP_CONSTEXPR PipelineCache() = default; + PipelineCache() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PipelineCache( PipelineCache const & rhs ) = default; + PipelineCache & operator=( PipelineCache const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineCache( PipelineCache && rhs ) = default; + PipelineCache & operator=( PipelineCache && rhs ) = default; +#else + PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) {} + + PipelineCache & operator=( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineCache = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7464,11 +8061,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::PipelineCache; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::PipelineCache; }; +#endif template <> struct isVulkanHandleType @@ -7487,7 +8086,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX; public: - VULKAN_HPP_CONSTEXPR CuFunctionNVX() = default; + CuFunctionNVX() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CuFunctionNVX( CuFunctionNVX const & rhs ) = default; + CuFunctionNVX & operator=( CuFunctionNVX const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CuFunctionNVX( CuFunctionNVX && rhs ) = default; + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) = default; +#else + CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT : m_cuFunctionNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuFunctionNVX, {} ) ) {} + + CuFunctionNVX & operator=( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cuFunctionNVX = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuFunctionNVX, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR CuFunctionNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7557,11 +8171,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::CuFunctionNVX; }; +#endif template <> struct isVulkanHandleType @@ -7580,7 +8196,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX; public: - VULKAN_HPP_CONSTEXPR CuModuleNVX() = default; + CuModuleNVX() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CuModuleNVX( CuModuleNVX const & rhs ) = default; + CuModuleNVX & operator=( CuModuleNVX const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CuModuleNVX( CuModuleNVX && rhs ) = default; + CuModuleNVX & operator=( CuModuleNVX && rhs ) = default; +#else + CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT : m_cuModuleNVX( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuModuleNVX, {} ) ) {} + + CuModuleNVX & operator=( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cuModuleNVX = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cuModuleNVX, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR CuModuleNVX( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7650,11 +8281,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::CuModuleNVX; }; +#endif template <> struct isVulkanHandleType @@ -7674,7 +8307,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaFunctionNV; public: - VULKAN_HPP_CONSTEXPR CudaFunctionNV() = default; + CudaFunctionNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CudaFunctionNV( CudaFunctionNV const & rhs ) = default; + CudaFunctionNV & operator=( CudaFunctionNV const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CudaFunctionNV( CudaFunctionNV && rhs ) = default; + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) = default; +# else + CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT : m_cudaFunctionNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaFunctionNV, {} ) ) {} + + CudaFunctionNV & operator=( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cudaFunctionNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaFunctionNV, {} ); + return *this; + } +# endif VULKAN_HPP_CONSTEXPR CudaFunctionNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7744,11 +8392,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; }; +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::CudaFunctionNV; }; +# endif template <> struct isVulkanHandleType @@ -7769,7 +8419,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCudaModuleNV; public: - VULKAN_HPP_CONSTEXPR CudaModuleNV() = default; + CudaModuleNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + CudaModuleNV( CudaModuleNV const & rhs ) = default; + CudaModuleNV & operator=( CudaModuleNV const & rhs ) = default; + +# if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + CudaModuleNV( CudaModuleNV && rhs ) = default; + CudaModuleNV & operator=( CudaModuleNV && rhs ) = default; +# else + CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT : m_cudaModuleNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaModuleNV, {} ) ) {} + + CudaModuleNV & operator=( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_cudaModuleNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_cudaModuleNV, {} ); + return *this; + } +# endif VULKAN_HPP_CONSTEXPR CudaModuleNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7839,11 +8504,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; }; +# if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::CudaModuleNV; }; +# endif template <> struct isVulkanHandleType @@ -7863,7 +8530,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool; public: - VULKAN_HPP_CONSTEXPR DescriptorPool() = default; + DescriptorPool() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorPool( DescriptorPool const & rhs ) = default; + DescriptorPool & operator=( DescriptorPool const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorPool( DescriptorPool && rhs ) = default; + DescriptorPool & operator=( DescriptorPool && rhs ) = default; +#else + DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT : m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) {} + + DescriptorPool & operator=( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorPool = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -7933,11 +8615,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DescriptorPool; }; +#endif template <> struct isVulkanHandleType @@ -7956,7 +8640,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout; public: - VULKAN_HPP_CONSTEXPR DescriptorSetLayout() = default; + DescriptorSetLayout() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DescriptorSetLayout( DescriptorSetLayout const & rhs ) = default; + DescriptorSetLayout & operator=( DescriptorSetLayout const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DescriptorSetLayout( DescriptorSetLayout && rhs ) = default; + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) = default; +#else + DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + : m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) + { + } + + DescriptorSetLayout & operator=( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT + { + m_descriptorSetLayout = VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8029,11 +8731,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout; }; +#endif template <> struct isVulkanHandleType @@ -8052,7 +8756,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer; public: - VULKAN_HPP_CONSTEXPR Framebuffer() = default; + Framebuffer() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Framebuffer( Framebuffer const & rhs ) = default; + Framebuffer & operator=( Framebuffer const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Framebuffer( Framebuffer && rhs ) = default; + Framebuffer & operator=( Framebuffer && rhs ) = default; +#else + Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT : m_framebuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) {} + + Framebuffer & operator=( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT + { + m_framebuffer = VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8122,11 +8841,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Framebuffer; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Framebuffer; }; +#endif template <> struct isVulkanHandleType @@ -8145,7 +8866,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() = default; + IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & rhs ) = default; + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) = default; + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) = default; +#else + IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutNV, {} ) ) + { + } + + IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutNV = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutNV, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8212,11 +8951,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV; }; +#endif template <> struct isVulkanHandleType @@ -8235,7 +8976,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR PrivateDataSlot() = default; + PrivateDataSlot() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PrivateDataSlot( PrivateDataSlot const & rhs ) = default; + PrivateDataSlot & operator=( PrivateDataSlot const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PrivateDataSlot( PrivateDataSlot && rhs ) = default; + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) = default; +#else + PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT : m_privateDataSlot( VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) {} + + PrivateDataSlot & operator=( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT + { + m_privateDataSlot = VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR PrivateDataSlot( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8299,11 +9055,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlot; }; +#endif template <> struct isVulkanHandleType @@ -8324,7 +9082,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass; public: - VULKAN_HPP_CONSTEXPR RenderPass() = default; + RenderPass() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + RenderPass( RenderPass const & rhs ) = default; + RenderPass & operator=( RenderPass const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + RenderPass( RenderPass && rhs ) = default; + RenderPass & operator=( RenderPass && rhs ) = default; +#else + RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT : m_renderPass( VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) {} + + RenderPass & operator=( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT + { + m_renderPass = VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8394,11 +9167,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::RenderPass; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::RenderPass; }; +#endif template <> struct isVulkanHandleType @@ -8417,7 +9192,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler; public: - VULKAN_HPP_CONSTEXPR Sampler() = default; + Sampler() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Sampler( Sampler const & rhs ) = default; + Sampler & operator=( Sampler const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Sampler( Sampler && rhs ) = default; + Sampler & operator=( Sampler && rhs ) = default; +#else + Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT : m_sampler( VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ) ) {} + + Sampler & operator=( Sampler && rhs ) VULKAN_HPP_NOEXCEPT + { + m_sampler = VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8487,11 +9277,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Sampler; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Sampler; }; +#endif template <> struct isVulkanHandleType @@ -8510,7 +9302,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion; public: - VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() = default; + SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + SamplerYcbcrConversion( SamplerYcbcrConversion const & rhs ) = default; + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) = default; + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) = default; +#else + SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + : m_samplerYcbcrConversion( VULKAN_HPP_NAMESPACE::exchange( rhs.m_samplerYcbcrConversion, {} ) ) + { + } + + SamplerYcbcrConversion & operator=( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT + { + m_samplerYcbcrConversion = VULKAN_HPP_NAMESPACE::exchange( rhs.m_samplerYcbcrConversion, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8583,11 +9393,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion; }; +#endif template <> struct isVulkanHandleType @@ -8608,7 +9420,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule; public: - VULKAN_HPP_CONSTEXPR ShaderModule() = default; + ShaderModule() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ShaderModule( ShaderModule const & rhs ) = default; + ShaderModule & operator=( ShaderModule const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ShaderModule( ShaderModule && rhs ) = default; + ShaderModule & operator=( ShaderModule && rhs ) = default; +#else + ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT : m_shaderModule( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) {} + + ShaderModule & operator=( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT + { + m_shaderModule = VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8678,11 +9505,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::ShaderModule; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::ShaderModule; }; +#endif template <> struct isVulkanHandleType @@ -8701,7 +9530,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT; public: - VULKAN_HPP_CONSTEXPR ValidationCacheEXT() = default; + ValidationCacheEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + ValidationCacheEXT( ValidationCacheEXT const & rhs ) = default; + ValidationCacheEXT & operator=( ValidationCacheEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + ValidationCacheEXT( ValidationCacheEXT && rhs ) = default; + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) = default; +#else + ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT : m_validationCacheEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCacheEXT, {} ) ) + { + } + + ValidationCacheEXT & operator=( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_validationCacheEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCacheEXT, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8773,11 +9619,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT; }; +#endif template <> struct isVulkanHandleType @@ -8796,7 +9644,25 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; public: - VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR() = default; + VideoSessionParametersKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + VideoSessionParametersKHR( VideoSessionParametersKHR const & rhs ) = default; + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) = default; + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) = default; +#else + VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_videoSessionParametersKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParametersKHR, {} ) ) + { + } + + VideoSessionParametersKHR & operator=( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_videoSessionParametersKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParametersKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR VideoSessionParametersKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -8863,11 +9729,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR; }; +#endif template <> struct isVulkanHandleType @@ -8875,6 +9743,110 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; + class PipelineBinaryKHR + { + public: + using CType = VkPipelineBinaryKHR; + using NativeType = VkPipelineBinaryKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PipelineBinaryKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PipelineBinaryKHR( PipelineBinaryKHR const & rhs ) = default; + PipelineBinaryKHR & operator=( PipelineBinaryKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) = default; + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) = default; +#else + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_pipelineBinaryKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinaryKHR, {} ) ) {} + + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinaryKHR, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR PipelineBinaryKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT PipelineBinaryKHR( VkPipelineBinaryKHR pipelineBinaryKHR ) VULKAN_HPP_NOEXCEPT : m_pipelineBinaryKHR( pipelineBinaryKHR ) {} + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + PipelineBinaryKHR & operator=( VkPipelineBinaryKHR pipelineBinaryKHR ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = pipelineBinaryKHR; + return *this; + } +#endif + + PipelineBinaryKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_pipelineBinaryKHR = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryKHR const & ) const = default; +#else + bool operator==( PipelineBinaryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR == rhs.m_pipelineBinaryKHR; + } + + bool operator!=( PipelineBinaryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR != rhs.m_pipelineBinaryKHR; + } + + bool operator<( PipelineBinaryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR < rhs.m_pipelineBinaryKHR; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinaryKHR == VK_NULL_HANDLE; + } + + private: + VkPipelineBinaryKHR m_pipelineBinaryKHR = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Queue { public: @@ -8886,7 +9858,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue; public: - VULKAN_HPP_CONSTEXPR Queue() = default; + Queue() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Queue( Queue const & rhs ) = default; + Queue & operator=( Queue const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Queue( Queue && rhs ) = default; + Queue & operator=( Queue && rhs ) = default; +#else + Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT : m_queue( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ) ) {} + + Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT + { + m_queue = VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -9036,8 +10023,8 @@ namespace VULKAN_HPP_NAMESPACE #else template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ //=== VK_KHR_synchronization2 === @@ -9114,11 +10101,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Queue; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Queue; }; +#endif template <> struct isVulkanHandleType @@ -9137,7 +10126,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice; public: - VULKAN_HPP_CONSTEXPR Device() = default; + Device() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Device( Device const & rhs ) = default; + Device & operator=( Device const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Device( Device && rhs ) = default; + Device & operator=( Device && rhs ) = default; +#else + Device( Device && rhs ) VULKAN_HPP_NOEXCEPT : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) {} + + Device & operator=( Device && rhs ) VULKAN_HPP_NOEXCEPT + { + m_device = VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -13455,12 +14459,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType>::type - getFaultInfoEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === @@ -13963,11 +14961,11 @@ namespace VULKAN_HPP_NAMESPACE #else template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, - VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, - VULKAN_HPP_NAMESPACE::ImageView view, - VULKAN_HPP_NAMESPACE::ImageLayout layout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, + VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint, + VULKAN_HPP_NAMESPACE::ImageView view, + VULKAN_HPP_NAMESPACE::ImageLayout layout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ //=== VK_KHR_maintenance5 === @@ -14016,6 +15014,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_AMD_anti_lag === + + template + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_shader_object === template @@ -14105,6 +15114,113 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_pipeline_binary === + + template + VULKAN_HPP_NODISCARD Result createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD ResultValue> + createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename PipelineBinaryKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue> + createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template >> + VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> + createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template >, + typename std::enable_if< + std::is_same>::value, + int>::type = 0> + VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> + createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, + Optional allocator, + PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + getPipelineKeyKHR( Optional pipelineCreateInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD typename ResultValueType>>::type + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>>::type + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, + Uint8_tAllocator & uint8_tAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + Result releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_QCOM_tile_properties === template @@ -14178,10 +15294,19 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV - getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename LatencyTimingsFrameReportNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, + LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -14258,11 +15383,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Device; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Device; }; +#endif template <> struct isVulkanHandleType @@ -14281,7 +15408,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR; public: - VULKAN_HPP_CONSTEXPR DisplayModeKHR() = default; + DisplayModeKHR() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + DisplayModeKHR( DisplayModeKHR const & rhs ) = default; + DisplayModeKHR & operator=( DisplayModeKHR const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + DisplayModeKHR( DisplayModeKHR && rhs ) = default; + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) = default; +#else + DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT : m_displayModeKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) {} + + DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + m_displayModeKHR = VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -14351,11 +15493,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR; }; +#endif template <> struct isVulkanHandleType @@ -14374,7 +15518,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice; public: - VULKAN_HPP_CONSTEXPR PhysicalDevice() = default; + PhysicalDevice() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + PhysicalDevice( PhysicalDevice const & rhs ) = default; + PhysicalDevice & operator=( PhysicalDevice const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + PhysicalDevice( PhysicalDevice && rhs ) = default; + PhysicalDevice & operator=( PhysicalDevice && rhs ) = default; +#else + PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) {} + + PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT + { + m_physicalDevice = VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -15586,7 +16745,7 @@ namespace VULKAN_HPP_NAMESPACE #else template typename ResultValueType::type - acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ template @@ -15779,11 +16938,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice; }; +#endif template <> struct isVulkanHandleType @@ -15802,7 +16963,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance; public: - VULKAN_HPP_CONSTEXPR Instance() = default; + Instance() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + Instance( Instance const & rhs ) = default; + Instance & operator=( Instance const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + Instance( Instance && rhs ) = default; + Instance & operator=( Instance && rhs ) = default; +#else + Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) {} + + Instance & operator=( Instance && rhs ) VULKAN_HPP_NOEXCEPT + { + m_instance = VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ); + return *this; + } +#endif VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} @@ -16451,11 +17627,13 @@ namespace VULKAN_HPP_NAMESPACE using Type = VULKAN_HPP_NAMESPACE::Instance; }; +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) template <> struct CppType { using Type = VULKAN_HPP_NAMESPACE::Instance; }; +#endif template <> struct isVulkanHandleType diff --git a/third_party/vulkan/vulkan_hash.hpp b/third_party/vulkan/vulkan_hash.hpp index 8adf257..1894033 100644 --- a/third_party/vulkan/vulkan_hash.hpp +++ b/third_party/vulkan/vulkan_hash.hpp @@ -525,6 +525,17 @@ namespace std } }; + //=== VK_KHR_pipeline_binary === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const & pipelineBinaryKHR ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( pipelineBinaryKHR ) ); + } + }; + #if 14 <= VULKAN_HPP_CPP_VERSION //====================================== //=== HASH structures for structures === @@ -1062,6 +1073,35 @@ namespace std }; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD const & antiLagPresentationInfoAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.stage ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagPresentationInfoAMD.frameIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::AntiLagDataAMD const & antiLagDataAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.mode ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.maxFPS ); + VULKAN_HPP_HASH_COMBINE( seed, antiLagDataAMD.pPresentationInfo ); + return seed; + } + }; + template <> struct hash { @@ -3990,6 +4030,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR const & devicePipelineBinaryInternalCacheControlKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, devicePipelineBinaryInternalCacheControlKHR.disableInternalCache ); + return seed; + } + }; + template <> struct hash { @@ -5637,6 +5691,19 @@ namespace std }; # endif /*VK_USE_PLATFORM_IOS_MVK*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA const & imageAlignmentControlCreateInfoMESA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageAlignmentControlCreateInfoMESA.maximumRequestedAlignment ); + return seed; + } + }; + template <> struct hash { @@ -7478,6 +7545,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD const & physicalDeviceAntiLagFeaturesAMD ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceAntiLagFeaturesAMD.antiLag ); + return seed; + } + }; + template <> struct hash { @@ -7669,16 +7749,45 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const & physicalDeviceComputeShaderDerivativesFeaturesNV ) + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV const & physicalDeviceCommandBufferInheritanceFeaturesNV ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupQuads ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesNV.computeDerivativeGroupLinear ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCommandBufferInheritanceFeaturesNV.commandBufferInheritance ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & physicalDeviceComputeShaderDerivativesFeaturesKHR ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.computeDerivativeGroupQuads ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesFeaturesKHR.computeDerivativeGroupLinear ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & physicalDeviceComputeShaderDerivativesPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceComputeShaderDerivativesPropertiesKHR.meshAndTaskShaderDerivatives ); return seed; } }; @@ -9116,6 +9225,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA const & physicalDeviceImageAlignmentControlFeaturesMESA ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlFeaturesMESA.imageAlignmentControl ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA const & physicalDeviceImageAlignmentControlPropertiesMESA ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceImageAlignmentControlPropertiesMESA.supportedImageAlignmentMask ); + return seed; + } + }; + template <> struct hash { @@ -9385,29 +9522,36 @@ namespace std }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT const & physicalDeviceLayeredDriverPropertiesMSFT ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR const & physicalDeviceLayeredApiPropertiesKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.underlyingAPI ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.layeredAPI ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesKHR.deviceName[i] ); + } return seed; } }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR const & physicalDeviceLayeredApiPropertiesListKHR ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.layeredApiCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiPropertiesListKHR.pLayeredApis ); return seed; } }; @@ -9546,6 +9690,129 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType ); + for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] ); + } + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR const & physicalDeviceLayeredApiVulkanPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredApiVulkanPropertiesKHR.properties ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT const & physicalDeviceLayeredDriverPropertiesMSFT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLayeredDriverPropertiesMSFT.underlyingAPI ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT const & physicalDeviceLegacyDitheringFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyDitheringFeaturesEXT.legacyDithering ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & physicalDeviceLegacyVertexAttributesFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesFeaturesEXT.legacyVertexAttributes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & physicalDeviceLegacyVertexAttributesPropertiesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLegacyVertexAttributesPropertiesEXT.nativeUnalignedPerformance ); + return seed; + } + }; + template <> struct hash { @@ -9696,6 +9963,41 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR const & physicalDeviceMaintenance7FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7FeaturesKHR.maintenance7 ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR const & physicalDeviceMaintenance7PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.robustFragmentShadingRateAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.separateDepthStencilAttachmentAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetTotalBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance7PropertiesKHR.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); + return seed; + } + }; + template <> struct hash { @@ -10252,6 +10554,38 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR const & physicalDevicePipelineBinaryFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryFeaturesKHR.pipelineBinaries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR const & physicalDevicePipelineBinaryPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryInternalCacheControl ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryPrefersInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryPrecompiledInternalCache ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineBinaryPropertiesKHR.pipelineBinaryCompressedData ); + return seed; + } + }; + template <> struct hash { @@ -10498,59 +10832,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const & physicalDeviceSparseProperties ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DBlockShape ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard2DMultisampleBlockShape ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyStandard3DBlockShape ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyAlignedMipSize ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceSparseProperties.residencyNonResidentStrict ); - return seed; - } - }; - - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const & physicalDeviceProperties ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.apiVersion ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.driverVersion ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.vendorID ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceID ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceType ); - for ( size_t i = 0; i < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.deviceName[i] ); - } - for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) - { - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.pipelineCacheUUID[i] ); - } - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.limits ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties.sparseProperties ); - return seed; - } - }; - - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const & physicalDeviceProperties2 ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceProperties2.properties ); - return seed; - } - }; - template <> struct hash { @@ -11516,6 +11797,35 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & + physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR.shaderRelaxedExtendedInstruction ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & physicalDeviceShaderReplicatedCompositesFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderReplicatedCompositesFeaturesEXT.shaderReplicatedComposites ); + return seed; + } + }; + template <> struct hash { @@ -12475,6 +12785,116 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR const & pipelineBinaryKeyKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.keySize ); + for ( size_t i = 0; i < VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeyKHR.key[i] ); + } + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR const & pipelineBinaryDataKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataKHR.dataSize ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataKHR.pData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR const & pipelineBinaryKeysAndDataKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.binaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.pPipelineBinaryKeys ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryKeysAndDataKHR.pPipelineBinaryData ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR const & pipelineCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateInfoKHR.pNext ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & pipelineBinaryCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pKeysAndDataInfo ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pipeline ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryCreateInfoKHR.pPipelineCreateInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR const & pipelineBinaryDataInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryDataInfoKHR.pipelineBinary ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR const & pipelineBinaryHandlesInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pipelineBinaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryHandlesInfoKHR.pPipelineBinaries ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR const & pipelineBinaryInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.binaryCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineBinaryInfoKHR.pPipelineBinaries ); + return seed; + } + }; + template <> struct hash { @@ -13689,6 +14109,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR const & releaseCapturedPipelineDataInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, releaseCapturedPipelineDataInfoKHR.pipeline ); + return seed; + } + }; + template <> struct hash { diff --git a/third_party/vulkan/vulkan_metal.h b/third_party/vulkan/vulkan_metal.h index e6f7bf7..89a5574 100644 --- a/third_party/vulkan/vulkan_metal.h +++ b/third_party/vulkan/vulkan_metal.h @@ -52,28 +52,28 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT( #define VK_EXT_metal_objects 1 #ifdef __OBJC__ @protocol MTLDevice; -typedef id MTLDevice_id; +typedef __unsafe_unretained id MTLDevice_id; #else typedef void* MTLDevice_id; #endif #ifdef __OBJC__ @protocol MTLCommandQueue; -typedef id MTLCommandQueue_id; +typedef __unsafe_unretained id MTLCommandQueue_id; #else typedef void* MTLCommandQueue_id; #endif #ifdef __OBJC__ @protocol MTLBuffer; -typedef id MTLBuffer_id; +typedef __unsafe_unretained id MTLBuffer_id; #else typedef void* MTLBuffer_id; #endif #ifdef __OBJC__ @protocol MTLTexture; -typedef id MTLTexture_id; +typedef __unsafe_unretained id MTLTexture_id; #else typedef void* MTLTexture_id; #endif @@ -81,12 +81,12 @@ typedef void* MTLTexture_id; typedef struct __IOSurface* IOSurfaceRef; #ifdef __OBJC__ @protocol MTLSharedEvent; -typedef id MTLSharedEvent_id; +typedef __unsafe_unretained id MTLSharedEvent_id; #else typedef void* MTLSharedEvent_id; #endif -#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 1 +#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 2 #define VK_EXT_METAL_OBJECTS_EXTENSION_NAME "VK_EXT_metal_objects" typedef enum VkExportMetalObjectTypeFlagBitsEXT { diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index 0149b00..ac672bf 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -9,7 +9,7 @@ #define VULKAN_RAII_HPP #include // std::unique_ptr -#include // std::exchange, std::forward +#include // std::forward #include #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) @@ -17,18 +17,6 @@ namespace VULKAN_HPP_NAMESPACE { namespace VULKAN_HPP_RAII_NAMESPACE { -# if ( 14 <= VULKAN_HPP_CPP_VERSION ) - using std::exchange; -# else - template - VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) - { - T oldValue = std::move( obj ); - obj = std::forward( newValue ); - return oldValue; - } -# endif - template class CreateReturnType { @@ -1680,12 +1668,22 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + //=== VK_EXT_shader_object === vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = @@ -2564,12 +2562,22 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; + //=== VK_AMD_anti_lag === + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; + //=== VK_EXT_shader_object === PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + //=== VK_KHR_pipeline_binary === + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; + //=== VK_QCOM_tile_properties === PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; @@ -2706,6 +2714,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_object === class ShaderEXT; + //=== VK_KHR_pipeline_binary === + class PipelineBinaryKHR; + //==================== //=== RAII HANDLES === //==================== @@ -2827,8 +2838,8 @@ namespace VULKAN_HPP_NAMESPACE Instance( Instance const & ) = delete; Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) , m_dispatcher( rhs.m_dispatcher.release() ) { } @@ -2871,7 +2882,7 @@ namespace VULKAN_HPP_NAMESPACE { m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_instance, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_instance, nullptr ); } VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const @@ -3096,8 +3107,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {} PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -3137,7 +3148,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PhysicalDevice release() { m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_physicalDevice, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_physicalDevice, nullptr ); } VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const @@ -3565,8 +3576,8 @@ namespace VULKAN_HPP_NAMESPACE Device( Device const & ) = delete; Device( Device && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) , m_dispatcher( rhs.m_dispatcher.release() ) { } @@ -3609,7 +3620,7 @@ namespace VULKAN_HPP_NAMESPACE { m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_device, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_device, nullptr ); } VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const @@ -4412,9 +4423,10 @@ namespace VULKAN_HPP_NAMESPACE getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const; //=== VK_EXT_device_fault === - - VULKAN_HPP_NODISCARD std::pair getFaultInfoEXT() const; - + template + VULKAN_HPP_NODISCARD Result getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === @@ -4552,6 +4564,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + //=== VK_AMD_anti_lag === + + void antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD @@ -4564,6 +4580,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + //=== VK_KHR_pipeline_binary === + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR getPipelineKeyKHR( + Optional pipelineCreateInfo VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const; + + VULKAN_HPP_NODISCARD std::pair> + getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const; + + void releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_QCOM_tile_properties === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM @@ -4633,10 +4666,10 @@ namespace VULKAN_HPP_NAMESPACE AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete; AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -4683,7 +4716,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructure, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -4753,10 +4786,10 @@ namespace VULKAN_HPP_NAMESPACE AccelerationStructureNV( AccelerationStructureNV const & ) = delete; AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_accelerationStructure( VULKAN_HPP_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -4803,7 +4836,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_accelerationStructure, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -4881,10 +4914,10 @@ namespace VULKAN_HPP_NAMESPACE Buffer( Buffer const & ) = delete; Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_buffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_buffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -4930,7 +4963,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_buffer, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_buffer, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -5007,10 +5040,10 @@ namespace VULKAN_HPP_NAMESPACE BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete; BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_collection( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_collection( VULKAN_HPP_NAMESPACE::exchange( rhs.m_collection, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -5057,7 +5090,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_collection, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_collection, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -5136,10 +5169,10 @@ namespace VULKAN_HPP_NAMESPACE BufferView( BufferView const & ) = delete; BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_bufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_bufferView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_bufferView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -5185,7 +5218,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_bufferView, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_bufferView, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -5255,10 +5288,10 @@ namespace VULKAN_HPP_NAMESPACE CommandPool( CommandPool const & ) = delete; CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -5304,7 +5337,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandPool, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_commandPool, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -5372,10 +5405,10 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer( CommandBuffer const & ) = delete; CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) - , m_commandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_commandPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandPool, {} ) ) + , m_commandBuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -5421,7 +5454,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_commandPool = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandBuffer, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_commandBuffer, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -6036,7 +6069,8 @@ namespace VULKAN_HPP_NAMESPACE void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo ) const VULKAN_HPP_NOEXCEPT; - void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & locationInfo ) const VULKAN_HPP_NOEXCEPT; + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo ) const + VULKAN_HPP_NOEXCEPT; //=== VK_EXT_line_rasterization === @@ -6456,10 +6490,10 @@ namespace VULKAN_HPP_NAMESPACE CuFunctionNVX( CuFunctionNVX const & ) = delete; CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_function( VULKAN_HPP_NAMESPACE::exchange( rhs.m_function, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -6505,7 +6539,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_function, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -6575,10 +6609,10 @@ namespace VULKAN_HPP_NAMESPACE CuModuleNVX( CuModuleNVX const & ) = delete; CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_module( VULKAN_HPP_NAMESPACE::exchange( rhs.m_module, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -6624,7 +6658,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_module, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -6695,10 +6729,10 @@ namespace VULKAN_HPP_NAMESPACE CudaFunctionNV( CudaFunctionNV const & ) = delete; CudaFunctionNV( CudaFunctionNV && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_function( VULKAN_HPP_NAMESPACE::exchange( rhs.m_function, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -6744,7 +6778,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_function, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -6816,10 +6850,10 @@ namespace VULKAN_HPP_NAMESPACE CudaModuleNV( CudaModuleNV const & ) = delete; CudaModuleNV( CudaModuleNV && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_module( VULKAN_HPP_NAMESPACE::exchange( rhs.m_module, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -6865,7 +6899,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_module, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -6940,10 +6974,10 @@ namespace VULKAN_HPP_NAMESPACE DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete; DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_callback( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_callback( VULKAN_HPP_NAMESPACE::exchange( rhs.m_callback, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -6990,7 +7024,7 @@ namespace VULKAN_HPP_NAMESPACE m_instance = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_callback, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_callback, nullptr ); } VULKAN_HPP_NAMESPACE::Instance getInstance() const @@ -7060,10 +7094,10 @@ namespace VULKAN_HPP_NAMESPACE DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete; DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_messenger( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_messenger( VULKAN_HPP_NAMESPACE::exchange( rhs.m_messenger, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -7110,7 +7144,7 @@ namespace VULKAN_HPP_NAMESPACE m_instance = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_messenger, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_messenger, nullptr ); } VULKAN_HPP_NAMESPACE::Instance getInstance() const @@ -7179,10 +7213,10 @@ namespace VULKAN_HPP_NAMESPACE DeferredOperationKHR( DeferredOperationKHR const & ) = delete; DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_operation( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_operation( VULKAN_HPP_NAMESPACE::exchange( rhs.m_operation, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -7229,7 +7263,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_operation, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_operation, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7307,10 +7341,10 @@ namespace VULKAN_HPP_NAMESPACE DescriptorPool( DescriptorPool const & ) = delete; DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -7357,7 +7391,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorPool, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorPool, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7417,10 +7451,10 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSet( DescriptorSet const & ) = delete; DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) - , m_descriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) ) + , m_descriptorSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -7468,7 +7502,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_descriptorPool = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSet, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSet, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7578,10 +7612,10 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSetLayout( DescriptorSetLayout const & ) = delete; DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -7628,7 +7662,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSetLayout, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorSetLayout, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7704,10 +7738,10 @@ namespace VULKAN_HPP_NAMESPACE DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete; DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -7754,7 +7788,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7824,10 +7858,10 @@ namespace VULKAN_HPP_NAMESPACE DeviceMemory( DeviceMemory const & ) = delete; DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_memory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_memory( VULKAN_HPP_NAMESPACE::exchange( rhs.m_memory, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -7873,7 +7907,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_memory, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_memory, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -7974,9 +8008,9 @@ namespace VULKAN_HPP_NAMESPACE DisplayKHR( DisplayKHR const & ) = delete; DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) - , m_display( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_display( VULKAN_HPP_NAMESPACE::exchange( rhs.m_display, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8018,7 +8052,7 @@ namespace VULKAN_HPP_NAMESPACE { m_physicalDevice = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_display, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_display, nullptr ); } VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const @@ -8126,9 +8160,9 @@ namespace VULKAN_HPP_NAMESPACE DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {} DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) - , m_displayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_physicalDevice( VULKAN_HPP_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) ) + , m_displayModeKHR( VULKAN_HPP_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8171,7 +8205,7 @@ namespace VULKAN_HPP_NAMESPACE { m_physicalDevice = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_displayModeKHR, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_displayModeKHR, nullptr ); } VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const @@ -8238,10 +8272,10 @@ namespace VULKAN_HPP_NAMESPACE Event( Event const & ) = delete; Event( Event && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_event( VULKAN_HPP_NAMESPACE::exchange( rhs.m_event, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8287,7 +8321,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_event, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_event, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8384,10 +8418,10 @@ namespace VULKAN_HPP_NAMESPACE Fence( Fence const & ) = delete; Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_fence( VULKAN_HPP_NAMESPACE::exchange( rhs.m_fence, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8433,7 +8467,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_fence, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_fence, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8507,10 +8541,10 @@ namespace VULKAN_HPP_NAMESPACE Framebuffer( Framebuffer const & ) = delete; Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_framebuffer( VULKAN_HPP_NAMESPACE::exchange( rhs.m_framebuffer, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8556,7 +8590,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_framebuffer, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_framebuffer, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8630,10 +8664,10 @@ namespace VULKAN_HPP_NAMESPACE Image( Image const & ) = delete; Image( Image && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_image( VULKAN_HPP_NAMESPACE::exchange( rhs.m_image, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8679,7 +8713,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_image, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_image, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8782,10 +8816,10 @@ namespace VULKAN_HPP_NAMESPACE ImageView( ImageView const & ) = delete; ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_imageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_imageView( VULKAN_HPP_NAMESPACE::exchange( rhs.m_imageView, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8831,7 +8865,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_imageView, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_imageView, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -8905,10 +8939,10 @@ namespace VULKAN_HPP_NAMESPACE IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete; IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -8955,7 +8989,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9025,10 +9059,10 @@ namespace VULKAN_HPP_NAMESPACE MicromapEXT( MicromapEXT const & ) = delete; MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_micromap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_micromap( VULKAN_HPP_NAMESPACE::exchange( rhs.m_micromap, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -9074,7 +9108,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_micromap, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_micromap, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9144,10 +9178,10 @@ namespace VULKAN_HPP_NAMESPACE OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete; OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_session( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_session( VULKAN_HPP_NAMESPACE::exchange( rhs.m_session, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -9194,7 +9228,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_session, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_session, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9264,9 +9298,9 @@ namespace VULKAN_HPP_NAMESPACE PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete; PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_configuration( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_configuration( VULKAN_HPP_NAMESPACE::exchange( rhs.m_configuration, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -9309,7 +9343,7 @@ namespace VULKAN_HPP_NAMESPACE { m_device = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_configuration, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_configuration, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9377,10 +9411,10 @@ namespace VULKAN_HPP_NAMESPACE PipelineCache( PipelineCache const & ) = delete; PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_pipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -9427,7 +9461,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineCache, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineCache, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9549,11 +9583,11 @@ namespace VULKAN_HPP_NAMESPACE Pipeline( Pipeline const & ) = delete; Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipeline( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipeline, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -9602,7 +9636,7 @@ namespace VULKAN_HPP_NAMESPACE m_allocator = nullptr; m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipeline, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_pipeline, nullptr ); } VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const @@ -9750,6 +9784,157 @@ namespace VULKAN_HPP_NAMESPACE } }; + class PipelineBinaryKHR + { + public: + using CType = VkPipelineBinaryKHR; + using CppType = VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + PipelineBinaryKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkPipelineBinaryKHR pipelineBinary, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr, + VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess ) + : m_device( device ) + , m_pipelineBinary( pipelineBinary ) + , m_allocator( static_cast( allocator ) ) + , m_constructorSuccessCode( successCode ) + , m_dispatcher( device.getDispatcher() ) + { + } + + PipelineBinaryKHR( std::nullptr_t ) {} + + ~PipelineBinaryKHR() + { + clear(); + } + + PipelineBinaryKHR() = delete; + PipelineBinaryKHR( PipelineBinaryKHR const & ) = delete; + + PipelineBinaryKHR( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineBinary( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineBinary, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + PipelineBinaryKHR & operator=( PipelineBinaryKHR const & ) = delete; + + PipelineBinaryKHR & operator=( PipelineBinaryKHR && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineBinary, rhs.m_pipelineBinary ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinary; + } + + operator VULKAN_HPP_NAMESPACE::PipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT + { + return m_pipelineBinary; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_pipelineBinary ) + { + getDispatcher()->vkDestroyPipelineBinaryKHR( static_cast( m_device ), + static_cast( m_pipelineBinary ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_pipelineBinary = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR release() + { + m_device = nullptr; + m_allocator = nullptr; + m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineBinary, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const + { + return m_constructorSuccessCode; + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_pipelineBinary, rhs.m_pipelineBinary ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinary = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class PipelineBinaryKHRs : public std::vector + { + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + PipelineBinaryKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createPipelineBinariesKHR( createInfo, allocator ); + } +# endif + + PipelineBinaryKHRs( std::nullptr_t ) {} + + PipelineBinaryKHRs() = delete; + PipelineBinaryKHRs( PipelineBinaryKHRs const & ) = delete; + PipelineBinaryKHRs( PipelineBinaryKHRs && rhs ) = default; + PipelineBinaryKHRs & operator=( PipelineBinaryKHRs const & ) = delete; + PipelineBinaryKHRs & operator=( PipelineBinaryKHRs && rhs ) = default; + + private: + PipelineBinaryKHRs( std::vector && rhs ) + { + std::swap( *this, rhs ); + } + }; + class PipelineLayout { public: @@ -9791,10 +9976,10 @@ namespace VULKAN_HPP_NAMESPACE PipelineLayout( PipelineLayout const & ) = delete; PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_pipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_pipelineLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -9841,7 +10026,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineLayout, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_pipelineLayout, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -9911,10 +10096,10 @@ namespace VULKAN_HPP_NAMESPACE PrivateDataSlot( PrivateDataSlot const & ) = delete; PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_privateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_privateDataSlot( VULKAN_HPP_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -9961,7 +10146,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_privateDataSlot, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_privateDataSlot, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10031,10 +10216,10 @@ namespace VULKAN_HPP_NAMESPACE QueryPool( QueryPool const & ) = delete; QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_queryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_queryPool( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queryPool, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10080,7 +10265,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queryPool, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_queryPool, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10175,8 +10360,8 @@ namespace VULKAN_HPP_NAMESPACE Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {} Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT - : m_queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_queue( VULKAN_HPP_NAMESPACE::exchange( rhs.m_queue, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10216,7 +10401,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Queue release() { m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queue, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_queue, nullptr ); } VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const @@ -10332,10 +10517,10 @@ namespace VULKAN_HPP_NAMESPACE RenderPass( RenderPass const & ) = delete; RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_renderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_renderPass( VULKAN_HPP_NAMESPACE::exchange( rhs.m_renderPass, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10381,7 +10566,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_renderPass, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_renderPass, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10459,10 +10644,10 @@ namespace VULKAN_HPP_NAMESPACE Sampler( Sampler const & ) = delete; Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_sampler( VULKAN_HPP_NAMESPACE::exchange( rhs.m_sampler, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10508,7 +10693,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_sampler, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_sampler, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10578,10 +10763,10 @@ namespace VULKAN_HPP_NAMESPACE SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete; SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_ycbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_ycbcrConversion( VULKAN_HPP_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10628,7 +10813,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_ycbcrConversion, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_ycbcrConversion, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10698,10 +10883,10 @@ namespace VULKAN_HPP_NAMESPACE Semaphore( Semaphore const & ) = delete; Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_semaphore( VULKAN_HPP_NAMESPACE::exchange( rhs.m_semaphore, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10747,7 +10932,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_semaphore, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_semaphore, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -10827,11 +11012,11 @@ namespace VULKAN_HPP_NAMESPACE ShaderEXT( ShaderEXT const & ) = delete; ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shader( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shader, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -10880,7 +11065,7 @@ namespace VULKAN_HPP_NAMESPACE m_allocator = nullptr; m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_shader, nullptr ); } VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const @@ -10988,10 +11173,10 @@ namespace VULKAN_HPP_NAMESPACE ShaderModule( ShaderModule const & ) = delete; ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_shaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_shaderModule( VULKAN_HPP_NAMESPACE::exchange( rhs.m_shaderModule, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -11037,7 +11222,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shaderModule, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_shaderModule, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -11263,10 +11448,10 @@ namespace VULKAN_HPP_NAMESPACE SurfaceKHR( SurfaceKHR const & ) = delete; SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) ) - , m_surface( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_instance( VULKAN_HPP_NAMESPACE::exchange( rhs.m_instance, {} ) ) + , m_surface( VULKAN_HPP_NAMESPACE::exchange( rhs.m_surface, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -11312,7 +11497,7 @@ namespace VULKAN_HPP_NAMESPACE m_instance = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_surface, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_surface, nullptr ); } VULKAN_HPP_NAMESPACE::Instance getInstance() const @@ -11382,10 +11567,10 @@ namespace VULKAN_HPP_NAMESPACE SwapchainKHR( SwapchainKHR const & ) = delete; SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_swapchain( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_swapchain( VULKAN_HPP_NAMESPACE::exchange( rhs.m_swapchain, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -11431,7 +11616,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_swapchain, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_swapchain, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -11500,7 +11685,7 @@ namespace VULKAN_HPP_NAMESPACE void setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV getLatencyTimingsNV() const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD std::vector getLatencyTimingsNV() const; private: VULKAN_HPP_NAMESPACE::Device m_device = {}; @@ -11577,10 +11762,10 @@ namespace VULKAN_HPP_NAMESPACE ValidationCacheEXT( ValidationCacheEXT const & ) = delete; ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_validationCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_validationCache( VULKAN_HPP_NAMESPACE::exchange( rhs.m_validationCache, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -11627,7 +11812,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_validationCache, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_validationCache, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -11703,10 +11888,10 @@ namespace VULKAN_HPP_NAMESPACE VideoSessionKHR( VideoSessionKHR const & ) = delete; VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_videoSession( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSession( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSession, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -11753,7 +11938,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSession, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_videoSession, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -11829,10 +12014,10 @@ namespace VULKAN_HPP_NAMESPACE VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete; VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT - : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) ) - , m_videoSessionParameters( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) ) - , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) ) - , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_videoSessionParameters( VULKAN_HPP_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) { } @@ -11879,7 +12064,7 @@ namespace VULKAN_HPP_NAMESPACE m_device = nullptr; m_allocator = nullptr; m_dispatcher = nullptr; - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSessionParameters, nullptr ); + return VULKAN_HPP_NAMESPACE::exchange( m_videoSessionParameters, nullptr ); } VULKAN_HPP_NAMESPACE::Device getDevice() const @@ -12018,7 +12203,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( usage ), static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); return imageFormatProperties; } @@ -12128,7 +12313,7 @@ namespace VULKAN_HPP_NAMESPACE layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -12159,7 +12344,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -12185,7 +12370,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -12212,7 +12397,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -12238,7 +12423,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit( static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); } VULKAN_HPP_INLINE void Queue::waitIdle() const @@ -12246,7 +12431,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( getDispatcher()->vkQueueWaitIdle && "Function requires " ); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueWaitIdle( static_cast( m_queue ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); } VULKAN_HPP_INLINE void Device::waitIdle() const @@ -12254,7 +12439,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( getDispatcher()->vkDeviceWaitIdle && "Function requires " ); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDeviceWaitIdle( static_cast( m_device ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); } VULKAN_HPP_NODISCARD @@ -12293,7 +12478,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( size ), static_cast( flags ), &pData ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" ); return pData; } @@ -12312,7 +12497,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkFlushMappedMemoryRanges( static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); } VULKAN_HPP_INLINE void @@ -12322,7 +12507,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkInvalidateMappedMemoryRanges( static_cast( m_device ), memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT @@ -12345,7 +12530,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" ); } VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const @@ -12357,7 +12542,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT @@ -12449,7 +12634,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueBindSparse( static_cast( m_queue ), bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); } VULKAN_HPP_NODISCARD @@ -12481,7 +12666,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkResetFences( static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const @@ -12490,7 +12675,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetFenceStatus( static_cast( m_device ), static_cast( m_fence ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); return static_cast( result ); @@ -12503,7 +12688,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkWaitForFences( static_cast( m_device ), fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); @@ -12561,7 +12746,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetEventStatus( static_cast( m_device ), static_cast( m_event ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); return static_cast( result ); @@ -12573,7 +12758,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetEvent( static_cast( m_device ), static_cast( m_event ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::set" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::set" ); } VULKAN_HPP_INLINE void Event::reset() const @@ -12582,7 +12767,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkResetEvent( static_cast( m_device ), static_cast( m_event ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); } VULKAN_HPP_NODISCARD @@ -12625,7 +12810,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( data.data() ), static_cast( stride ), static_cast( flags ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); return std::make_pair( result, std::move( data ) ); @@ -12647,7 +12832,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &data ), static_cast( stride ), static_cast( flags ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); return std::make_pair( result, std::move( data ) ); @@ -12826,7 +13011,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_device ), static_cast( m_pipelineCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { @@ -12844,7 +13029,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_pipelineCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE @@ -12904,7 +13089,7 @@ namespace VULKAN_HPP_NAMESPACE # endif } - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE @@ -12964,7 +13149,7 @@ namespace VULKAN_HPP_NAMESPACE # endif } - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } VULKAN_HPP_NODISCARD @@ -13200,7 +13385,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkResetCommandPool( static_cast( m_device ), static_cast( m_commandPool ), static_cast( flags ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE @@ -13237,7 +13422,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBeginCommandBuffer( static_cast( m_commandBuffer ), reinterpret_cast( &beginInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); } VULKAN_HPP_INLINE void CommandBuffer::end() const @@ -13246,7 +13431,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); } VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const @@ -13255,7 +13440,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); } VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, @@ -13793,7 +13978,7 @@ namespace VULKAN_HPP_NAMESPACE uint32_t apiVersion; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkEnumerateInstanceVersion( &apiVersion ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" ); return apiVersion; } @@ -13805,7 +13990,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindBufferMemory2( static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); } VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const @@ -13814,7 +13999,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindImageMemory2( static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags @@ -13874,7 +14059,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { @@ -14058,7 +14243,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); return imageFormatProperties; } @@ -14076,7 +14261,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); return structureChain; } @@ -14450,7 +14635,7 @@ namespace VULKAN_HPP_NAMESPACE uint64_t value; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( m_semaphore ), &value ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" ); return value; } @@ -14462,7 +14647,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); @@ -14474,7 +14659,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress @@ -14534,7 +14719,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { @@ -14576,7 +14761,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetPrivateData( static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_, @@ -14654,7 +14839,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit2( static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); } VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT @@ -14978,7 +15163,7 @@ namespace VULKAN_HPP_NAMESPACE queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); return supported; } @@ -14994,7 +15179,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); return surfaceCapabilities; } @@ -15021,7 +15206,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { @@ -15053,7 +15238,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( presentModes.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { @@ -15109,7 +15294,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( swapchainImages.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" ); VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); if ( swapchainImageCount < swapchainImages.size() ) { @@ -15131,12 +15316,12 @@ namespace VULKAN_HPP_NAMESPACE static_cast( semaphore ), static_cast( fence ), &imageIndex ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return std::make_pair( result, std::move( imageIndex ) ); } @@ -15147,7 +15332,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( &presentInfo ) ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); @@ -15161,7 +15346,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR( static_cast( m_device ), reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); return deviceGroupPresentCapabilities; } @@ -15175,7 +15360,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR( static_cast( m_device ), static_cast( surface ), reinterpret_cast( &modes ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); return modes; } @@ -15203,7 +15388,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( rects.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); VULKAN_HPP_ASSERT( rectCount <= rects.size() ); if ( rectCount < rects.size() ) { @@ -15220,12 +15405,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t imageIndex; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireNextImage2KHR( static_cast( m_device ), reinterpret_cast( &acquireInfo ), &imageIndex ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eTimeout, + VULKAN_HPP_NAMESPACE::Result::eNotReady, + VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return std::make_pair( result, std::move( imageIndex ) ); } @@ -15251,7 +15436,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -15279,7 +15464,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -15345,7 +15530,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -15389,7 +15574,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_displayModeKHR ), planeIndex, reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" ); return capabilities; } @@ -15717,7 +15902,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); } VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const @@ -15726,7 +15911,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); } VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT @@ -15765,7 +15950,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); return capabilities; } @@ -15783,7 +15968,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); return structureChain; } @@ -15814,7 +15999,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( videoFormatProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); if ( videoFormatPropertyCount < videoFormatProperties.size() ) { @@ -15888,7 +16073,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_videoSession ), bindSessionMemoryInfos.size(), reinterpret_cast( bindSessionMemoryInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE @@ -15924,7 +16109,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast( m_device ), static_cast( m_videoSessionParameters ), reinterpret_cast( &updateInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" ); } VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT @@ -16151,7 +16336,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageViewAddressNVX( static_cast( m_device ), static_cast( m_imageView ), reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" ); return properties; } @@ -16226,7 +16411,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( info.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" ); VULKAN_HPP_ASSERT( infoSize <= info.size() ); if ( infoSize < info.size() ) { @@ -16303,7 +16488,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( flags ), static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); return externalImageFormatProperties; } @@ -16318,7 +16503,7 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryWin32HandleNV( static_cast( m_device ), static_cast( m_memory ), static_cast( handleType ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" ); return handle; } @@ -16417,7 +16602,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); return imageFormatProperties; } @@ -16435,7 +16620,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); return structureChain; } @@ -16647,7 +16832,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( physicalDeviceGroupProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) { @@ -16683,7 +16868,7 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryWin32HandleKHR( static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); return handle; } @@ -16700,7 +16885,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); return memoryWin32HandleProperties; } @@ -16715,7 +16900,7 @@ namespace VULKAN_HPP_NAMESPACE int fd; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); return fd; } @@ -16731,7 +16916,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); return memoryFdProperties; } @@ -16764,7 +16949,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreWin32HandleKHR( static_cast( m_device ), reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE @@ -16776,7 +16961,7 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreWin32HandleKHR( static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); return handle; } @@ -16790,7 +16975,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( &importSemaphoreFdInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const @@ -16800,7 +16985,7 @@ namespace VULKAN_HPP_NAMESPACE int fd; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); return fd; } @@ -16935,7 +17120,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), &dpy, static_cast( display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); } VULKAN_HPP_NODISCARD @@ -16971,7 +17156,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); return surfaceCapabilities; } @@ -16985,7 +17170,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDisplayPowerControlEXT( static_cast( m_device ), static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); } VULKAN_HPP_NODISCARD @@ -17045,7 +17230,7 @@ namespace VULKAN_HPP_NAMESPACE uint64_t counterValue; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSwapchainCounterEXT( static_cast( m_device ), static_cast( m_swapchain ), static_cast( counter ), &counterValue ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" ); return counterValue; } @@ -17061,7 +17246,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" ); return displayTimingProperties; } @@ -17088,7 +17273,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( presentationTimings.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" ); VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); if ( presentationTimingCount < presentationTimings.size() ) { @@ -17212,9 +17397,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( m_swapchain ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); } @@ -17244,7 +17429,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( &importFenceWin32HandleInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE @@ -17255,7 +17440,7 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( &getWin32HandleInfo ), &handle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); return handle; } @@ -17269,7 +17454,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( &importFenceFdInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const @@ -17279,7 +17464,7 @@ namespace VULKAN_HPP_NAMESPACE int fd; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( &getFdInfo ), &fd ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); return fd; } @@ -17314,7 +17499,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( counterDescriptions.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); VULKAN_HPP_ASSERT( counterCount <= counters.size() ); if ( counterCount < counters.size() ) { @@ -17345,7 +17530,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( &info ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); } VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT @@ -17368,7 +17553,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); return surfaceCapabilities; } @@ -17386,7 +17571,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); return structureChain; } @@ -17417,7 +17602,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { @@ -17459,7 +17644,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( surfaceFormats.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { @@ -17493,7 +17678,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -17521,7 +17706,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -17552,7 +17737,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -17572,7 +17757,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); return capabilities; } @@ -17641,7 +17826,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), reinterpret_cast( &nameInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); } VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const @@ -17650,7 +17835,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( &tagInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); } VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT @@ -17747,7 +17932,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); return properties; } @@ -17764,7 +17949,7 @@ namespace VULKAN_HPP_NAMESPACE structureChain.template get(); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID( static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); return structureChain; } @@ -17778,7 +17963,7 @@ namespace VULKAN_HPP_NAMESPACE struct AHardwareBuffer * buffer; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID( static_cast( m_device ), reinterpret_cast( &info ), &buffer ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); return buffer; } @@ -17844,7 +18029,7 @@ namespace VULKAN_HPP_NAMESPACE # endif } - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const @@ -17855,7 +18040,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast( m_device ), static_cast( m_pipeline ), reinterpret_cast( &sizeInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" ); return sizeInfo; } @@ -17872,7 +18057,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_pipeline ), reinterpret_cast( &nodeInfo ), &nodeIndex ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" ); return nodeIndex; } @@ -18135,11 +18320,11 @@ namespace VULKAN_HPP_NAMESPACE infos.size(), reinterpret_cast( infos.data() ), reinterpret_cast( pBuildRangeInfos.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -18155,11 +18340,11 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkCopyAccelerationStructureKHR( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -18175,11 +18360,11 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -18195,11 +18380,11 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -18224,7 +18409,7 @@ namespace VULKAN_HPP_NAMESPACE data.size() * sizeof( DataType ), reinterpret_cast( data.data() ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); return data; } @@ -18247,7 +18432,7 @@ namespace VULKAN_HPP_NAMESPACE sizeof( DataType ), reinterpret_cast( &data ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); return data; } @@ -18436,7 +18621,7 @@ namespace VULKAN_HPP_NAMESPACE # endif } - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } template @@ -18455,7 +18640,7 @@ namespace VULKAN_HPP_NAMESPACE groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" ); return data; } @@ -18474,7 +18659,7 @@ namespace VULKAN_HPP_NAMESPACE groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" ); return data; } @@ -18495,7 +18680,7 @@ namespace VULKAN_HPP_NAMESPACE groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); return data; } @@ -18514,7 +18699,7 @@ namespace VULKAN_HPP_NAMESPACE groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" ); return data; } @@ -18604,7 +18789,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindBufferMemory2KHR( static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); } VULKAN_HPP_INLINE void @@ -18614,7 +18799,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindImageMemory2KHR( static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); } //=== VK_EXT_image_drm_format_modifier === @@ -18627,7 +18812,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT( static_cast( m_device ), static_cast( m_image ), reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" ); return properties; } @@ -18668,7 +18853,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_validationCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ValidationCacheEXT::getData() const @@ -18689,7 +18874,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_device ), static_cast( m_validationCache ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { @@ -18799,7 +18984,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkBindAccelerationStructureMemoryNV( static_cast( m_device ), bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); } VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info, @@ -18927,7 +19112,7 @@ namespace VULKAN_HPP_NAMESPACE # endif } - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } template @@ -18946,7 +19131,7 @@ namespace VULKAN_HPP_NAMESPACE groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" ); return data; } @@ -18965,7 +19150,7 @@ namespace VULKAN_HPP_NAMESPACE groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" ); return data; } @@ -18982,7 +19167,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); return data; } @@ -18998,7 +19183,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_accelerationStructure ), sizeof( DataType ), reinterpret_cast( &data ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" ); return data; } @@ -19026,7 +19211,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCompileDeferredNV( static_cast( m_device ), static_cast( m_pipeline ), shader ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" ); } //=== VK_KHR_maintenance3 === @@ -19116,7 +19301,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); return memoryHostPointerProperties; } @@ -19159,7 +19344,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { @@ -19183,7 +19368,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); return data_; } @@ -19199,7 +19384,7 @@ namespace VULKAN_HPP_NAMESPACE uint64_t & maxDeviation = data_.second; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); return data_; } @@ -19308,7 +19493,7 @@ namespace VULKAN_HPP_NAMESPACE uint64_t value; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( m_semaphore ), &value ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" ); return value; } @@ -19320,7 +19505,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( &waitInfo ), timeout ) ); - resultCheck( + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); return static_cast( result ); @@ -19332,7 +19517,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( &signalInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); } //=== VK_INTEL_performance_query === @@ -19344,7 +19529,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkInitializePerformanceApiINTEL( static_cast( m_device ), reinterpret_cast( &initializeInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); } VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT @@ -19361,7 +19546,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), reinterpret_cast( &markerInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); } VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const @@ -19371,7 +19556,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), reinterpret_cast( &markerInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); } VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const @@ -19381,7 +19566,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), reinterpret_cast( &overrideInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE @@ -19414,7 +19599,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), static_cast( configuration ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL @@ -19425,7 +19610,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPerformanceParameterINTEL( static_cast( m_device ), static_cast( parameter ), reinterpret_cast( &value ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); return value; } @@ -19520,7 +19705,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( fragmentShadingRates.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" ); VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() ); if ( fragmentShadingRateCount < fragmentShadingRates.size() ) { @@ -19554,13 +19739,13 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( - const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & locationInfo ) const VULKAN_HPP_NOEXCEPT + const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR && "Function requires " ); - getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &locationInfo ) ); + getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR( + static_cast( m_commandBuffer ), reinterpret_cast( &inputAttachmentIndexInfo ) ); } //=== VK_EXT_buffer_device_address === @@ -19599,7 +19784,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &toolCount, reinterpret_cast( toolProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { @@ -19616,9 +19801,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkWaitForPresentKHR( static_cast( m_device ), static_cast( m_swapchain ), presentId, timeout ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); } @@ -19645,7 +19831,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -19678,7 +19864,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( combinations.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" ); VULKAN_HPP_ASSERT( combinationCount <= combinations.size() ); if ( combinationCount < combinations.size() ) { @@ -19716,7 +19902,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( presentModes.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { @@ -19732,7 +19918,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" ); } VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const @@ -19742,7 +19928,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( m_swapchain ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR @@ -19756,7 +19942,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); return modes; } @@ -20039,9 +20225,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( m_operation ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( + result, + VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); return static_cast( result ); } @@ -20071,7 +20258,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" ); VULKAN_HPP_ASSERT( executableCount <= properties.size() ); if ( executableCount < properties.size() ) { @@ -20103,7 +20290,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( statistics.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" ); VULKAN_HPP_ASSERT( statisticCount <= statistics.size() ); if ( statisticCount < statistics.size() ) { @@ -20138,7 +20325,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( internalRepresentations.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" ); VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() ); if ( internalRepresentationCount < internalRepresentations.size() ) { @@ -20155,7 +20342,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyMemoryToImageEXT( static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); } VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const @@ -20164,7 +20351,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToMemoryEXT( static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); } VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const @@ -20173,7 +20360,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToImageEXT( static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); } VULKAN_HPP_INLINE void Device::transitionImageLayoutEXT( @@ -20183,7 +20370,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkTransitionImageLayoutEXT( static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR @@ -20229,7 +20416,7 @@ namespace VULKAN_HPP_NAMESPACE void * pData; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); return pData; } @@ -20240,7 +20427,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); } //=== VK_EXT_swapchain_maintenance1 === @@ -20251,7 +20438,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkReleaseSwapchainImagesEXT( static_cast( m_device ), reinterpret_cast( &releaseInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); } //=== VK_NV_device_generated_commands === @@ -20363,7 +20550,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); } VULKAN_HPP_NODISCARD @@ -20432,7 +20619,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetPrivateDataEXT( static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, @@ -20461,7 +20648,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), reinterpret_cast( &qualityLevelInfo ), reinterpret_cast( &qualityLevelProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); return qualityLevelProperties; } @@ -20480,7 +20667,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), reinterpret_cast( &qualityLevelInfo ), reinterpret_cast( &qualityLevelProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); return structureChain; } @@ -20515,7 +20702,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); return data_; } @@ -20552,7 +20739,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); return data_; } @@ -20609,7 +20796,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_device ), static_cast( m_module ), &cacheSize, reinterpret_cast( cacheData.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CudaModuleNV::getCache" ); VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); if ( cacheSize < cacheData.size() ) { @@ -20743,7 +20930,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkQueueSubmit2KHR( static_cast( m_queue ), submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); } VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, @@ -20889,7 +21076,7 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); return data; } @@ -20904,7 +21091,7 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); return data; } @@ -20919,7 +21106,7 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); return data; } @@ -20934,7 +21121,7 @@ namespace VULKAN_HPP_NAMESPACE DataType data; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); return data; } @@ -20950,7 +21137,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( &info ), &data ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); return data; } @@ -21059,50 +21246,15 @@ namespace VULKAN_HPP_NAMESPACE } //=== VK_EXT_device_fault === - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair - Device::getFaultInfoEXT() const + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts, + VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceFaultInfoEXT && "Function requires " ); - - std::pair data_; - VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first; - VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second; - VULKAN_HPP_NAMESPACE::Result result; - do - { - result = static_cast( - getDispatcher()->vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast( &faultCounts ), nullptr ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - std::free( faultInfo.pAddressInfos ); - if ( faultCounts.addressInfoCount ) - { - faultInfo.pAddressInfos = reinterpret_cast( - std::malloc( faultCounts.addressInfoCount * sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) ) ); - } - std::free( faultInfo.pVendorInfos ); - if ( faultCounts.vendorInfoCount ) - { - faultInfo.pVendorInfos = reinterpret_cast( - std::malloc( faultCounts.vendorInfoCount * sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) ) ); - } - std::free( faultInfo.pVendorBinaryData ); - if ( faultCounts.vendorBinarySize ) - { - faultInfo.pVendorBinaryData = std::malloc( faultCounts.vendorBinarySize ); - } - result = static_cast( getDispatcher()->vkGetDeviceFaultInfoEXT( - m_device, reinterpret_cast( &faultCounts ), reinterpret_cast( &faultInfo ) ) ); - } - } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } ); - - return data_; + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetDeviceFaultInfoEXT( + m_device, reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); } - # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -21112,7 +21264,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( m_display ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); } VULKAN_HPP_NODISCARD @@ -21203,7 +21355,7 @@ namespace VULKAN_HPP_NAMESPACE zx_handle_t zirconHandle; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA( static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); return zirconHandle; } @@ -21220,7 +21372,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), zirconHandle, reinterpret_cast( &memoryZirconHandleProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); return memoryZirconHandleProperties; } @@ -21237,7 +21389,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA( static_cast( m_device ), reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t @@ -21249,7 +21401,7 @@ namespace VULKAN_HPP_NAMESPACE zx_handle_t zirconHandle; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA( static_cast( m_device ), reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); return zirconHandle; } @@ -21292,7 +21444,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), static_cast( m_collection ), reinterpret_cast( &imageConstraintsInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" ); } VULKAN_HPP_INLINE void @@ -21305,7 +21457,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), static_cast( m_collection ), reinterpret_cast( &bufferConstraintsInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const @@ -21318,7 +21470,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), static_cast( m_collection ), reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" ); return properties; } @@ -21334,7 +21486,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( static_cast( m_device ), static_cast( m_renderPass ), reinterpret_cast( &maxWorkgroupSize ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); return maxWorkgroupSize; } @@ -21369,7 +21521,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast( m_device ), reinterpret_cast( &memoryGetRemoteAddressInfo ), reinterpret_cast( &address ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); return address; } @@ -21386,7 +21538,7 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkGetPipelinePropertiesEXT( static_cast( m_device ), reinterpret_cast( &pipelineInfo ), reinterpret_cast( &pipelineProperties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); return pipelineProperties; } @@ -21575,11 +21727,11 @@ namespace VULKAN_HPP_NAMESPACE static_cast( deferredOperation ), infos.size(), reinterpret_cast( infos.data() ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -21593,11 +21745,11 @@ namespace VULKAN_HPP_NAMESPACE static_cast( getDispatcher()->vkCopyMicromapEXT( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -21612,11 +21764,11 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -21631,11 +21783,11 @@ namespace VULKAN_HPP_NAMESPACE getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, + VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", + { VULKAN_HPP_NAMESPACE::Result::eSuccess, + VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, + VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } ); return static_cast( result ); } @@ -21659,7 +21811,7 @@ namespace VULKAN_HPP_NAMESPACE data.size() * sizeof( DataType ), reinterpret_cast( data.data() ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); return data; } @@ -21681,7 +21833,7 @@ namespace VULKAN_HPP_NAMESPACE sizeof( DataType ), reinterpret_cast( &data ), stride ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); return data; } @@ -22368,7 +22520,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( imageFormatProperties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" ); VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() ); if ( formatCount < imageFormatProperties.size() ) { @@ -22413,7 +22565,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( bindingPoint ), static_cast( view ), static_cast( layout ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" ); } VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session, @@ -22519,6 +22671,15 @@ namespace VULKAN_HPP_NAMESPACE return structureChain; } + //=== VK_AMD_anti_lag === + + VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkAntiLagUpdateAMD && "Function requires " ); + + getDispatcher()->vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( &data ) ); + } + //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE @@ -22572,7 +22733,7 @@ namespace VULKAN_HPP_NAMESPACE # endif } - return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, *reinterpret_cast( &shader ), allocator ); + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, *reinterpret_cast( &shader ), allocator, result ); } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector ShaderEXT::getBinaryData() const @@ -22593,7 +22754,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_device ), static_cast( m_shader ), &dataSize, reinterpret_cast( data.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { @@ -22621,6 +22782,125 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( shaders.data() ) ); } + //=== VK_KHR_pipeline_binary === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + Device::createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + { + std::vector pipelineBinaries; + VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; + VULKAN_HPP_NAMESPACE::Result result; + if ( createInfo.pKeysAndDataInfo ) + { + VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); + pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); + binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + else + { + VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaries.resize( binaries.pipelineBinaryCount ); + binaries.pPipelineBinaries = pipelineBinaries.data(); + result = static_cast( getDispatcher()->vkCreatePipelineBinariesKHR( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &binaries ) ) ); + } + } + + if ( ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) && ( result != VULKAN_HPP_NAMESPACE::Result::eIncomplete ) && + ( result != VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR ) ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createPipelineBinariesKHR" ); +# endif + } + + std::vector pipelineBinariesRAII; + pipelineBinariesRAII.reserve( pipelineBinaries.size() ); + for ( auto & pipelineBinary : pipelineBinaries ) + { + pipelineBinariesRAII.emplace_back( *this, *reinterpret_cast( &pipelineBinary ), allocator, result ); + } + return pipelineBinariesRAII; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR + Device::getPipelineKeyKHR( Optional pipelineCreateInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineKeyKHR && "Function requires " ); + + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetPipelineKeyKHR( + static_cast( m_device ), + reinterpret_cast( static_cast( pipelineCreateInfo ) ), + reinterpret_cast( &pipelineKey ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); + + return pipelineKey; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair> + Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineBinaryDataKHR && "Function requires " ); + + std::pair> data_; + VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; + std::vector & pipelineBinaryData = data_.second; + size_t pipelineBinaryDataSize; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + nullptr ) ); + if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { + pipelineBinaryData.resize( pipelineBinaryDataSize ); + result = static_cast( + getDispatcher()->vkGetPipelineBinaryDataKHR( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &pipelineBinaryKey ), + &pipelineBinaryDataSize, + reinterpret_cast( pipelineBinaryData.data() ) ) ); + } + + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); + + return data_; + } + + VULKAN_HPP_INLINE void + Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, + Optional allocator ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseCapturedPipelineDataKHR && "Function requires " ); + + getDispatcher()->vkReleaseCapturedPipelineDataKHR( + static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( static_cast( allocator ) ) ); + } + //=== VK_QCOM_tile_properties === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Framebuffer::getTilePropertiesQCOM() const @@ -22676,7 +22956,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkSetLatencySleepModeNV( static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &sleepModeInfo ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::setLatencySleepModeNV" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::setLatencySleepModeNV" ); } VULKAN_HPP_INLINE void SwapchainKHR::latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const VULKAN_HPP_NOEXCEPT @@ -22696,15 +22976,20 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &latencyMarkerInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV SwapchainKHR::getLatencyTimingsNV() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector SwapchainKHR::getLatencyTimingsNV() const { VULKAN_HPP_ASSERT( getDispatcher()->vkGetLatencyTimingsNV && "Function requires " ); - VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + std::vector timings; + VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; + getDispatcher()->vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); + timings.resize( latencyMarkerInfo.timingCount ); + latencyMarkerInfo.pTimings = timings.data(); getDispatcher()->vkGetLatencyTimingsNV( static_cast( m_device ), static_cast( m_swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); - return latencyMarkerInfo; + return timings; } VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT @@ -22736,7 +23021,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &propertyCount, reinterpret_cast( properties.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -22768,7 +23053,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetScreenBufferPropertiesQNX( static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); return properties; } @@ -22784,7 +23069,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get(); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetScreenBufferPropertiesQNX( static_cast( m_device ), &buffer, reinterpret_cast( &properties ) ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); return structureChain; } @@ -22822,7 +23107,7 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_physicalDevice ), &timeDomainCount, reinterpret_cast( timeDomains.data() ) ) ); } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { @@ -22846,7 +23131,7 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); return data_; } @@ -22862,7 +23147,7 @@ namespace VULKAN_HPP_NAMESPACE uint64_t & maxDeviation = data_.second; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetCalibratedTimestampsKHR( static_cast( m_device ), 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); - resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); return data_; } diff --git a/third_party/vulkan/vulkan_shared.hpp b/third_party/vulkan/vulkan_shared.hpp index 8b2697a..1bff517 100644 --- a/third_party/vulkan/vulkan_shared.hpp +++ b/third_party/vulkan/vulkan_shared.hpp @@ -52,6 +52,28 @@ namespace VULKAN_HPP_NAMESPACE { }; + template + struct HasPoolType : std::false_type + { + }; + + template + struct HasPoolType::deleter::PoolTypeExport() )> : std::true_type + { + }; + + template + struct GetPoolType + { + using type = NoDestructor; + }; + + template + struct GetPoolType::value>::type> + { + using type = typename SharedHandleTraits::deleter::PoolTypeExport; + }; + //===================================================================================================================== template @@ -257,12 +279,23 @@ namespace VULKAN_HPP_NAMESPACE public: SharedHandle() = default; - template ::value>::type> + template ::value && !HasPoolType::value>::type> explicit SharedHandle( HandleType handle, SharedHandle> parent, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT : BaseType( handle, std::move( parent ), std::move( deleter ) ) { } + template ::value && HasPoolType::value>::type> + explicit SharedHandle( HandleType handle, + SharedHandle> parent, + SharedHandle::type> pool, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : BaseType( handle, std::move( parent ), DeleterType{ std::move( pool ), dispatch } ) + { + } + template ::value>::type> explicit SharedHandle( HandleType handle, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT : BaseType( handle, std::move( deleter ) ) { @@ -390,6 +423,8 @@ namespace VULKAN_HPP_NAMESPACE public: using DestructorType = typename SharedHandleTraits::DestructorType; + using PoolTypeExport = PoolType; + template using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); @@ -409,7 +444,7 @@ namespace VULKAN_HPP_NAMESPACE public: void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + VULKAN_HPP_ASSERT( m_destroy && m_dispatch && m_pool ); ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); } @@ -925,6 +960,17 @@ namespace VULKAN_HPP_NAMESPACE using SharedShaderEXT = SharedHandle; + //=== VK_KHR_pipeline_binary === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = ObjectDestroyShared; + }; + + using SharedPipelineBinaryKHR = SharedHandle; + enum class SwapchainOwns { no, diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index 693977c..1d0e054 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -105,6 +105,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Instance is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Instance is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" ); @@ -121,6 +122,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "MemoryType is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PhysicalDevice is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PhysicalDevice is not nothrow_move_constructible!" ); @@ -160,6 +162,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Device is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Device is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" ); @@ -184,6 +187,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Queue is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Queue is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" ); @@ -201,6 +205,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeviceMemory is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeviceMemory is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" ); @@ -260,6 +265,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Fence is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Fence is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" ); @@ -268,6 +274,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Semaphore is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Semaphore is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" ); @@ -276,6 +283,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Event is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Event is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" ); @@ -284,6 +292,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "QueryPool is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "QueryPool is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" ); @@ -292,6 +301,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Buffer is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Buffer is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" ); @@ -300,6 +310,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferView is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferView is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" ); @@ -308,6 +319,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Image is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Image is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" ); @@ -332,6 +344,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageView is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ImageView is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" ); @@ -340,6 +353,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderModule is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderModule is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), @@ -349,6 +363,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineCache is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineCache is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), @@ -370,6 +385,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Pipeline is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Pipeline is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), @@ -477,6 +493,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "Viewport is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PipelineLayout is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PipelineLayout is not nothrow_move_constructible!" ); @@ -492,6 +509,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Sampler is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Sampler is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" ); @@ -515,6 +533,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorPool is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorPool is not nothrow_move_constructible!" ); @@ -530,6 +549,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSet is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSet is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), @@ -539,6 +559,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSetLayout is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorSetLayout is not nothrow_move_constructible!" ); @@ -571,6 +592,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Framebuffer is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "Framebuffer is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), @@ -580,6 +602,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderPass is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "RenderPass is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" ); @@ -598,6 +621,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandPool is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandPool is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), @@ -607,6 +631,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandBuffer is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CommandBuffer is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), @@ -983,11 +1008,15 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SamplerYcbcrConversion is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SamplerYcbcrConversion is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "DescriptorUpdateTemplate is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DescriptorUpdateTemplate is not nothrow_move_constructible!" ); @@ -1516,6 +1545,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PrivateDataSlot is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PrivateDataSlot is not nothrow_move_constructible!" ); @@ -1784,6 +1814,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SurfaceKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SurfaceKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), @@ -1806,6 +1837,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SwapchainKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "SwapchainKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" ); @@ -1852,6 +1884,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), @@ -1861,6 +1894,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayModeKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DisplayModeKHR is not nothrow_move_constructible!" ); @@ -1961,6 +1995,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugReportCallbackEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DebugReportCallbackEXT is not nothrow_move_constructible!" ); @@ -2003,11 +2039,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "VideoSessionKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "VideoSessionKHR is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "VideoSessionParametersKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "VideoSessionParametersKHR is not nothrow_move_constructible!" ); @@ -2184,9 +2223,11 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuModuleNVX is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuModuleNVX is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CuFunctionNVX is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CuFunctionNVX is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ), @@ -3158,6 +3199,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DebugUtilsMessengerEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DebugUtilsMessengerEXT is not nothrow_move_constructible!" ); @@ -3459,6 +3502,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "AccelerationStructureKHR is not nothrow_move_constructible!" ); @@ -3671,6 +3716,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ValidationCacheEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ValidationCacheEXT is not nothrow_move_constructible!" ); @@ -3802,6 +3848,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AccelerationStructureNV is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "AccelerationStructureNV is not nothrow_move_constructible!" ); @@ -4011,16 +4059,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" ); - //=== VK_NV_mesh_shader === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), @@ -4147,6 +4185,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PerformanceConfigurationINTEL is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "PerformanceConfigurationINTEL is not nothrow_move_constructible!" ); @@ -4540,6 +4580,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeferredOperationKHR is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "DeferredOperationKHR is not nothrow_move_constructible!" ); @@ -4819,6 +4860,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutNV is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "IndirectCommandsLayoutNV is not nothrow_move_constructible!" ); @@ -5120,9 +5163,11 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CudaModuleNV is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CudaModuleNV is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CudaFunctionNV ) == sizeof( VkCudaFunctionNV ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "CudaFunctionNV is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "CudaFunctionNV is not nothrow_move_constructible!" ); @@ -5786,6 +5831,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferCollectionFUCHSIA is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "BufferCollectionFUCHSIA is not nothrow_move_constructible!" ); @@ -6102,6 +6149,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MicromapEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MicromapEXT is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ), @@ -6728,6 +6776,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "OpticalFlowSessionNV is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "OpticalFlowSessionNV is not nothrow_move_constructible!" ); @@ -6847,6 +6896,25 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "BufferUsageFlags2CreateInfoKHR is not nothrow_move_constructible!" ); +//=== VK_AMD_anti_lag === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD ) == sizeof( VkPhysicalDeviceAntiLagFeaturesAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceAntiLagFeaturesAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AntiLagDataAMD ) == sizeof( VkAntiLagDataAMD ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AntiLagDataAMD is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD ) == sizeof( VkAntiLagPresentationInfoAMD ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AntiLagPresentationInfoAMD is not nothrow_move_constructible!" ); + //=== VK_KHR_ray_tracing_position_fetch === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) == @@ -6860,6 +6928,7 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderEXT is not copy_constructible!" ); VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "ShaderEXT is not nothrow_move_constructible!" ); VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ), @@ -6881,6 +6950,89 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ShaderCreateInfoEXT is not nothrow_move_constructible!" ); +//=== VK_KHR_pipeline_binary === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineBinaryFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineBinaryFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineBinaryPropertiesKHR ) == sizeof( VkPhysicalDevicePipelineBinaryPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineBinaryPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePipelineBinaryInternalCacheControlKHR ) == + sizeof( VkDevicePipelineBinaryInternalCacheControlKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DevicePipelineBinaryInternalCacheControlKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR ) == sizeof( VkPipelineBinaryKHR ), "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, "PipelineBinaryKHR is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR ) == sizeof( VkPipelineBinaryKeyKHR ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKeyKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR ) == sizeof( VkPipelineBinaryDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR ) == sizeof( VkPipelineBinaryKeysAndDataKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryKeysAndDataKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR ) == sizeof( VkPipelineBinaryCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryInfoKHR ) == sizeof( VkPipelineBinaryInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR ) == sizeof( VkReleaseCapturedPipelineDataInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ReleaseCapturedPipelineDataInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR ) == sizeof( VkPipelineBinaryDataInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryDataInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR ) == sizeof( VkPipelineCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR ) == sizeof( VkPipelineBinaryHandlesInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineBinaryHandlesInfoKHR is not nothrow_move_constructible!" ); + //=== VK_QCOM_tile_properties === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ), @@ -6979,6 +7131,24 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "MutableDescriptorTypeCreateInfoEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_legacy_vertex_attributes === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesFeaturesEXT ) == + sizeof( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyVertexAttributesFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyVertexAttributesPropertiesEXT ) == + sizeof( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLegacyVertexAttributesPropertiesEXT is not nothrow_move_constructible!" ); + //=== VK_EXT_layer_settings === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerSettingsCreateInfoEXT ) == sizeof( VkLayerSettingsCreateInfoEXT ), @@ -7126,6 +7296,24 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM is not nothrow_move_constructible!" ); +//=== VK_KHR_compute_shader_derivatives === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesKHR ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesPropertiesKHR ) == + sizeof( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceComputeShaderDerivativesPropertiesKHR is not nothrow_move_constructible!" ); + //=== VK_KHR_video_decode_av1 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR ) == sizeof( VkVideoDecodeAV1ProfileInfoKHR ), @@ -7483,6 +7671,64 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceRawAccessChainsFeaturesNV is not nothrow_move_constructible!" ); +//=== VK_KHR_shader_relaxed_extended_instruction === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ) == + sizeof( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR is not nothrow_move_constructible!" ); + +//=== VK_NV_command_buffer_inheritance === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCommandBufferInheritanceFeaturesNV ) == + sizeof( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCommandBufferInheritanceFeaturesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_maintenance7 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance7FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance7FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance7PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance7PropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance7PropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesListKHR ) == sizeof( VkPhysicalDeviceLayeredApiPropertiesListKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiPropertiesListKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR ) == sizeof( VkPhysicalDeviceLayeredApiPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiVulkanPropertiesKHR ) == + sizeof( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLayeredApiVulkanPropertiesKHR is not nothrow_move_constructible!" ); + //=== VK_NV_shader_atomic_float16_vector === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV ) == @@ -7493,6 +7739,16 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV is not nothrow_move_constructible!" ); +//=== VK_EXT_shader_replicated_composites === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderReplicatedCompositesFeaturesEXT ) == + sizeof( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT is not nothrow_move_constructible!" ); + //=== VK_NV_ray_tracing_validation === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV ) == @@ -7503,4 +7759,29 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceRayTracingValidationFeaturesNV is not nothrow_move_constructible!" ); +//=== VK_MESA_image_alignment_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA ) == + sizeof( VkPhysicalDeviceImageAlignmentControlFeaturesMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageAlignmentControlFeaturesMESA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA ) == + sizeof( VkPhysicalDeviceImageAlignmentControlPropertiesMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceImageAlignmentControlPropertiesMESA is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA ) == sizeof( VkImageAlignmentControlCreateInfoMESA ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageAlignmentControlCreateInfoMESA is not nothrow_move_constructible!" ); + #endif diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index 45d9c2b..5f6391c 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -8,6 +8,9 @@ #ifndef VULKAN_STRUCTS_HPP #define VULKAN_STRUCTS_HPP +// include-what-you-use: make sure, vulkan.hpp is used by code-completers +// IWYU pragma: private; include "vulkan.hpp" + #include // strcmp namespace VULKAN_HPP_NAMESPACE @@ -24,12 +27,12 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AabbPositionsKHR( float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {} ) VULKAN_HPP_NOEXCEPT - : minX( minX_ ) - , minY( minY_ ) - , minZ( minZ_ ) - , maxX( maxX_ ) - , maxY( maxY_ ) - , maxZ( maxZ_ ) + : minX{ minX_ } + , minY{ minY_ } + , minZ{ minZ_ } + , maxX{ maxX_ } + , maxY{ maxY_ } + , maxZ{ maxZ_ } { } @@ -195,14 +198,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexFormat( vertexFormat_ ) - , vertexData( vertexData_ ) - , vertexStride( vertexStride_ ) - , maxVertex( maxVertex_ ) - , indexType( indexType_ ) - , indexData( indexData_ ) - , transformData( transformData_ ) + : pNext{ pNext_ } + , vertexFormat{ vertexFormat_ } + , vertexData{ vertexData_ } + , vertexStride{ vertexStride_ } + , maxVertex{ maxVertex_ } + , indexType{ indexType_ } + , indexData{ indexData_ } + , transformData{ transformData_ } { } @@ -336,9 +339,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryAabbsDataKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , data( data_ ) - , stride( stride_ ) + : pNext{ pNext_ } + , data{ data_ } + , stride{ stride_ } { } @@ -428,9 +431,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryInstancesDataKHR( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , arrayOfPointers( arrayOfPointers_ ) - , data( data_ ) + : pNext{ pNext_ } + , arrayOfPointers{ arrayOfPointers_ } + , data{ data_ } { } @@ -586,10 +589,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , geometryType( geometryType_ ) - , geometry( geometry_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , geometryType{ geometryType_ } + , geometry{ geometry_ } + , flags{ flags_ } { } @@ -738,16 +741,16 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR * const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , flags( flags_ ) - , mode( mode_ ) - , srcAccelerationStructure( srcAccelerationStructure_ ) - , dstAccelerationStructure( dstAccelerationStructure_ ) - , geometryCount( geometryCount_ ) - , pGeometries( pGeometries_ ) - , ppGeometries( ppGeometries_ ) - , scratchData( scratchData_ ) + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , mode{ mode_ } + , srcAccelerationStructure{ srcAccelerationStructure_ } + , dstAccelerationStructure{ dstAccelerationStructure_ } + , geometryCount{ geometryCount_ } + , pGeometries{ pGeometries_ } + , ppGeometries{ ppGeometries_ } + , scratchData{ scratchData_ } { } @@ -954,10 +957,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : primitiveCount( primitiveCount_ ) - , primitiveOffset( primitiveOffset_ ) - , firstVertex( firstVertex_ ) - , transformOffset( transformOffset_ ) + : primitiveCount{ primitiveCount_ } + , primitiveOffset{ primitiveOffset_ } + , firstVertex{ firstVertex_ } + , transformOffset{ transformOffset_ } { } @@ -1063,10 +1066,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructureSize( accelerationStructureSize_ ) - , updateScratchSize( updateScratchSize_ ) - , buildScratchSize( buildScratchSize_ ) + : pNext{ pNext_ } + , accelerationStructureSize{ accelerationStructureSize_ } + , updateScratchSize{ updateScratchSize_ } + , buildScratchSize{ buildScratchSize_ } { } @@ -1156,9 +1159,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR AccelerationStructureCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructureNV_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructure( accelerationStructure_ ) - , accelerationStructureNV( accelerationStructureNV_ ) + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , accelerationStructureNV{ accelerationStructureNV_ } { } @@ -1275,13 +1278,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , createFlags( createFlags_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) - , type( type_ ) - , deviceAddress( deviceAddress_ ) + : pNext{ pNext_ } + , createFlags{ createFlags_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + , type{ type_ } + , deviceAddress{ deviceAddress_ } { } @@ -1431,18 +1434,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexData( vertexData_ ) - , vertexOffset( vertexOffset_ ) - , vertexCount( vertexCount_ ) - , vertexStride( vertexStride_ ) - , vertexFormat( vertexFormat_ ) - , indexData( indexData_ ) - , indexOffset( indexOffset_ ) - , indexCount( indexCount_ ) - , indexType( indexType_ ) - , transformData( transformData_ ) - , transformOffset( transformOffset_ ) + : pNext{ pNext_ } + , vertexData{ vertexData_ } + , vertexOffset{ vertexOffset_ } + , vertexCount{ vertexCount_ } + , vertexStride{ vertexStride_ } + , vertexFormat{ vertexFormat_ } + , indexData{ indexData_ } + , indexOffset{ indexOffset_ } + , indexCount{ indexCount_ } + , indexType{ indexType_ } + , transformData{ transformData_ } + , transformOffset{ transformOffset_ } { } @@ -1637,11 +1640,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , aabbData( aabbData_ ) - , numAABBs( numAABBs_ ) - , stride( stride_ ) - , offset( offset_ ) + : pNext{ pNext_ } + , aabbData{ aabbData_ } + , numAABBs{ numAABBs_ } + , stride{ stride_ } + , offset{ offset_ } { } @@ -1758,8 +1761,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GeometryDataNV( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {} ) VULKAN_HPP_NOEXCEPT - : triangles( triangles_ ) - , aabbs( aabbs_ ) + : triangles{ triangles_ } + , aabbs{ aabbs_ } { } @@ -1847,10 +1850,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , geometryType( geometryType_ ) - , geometry( geometry_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , geometryType{ geometryType_ } + , geometry{ geometry_ } + , flags{ flags_ } { } @@ -1965,12 +1968,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV * pGeometries_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , flags( flags_ ) - , instanceCount( instanceCount_ ) - , geometryCount( geometryCount_ ) - , pGeometries( pGeometries_ ) + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , instanceCount{ instanceCount_ } + , geometryCount{ geometryCount_ } + , pGeometries{ pGeometries_ } { } @@ -2128,9 +2131,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , compactedSize( compactedSize_ ) - , info( info_ ) + : pNext{ pNext_ } + , compactedSize{ compactedSize_ } + , info{ info_ } { } @@ -2236,8 +2239,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructure( accelerationStructure_ ) + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } { } @@ -2334,8 +2337,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 AccelerationStructureGeometryMotionTrianglesDataNV( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexData( vertexData_ ) + : pNext{ pNext_ } + , vertexData{ vertexData_ } { } @@ -2411,7 +2414,7 @@ namespace VULKAN_HPP_NAMESPACE using NativeType = VkTransformMatrixKHR; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix( matrix_ ) {} + VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( std::array, 3> const & matrix_ = {} ) VULKAN_HPP_NOEXCEPT : matrix{ matrix_ } {} VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -2491,12 +2494,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT - : transform( transform_ ) - , instanceCustomIndex( instanceCustomIndex_ ) - , mask( mask_ ) - , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) - , flags( flags_ ) - , accelerationStructureReference( accelerationStructureReference_ ) + : transform{ transform_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } { } @@ -2625,13 +2628,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT - : transformT0( transformT0_ ) - , transformT1( transformT1_ ) - , instanceCustomIndex( instanceCustomIndex_ ) - , mask( mask_ ) - , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) - , flags( flags_ ) - , accelerationStructureReference( accelerationStructureReference_ ) + : transformT0{ transformT0_ } + , transformT1{ transformT1_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } { } @@ -2770,9 +2773,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , accelerationStructure( accelerationStructure_ ) + : pNext{ pNext_ } + , type{ type_ } + , accelerationStructure{ accelerationStructure_ } { } @@ -2882,9 +2885,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR AccelerationStructureMotionInfoNV( uint32_t maxInstances_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxInstances( maxInstances_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , maxInstances{ maxInstances_ } + , flags{ flags_ } { } @@ -3002,22 +3005,22 @@ namespace VULKAN_HPP_NAMESPACE float tx_ = {}, float ty_ = {}, float tz_ = {} ) VULKAN_HPP_NOEXCEPT - : sx( sx_ ) - , a( a_ ) - , b( b_ ) - , pvx( pvx_ ) - , sy( sy_ ) - , c( c_ ) - , pvy( pvy_ ) - , sz( sz_ ) - , pvz( pvz_ ) - , qx( qx_ ) - , qy( qy_ ) - , qz( qz_ ) - , qw( qw_ ) - , tx( tx_ ) - , ty( ty_ ) - , tz( tz_ ) + : sx{ sx_ } + , a{ a_ } + , b{ b_ } + , pvx{ pvx_ } + , sy{ sy_ } + , c{ c_ } + , pvy{ pvy_ } + , sz{ sz_ } + , pvz{ pvz_ } + , qx{ qx_ } + , qy{ qy_ } + , qz{ qz_ } + , qw{ qw_ } + , tx{ tx_ } + , ty{ ty_ } + , tz{ tz_ } { } @@ -3220,13 +3223,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {} ) VULKAN_HPP_NOEXCEPT - : transformT0( transformT0_ ) - , transformT1( transformT1_ ) - , instanceCustomIndex( instanceCustomIndex_ ) - , mask( mask_ ) - , instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ) - , flags( flags_ ) - , accelerationStructureReference( accelerationStructureReference_ ) + : transformT0{ transformT0_ } + , transformT1{ transformT1_ } + , instanceCustomIndex{ instanceCustomIndex_ } + , mask{ mask_ } + , instanceShaderBindingTableRecordOffset{ instanceShaderBindingTableRecordOffset_ } + , flags{ flags_ } + , accelerationStructureReference{ accelerationStructureReference_ } { } @@ -3426,9 +3429,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV::eStatic, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV data_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , flags( flags_ ) - , data( data_ ) + : type{ type_ } + , flags{ flags_ } + , data{ data_ } { } @@ -3507,9 +3510,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapUsageEXT( uint32_t count_ = {}, uint32_t subdivisionLevel_ = {}, uint32_t format_ = {} ) VULKAN_HPP_NOEXCEPT - : count( count_ ) - , subdivisionLevel( subdivisionLevel_ ) - , format( format_ ) + : count{ count_ } + , subdivisionLevel{ subdivisionLevel_ } + , format{ format_ } { } @@ -3619,23 +3622,23 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displacementBiasAndScaleFormat( displacementBiasAndScaleFormat_ ) - , displacementVectorFormat( displacementVectorFormat_ ) - , displacementBiasAndScaleBuffer( displacementBiasAndScaleBuffer_ ) - , displacementBiasAndScaleStride( displacementBiasAndScaleStride_ ) - , displacementVectorBuffer( displacementVectorBuffer_ ) - , displacementVectorStride( displacementVectorStride_ ) - , displacedMicromapPrimitiveFlags( displacedMicromapPrimitiveFlags_ ) - , displacedMicromapPrimitiveFlagsStride( displacedMicromapPrimitiveFlagsStride_ ) - , indexType( indexType_ ) - , indexBuffer( indexBuffer_ ) - , indexStride( indexStride_ ) - , baseTriangle( baseTriangle_ ) - , usageCountsCount( usageCountsCount_ ) - , pUsageCounts( pUsageCounts_ ) - , ppUsageCounts( ppUsageCounts_ ) - , micromap( micromap_ ) + : pNext{ pNext_ } + , displacementBiasAndScaleFormat{ displacementBiasAndScaleFormat_ } + , displacementVectorFormat{ displacementVectorFormat_ } + , displacementBiasAndScaleBuffer{ displacementBiasAndScaleBuffer_ } + , displacementBiasAndScaleStride{ displacementBiasAndScaleStride_ } + , displacementVectorBuffer{ displacementVectorBuffer_ } + , displacementVectorStride{ displacementVectorStride_ } + , displacedMicromapPrimitiveFlags{ displacedMicromapPrimitiveFlags_ } + , displacedMicromapPrimitiveFlagsStride{ displacedMicromapPrimitiveFlagsStride_ } + , indexType{ indexType_ } + , indexBuffer{ indexBuffer_ } + , indexStride{ indexStride_ } + , baseTriangle{ baseTriangle_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , micromap{ micromap_ } { } @@ -3946,15 +3949,15 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::MicromapUsageEXT * const * ppUsageCounts_ = {}, VULKAN_HPP_NAMESPACE::MicromapEXT micromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , indexType( indexType_ ) - , indexBuffer( indexBuffer_ ) - , indexStride( indexStride_ ) - , baseTriangle( baseTriangle_ ) - , usageCountsCount( usageCountsCount_ ) - , pUsageCounts( pUsageCounts_ ) - , ppUsageCounts( ppUsageCounts_ ) - , micromap( micromap_ ) + : pNext{ pNext_ } + , indexType{ indexType_ } + , indexBuffer{ indexBuffer_ } + , indexStride{ indexStride_ } + , baseTriangle{ baseTriangle_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , micromap{ micromap_ } { } @@ -4147,8 +4150,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pVersionData( pVersionData_ ) + : pNext{ pNext_ } + , pVersionData{ pVersionData_ } { } @@ -4248,12 +4251,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchain( swapchain_ ) - , timeout( timeout_ ) - , semaphore( semaphore_ ) - , fence( fence_ ) - , deviceMask( deviceMask_ ) + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , timeout{ timeout_ } + , semaphore{ semaphore_ } + , fence{ fence_ } + , deviceMask{ deviceMask_ } { } @@ -4385,9 +4388,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , timeout( timeout_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , timeout{ timeout_ } { } @@ -4491,12 +4494,12 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT - : pUserData( pUserData_ ) - , pfnAllocation( pfnAllocation_ ) - , pfnReallocation( pfnReallocation_ ) - , pfnFree( pfnFree_ ) - , pfnInternalAllocation( pfnInternalAllocation_ ) - , pfnInternalFree( pfnInternalFree_ ) + : pUserData{ pUserData_ } + , pfnAllocation{ pfnAllocation_ } + , pfnReallocation{ pfnReallocation_ } + , pfnFree{ pfnFree_ } + , pfnInternalAllocation{ pfnInternalAllocation_ } + , pfnInternalFree{ pfnInternalFree_ } { } @@ -4614,9 +4617,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AmigoProfilingSubmitInfoSEC( uint64_t firstDrawTimestamp_ = {}, uint64_t swapBufferTimestamp_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , firstDrawTimestamp( firstDrawTimestamp_ ) - , swapBufferTimestamp( swapBufferTimestamp_ ) + : pNext{ pNext_ } + , firstDrawTimestamp{ firstDrawTimestamp_ } + , swapBufferTimestamp{ swapBufferTimestamp_ } { } @@ -4719,10 +4722,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity ) VULKAN_HPP_NOEXCEPT - : r( r_ ) - , g( g_ ) - , b( b_ ) - , a( a_ ) + : r{ r_ } + , g{ g_ } + , b{ b_ } + , a{ a_ } { } @@ -4834,15 +4837,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , externalFormat( externalFormat_ ) - , formatFeatures( formatFeatures_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } @@ -4963,15 +4966,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , externalFormat( externalFormat_ ) - , formatFeatures( formatFeatures_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } @@ -5084,8 +5087,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatResolvePropertiesANDROID( VULKAN_HPP_NAMESPACE::Format colorAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorAttachmentFormat( colorAttachmentFormat_ ) + : pNext{ pNext_ } + , colorAttachmentFormat{ colorAttachmentFormat_ } { } @@ -5172,9 +5175,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , allocationSize( allocationSize_ ) - , memoryTypeBits( memoryTypeBits_ ) + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -5258,8 +5261,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( uint64_t androidHardwareBufferUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , androidHardwareBufferUsage( androidHardwareBufferUsage_ ) + : pNext{ pNext_ } + , androidHardwareBufferUsage{ androidHardwareBufferUsage_ } { } @@ -5344,9 +5347,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , window{ window_ } { } @@ -5443,6 +5446,230 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + struct AntiLagPresentationInfoAMD + { + using NativeType = VkAntiLagPresentationInfoAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagPresentationInfoAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput, + uint64_t frameIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stage{ stage_ } + , frameIndex{ frameIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR AntiLagPresentationInfoAMD( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AntiLagPresentationInfoAMD( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : AntiLagPresentationInfoAMD( *reinterpret_cast( &rhs ) ) + { + } + + AntiLagPresentationInfoAMD & operator=( AntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AntiLagPresentationInfoAMD & operator=( VkAntiLagPresentationInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setStage( VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage_ ) VULKAN_HPP_NOEXCEPT + { + stage = stage_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagPresentationInfoAMD & setFrameIndex( uint64_t frameIndex_ ) VULKAN_HPP_NOEXCEPT + { + frameIndex = frameIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAntiLagPresentationInfoAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagPresentationInfoAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stage, frameIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AntiLagPresentationInfoAMD const & ) const = default; +#else + bool operator==( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stage == rhs.stage ) && ( frameIndex == rhs.frameIndex ); +# endif + } + + bool operator!=( AntiLagPresentationInfoAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagPresentationInfoAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::AntiLagStageAMD stage = VULKAN_HPP_NAMESPACE::AntiLagStageAMD::eInput; + uint64_t frameIndex = {}; + }; + + template <> + struct CppType + { + using Type = AntiLagPresentationInfoAMD; + }; + + struct AntiLagDataAMD + { + using NativeType = VkAntiLagDataAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAntiLagDataAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR AntiLagDataAMD( VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl, + uint32_t maxFPS_ = {}, + const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , mode{ mode_ } + , maxFPS{ maxFPS_ } + , pPresentationInfo{ pPresentationInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR AntiLagDataAMD( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + AntiLagDataAMD( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT : AntiLagDataAMD( *reinterpret_cast( &rhs ) ) {} + + AntiLagDataAMD & operator=( AntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + AntiLagDataAMD & operator=( VkAntiLagDataAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMode( VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & setMaxFPS( uint32_t maxFPS_ ) VULKAN_HPP_NOEXCEPT + { + maxFPS = maxFPS_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 AntiLagDataAMD & + setPPresentationInfo( const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo_ ) VULKAN_HPP_NOEXCEPT + { + pPresentationInfo = pPresentationInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkAntiLagDataAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkAntiLagDataAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, mode, maxFPS, pPresentationInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( AntiLagDataAMD const & ) const = default; +#else + bool operator==( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( mode == rhs.mode ) && ( maxFPS == rhs.maxFPS ) && + ( pPresentationInfo == rhs.pPresentationInfo ); +# endif + } + + bool operator!=( AntiLagDataAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAntiLagDataAMD; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::AntiLagModeAMD mode = VULKAN_HPP_NAMESPACE::AntiLagModeAMD::eDriverControl; + uint32_t maxFPS = {}; + const VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD * pPresentationInfo = {}; + }; + + template <> + struct CppType + { + using Type = AntiLagDataAMD; + }; + struct ApplicationInfo { using NativeType = VkApplicationInfo; @@ -5457,12 +5684,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pApplicationName( pApplicationName_ ) - , applicationVersion( applicationVersion_ ) - , pEngineName( pEngineName_ ) - , engineVersion( engineVersion_ ) - , apiVersion( apiVersion_ ) + : pNext{ pNext_ } + , pApplicationName{ pApplicationName_ } + , applicationVersion{ applicationVersion_ } + , pEngineName{ pEngineName_ } + , engineVersion{ engineVersion_ } + , apiVersion{ apiVersion_ } { } @@ -5613,15 +5840,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) + : flags{ flags_ } + , format{ format_ } + , samples{ samples_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , stencilLoadOp{ stencilLoadOp_ } + , stencilStoreOp{ stencilStoreOp_ } + , initialLayout{ initialLayout_ } + , finalLayout{ finalLayout_ } { } @@ -5777,16 +6004,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , format( format_ ) - , samples( samples_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , stencilLoadOp( stencilLoadOp_ ) - , stencilStoreOp( stencilStoreOp_ ) - , initialLayout( initialLayout_ ) - , finalLayout( finalLayout_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , format{ format_ } + , samples{ samples_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , stencilLoadOp{ stencilLoadOp_ } + , stencilStoreOp{ stencilStoreOp_ } + , initialLayout{ initialLayout_ } + , finalLayout{ finalLayout_ } { } @@ -5954,9 +6181,9 @@ namespace VULKAN_HPP_NAMESPACE AttachmentDescriptionStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stencilInitialLayout( stencilInitialLayout_ ) - , stencilFinalLayout( stencilFinalLayout_ ) + : pNext{ pNext_ } + , stencilInitialLayout{ stencilInitialLayout_ } + , stencilFinalLayout{ stencilFinalLayout_ } { } @@ -6062,8 +6289,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReference( uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : attachment( attachment_ ) - , layout( layout_ ) + : attachment{ attachment_ } + , layout{ layout_ } { } @@ -6153,10 +6380,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachment( attachment_ ) - , layout( layout_ ) - , aspectMask( aspectMask_ ) + : pNext{ pNext_ } + , attachment{ attachment_ } + , layout{ layout_ } + , aspectMask{ aspectMask_ } { } @@ -6273,8 +6500,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stencilLayout( stencilLayout_ ) + : pNext{ pNext_ } + , stencilLayout{ stencilLayout_ } { } @@ -6375,10 +6602,10 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SampleCountFlagBits * pColorAttachmentSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits depthStencilAttachmentSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentSamples( pColorAttachmentSamples_ ) - , depthStencilAttachmentSamples( depthStencilAttachmentSamples_ ) + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentSamples{ pColorAttachmentSamples_ } + , depthStencilAttachmentSamples{ depthStencilAttachmentSamples_ } { } @@ -6516,8 +6743,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Extent2D( uint32_t width_ = {}, uint32_t height_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) + : width{ width_ } + , height{ height_ } { } @@ -6599,8 +6826,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SampleLocationEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) + : x{ x_ } + , y{ y_ } { } @@ -6690,11 +6917,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT * pSampleLocations_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleLocationsPerPixel( sampleLocationsPerPixel_ ) - , sampleLocationGridSize( sampleLocationGridSize_ ) - , sampleLocationsCount( sampleLocationsCount_ ) - , pSampleLocations( pSampleLocations_ ) + : pNext{ pNext_ } + , sampleLocationsPerPixel{ sampleLocationsPerPixel_ } + , sampleLocationGridSize{ sampleLocationGridSize_ } + , sampleLocationsCount{ sampleLocationsCount_ } + , pSampleLocations{ pSampleLocations_ } { } @@ -6842,8 +7069,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : attachmentIndex( attachmentIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + : attachmentIndex{ attachmentIndex_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } { } @@ -6930,8 +7157,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) BaseInStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, const struct VULKAN_HPP_NAMESPACE::BaseInStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : sType( sType_ ) - , pNext( pNext_ ) + : sType{ sType_ } + , pNext{ pNext_ } { } @@ -7008,8 +7235,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) BaseOutStructure( VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo, struct VULKAN_HPP_NAMESPACE::BaseOutStructure * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : sType( sType_ ) - , pNext( pNext_ ) + : sType{ sType_ } + , pNext{ pNext_ } { } @@ -7093,12 +7320,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructure( accelerationStructure_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } { } @@ -7257,9 +7484,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( uint32_t deviceIndexCount_ = {}, const uint32_t * pDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) + : pNext{ pNext_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } { } @@ -7383,10 +7610,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } { } @@ -7504,10 +7731,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stageFlags( stageFlags_ ) - , layout( layout_ ) - , set( set_ ) + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , set{ set_ } { } @@ -7628,14 +7855,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t dynamicOffsetCount_ = {}, const uint32_t * pDynamicOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stageFlags( stageFlags_ ) - , layout( layout_ ) - , firstSet( firstSet_ ) - , descriptorSetCount( descriptorSetCount_ ) - , pDescriptorSets( pDescriptorSets_ ) - , dynamicOffsetCount( dynamicOffsetCount_ ) - , pDynamicOffsets( pDynamicOffsets_ ) + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , firstSet{ firstSet_ } + , descriptorSetCount{ descriptorSetCount_ } + , pDescriptorSets{ pDescriptorSets_ } + , dynamicOffsetCount{ dynamicOffsetCount_ } + , pDynamicOffsets{ pDynamicOffsets_ } { } @@ -7817,8 +8044,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Offset2D( int32_t x_ = {}, int32_t y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) + : x{ x_ } + , y{ y_ } { } @@ -7900,8 +8127,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Rect2D( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , extent( extent_ ) + : offset{ offset_ } + , extent{ extent_ } { } @@ -7990,11 +8217,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pSplitInstanceBindRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceIndexCount( deviceIndexCount_ ) - , pDeviceIndices( pDeviceIndices_ ) - , splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ) - , pSplitInstanceBindRegions( pSplitInstanceBindRegions_ ) + : pNext{ pNext_ } + , deviceIndexCount{ deviceIndexCount_ } + , pDeviceIndices{ pDeviceIndices_ } + , splitInstanceBindRegionCount{ splitInstanceBindRegionCount_ } + , pSplitInstanceBindRegions{ pSplitInstanceBindRegions_ } { } @@ -8155,10 +8382,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) + : pNext{ pNext_ } + , image{ image_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } { } @@ -8274,9 +8501,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchain( swapchain_ ) - , imageIndex( imageIndex_ ) + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , imageIndex{ imageIndex_ } { } @@ -8379,8 +8606,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , planeAspect( planeAspect_ ) + : pNext{ pNext_ } + , planeAspect{ planeAspect_ } { } @@ -8477,9 +8704,9 @@ namespace VULKAN_HPP_NAMESPACE BindIndexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT - : bufferAddress( bufferAddress_ ) - , size( size_ ) - , indexType( indexType_ ) + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , indexType{ indexType_ } { } @@ -8574,8 +8801,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindMemoryStatusKHR( VULKAN_HPP_NAMESPACE::Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pResult( pResult_ ) + : pNext{ pNext_ } + , pResult{ pResult_ } { } @@ -8666,7 +8893,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BindPipelineIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress pipelineAddress_ = {} ) VULKAN_HPP_NOEXCEPT - : pipelineAddress( pipelineAddress_ ) + : pipelineAddress{ pipelineAddress_ } { } @@ -8743,7 +8970,7 @@ namespace VULKAN_HPP_NAMESPACE using NativeType = VkBindShaderGroupIndirectCommandNV; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex( groupIndex_ ) {} + VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( uint32_t groupIndex_ = {} ) VULKAN_HPP_NOEXCEPT : groupIndex{ groupIndex_ } {} VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -8823,11 +9050,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : resourceOffset( resourceOffset_ ) - , size( size_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) + : resourceOffset{ resourceOffset_ } + , size{ size_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , flags{ flags_ } { } @@ -8937,9 +9164,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + : buffer{ buffer_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } { } @@ -9051,9 +9278,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + : image{ image_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } { } @@ -9164,9 +9391,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresource( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , arrayLayer( arrayLayer_ ) + : aspectMask{ aspectMask_ } + , mipLevel{ mipLevel_ } + , arrayLayer{ arrayLayer_ } { } @@ -9255,9 +9482,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Offset3D( int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) + : x{ x_ } + , y{ y_ } + , z{ z_ } { } @@ -9348,9 +9575,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Extent3D( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) - , depth( depth_ ) + : width{ width_ } + , height{ height_ } + , depth{ depth_ } { } @@ -9446,12 +9673,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : subresource( subresource_ ) - , offset( offset_ ) - , extent( extent_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , flags( flags_ ) + : subresource{ subresource_ } + , offset{ offset_ } + , extent{ extent_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , flags{ flags_ } { } @@ -9572,9 +9799,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind * pBinds_ = {} ) VULKAN_HPP_NOEXCEPT - : image( image_ ) - , bindCount( bindCount_ ) - , pBinds( pBinds_ ) + : image{ image_ } + , bindCount{ bindCount_ } + , pBinds{ pBinds_ } { } @@ -9697,17 +9924,17 @@ namespace VULKAN_HPP_NAMESPACE uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , bufferBindCount( bufferBindCount_ ) - , pBufferBinds( pBufferBinds_ ) - , imageOpaqueBindCount( imageOpaqueBindCount_ ) - , pImageOpaqueBinds( pImageOpaqueBinds_ ) - , imageBindCount( imageBindCount_ ) - , pImageBinds( pImageBinds_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , bufferBindCount{ bufferBindCount_ } + , pBufferBinds{ pBufferBinds_ } + , imageOpaqueBindCount{ imageOpaqueBindCount_ } + , pImageOpaqueBinds{ pImageOpaqueBinds_ } + , imageBindCount{ imageBindCount_ } + , pImageBinds{ pImageBinds_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphores{ pSignalSemaphores_ } { } @@ -9960,9 +10187,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferAddress( bufferAddress_ ) - , size( size_ ) - , stride( stride_ ) + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , stride{ stride_ } { } @@ -10061,11 +10288,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memorySize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryBindIndex( memoryBindIndex_ ) - , memory( memory_ ) - , memoryOffset( memoryOffset_ ) - , memorySize( memorySize_ ) + : pNext{ pNext_ } + , memoryBindIndex{ memoryBindIndex_ } + , memory{ memory_ } + , memoryOffset{ memoryOffset_ } + , memorySize{ memorySize_ } { } @@ -10189,8 +10416,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR BlitImageCubicWeightsInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cubicWeights( cubicWeights_ ) + : pNext{ pNext_ } + , cubicWeights{ cubicWeights_ } { } @@ -10285,10 +10512,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , mipLevel( mipLevel_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) + : aspectMask{ aspectMask_ } + , mipLevel{ mipLevel_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } { } @@ -10394,11 +10621,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array const & dstOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubresource( srcSubresource_ ) - , srcOffsets( srcOffsets_ ) - , dstSubresource( dstSubresource_ ) - , dstOffsets( dstOffsets_ ) + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffsets{ srcOffsets_ } + , dstSubresource{ dstSubresource_ } + , dstOffsets{ dstOffsets_ } { } @@ -10526,14 +10753,14 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageBlit2 * pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) - , filter( filter_ ) + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } + , filter{ filter_ } { } @@ -10708,8 +10935,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -10807,9 +11034,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR BufferCollectionBufferCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collection( collection_ ) - , index( index_ ) + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } { } @@ -10919,12 +11146,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t minBufferCountForDedicatedSlack_ = {}, uint32_t minBufferCountForSharedSlack_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minBufferCount( minBufferCount_ ) - , maxBufferCount( maxBufferCount_ ) - , minBufferCountForCamping( minBufferCountForCamping_ ) - , minBufferCountForDedicatedSlack( minBufferCountForDedicatedSlack_ ) - , minBufferCountForSharedSlack( minBufferCountForSharedSlack_ ) + : pNext{ pNext_ } + , minBufferCount{ minBufferCount_ } + , maxBufferCount{ maxBufferCount_ } + , minBufferCountForCamping{ minBufferCountForCamping_ } + , minBufferCountForDedicatedSlack{ minBufferCountForDedicatedSlack_ } + , minBufferCountForSharedSlack{ minBufferCountForSharedSlack_ } { } @@ -11059,8 +11286,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferCollectionCreateInfoFUCHSIA( zx_handle_t collectionToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collectionToken( collectionToken_ ) + : pNext{ pNext_ } + , collectionToken{ collectionToken_ } { } @@ -11165,9 +11392,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR BufferCollectionImageCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collection( collection_ ) - , index( index_ ) + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } { } @@ -11272,8 +11499,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SysmemColorSpaceFUCHSIA( uint32_t colorSpace_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorSpace( colorSpace_ ) + : pNext{ pNext_ } + , colorSpace{ colorSpace_ } { } @@ -11382,18 +11609,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) - , bufferCount( bufferCount_ ) - , createInfoIndex( createInfoIndex_ ) - , sysmemPixelFormat( sysmemPixelFormat_ ) - , formatFeatures( formatFeatures_ ) - , sysmemColorSpaceIndex( sysmemColorSpaceIndex_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } + , bufferCount{ bufferCount_ } + , createInfoIndex{ createInfoIndex_ } + , sysmemPixelFormat{ sysmemPixelFormat_ } + , formatFeatures{ formatFeatures_ } + , sysmemColorSpaceIndex{ sysmemColorSpaceIndex_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } @@ -11519,13 +11746,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , size( size_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , size{ size_ } + , usage{ usage_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } { } @@ -11691,10 +11918,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::FormatFeatureFlags requiredFormatFeatures_ = {}, VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , createInfo( createInfo_ ) - , requiredFormatFeatures( requiredFormatFeatures_ ) - , bufferCollectionConstraints( bufferCollectionConstraints_ ) + : pNext{ pNext_ } + , createInfo{ createInfo_ } + , requiredFormatFeatures{ requiredFormatFeatures_ } + , bufferCollectionConstraints{ bufferCollectionConstraints_ } { } @@ -11810,9 +12037,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR BufferCopy( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : srcOffset( srcOffset_ ) - , dstOffset( dstOffset_ ) - , size( size_ ) + : srcOffset{ srcOffset_ } + , dstOffset{ dstOffset_ } + , size{ size_ } { } @@ -11907,10 +12134,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcOffset( srcOffset_ ) - , dstOffset( dstOffset_ ) - , size( size_ ) + : pNext{ pNext_ } + , srcOffset{ srcOffset_ } + , dstOffset{ dstOffset_ } + , size{ size_ } { } @@ -12023,8 +12250,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceAddress( deviceAddress_ ) + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } { } @@ -12119,8 +12346,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -12220,12 +12447,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : bufferOffset( bufferOffset_ ) - , bufferRowLength( bufferRowLength_ ) - , bufferImageHeight( bufferImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + : bufferOffset{ bufferOffset_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } @@ -12350,13 +12577,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bufferOffset( bufferOffset_ ) - , bufferRowLength( bufferRowLength_ ) - , bufferImageHeight( bufferImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + : pNext{ pNext_ } + , bufferOffset{ bufferOffset_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } @@ -12501,14 +12728,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } { } @@ -12663,16 +12890,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcStageMask( srcStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstStageMask( dstStageMask_ ) - , dstAccessMask( dstAccessMask_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } { } @@ -12837,8 +13064,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -12935,8 +13162,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , opaqueCaptureAddress( opaqueCaptureAddress_ ) + : pNext{ pNext_ } + , opaqueCaptureAddress{ opaqueCaptureAddress_ } { } @@ -13034,8 +13261,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , usage( usage_ ) + : pNext{ pNext_ } + , usage{ usage_ } { } @@ -13135,12 +13362,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , buffer( buffer_ ) - , format( format_ ) - , offset( offset_ ) - , range( range_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , buffer{ buffer_ } + , format{ format_ } + , offset{ offset_ } + , range{ range_ } { } @@ -13271,8 +13498,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoKHR( VULKAN_HPP_NAMESPACE::TimeDomainKHR timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainKHR::eDevice, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , timeDomain( timeDomain_ ) + : pNext{ pNext_ } + , timeDomain{ timeDomain_ } { } @@ -13370,9 +13597,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CheckpointData2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage_ = {}, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stage( stage_ ) - , pCheckpointMarker( pCheckpointMarker_ ) + : pNext{ pNext_ } + , stage{ stage_ } + , pCheckpointMarker{ pCheckpointMarker_ } { } @@ -13453,9 +13680,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR CheckpointDataNV( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void * pCheckpointMarker_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stage( stage_ ) - , pCheckpointMarker( pCheckpointMarker_ ) + : pNext{ pNext_ } + , stage{ stage_ } + , pCheckpointMarker{ pCheckpointMarker_ } { } @@ -13593,8 +13820,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( float depth_ = {}, uint32_t stencil_ = {} ) VULKAN_HPP_NOEXCEPT - : depth( depth_ ) - , stencil( stencil_ ) + : depth{ depth_ } + , stencil{ stencil_ } { } @@ -13724,9 +13951,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 ClearAttachment( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , colorAttachment( colorAttachment_ ) - , clearValue( clearValue_ ) + : aspectMask{ aspectMask_ } + , colorAttachment{ colorAttachment_ } + , clearValue{ clearValue_ } { } @@ -13797,9 +14024,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ClearRect( VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : rect( rect_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) + : rect{ rect_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } { } @@ -13888,9 +14115,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {} ) VULKAN_HPP_NOEXCEPT - : pixelX( pixelX_ ) - , pixelY( pixelY_ ) - , sample( sample_ ) + : pixelX{ pixelX_ } + , pixelY{ pixelY_ } + , sample{ sample_ } { } @@ -13986,10 +14213,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV * pSampleLocations_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRate( shadingRate_ ) - , sampleCount( sampleCount_ ) - , sampleLocationCount( sampleLocationCount_ ) - , pSampleLocations( pSampleLocations_ ) + : shadingRate{ shadingRate_ } + , sampleCount{ sampleCount_ } + , sampleLocationCount{ sampleLocationCount_ } + , pSampleLocations{ pSampleLocations_ } { } @@ -14119,11 +14346,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, VULKAN_HPP_NAMESPACE::Bool32 clampResults_ = {} ) VULKAN_HPP_NOEXCEPT - : advancedBlendOp( advancedBlendOp_ ) - , srcPremultiplied( srcPremultiplied_ ) - , dstPremultiplied( dstPremultiplied_ ) - , blendOverlap( blendOverlap_ ) - , clampResults( clampResults_ ) + : advancedBlendOp{ advancedBlendOp_ } + , srcPremultiplied{ srcPremultiplied_ } + , dstPremultiplied{ dstPremultiplied_ } + , blendOverlap{ blendOverlap_ } + , clampResults{ clampResults_ } { } @@ -14239,12 +14466,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd ) VULKAN_HPP_NOEXCEPT - : srcColorBlendFactor( srcColorBlendFactor_ ) - , dstColorBlendFactor( dstColorBlendFactor_ ) - , colorBlendOp( colorBlendOp_ ) - , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) - , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) - , alphaBlendOp( alphaBlendOp_ ) + : srcColorBlendFactor{ srcColorBlendFactor_ } + , dstColorBlendFactor{ dstColorBlendFactor_ } + , colorBlendOp{ colorBlendOp_ } + , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } + , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } + , alphaBlendOp{ alphaBlendOp_ } { } @@ -14369,10 +14596,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , commandPool( commandPool_ ) - , level( level_ ) - , commandBufferCount( commandBufferCount_ ) + : pNext{ pNext_ } + , commandPool{ commandPool_ } + , level{ level_ } + , commandBufferCount{ commandBufferCount_ } { } @@ -14492,13 +14719,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) - , framebuffer( framebuffer_ ) - , occlusionQueryEnable( occlusionQueryEnable_ ) - , queryFlags( queryFlags_ ) - , pipelineStatistics( pipelineStatistics_ ) + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } + , framebuffer{ framebuffer_ } + , occlusionQueryEnable{ occlusionQueryEnable_ } + , queryFlags{ queryFlags_ } + , pipelineStatistics{ pipelineStatistics_ } { } @@ -14640,9 +14867,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo * pInheritanceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pInheritanceInfo( pInheritanceInfo_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pInheritanceInfo{ pInheritanceInfo_ } { } @@ -14749,8 +14976,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , conditionalRenderingEnable( conditionalRenderingEnable_ ) + : pNext{ pNext_ } + , conditionalRenderingEnable{ conditionalRenderingEnable_ } { } @@ -14851,9 +15078,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transform( transform_ ) - , renderArea( renderArea_ ) + : pNext{ pNext_ } + , transform{ transform_ } + , renderArea{ renderArea_ } { } @@ -14970,14 +15197,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentFormats( pColorAttachmentFormats_ ) - , depthAttachmentFormat( depthAttachmentFormat_ ) - , stencilAttachmentFormat( stencilAttachmentFormat_ ) - , rasterizationSamples( rasterizationSamples_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } + , rasterizationSamples{ rasterizationSamples_ } { } @@ -15160,12 +15387,12 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR Viewport( float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , width( width_ ) - , height( height_ ) - , minDepth( minDepth_ ) - , maxDepth( maxDepth_ ) + : x{ x_ } + , y{ y_ } + , width{ width_ } + , height{ height_ } + , minDepth{ minDepth_ } + , maxDepth{ maxDepth_ } { } @@ -15282,10 +15509,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t viewportDepthCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport * pViewportDepths_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewportScissor2D( viewportScissor2D_ ) - , viewportDepthCount( viewportDepthCount_ ) - , pViewportDepths( pViewportDepths_ ) + : pNext{ pNext_ } + , viewportScissor2D{ viewportScissor2D_ } + , viewportDepthCount{ viewportDepthCount_ } + , pViewportDepths{ pViewportDepths_ } { } @@ -15404,9 +15631,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR CommandBufferSubmitInfo( VULKAN_HPP_NAMESPACE::CommandBuffer commandBuffer_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , commandBuffer( commandBuffer_ ) - , deviceMask( deviceMask_ ) + : pNext{ pNext_ } + , commandBuffer{ commandBuffer_ } + , deviceMask{ deviceMask_ } { } @@ -15512,9 +15739,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } { } @@ -15613,9 +15840,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SpecializationMapEntry( uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : constantID( constantID_ ) - , offset( offset_ ) - , size( size_ ) + : constantID{ constantID_ } + , offset{ offset_ } + , size{ size_ } { } @@ -15710,10 +15937,10 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SpecializationMapEntry * pMapEntries_ = {}, size_t dataSize_ = {}, const void * pData_ = {} ) VULKAN_HPP_NOEXCEPT - : mapEntryCount( mapEntryCount_ ) - , pMapEntries( pMapEntries_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) + : mapEntryCount{ mapEntryCount_ } + , pMapEntries{ pMapEntries_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } @@ -15849,12 +16076,12 @@ namespace VULKAN_HPP_NAMESPACE const char * pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stage( stage_ ) - , module( module_ ) - , pName( pName_ ) - , pSpecializationInfo( pSpecializationInfo_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , module{ module_ } + , pName{ pName_ } + , pSpecializationInfo{ pSpecializationInfo_ } { } @@ -16005,12 +16232,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stage( stage_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } @@ -16143,10 +16370,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress pipelineDeviceAddressCaptureReplay_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceAddress( deviceAddress_ ) - , size( size_ ) - , pipelineDeviceAddressCaptureReplay( pipelineDeviceAddressCaptureReplay_ ) + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } + , size{ size_ } + , pipelineDeviceAddressCaptureReplay{ pipelineDeviceAddressCaptureReplay_ } { } @@ -16264,10 +16491,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } + , offset{ offset_ } + , flags{ flags_ } { } @@ -16377,10 +16604,10 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ConformanceVersion( uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {} ) VULKAN_HPP_NOEXCEPT - : major( major_ ) - , minor( minor_ ) - , subminor( subminor_ ) - , patch( patch_ ) + : major{ major_ } + , minor{ minor_ } + , subminor{ subminor_ } + , patch{ patch_ } { } @@ -16490,16 +16717,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , MSize( MSize_ ) - , NSize( NSize_ ) - , KSize( KSize_ ) - , AType( AType_ ) - , BType( BType_ ) - , CType( CType_ ) - , ResultType( ResultType_ ) - , saturatingAccumulation( saturatingAccumulation_ ) - , scope( scope_ ) + : pNext{ pNext_ } + , MSize{ MSize_ } + , NSize{ NSize_ } + , KSize{ KSize_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , ResultType{ ResultType_ } + , saturatingAccumulation{ saturatingAccumulation_ } + , scope{ scope_ } { } @@ -16608,15 +16835,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = {}, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , MSize( MSize_ ) - , NSize( NSize_ ) - , KSize( KSize_ ) - , AType( AType_ ) - , BType( BType_ ) - , CType( CType_ ) - , DType( DType_ ) - , scope( scope_ ) + : pNext{ pNext_ } + , MSize{ MSize_ } + , NSize{ NSize_ } + , KSize{ KSize_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , DType{ DType_ } + , scope{ scope_ } { } @@ -16718,10 +16945,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } @@ -16838,10 +17065,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } @@ -16941,11 +17168,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcBuffer( srcBuffer_ ) - , dstBuffer( dstBuffer_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , srcBuffer{ srcBuffer_ } + , dstBuffer{ dstBuffer_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -17090,12 +17317,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcBuffer( srcBuffer_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , srcBuffer{ srcBuffer_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -17255,8 +17482,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transform( transform_ ) + : pNext{ pNext_ } + , transform{ transform_ } { } @@ -17358,14 +17585,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSet( srcSet_ ) - , srcBinding( srcBinding_ ) - , srcArrayElement( srcArrayElement_ ) - , dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) + : pNext{ pNext_ } + , srcSet{ srcSet_ } + , srcBinding{ srcBinding_ } + , srcArrayElement{ srcArrayElement_ } + , dstSet{ dstSet_ } + , dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } { } @@ -17514,12 +17741,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } { } @@ -17654,13 +17881,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -17829,12 +18056,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstBuffer( dstBuffer_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstBuffer{ dstBuffer_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -17999,14 +18226,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -18189,13 +18416,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pHostPointer( pHostPointer_ ) - , memoryRowLength( memoryRowLength_ ) - , memoryImageHeight( memoryImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + : pNext{ pNext_ } + , pHostPointer{ pHostPointer_ } + , memoryRowLength{ memoryRowLength_ } + , memoryImageHeight{ memoryImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } @@ -18340,12 +18567,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -18500,9 +18727,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR CopyMemoryIndirectCommandNV( VULKAN_HPP_NAMESPACE::DeviceAddress srcAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddress dstAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAddress( srcAddress_ ) - , dstAddress( dstAddress_ ) - , size( size_ ) + : srcAddress{ srcAddress_ } + , dstAddress{ dstAddress_ } + , size{ size_ } { } @@ -18601,10 +18828,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } @@ -18703,12 +18930,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAddress( srcAddress_ ) - , bufferRowLength( bufferRowLength_ ) - , bufferImageHeight( bufferImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + : srcAddress{ srcAddress_ } + , bufferRowLength{ bufferRowLength_ } + , bufferImageHeight{ bufferImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } @@ -18837,13 +19064,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pHostPointer( pHostPointer_ ) - , memoryRowLength( memoryRowLength_ ) - , memoryImageHeight( memoryImageHeight_ ) - , imageSubresource( imageSubresource_ ) - , imageOffset( imageOffset_ ) - , imageExtent( imageExtent_ ) + : pNext{ pNext_ } + , pHostPointer{ pHostPointer_ } + , memoryRowLength{ memoryRowLength_ } + , memoryImageHeight{ memoryImageHeight_ } + , imageSubresource{ imageSubresource_ } + , imageOffset{ imageOffset_ } + , imageExtent{ imageExtent_ } { } @@ -18988,12 +19215,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -19152,10 +19379,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } @@ -19253,10 +19480,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::MicromapEXT dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } @@ -19371,10 +19598,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT mode_ = VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT::eClone, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , src( src_ ) - , dst( dst_ ) - , mode( mode_ ) + : pNext{ pNext_ } + , src{ src_ } + , dst{ dst_ } + , mode{ mode_ } { } @@ -19470,9 +19697,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CuFunctionCreateInfoNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , module( module_ ) - , pName( pName_ ) + : pNext{ pNext_ } + , module{ module_ } + , pName{ pName_ } { } @@ -19595,19 +19822,19 @@ namespace VULKAN_HPP_NAMESPACE size_t extraCount_ = {}, const void * const * pExtras_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , function( function_ ) - , gridDimX( gridDimX_ ) - , gridDimY( gridDimY_ ) - , gridDimZ( gridDimZ_ ) - , blockDimX( blockDimX_ ) - , blockDimY( blockDimY_ ) - , blockDimZ( blockDimZ_ ) - , sharedMemBytes( sharedMemBytes_ ) - , paramCount( paramCount_ ) - , pParams( pParams_ ) - , extraCount( extraCount_ ) - , pExtras( pExtras_ ) + : pNext{ pNext_ } + , function{ function_ } + , gridDimX{ gridDimX_ } + , gridDimY{ gridDimY_ } + , gridDimZ{ gridDimZ_ } + , blockDimX{ blockDimX_ } + , blockDimY{ blockDimY_ } + , blockDimZ{ blockDimZ_ } + , sharedMemBytes{ sharedMemBytes_ } + , paramCount{ paramCount_ } + , pParams{ pParams_ } + , extraCount{ extraCount_ } + , pExtras{ pExtras_ } { } @@ -19840,9 +20067,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CuModuleCreateInfoNVX( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } @@ -19964,9 +20191,9 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CudaFunctionCreateInfoNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module_ = {}, const char * pName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , module( module_ ) - , pName( pName_ ) + : pNext{ pNext_ } + , module{ module_ } + , pName{ pName_ } { } @@ -20091,19 +20318,19 @@ namespace VULKAN_HPP_NAMESPACE size_t extraCount_ = {}, const void * const * pExtras_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , function( function_ ) - , gridDimX( gridDimX_ ) - , gridDimY( gridDimY_ ) - , gridDimZ( gridDimZ_ ) - , blockDimX( blockDimX_ ) - , blockDimY( blockDimY_ ) - , blockDimZ( blockDimZ_ ) - , sharedMemBytes( sharedMemBytes_ ) - , paramCount( paramCount_ ) - , pParams( pParams_ ) - , extraCount( extraCount_ ) - , pExtras( pExtras_ ) + : pNext{ pNext_ } + , function{ function_ } + , gridDimX{ gridDimX_ } + , gridDimY{ gridDimY_ } + , gridDimZ{ gridDimZ_ } + , blockDimX{ blockDimX_ } + , blockDimY{ blockDimY_ } + , blockDimZ{ blockDimZ_ } + , sharedMemBytes{ sharedMemBytes_ } + , paramCount{ paramCount_ } + , pParams{ pParams_ } + , extraCount{ extraCount_ } + , pExtras{ pExtras_ } { } @@ -20338,9 +20565,9 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR CudaModuleCreateInfoNV( size_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } @@ -20466,11 +20693,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ) - , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) - , signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ) - , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + : pNext{ pNext_ } + , waitSemaphoreValuesCount{ waitSemaphoreValuesCount_ } + , pWaitSemaphoreValues{ pWaitSemaphoreValues_ } + , signalSemaphoreValuesCount{ signalSemaphoreValuesCount_ } + , pSignalSemaphoreValues{ pSignalSemaphoreValues_ } { } @@ -20628,9 +20855,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( const char * pMarkerName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pMarkerName( pMarkerName_ ) - , color( color_ ) + : pNext{ pNext_ } + , pMarkerName{ pMarkerName_ } + , color{ color_ } { } @@ -20746,10 +20973,10 @@ namespace VULKAN_HPP_NAMESPACE uint64_t object_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , object( object_ ) - , pObjectName( pObjectName_ ) + : pNext{ pNext_ } + , objectType{ objectType_ } + , object{ object_ } + , pObjectName{ pObjectName_ } { } @@ -20880,12 +21107,12 @@ namespace VULKAN_HPP_NAMESPACE size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , object( object_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) + : pNext{ pNext_ } + , objectType{ objectType_ } + , object{ object_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } { } @@ -21040,10 +21267,10 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pfnCallback( pfnCallback_ ) - , pUserData( pUserData_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pfnCallback{ pfnCallback_ } + , pUserData{ pUserData_ } { } @@ -21153,9 +21380,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( const char * pLabelName_ = {}, std::array const & color_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pLabelName( pLabelName_ ) - , color( color_ ) + : pNext{ pNext_ } + , pLabelName{ pLabelName_ } + , color{ color_ } { } @@ -21267,10 +21494,10 @@ namespace VULKAN_HPP_NAMESPACE uint64_t objectHandle_ = {}, const char * pObjectName_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , pObjectName( pObjectName_ ) + : pNext{ pNext_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , pObjectName{ pObjectName_ } { } @@ -21402,17 +21629,17 @@ namespace VULKAN_HPP_NAMESPACE uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pObjects_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pMessageIdName( pMessageIdName_ ) - , messageIdNumber( messageIdNumber_ ) - , pMessage( pMessage_ ) - , queueLabelCount( queueLabelCount_ ) - , pQueueLabels( pQueueLabels_ ) - , cmdBufLabelCount( cmdBufLabelCount_ ) - , pCmdBufLabels( pCmdBufLabels_ ) - , objectCount( objectCount_ ) - , pObjects( pObjects_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pMessageIdName{ pMessageIdName_ } + , messageIdNumber{ messageIdNumber_ } + , pMessage{ pMessage_ } + , queueLabelCount{ queueLabelCount_ } + , pQueueLabels{ pQueueLabels_ } + , cmdBufLabelCount{ cmdBufLabelCount_ } + , pCmdBufLabels{ pCmdBufLabels_ } + , objectCount{ objectCount_ } + , pObjects{ pObjects_ } { } @@ -21676,12 +21903,12 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , messageSeverity( messageSeverity_ ) - , messageType( messageType_ ) - , pfnUserCallback( pfnUserCallback_ ) - , pUserData( pUserData_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , messageSeverity{ messageSeverity_ } + , messageType{ messageType_ } + , pfnUserCallback{ pfnUserCallback_ } + , pUserData{ pUserData_ } { } @@ -21814,12 +22041,12 @@ namespace VULKAN_HPP_NAMESPACE size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) + : pNext{ pNext_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } { } @@ -21977,11 +22204,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize compressedSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize decompressedSize_ = {}, VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethod_ = {} ) VULKAN_HPP_NOEXCEPT - : srcAddress( srcAddress_ ) - , dstAddress( dstAddress_ ) - , compressedSize( compressedSize_ ) - , decompressedSize( decompressedSize_ ) - , decompressionMethod( decompressionMethod_ ) + : srcAddress{ srcAddress_ } + , dstAddress{ dstAddress_ } + , compressedSize{ compressedSize_ } + , decompressedSize{ decompressedSize_ } + , decompressionMethod{ decompressionMethod_ } { } @@ -22097,8 +22324,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dedicatedAllocation( dedicatedAllocation_ ) + : pNext{ pNext_ } + , dedicatedAllocation{ dedicatedAllocation_ } { } @@ -22195,8 +22422,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dedicatedAllocation( dedicatedAllocation_ ) + : pNext{ pNext_ } + , dedicatedAllocation{ dedicatedAllocation_ } { } @@ -22294,9 +22521,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , image{ image_ } + , buffer{ buffer_ } { } @@ -22402,11 +22629,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineStageFlags2 dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags2 dstAccessMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcStageMask( srcStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstStageMask( dstStageMask_ ) - , dstAccessMask( dstAccessMask_ ) + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } { } @@ -22528,11 +22755,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , baseMipLevel( baseMipLevel_ ) - , levelCount( levelCount_ ) - , baseArrayLayer( baseArrayLayer_ ) - , layerCount( layerCount_ ) + : aspectMask{ aspectMask_ } + , baseMipLevel{ baseMipLevel_ } + , levelCount{ levelCount_ } + , baseArrayLayer{ baseArrayLayer_ } + , layerCount{ layerCount_ } { } @@ -22652,17 +22879,17 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcStageMask( srcStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstStageMask( dstStageMask_ ) - , dstAccessMask( dstAccessMask_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , image( image_ ) - , subresourceRange( subresourceRange_ ) + : pNext{ pNext_ } + , srcStageMask{ srcStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstStageMask{ dstStageMask_ } + , dstAccessMask{ dstAccessMask_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , image{ image_ } + , subresourceRange{ subresourceRange_ } { } @@ -22854,14 +23081,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t imageMemoryBarrierCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 * pImageMemoryBarriers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dependencyFlags( dependencyFlags_ ) - , memoryBarrierCount( memoryBarrierCount_ ) - , pMemoryBarriers( pMemoryBarriers_ ) - , bufferMemoryBarrierCount( bufferMemoryBarrierCount_ ) - , pBufferMemoryBarriers( pBufferMemoryBarriers_ ) - , imageMemoryBarrierCount( imageMemoryBarrierCount_ ) - , pImageMemoryBarriers( pImageMemoryBarriers_ ) + : pNext{ pNext_ } + , dependencyFlags{ dependencyFlags_ } + , memoryBarrierCount{ memoryBarrierCount_ } + , pMemoryBarriers{ pMemoryBarriers_ } + , bufferMemoryBarrierCount{ bufferMemoryBarrierCount_ } + , pBufferMemoryBarriers{ pBufferMemoryBarriers_ } + , imageMemoryBarrierCount{ imageMemoryBarrierCount_ } + , pImageMemoryBarriers{ pImageMemoryBarriers_ } { } @@ -23069,10 +23296,10 @@ namespace VULKAN_HPP_NAMESPACE float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthBiasConstantFactor( depthBiasConstantFactor_ ) - , depthBiasClamp( depthBiasClamp_ ) - , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) + : pNext{ pNext_ } + , depthBiasConstantFactor{ depthBiasConstantFactor_ } + , depthBiasClamp{ depthBiasClamp_ } + , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } { } @@ -23182,9 +23409,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT::eLeastRepresentableValueFormat, VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthBiasRepresentation( depthBiasRepresentation_ ) - , depthBiasExact( depthBiasExact_ ) + : pNext{ pNext_ } + , depthBiasRepresentation{ depthBiasRepresentation_ } + , depthBiasExact{ depthBiasExact_ } { } @@ -23294,10 +23521,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , address( address_ ) - , range( range_ ) - , format( format_ ) + : pNext{ pNext_ } + , address{ address_ } + , range{ range_ } + , format{ format_ } { } @@ -23411,10 +23638,10 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorBufferBindingInfoEXT( VULKAN_HPP_NAMESPACE::DeviceAddress address_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , address( address_ ) - , usage( usage_ ) + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , address{ address_ } + , usage{ usage_ } { } @@ -23435,7 +23662,7 @@ namespace VULKAN_HPP_NAMESPACE } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -23469,7 +23696,7 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple # endif @@ -23499,7 +23726,7 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingInfoEXT; - void * pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::DeviceAddress address = {}; VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {}; }; @@ -23519,9 +23746,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorBufferBindingPushDescriptorBufferHandleEXT( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -23544,7 +23771,7 @@ namespace VULKAN_HPP_NAMESPACE } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DescriptorBufferBindingPushDescriptorBufferHandleEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; @@ -23571,7 +23798,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -23599,7 +23826,7 @@ namespace VULKAN_HPP_NAMESPACE public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorBufferBindingPushDescriptorBufferHandleEXT; - void * pNext = {}; + const void * pNext = {}; VULKAN_HPP_NAMESPACE::Buffer buffer = {}; }; @@ -23617,9 +23844,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) - , range( range_ ) + : buffer{ buffer_ } + , offset{ offset_ } + , range{ range_ } { } @@ -23714,9 +23941,9 @@ namespace VULKAN_HPP_NAMESPACE DescriptorImageInfo( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined ) VULKAN_HPP_NOEXCEPT - : sampler( sampler_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) + : sampler{ sampler_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } { } @@ -23933,9 +24160,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 DescriptorGetInfoEXT( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::DescriptorDataEXT data_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , data( data_ ) + : pNext{ pNext_ } + , type{ type_ } + , data{ data_ } { } @@ -24020,8 +24247,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorPoolSize( VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , descriptorCount( descriptorCount_ ) + : type{ type_ } + , descriptorCount{ descriptorCount_ } { } @@ -24110,11 +24337,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize * pPoolSizes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , maxSets( maxSets_ ) - , poolSizeCount( poolSizeCount_ ) - , pPoolSizes( pPoolSizes_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , maxSets{ maxSets_ } + , poolSizeCount{ poolSizeCount_ } + , pPoolSizes{ pPoolSizes_ } { } @@ -24257,8 +24484,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfo( uint32_t maxInlineUniformBlockBindings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ ) + : pNext{ pNext_ } + , maxInlineUniformBlockBindings{ maxInlineUniformBlockBindings_ } { } @@ -24359,10 +24586,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorPool( descriptorPool_ ) - , descriptorSetCount( descriptorSetCount_ ) - , pSetLayouts( pSetLayouts_ ) + : pNext{ pNext_ } + , descriptorPool{ descriptorPool_ } + , descriptorSetCount{ descriptorSetCount_ } + , pSetLayouts{ pSetLayouts_ } { } @@ -24497,9 +24724,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DescriptorSetBindingReferenceVALVE( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, uint32_t binding_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorSetLayout( descriptorSetLayout_ ) - , binding( binding_ ) + : pNext{ pNext_ } + , descriptorSetLayout{ descriptorSetLayout_ } + , binding{ binding_ } { } @@ -24603,11 +24830,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler * pImmutableSamplers_ = {} ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , descriptorType( descriptorType_ ) - , descriptorCount( descriptorCount_ ) - , stageFlags( stageFlags_ ) - , pImmutableSamplers( pImmutableSamplers_ ) + : binding{ binding_ } + , descriptorType{ descriptorType_ } + , descriptorCount{ descriptorCount_ } + , stageFlags{ stageFlags_ } + , pImmutableSamplers{ pImmutableSamplers_ } { } @@ -24747,9 +24974,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags * pBindingFlags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bindingCount( bindingCount_ ) - , pBindingFlags( pBindingFlags_ ) + : pNext{ pNext_ } + , bindingCount{ bindingCount_ } + , pBindingFlags{ pBindingFlags_ } { } @@ -24876,10 +25103,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding * pBindings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , bindingCount( bindingCount_ ) - , pBindings( pBindings_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , bindingCount{ bindingCount_ } + , pBindings{ pBindings_ } { } @@ -25014,9 +25241,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutHostMappingInfoVALVE( size_t descriptorOffset_ = {}, uint32_t descriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorOffset( descriptorOffset_ ) - , descriptorSize( descriptorSize_ ) + : pNext{ pNext_ } + , descriptorOffset{ descriptorOffset_ } + , descriptorSize{ descriptorSize_ } { } @@ -25118,8 +25345,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supported( supported_ ) + : pNext{ pNext_ } + , supported{ supported_ } { } @@ -25204,9 +25431,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( uint32_t descriptorSetCount_ = {}, const uint32_t * pDescriptorCounts_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorSetCount( descriptorSetCount_ ) - , pDescriptorCounts( pDescriptorCounts_ ) + : pNext{ pNext_ } + , descriptorSetCount{ descriptorSetCount_ } + , pDescriptorCounts{ pDescriptorCounts_ } { } @@ -25331,8 +25558,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( uint32_t maxVariableDescriptorCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxVariableDescriptorCount( maxVariableDescriptorCount_ ) + : pNext{ pNext_ } + , maxVariableDescriptorCount{ maxVariableDescriptorCount_ } { } @@ -25419,12 +25646,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {} ) VULKAN_HPP_NOEXCEPT - : dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , offset( offset_ ) - , stride( stride_ ) + : dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } + , descriptorType{ descriptorType_ } + , offset{ offset_ } + , stride{ stride_ } { } @@ -25552,15 +25779,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ) - , pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ) - , templateType( templateType_ ) - , descriptorSetLayout( descriptorSetLayout_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipelineLayout( pipelineLayout_ ) - , set( set_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , descriptorUpdateEntryCount{ descriptorUpdateEntryCount_ } + , pDescriptorUpdateEntries{ pDescriptorUpdateEntries_ } + , templateType{ templateType_ } + , descriptorSetLayout{ descriptorSetLayout_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipelineLayout{ pipelineLayout_ } + , set{ set_ } { } @@ -25763,11 +25990,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT bindingType_ = VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT::eBind, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , baseAddress( baseAddress_ ) - , size( size_ ) - , bindingType( bindingType_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , baseAddress{ baseAddress_ } + , size{ size_ } + , bindingType{ bindingType_ } { } @@ -25891,8 +26118,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pCreateInfo( pCreateInfo_ ) + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } { } @@ -25993,11 +26220,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueCount_ = {}, const float * pQueuePriorities_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueCount( queueCount_ ) - , pQueuePriorities( pQueuePriorities_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , queueCount{ queueCount_ } + , pQueuePriorities{ pQueuePriorities_ } { } @@ -26193,61 +26420,61 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {} ) VULKAN_HPP_NOEXCEPT - : robustBufferAccess( robustBufferAccess_ ) - , fullDrawIndexUint32( fullDrawIndexUint32_ ) - , imageCubeArray( imageCubeArray_ ) - , independentBlend( independentBlend_ ) - , geometryShader( geometryShader_ ) - , tessellationShader( tessellationShader_ ) - , sampleRateShading( sampleRateShading_ ) - , dualSrcBlend( dualSrcBlend_ ) - , logicOp( logicOp_ ) - , multiDrawIndirect( multiDrawIndirect_ ) - , drawIndirectFirstInstance( drawIndirectFirstInstance_ ) - , depthClamp( depthClamp_ ) - , depthBiasClamp( depthBiasClamp_ ) - , fillModeNonSolid( fillModeNonSolid_ ) - , depthBounds( depthBounds_ ) - , wideLines( wideLines_ ) - , largePoints( largePoints_ ) - , alphaToOne( alphaToOne_ ) - , multiViewport( multiViewport_ ) - , samplerAnisotropy( samplerAnisotropy_ ) - , textureCompressionETC2( textureCompressionETC2_ ) - , textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ) - , textureCompressionBC( textureCompressionBC_ ) - , occlusionQueryPrecise( occlusionQueryPrecise_ ) - , pipelineStatisticsQuery( pipelineStatisticsQuery_ ) - , vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ) - , fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ) - , shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ) - , shaderImageGatherExtended( shaderImageGatherExtended_ ) - , shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ) - , shaderStorageImageMultisample( shaderStorageImageMultisample_ ) - , shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ) - , shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ) - , shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ) - , shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ) - , shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ) - , shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ) - , shaderClipDistance( shaderClipDistance_ ) - , shaderCullDistance( shaderCullDistance_ ) - , shaderFloat64( shaderFloat64_ ) - , shaderInt64( shaderInt64_ ) - , shaderInt16( shaderInt16_ ) - , shaderResourceResidency( shaderResourceResidency_ ) - , shaderResourceMinLod( shaderResourceMinLod_ ) - , sparseBinding( sparseBinding_ ) - , sparseResidencyBuffer( sparseResidencyBuffer_ ) - , sparseResidencyImage2D( sparseResidencyImage2D_ ) - , sparseResidencyImage3D( sparseResidencyImage3D_ ) - , sparseResidency2Samples( sparseResidency2Samples_ ) - , sparseResidency4Samples( sparseResidency4Samples_ ) - , sparseResidency8Samples( sparseResidency8Samples_ ) - , sparseResidency16Samples( sparseResidency16Samples_ ) - , sparseResidencyAliased( sparseResidencyAliased_ ) - , variableMultisampleRate( variableMultisampleRate_ ) - , inheritedQueries( inheritedQueries_ ) + : robustBufferAccess{ robustBufferAccess_ } + , fullDrawIndexUint32{ fullDrawIndexUint32_ } + , imageCubeArray{ imageCubeArray_ } + , independentBlend{ independentBlend_ } + , geometryShader{ geometryShader_ } + , tessellationShader{ tessellationShader_ } + , sampleRateShading{ sampleRateShading_ } + , dualSrcBlend{ dualSrcBlend_ } + , logicOp{ logicOp_ } + , multiDrawIndirect{ multiDrawIndirect_ } + , drawIndirectFirstInstance{ drawIndirectFirstInstance_ } + , depthClamp{ depthClamp_ } + , depthBiasClamp{ depthBiasClamp_ } + , fillModeNonSolid{ fillModeNonSolid_ } + , depthBounds{ depthBounds_ } + , wideLines{ wideLines_ } + , largePoints{ largePoints_ } + , alphaToOne{ alphaToOne_ } + , multiViewport{ multiViewport_ } + , samplerAnisotropy{ samplerAnisotropy_ } + , textureCompressionETC2{ textureCompressionETC2_ } + , textureCompressionASTC_LDR{ textureCompressionASTC_LDR_ } + , textureCompressionBC{ textureCompressionBC_ } + , occlusionQueryPrecise{ occlusionQueryPrecise_ } + , pipelineStatisticsQuery{ pipelineStatisticsQuery_ } + , vertexPipelineStoresAndAtomics{ vertexPipelineStoresAndAtomics_ } + , fragmentStoresAndAtomics{ fragmentStoresAndAtomics_ } + , shaderTessellationAndGeometryPointSize{ shaderTessellationAndGeometryPointSize_ } + , shaderImageGatherExtended{ shaderImageGatherExtended_ } + , shaderStorageImageExtendedFormats{ shaderStorageImageExtendedFormats_ } + , shaderStorageImageMultisample{ shaderStorageImageMultisample_ } + , shaderStorageImageReadWithoutFormat{ shaderStorageImageReadWithoutFormat_ } + , shaderStorageImageWriteWithoutFormat{ shaderStorageImageWriteWithoutFormat_ } + , shaderUniformBufferArrayDynamicIndexing{ shaderUniformBufferArrayDynamicIndexing_ } + , shaderSampledImageArrayDynamicIndexing{ shaderSampledImageArrayDynamicIndexing_ } + , shaderStorageBufferArrayDynamicIndexing{ shaderStorageBufferArrayDynamicIndexing_ } + , shaderStorageImageArrayDynamicIndexing{ shaderStorageImageArrayDynamicIndexing_ } + , shaderClipDistance{ shaderClipDistance_ } + , shaderCullDistance{ shaderCullDistance_ } + , shaderFloat64{ shaderFloat64_ } + , shaderInt64{ shaderInt64_ } + , shaderInt16{ shaderInt16_ } + , shaderResourceResidency{ shaderResourceResidency_ } + , shaderResourceMinLod{ shaderResourceMinLod_ } + , sparseBinding{ sparseBinding_ } + , sparseResidencyBuffer{ sparseResidencyBuffer_ } + , sparseResidencyImage2D{ sparseResidencyImage2D_ } + , sparseResidencyImage3D{ sparseResidencyImage3D_ } + , sparseResidency2Samples{ sparseResidency2Samples_ } + , sparseResidency4Samples{ sparseResidency4Samples_ } + , sparseResidency8Samples{ sparseResidency8Samples_ } + , sparseResidency16Samples{ sparseResidency16Samples_ } + , sparseResidencyAliased{ sparseResidencyAliased_ } + , variableMultisampleRate{ variableMultisampleRate_ } + , inheritedQueries{ inheritedQueries_ } { } @@ -26860,15 +27087,15 @@ namespace VULKAN_HPP_NAMESPACE const char * const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pEnabledFeatures_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueCreateInfoCount( queueCreateInfoCount_ ) - , pQueueCreateInfos( pQueueCreateInfos_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) - , pEnabledFeatures( pEnabledFeatures_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , queueCreateInfoCount{ queueCreateInfoCount_ } + , pQueueCreateInfos{ pQueueCreateInfos_ } + , enabledLayerCount{ enabledLayerCount_ } + , ppEnabledLayerNames{ ppEnabledLayerNames_ } + , enabledExtensionCount{ enabledExtensionCount_ } + , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } + , pEnabledFeatures{ pEnabledFeatures_ } { } @@ -27120,10 +27347,10 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pfnUserCallback( pfnUserCallback_ ) - , pUserData( pUserData_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pfnUserCallback{ pfnUserCallback_ } + , pUserData{ pUserData_ } { } @@ -27235,8 +27462,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -27332,8 +27559,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceEvent( deviceEvent_ ) + : pNext{ pNext_ } + , deviceEvent{ deviceEvent_ } { } @@ -27425,9 +27652,9 @@ namespace VULKAN_HPP_NAMESPACE DeviceFaultAddressInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT addressType_ = VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT::eNone, VULKAN_HPP_NAMESPACE::DeviceAddress reportedAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize addressPrecision_ = {} ) VULKAN_HPP_NOEXCEPT - : addressType( addressType_ ) - , reportedAddress( reportedAddress_ ) - , addressPrecision( addressPrecision_ ) + : addressType{ addressType_ } + , reportedAddress{ reportedAddress_ } + , addressPrecision{ addressPrecision_ } { } @@ -27525,10 +27752,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t vendorInfoCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vendorBinarySize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , addressInfoCount( addressInfoCount_ ) - , vendorInfoCount( vendorInfoCount_ ) - , vendorBinarySize( vendorBinarySize_ ) + : pNext{ pNext_ } + , addressInfoCount{ addressInfoCount_ } + , vendorInfoCount{ vendorInfoCount_ } + , vendorBinarySize{ vendorBinarySize_ } { } @@ -27637,9 +27864,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 DeviceFaultVendorInfoEXT( std::array const & description_ = {}, uint64_t vendorFaultCode_ = {}, uint64_t vendorFaultData_ = {} ) VULKAN_HPP_NOEXCEPT - : description( description_ ) - , vendorFaultCode( vendorFaultCode_ ) - , vendorFaultData( vendorFaultData_ ) + : description{ description_ } + , vendorFaultCode{ vendorFaultCode_ } + , vendorFaultData{ vendorFaultData_ } { } @@ -27770,11 +27997,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT * pVendorInfos_ = {}, void * pVendorBinaryData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , description( description_ ) - , pAddressInfos( pAddressInfos_ ) - , pVendorInfos( pVendorInfos_ ) - , pVendorBinaryData( pVendorBinaryData_ ) + : pNext{ pNext_ } + , description{ description_ } + , pAddressInfos{ pAddressInfos_ } + , pVendorInfos{ pVendorInfos_ } + , pVendorBinaryData{ pVendorBinaryData_ } { } @@ -27789,10 +28016,10 @@ namespace VULKAN_HPP_NAMESPACE DeviceFaultInfoEXT & operator=( DeviceFaultInfoEXT const & ) = delete; DeviceFaultInfoEXT( DeviceFaultInfoEXT && rhs ) VULKAN_HPP_NOEXCEPT - : pNext( rhs.pNext ) - , pAddressInfos( rhs.pAddressInfos ) - , pVendorInfos( rhs.pVendorInfos ) - , pVendorBinaryData( rhs.pVendorBinaryData ) + : pNext{ rhs.pNext } + , pAddressInfos{ rhs.pAddressInfos } + , pVendorInfos{ rhs.pVendorInfos } + , pVendorBinaryData{ rhs.pVendorBinaryData } { memcpy( description, rhs.description, VK_MAX_DESCRIPTION_SIZE ); @@ -27923,17 +28150,17 @@ namespace VULKAN_HPP_NAMESPACE uint32_t engineNameOffset_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : headerSize( headerSize_ ) - , headerVersion( headerVersion_ ) - , vendorID( vendorID_ ) - , deviceID( deviceID_ ) - , driverVersion( driverVersion_ ) - , pipelineCacheUUID( pipelineCacheUUID_ ) - , applicationNameOffset( applicationNameOffset_ ) - , applicationVersion( applicationVersion_ ) - , engineNameOffset( engineNameOffset_ ) - , engineVersion( engineVersion_ ) - , apiVersion( apiVersion_ ) + : headerSize{ headerSize_ } + , headerVersion{ headerVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , driverVersion{ driverVersion_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + , applicationNameOffset{ applicationNameOffset_ } + , applicationVersion{ applicationVersion_ } + , engineNameOffset{ engineNameOffset_ } + , engineVersion{ engineVersion_ } + , apiVersion{ apiVersion_ } { } @@ -28110,9 +28337,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , resourceDeviceIndex( resourceDeviceIndex_ ) - , memoryDeviceIndex( memoryDeviceIndex_ ) + : pNext{ pNext_ } + , resourceDeviceIndex{ resourceDeviceIndex_ } + , memoryDeviceIndex{ memoryDeviceIndex_ } { } @@ -28217,8 +28444,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceMask( deviceMask_ ) + : pNext{ pNext_ } + , deviceMask{ deviceMask_ } { } @@ -28317,9 +28544,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , physicalDeviceCount( physicalDeviceCount_ ) - , pPhysicalDevices( pPhysicalDevices_ ) + : pNext{ pNext_ } + , physicalDeviceCount{ physicalDeviceCount_ } + , pPhysicalDevices{ pPhysicalDevices_ } { } @@ -28445,9 +28672,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( std::array const & presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentMask( presentMask_ ) - , modes( modes_ ) + : pNext{ pNext_ } + , presentMask{ presentMask_ } + , modes{ modes_ } { } @@ -28536,10 +28763,10 @@ namespace VULKAN_HPP_NAMESPACE const uint32_t * pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pDeviceMasks( pDeviceMasks_ ) - , mode( mode_ ) + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pDeviceMasks{ pDeviceMasks_ } + , mode{ mode_ } { } @@ -28674,10 +28901,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDeviceRenderAreas_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceMask( deviceMask_ ) - , deviceRenderAreaCount( deviceRenderAreaCount_ ) - , pDeviceRenderAreas( pDeviceRenderAreas_ ) + : pNext{ pNext_ } + , deviceMask{ deviceMask_ } + , deviceRenderAreaCount{ deviceRenderAreaCount_ } + , pDeviceRenderAreas{ pDeviceRenderAreas_ } { } @@ -28819,13 +29046,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t signalSemaphoreCount_ = {}, const uint32_t * pSignalSemaphoreDeviceIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ ) + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphoreDeviceIndices{ pWaitSemaphoreDeviceIndices_ } + , commandBufferCount{ commandBufferCount_ } + , pCommandBufferDeviceMasks{ pCommandBufferDeviceMasks_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphoreDeviceIndices{ pSignalSemaphoreDeviceIndices_ } { } @@ -29021,8 +29248,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , modes( modes_ ) + : pNext{ pNext_ } + , modes{ modes_ } { } @@ -29130,20 +29357,20 @@ namespace VULKAN_HPP_NAMESPACE const uint32_t * pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , imageType( imageType_ ) - , format( format_ ) - , extent( extent_ ) - , mipLevels( mipLevels_ ) - , arrayLayers( arrayLayers_ ) - , samples( samples_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , initialLayout( initialLayout_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , imageType{ imageType_ } + , format{ format_ } + , extent{ extent_ } + , mipLevels{ mipLevels_ } + , arrayLayers{ arrayLayers_ } + , samples{ samples_ } + , tiling{ tiling_ } + , usage{ usage_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + , initialLayout{ initialLayout_ } { } @@ -29395,9 +29622,9 @@ namespace VULKAN_HPP_NAMESPACE DeviceImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pCreateInfo( pCreateInfo_ ) - , planeAspect( planeAspect_ ) + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } + , planeAspect{ planeAspect_ } { } @@ -29504,8 +29731,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageSubresource( imageSubresource_ ) + : pNext{ pNext_ } + , imageSubresource{ imageSubresource_ } { } @@ -29604,9 +29831,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pCreateInfo( pCreateInfo_ ) - , pSubresource( pSubresource_ ) + : pNext{ pNext_ } + , pCreateInfo{ pCreateInfo_ } + , pSubresource{ pSubresource_ } { } @@ -29713,8 +29940,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) + : pNext{ pNext_ } + , memory{ memory_ } { } @@ -29813,8 +30040,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , overallocationBehavior( overallocationBehavior_ ) + : pNext{ pNext_ } + , overallocationBehavior{ overallocationBehavior_ } { } @@ -29918,14 +30145,14 @@ namespace VULKAN_HPP_NAMESPACE uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , type( type_ ) - , memoryObjectId( memoryObjectId_ ) - , size( size_ ) - , objectType( objectType_ ) - , objectHandle( objectHandle_ ) - , heapIndex( heapIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , type{ type_ } + , memoryObjectId{ memoryObjectId_ } + , size{ size_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , heapIndex{ heapIndex_ } { } @@ -30057,6 +30284,104 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + struct DevicePipelineBinaryInternalCacheControlKHR + { + using NativeType = VkDevicePipelineBinaryInternalCacheControlKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , disableInternalCache{ disableInternalCache_ } + { + } + + VULKAN_HPP_CONSTEXPR DevicePipelineBinaryInternalCacheControlKHR( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DevicePipelineBinaryInternalCacheControlKHR( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : DevicePipelineBinaryInternalCacheControlKHR( *reinterpret_cast( &rhs ) ) + { + } + + DevicePipelineBinaryInternalCacheControlKHR & operator=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DevicePipelineBinaryInternalCacheControlKHR & operator=( VkDevicePipelineBinaryInternalCacheControlKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DevicePipelineBinaryInternalCacheControlKHR & + setDisableInternalCache( VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + disableInternalCache = disableInternalCache_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDevicePipelineBinaryInternalCacheControlKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDevicePipelineBinaryInternalCacheControlKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, disableInternalCache ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DevicePipelineBinaryInternalCacheControlKHR const & ) const = default; +#else + bool operator==( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( disableInternalCache == rhs.disableInternalCache ); +# endif + } + + bool operator!=( DevicePipelineBinaryInternalCacheControlKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePipelineBinaryInternalCacheControlKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 disableInternalCache = {}; + }; + + template <> + struct CppType + { + using Type = DevicePipelineBinaryInternalCacheControlKHR; + }; + struct DevicePrivateDataCreateInfo { using NativeType = VkDevicePrivateDataCreateInfo; @@ -30066,8 +30391,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfo( uint32_t privateDataSlotRequestCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , privateDataSlotRequestCount( privateDataSlotRequestCount_ ) + : pNext{ pNext_ } + , privateDataSlotRequestCount{ privateDataSlotRequestCount_ } { } @@ -30166,8 +30491,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , globalPriority( globalPriority_ ) + : pNext{ pNext_ } + , globalPriority{ globalPriority_ } { } @@ -30268,10 +30593,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , queueIndex( queueIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , queueIndex{ queueIndex_ } { } @@ -30382,8 +30707,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DeviceQueueShaderCoreControlCreateInfoARM( uint32_t shaderCoreCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderCoreCount( shaderCoreCount_ ) + : pNext{ pNext_ } + , shaderCoreCount{ shaderCoreCount_ } { } @@ -30480,9 +30805,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ = {}, PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pfnGetInstanceProcAddr( pfnGetInstanceProcAddr_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pfnGetInstanceProcAddr{ pfnGetInstanceProcAddr_ } { } @@ -30588,10 +30913,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t driverCount_ = {}, const VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG * pDrivers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mode( mode_ ) - , driverCount( driverCount_ ) - , pDrivers( pDrivers_ ) + : pNext{ pNext_ } + , mode{ mode_ } + , driverCount{ driverCount_ } + , pDrivers{ pDrivers_ } { } @@ -30724,10 +31049,10 @@ namespace VULKAN_HPP_NAMESPACE IDirectFB * dfb_ = {}, IDirectFBSurface * surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dfb( dfb_ ) - , surface( surface_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , dfb{ dfb_ } + , surface{ surface_ } { } @@ -30841,9 +31166,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 DispatchGraphCountInfoAMDX( uint32_t count_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX infos_ = {}, uint64_t stride_ = {} ) VULKAN_HPP_NOEXCEPT - : count( count_ ) - , infos( infos_ ) - , stride( stride_ ) + : count{ count_ } + , infos{ infos_ } + , stride{ stride_ } { } @@ -30922,10 +31247,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t payloadCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX payloads_ = {}, uint64_t payloadStride_ = {} ) VULKAN_HPP_NOEXCEPT - : nodeIndex( nodeIndex_ ) - , payloadCount( payloadCount_ ) - , payloads( payloads_ ) - , payloadStride( payloadStride_ ) + : nodeIndex{ nodeIndex_ } + , payloadCount{ payloadCount_ } + , payloads{ payloads_ } + , payloadStride{ payloadStride_ } { } @@ -31007,9 +31332,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) + : x{ x_ } + , y{ y_ } + , z{ z_ } { } @@ -31106,8 +31431,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayEvent( displayEvent_ ) + : pNext{ pNext_ } + , displayEvent{ displayEvent_ } { } @@ -31198,8 +31523,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {} ) VULKAN_HPP_NOEXCEPT - : visibleRegion( visibleRegion_ ) - , refreshRate( refreshRate_ ) + : visibleRegion{ visibleRegion_ } + , refreshRate{ refreshRate_ } { } @@ -31289,9 +31614,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , parameters( parameters_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , parameters{ parameters_ } { } @@ -31394,8 +31719,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {} ) VULKAN_HPP_NOEXCEPT - : displayMode( displayMode_ ) - , parameters( parameters_ ) + : displayMode{ displayMode_ } + , parameters{ parameters_ } { } @@ -31470,8 +31795,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayModeProperties( displayModeProperties_ ) + : pNext{ pNext_ } + , displayModeProperties{ displayModeProperties_ } { } @@ -31553,8 +31878,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , localDimmingSupport( localDimmingSupport_ ) + : pNext{ pNext_ } + , localDimmingSupport{ localDimmingSupport_ } { } @@ -31640,15 +31965,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {} ) VULKAN_HPP_NOEXCEPT - : supportedAlpha( supportedAlpha_ ) - , minSrcPosition( minSrcPosition_ ) - , maxSrcPosition( maxSrcPosition_ ) - , minSrcExtent( minSrcExtent_ ) - , maxSrcExtent( maxSrcExtent_ ) - , minDstPosition( minDstPosition_ ) - , maxDstPosition( maxDstPosition_ ) - , minDstExtent( minDstExtent_ ) - , maxDstExtent( maxDstExtent_ ) + : supportedAlpha{ supportedAlpha_ } + , minSrcPosition{ minSrcPosition_ } + , maxSrcPosition{ maxSrcPosition_ } + , minSrcExtent{ minSrcExtent_ } + , maxSrcExtent{ maxSrcExtent_ } + , minDstPosition{ minDstPosition_ } + , maxDstPosition{ maxDstPosition_ } + , minDstExtent{ minDstExtent_ } + , maxDstExtent{ maxDstExtent_ } { } @@ -31740,8 +32065,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , capabilities( capabilities_ ) + : pNext{ pNext_ } + , capabilities{ capabilities_ } { } @@ -31823,9 +32148,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mode( mode_ ) - , planeIndex( planeIndex_ ) + : pNext{ pNext_ } + , mode{ mode_ } + , planeIndex{ planeIndex_ } { } @@ -31925,8 +32250,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : currentDisplay( currentDisplay_ ) - , currentStackIndex( currentStackIndex_ ) + : currentDisplay{ currentDisplay_ } + , currentStackIndex{ currentStackIndex_ } { } @@ -32001,8 +32326,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayPlaneProperties( displayPlaneProperties_ ) + : pNext{ pNext_ } + , displayPlaneProperties{ displayPlaneProperties_ } { } @@ -32084,8 +32409,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , powerState( powerState_ ) + : pNext{ pNext_ } + , powerState{ powerState_ } { } @@ -32182,10 +32507,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcRect( srcRect_ ) - , dstRect( dstRect_ ) - , persistent( persistent_ ) + : pNext{ pNext_ } + , srcRect{ srcRect_ } + , dstRect{ dstRect_ } + , persistent{ persistent_ } { } @@ -32301,13 +32626,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {} ) VULKAN_HPP_NOEXCEPT - : display( display_ ) - , displayName( displayName_ ) - , physicalDimensions( physicalDimensions_ ) - , physicalResolution( physicalResolution_ ) - , supportedTransforms( supportedTransforms_ ) - , planeReorderPossible( planeReorderPossible_ ) - , persistentContent( persistentContent_ ) + : display{ display_ } + , displayName{ displayName_ } + , physicalDimensions{ physicalDimensions_ } + , physicalResolution{ physicalResolution_ } + , supportedTransforms{ supportedTransforms_ } + , planeReorderPossible{ planeReorderPossible_ } + , persistentContent{ persistentContent_ } { } @@ -32411,8 +32736,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayProperties( displayProperties_ ) + : pNext{ pNext_ } + , displayProperties{ displayProperties_ } { } @@ -32502,15 +32827,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , displayMode( displayMode_ ) - , planeIndex( planeIndex_ ) - , planeStackIndex( planeStackIndex_ ) - , transform( transform_ ) - , globalAlpha( globalAlpha_ ) - , alphaMode( alphaMode_ ) - , imageExtent( imageExtent_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , displayMode{ displayMode_ } + , planeIndex{ planeIndex_ } + , planeStackIndex{ planeStackIndex_ } + , transform{ transform_ } + , globalAlpha{ globalAlpha_ } + , alphaMode{ alphaMode_ } + , imageExtent{ imageExtent_ } { } @@ -32666,11 +32991,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : indexCount( indexCount_ ) - , instanceCount( instanceCount_ ) - , firstIndex( firstIndex_ ) - , vertexOffset( vertexOffset_ ) - , firstInstance( firstInstance_ ) + : indexCount{ indexCount_ } + , instanceCount{ instanceCount_ } + , firstIndex{ firstIndex_ } + , vertexOffset{ vertexOffset_ } + , firstInstance{ firstInstance_ } { } @@ -32780,10 +33105,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {} ) VULKAN_HPP_NOEXCEPT - : vertexCount( vertexCount_ ) - , instanceCount( instanceCount_ ) - , firstVertex( firstVertex_ ) - , firstInstance( firstInstance_ ) + : vertexCount{ vertexCount_ } + , instanceCount{ instanceCount_ } + , firstVertex{ firstVertex_ } + , firstInstance{ firstInstance_ } { } @@ -32883,9 +33208,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandEXT( uint32_t groupCountX_ = {}, uint32_t groupCountY_ = {}, uint32_t groupCountZ_ = {} ) VULKAN_HPP_NOEXCEPT - : groupCountX( groupCountX_ ) - , groupCountY( groupCountY_ ) - , groupCountZ( groupCountZ_ ) + : groupCountX{ groupCountX_ } + , groupCountY{ groupCountY_ } + , groupCountZ{ groupCountZ_ } { } @@ -32977,8 +33302,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( uint32_t taskCount_ = {}, uint32_t firstTask_ = {} ) VULKAN_HPP_NOEXCEPT - : taskCount( taskCount_ ) - , firstTask( firstTask_ ) + : taskCount{ taskCount_ } + , firstTask{ firstTask_ } { } @@ -33065,9 +33390,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DrmFormatModifierProperties2EXT( uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + : drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } { } @@ -33142,9 +33467,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ ) + : drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , drmFormatModifierTilingFeatures{ drmFormatModifierTilingFeatures_ } { } @@ -33222,9 +33547,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesList2EXT( uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } { } @@ -33235,17 +33560,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DrmFormatModifierPropertiesList2EXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifierProperties_, - void * pNext_ = nullptr ) - : pNext( pNext_ ) - , drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) - , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DrmFormatModifierPropertiesList2EXT & operator=( DrmFormatModifierPropertiesList2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -33320,9 +33634,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT * pDrmFormatModifierProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifierProperties( pDrmFormatModifierProperties_ ) + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifierProperties{ pDrmFormatModifierProperties_ } { } @@ -33333,17 +33647,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - DrmFormatModifierPropertiesListEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & drmFormatModifierProperties_, - void * pNext_ = nullptr ) - : pNext( pNext_ ) - , drmFormatModifierCount( static_cast( drmFormatModifierProperties_.size() ) ) - , pDrmFormatModifierProperties( drmFormatModifierProperties_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -33416,8 +33719,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR EventCreateInfo( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -33511,9 +33814,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pLibraries_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , libraryCount( libraryCount_ ) - , pLibraries( pLibraries_ ) + : pNext{ pNext_ } + , libraryCount{ libraryCount_ } + , pLibraries{ pLibraries_ } { } @@ -33641,14 +33944,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , pLibraryInfo( pLibraryInfo_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pLibraryInfo{ pLibraryInfo_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } @@ -33831,8 +34134,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , size( size_ ) + : pNext{ pNext_ } + , size{ size_ } { } @@ -33929,8 +34232,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } @@ -34031,10 +34334,10 @@ namespace VULKAN_HPP_NAMESPACE DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } { } @@ -34145,8 +34448,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } @@ -34244,8 +34547,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } @@ -34345,10 +34648,10 @@ namespace VULKAN_HPP_NAMESPACE DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } { } @@ -34460,9 +34763,9 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( const SECURITY_ATTRIBUTES * pAttributes_ = {}, DWORD dwAccess_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } { } @@ -34568,9 +34871,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ExportMetalBufferInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , mtlBuffer( mtlBuffer_ ) + : pNext{ pNext_ } + , memory{ memory_ } + , mtlBuffer{ mtlBuffer_ } { } @@ -34676,9 +34979,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ExportMetalCommandQueueInfoEXT( VULKAN_HPP_NAMESPACE::Queue queue_ = {}, MTLCommandQueue_id mtlCommandQueue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queue( queue_ ) - , mtlCommandQueue( mtlCommandQueue_ ) + : pNext{ pNext_ } + , queue{ queue_ } + , mtlCommandQueue{ mtlCommandQueue_ } { } @@ -34782,8 +35085,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalDeviceInfoEXT( MTLDevice_id mtlDevice_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mtlDevice( mtlDevice_ ) + : pNext{ pNext_ } + , mtlDevice{ mtlDevice_ } { } @@ -34881,9 +35184,9 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportMetalIOSurfaceInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , ioSurface( ioSurface_ ) + : pNext{ pNext_ } + , image{ image_ } + , ioSurface{ ioSurface_ } { } @@ -34989,8 +35292,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ExportMetalObjectCreateInfoEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT exportObjectType_ = VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT::eMetalDevice, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exportObjectType( exportObjectType_ ) + : pNext{ pNext_ } + , exportObjectType{ exportObjectType_ } { } @@ -35087,7 +35390,7 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMetalObjectsInfoEXT; # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {} + VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} VULKAN_HPP_CONSTEXPR ExportMetalObjectsInfoEXT( ExportMetalObjectsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -35178,10 +35481,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Event event_ = {}, MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , event( event_ ) - , mtlSharedEvent( mtlSharedEvent_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , event{ event_ } + , mtlSharedEvent{ mtlSharedEvent_ } { } @@ -35302,12 +35605,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , imageView( imageView_ ) - , bufferView( bufferView_ ) - , plane( plane_ ) - , mtlTexture( mtlTexture_ ) + : pNext{ pNext_ } + , image{ image_ } + , imageView{ imageView_ } + , bufferView{ bufferView_ } + , plane{ plane_ } + , mtlTexture{ mtlTexture_ } { } @@ -35439,8 +35742,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } @@ -35542,10 +35845,10 @@ namespace VULKAN_HPP_NAMESPACE DWORD dwAccess_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pAttributes( pAttributes_ ) - , dwAccess( dwAccess_ ) - , name( name_ ) + : pNext{ pNext_ } + , pAttributes{ pAttributes_ } + , dwAccess{ dwAccess_ } + , name{ name_ } { } @@ -35653,8 +35956,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( std::array const & extensionName_ = {}, uint32_t specVersion_ = {} ) VULKAN_HPP_NOEXCEPT - : extensionName( extensionName_ ) - , specVersion( specVersion_ ) + : extensionName{ extensionName_ } + , specVersion{ specVersion_ } { } @@ -35664,18 +35967,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ExtensionProperties( std::string const & extensionName_, uint32_t specVersion_ = {} ) : specVersion( specVersion_ ) - { - VULKAN_HPP_ASSERT( extensionName_.size() < VK_MAX_EXTENSION_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( extensionName, VK_MAX_EXTENSION_NAME_SIZE, extensionName_.data(), extensionName_.size() ); -# else - strncpy( extensionName, extensionName_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, extensionName_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -35742,9 +36033,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) + : externalMemoryFeatures{ externalMemoryFeatures_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } { } @@ -35825,8 +36116,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalBufferProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalMemoryProperties( externalMemoryProperties_ ) + : pNext{ pNext_ } + , externalMemoryProperties{ externalMemoryProperties_ } { } @@ -35912,10 +36203,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalFenceFeatures( externalFenceFeatures_ ) + : pNext{ pNext_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + , externalFenceFeatures{ externalFenceFeatures_ } { } @@ -36006,8 +36297,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalFormat( externalFormat_ ) + : pNext{ pNext_ } + , externalFormat{ externalFormat_ } { } @@ -36104,8 +36395,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalFormatQNX( uint64_t externalFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalFormat( externalFormat_ ) + : pNext{ pNext_ } + , externalFormat{ externalFormat_ } { } @@ -36199,8 +36490,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalMemoryProperties( externalMemoryProperties_ ) + : pNext{ pNext_ } + , externalMemoryProperties{ externalMemoryProperties_ } { } @@ -36284,11 +36575,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxExtent( maxExtent_ ) - , maxMipLevels( maxMipLevels_ ) - , maxArrayLayers( maxArrayLayers_ ) - , sampleCounts( sampleCounts_ ) - , maxResourceSize( maxResourceSize_ ) + : maxExtent{ maxExtent_ } + , maxMipLevels{ maxMipLevels_ } + , maxArrayLayers{ maxArrayLayers_ } + , sampleCounts{ sampleCounts_ } + , maxResourceSize{ maxResourceSize_ } { } @@ -36371,10 +36662,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : imageFormatProperties( imageFormatProperties_ ) - , externalMemoryFeatures( externalMemoryFeatures_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) + : imageFormatProperties{ imageFormatProperties_ } + , externalMemoryFeatures{ externalMemoryFeatures_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } { } @@ -36455,8 +36746,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryAcquireUnmodifiedEXT( VULKAN_HPP_NAMESPACE::Bool32 acquireUnmodifiedMemory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , acquireUnmodifiedMemory( acquireUnmodifiedMemory_ ) + : pNext{ pNext_ } + , acquireUnmodifiedMemory{ acquireUnmodifiedMemory_ } { } @@ -36553,8 +36844,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } @@ -36653,8 +36944,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } @@ -36753,8 +37044,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleTypes( handleTypes_ ) + : pNext{ pNext_ } + , handleTypes{ handleTypes_ } { } @@ -36853,10 +37144,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ) - , compatibleHandleTypes( compatibleHandleTypes_ ) - , externalSemaphoreFeatures( externalSemaphoreFeatures_ ) + : pNext{ pNext_ } + , exportFromImportedHandleTypes{ exportFromImportedHandleTypes_ } + , compatibleHandleTypes{ compatibleHandleTypes_ } + , externalSemaphoreFeatures{ externalSemaphoreFeatures_ } { } @@ -36946,8 +37237,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FenceCreateInfo( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -37042,9 +37333,9 @@ namespace VULKAN_HPP_NAMESPACE FenceGetFdInfoKHR( VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , fence{ fence_ } + , handleType{ handleType_ } { } @@ -37150,9 +37441,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , fence{ fence_ } + , handleType{ handleType_ } { } @@ -37260,9 +37551,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , filterCubic( filterCubic_ ) - , filterCubicMinmax( filterCubicMinmax_ ) + : pNext{ pNext_ } + , filterCubic{ filterCubic_ } + , filterCubicMinmax{ filterCubicMinmax_ } { } @@ -37343,9 +37634,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR FormatProperties( VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {} ) VULKAN_HPP_NOEXCEPT - : linearTilingFeatures( linearTilingFeatures_ ) - , optimalTilingFeatures( optimalTilingFeatures_ ) - , bufferFeatures( bufferFeatures_ ) + : linearTilingFeatures{ linearTilingFeatures_ } + , optimalTilingFeatures{ optimalTilingFeatures_ } + , bufferFeatures{ bufferFeatures_ } { } @@ -37420,8 +37711,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR FormatProperties2( VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatProperties( formatProperties_ ) + : pNext{ pNext_ } + , formatProperties{ formatProperties_ } { } @@ -37504,10 +37795,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags2 bufferFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , linearTilingFeatures( linearTilingFeatures_ ) - , optimalTilingFeatures( optimalTilingFeatures_ ) - , bufferFeatures( bufferFeatures_ ) + : pNext{ pNext_ } + , linearTilingFeatures{ linearTilingFeatures_ } + , optimalTilingFeatures{ optimalTilingFeatures_ } + , bufferFeatures{ bufferFeatures_ } { } @@ -37596,9 +37887,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pFragmentShadingRateAttachment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ) - , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + : pNext{ pNext_ } + , pFragmentShadingRateAttachment{ pFragmentShadingRateAttachment_ } + , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ } { } @@ -37715,16 +38006,16 @@ namespace VULKAN_HPP_NAMESPACE size_t tagSize_ = {}, const void * pTag_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , frameID( frameID_ ) - , imageCount( imageCount_ ) - , pImages( pImages_ ) - , bufferCount( bufferCount_ ) - , pBuffers( pBuffers_ ) - , tagName( tagName_ ) - , tagSize( tagSize_ ) - , pTag( pTag_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , frameID{ frameID_ } + , imageCount{ imageCount_ } + , pImages{ pImages_ } + , bufferCount{ bufferCount_ } + , pBuffers{ pBuffers_ } + , tagName{ tagName_ } + , tagSize{ tagSize_ } + , pTag{ pTag_ } { } @@ -37942,14 +38233,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , usage( usage_ ) - , width( width_ ) - , height( height_ ) - , layerCount( layerCount_ ) - , viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , usage{ usage_ } + , width{ width_ } + , height{ height_ } + , layerCount{ layerCount_ } + , viewFormatCount{ viewFormatCount_ } + , pViewFormats{ pViewFormats_ } { } @@ -38129,9 +38420,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo * pAttachmentImageInfos_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentImageInfoCount( attachmentImageInfoCount_ ) - , pAttachmentImageInfos( pAttachmentImageInfos_ ) + : pNext{ pNext_ } + , attachmentImageInfoCount{ attachmentImageInfoCount_ } + , pAttachmentImageInfos{ pAttachmentImageInfos_ } { } @@ -38269,14 +38560,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t height_ = {}, uint32_t layers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , renderPass( renderPass_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , width( width_ ) - , height( height_ ) - , layers( layers_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , renderPass{ renderPass_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , width{ width_ } + , height{ height_ } + , layers{ layers_ } { } @@ -38458,11 +38749,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , coverageReductionMode( coverageReductionMode_ ) - , rasterizationSamples( rasterizationSamples_ ) - , depthStencilSamples( depthStencilSamples_ ) - , colorSamples( colorSamples_ ) + : pNext{ pNext_ } + , coverageReductionMode{ coverageReductionMode_ } + , rasterizationSamples{ rasterizationSamples_ } + , depthStencilSamples{ depthStencilSamples_ } + , colorSamples{ colorSamples_ } { } @@ -38550,8 +38841,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {} ) VULKAN_HPP_NOEXCEPT - : buffer( buffer_ ) - , offset( offset_ ) + : buffer{ buffer_ } + , offset{ offset_ } { } @@ -38653,20 +38944,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , streamCount( streamCount_ ) - , pStreams( pStreams_ ) - , sequencesCount( sequencesCount_ ) - , preprocessBuffer( preprocessBuffer_ ) - , preprocessOffset( preprocessOffset_ ) - , preprocessSize( preprocessSize_ ) - , sequencesCountBuffer( sequencesCountBuffer_ ) - , sequencesCountOffset( sequencesCountOffset_ ) - , sequencesIndexBuffer( sequencesIndexBuffer_ ) - , sequencesIndexOffset( sequencesIndexOffset_ ) + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , streamCount{ streamCount_ } + , pStreams{ pStreams_ } + , sequencesCount{ sequencesCount_ } + , preprocessBuffer{ preprocessBuffer_ } + , preprocessOffset{ preprocessOffset_ } + , preprocessSize{ preprocessSize_ } + , sequencesCountBuffer{ sequencesCountBuffer_ } + , sequencesCountOffset{ sequencesCountOffset_ } + , sequencesIndexBuffer{ sequencesIndexBuffer_ } + , sequencesIndexOffset{ sequencesIndexOffset_ } { } @@ -38926,11 +39217,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) - , indirectCommandsLayout( indirectCommandsLayout_ ) - , maxSequencesCount( maxSequencesCount_ ) + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , maxSequencesCount{ maxSequencesCount_ } { } @@ -39068,21 +39359,21 @@ namespace VULKAN_HPP_NAMESPACE uint64_t gpuRenderStartTimeUs_ = {}, uint64_t gpuRenderEndTimeUs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentID( presentID_ ) - , inputSampleTimeUs( inputSampleTimeUs_ ) - , simStartTimeUs( simStartTimeUs_ ) - , simEndTimeUs( simEndTimeUs_ ) - , renderSubmitStartTimeUs( renderSubmitStartTimeUs_ ) - , renderSubmitEndTimeUs( renderSubmitEndTimeUs_ ) - , presentStartTimeUs( presentStartTimeUs_ ) - , presentEndTimeUs( presentEndTimeUs_ ) - , driverStartTimeUs( driverStartTimeUs_ ) - , driverEndTimeUs( driverEndTimeUs_ ) - , osRenderQueueStartTimeUs( osRenderQueueStartTimeUs_ ) - , osRenderQueueEndTimeUs( osRenderQueueEndTimeUs_ ) - , gpuRenderStartTimeUs( gpuRenderStartTimeUs_ ) - , gpuRenderEndTimeUs( gpuRenderEndTimeUs_ ) + : pNext{ pNext_ } + , presentID{ presentID_ } + , inputSampleTimeUs{ inputSampleTimeUs_ } + , simStartTimeUs{ simStartTimeUs_ } + , simEndTimeUs{ simEndTimeUs_ } + , renderSubmitStartTimeUs{ renderSubmitStartTimeUs_ } + , renderSubmitEndTimeUs{ renderSubmitEndTimeUs_ } + , presentStartTimeUs{ presentStartTimeUs_ } + , presentEndTimeUs{ presentEndTimeUs_ } + , driverStartTimeUs{ driverStartTimeUs_ } + , driverEndTimeUs{ driverEndTimeUs_ } + , osRenderQueueStartTimeUs{ osRenderQueueStartTimeUs_ } + , osRenderQueueEndTimeUs{ osRenderQueueEndTimeUs_ } + , gpuRenderStartTimeUs{ gpuRenderStartTimeUs_ } + , gpuRenderEndTimeUs{ gpuRenderEndTimeUs_ } { } @@ -39213,9 +39504,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR GetLatencyMarkerInfoNV( uint32_t timingCount_ = {}, VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV * pTimings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , timingCount( timingCount_ ) - , pTimings( pTimings_ ) + : pNext{ pNext_ } + , timingCount{ timingCount_ } + , pTimings{ pTimings_ } { } @@ -39335,9 +39626,9 @@ namespace VULKAN_HPP_NAMESPACE VertexInputBindingDescription( uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , stride( stride_ ) - , inputRate( inputRate_ ) + : binding{ binding_ } + , stride{ stride_ } + , inputRate{ inputRate_ } { } @@ -39432,10 +39723,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {} ) VULKAN_HPP_NOEXCEPT - : location( location_ ) - , binding( binding_ ) - , format( format_ ) - , offset( offset_ ) + : location{ location_ } + , binding{ binding_ } + , format{ format_ } + , offset{ offset_ } { } @@ -39542,12 +39833,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription * pVertexAttributeDescriptions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ) - , pVertexBindingDescriptions( pVertexBindingDescriptions_ ) - , vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ) - , pVertexAttributeDescriptions( pVertexAttributeDescriptions_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , vertexBindingDescriptionCount{ vertexBindingDescriptionCount_ } + , pVertexBindingDescriptions{ pVertexBindingDescriptions_ } + , vertexAttributeDescriptionCount{ vertexAttributeDescriptionCount_ } + , pVertexAttributeDescriptions{ pVertexAttributeDescriptions_ } { } @@ -39725,10 +40016,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , topology( topology_ ) - , primitiveRestartEnable( primitiveRestartEnable_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , topology{ topology_ } + , primitiveRestartEnable{ primitiveRestartEnable_ } { } @@ -39846,9 +40137,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , patchControlPoints( patchControlPoints_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , patchControlPoints{ patchControlPoints_ } { } @@ -39959,12 +40250,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewports( pViewports_ ) - , scissorCount( scissorCount_ ) - , pScissors( pScissors_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , viewportCount{ viewportCount_ } + , pViewports{ pViewports_ } + , scissorCount{ scissorCount_ } + , pScissors{ pScissors_ } { } @@ -40140,18 +40431,18 @@ namespace VULKAN_HPP_NAMESPACE float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , depthClampEnable( depthClampEnable_ ) - , rasterizerDiscardEnable( rasterizerDiscardEnable_ ) - , polygonMode( polygonMode_ ) - , cullMode( cullMode_ ) - , frontFace( frontFace_ ) - , depthBiasEnable( depthBiasEnable_ ) - , depthBiasConstantFactor( depthBiasConstantFactor_ ) - , depthBiasClamp( depthBiasClamp_ ) - , depthBiasSlopeFactor( depthBiasSlopeFactor_ ) - , lineWidth( lineWidth_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , depthClampEnable{ depthClampEnable_ } + , rasterizerDiscardEnable{ rasterizerDiscardEnable_ } + , polygonMode{ polygonMode_ } + , cullMode{ cullMode_ } + , frontFace{ frontFace_ } + , depthBiasEnable{ depthBiasEnable_ } + , depthBiasConstantFactor{ depthBiasConstantFactor_ } + , depthBiasClamp{ depthBiasClamp_ } + , depthBiasSlopeFactor{ depthBiasSlopeFactor_ } + , lineWidth{ lineWidth_ } { } @@ -40353,14 +40644,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rasterizationSamples( rasterizationSamples_ ) - , sampleShadingEnable( sampleShadingEnable_ ) - , minSampleShading( minSampleShading_ ) - , pSampleMask( pSampleMask_ ) - , alphaToCoverageEnable( alphaToCoverageEnable_ ) - , alphaToOneEnable( alphaToOneEnable_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , rasterizationSamples{ rasterizationSamples_ } + , sampleShadingEnable{ sampleShadingEnable_ } + , minSampleShading{ minSampleShading_ } + , pSampleMask{ pSampleMask_ } + , alphaToCoverageEnable{ alphaToCoverageEnable_ } + , alphaToOneEnable{ alphaToOneEnable_ } { } @@ -40513,13 +40804,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {} ) VULKAN_HPP_NOEXCEPT - : failOp( failOp_ ) - , passOp( passOp_ ) - , depthFailOp( depthFailOp_ ) - , compareOp( compareOp_ ) - , compareMask( compareMask_ ) - , writeMask( writeMask_ ) - , reference( reference_ ) + : failOp{ failOp_ } + , passOp{ passOp_ } + , depthFailOp{ depthFailOp_ } + , compareOp{ compareOp_ } + , compareMask{ compareMask_ } + , writeMask{ writeMask_ } + , reference{ reference_ } { } @@ -40656,17 +40947,17 @@ namespace VULKAN_HPP_NAMESPACE float minDepthBounds_ = {}, float maxDepthBounds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , depthTestEnable( depthTestEnable_ ) - , depthWriteEnable( depthWriteEnable_ ) - , depthCompareOp( depthCompareOp_ ) - , depthBoundsTestEnable( depthBoundsTestEnable_ ) - , stencilTestEnable( stencilTestEnable_ ) - , front( front_ ) - , back( back_ ) - , minDepthBounds( minDepthBounds_ ) - , maxDepthBounds( maxDepthBounds_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , depthTestEnable{ depthTestEnable_ } + , depthWriteEnable{ depthWriteEnable_ } + , depthCompareOp{ depthCompareOp_ } + , depthBoundsTestEnable{ depthBoundsTestEnable_ } + , stencilTestEnable{ stencilTestEnable_ } + , front{ front_ } + , back{ back_ } + , minDepthBounds{ minDepthBounds_ } + , maxDepthBounds{ maxDepthBounds_ } { } @@ -40855,14 +41146,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {} ) VULKAN_HPP_NOEXCEPT - : blendEnable( blendEnable_ ) - , srcColorBlendFactor( srcColorBlendFactor_ ) - , dstColorBlendFactor( dstColorBlendFactor_ ) - , colorBlendOp( colorBlendOp_ ) - , srcAlphaBlendFactor( srcAlphaBlendFactor_ ) - , dstAlphaBlendFactor( dstAlphaBlendFactor_ ) - , alphaBlendOp( alphaBlendOp_ ) - , colorWriteMask( colorWriteMask_ ) + : blendEnable{ blendEnable_ } + , srcColorBlendFactor{ srcColorBlendFactor_ } + , dstColorBlendFactor{ dstColorBlendFactor_ } + , colorBlendOp{ colorBlendOp_ } + , srcAlphaBlendFactor{ srcAlphaBlendFactor_ } + , dstAlphaBlendFactor{ dstAlphaBlendFactor_ } + , alphaBlendOp{ alphaBlendOp_ } + , colorWriteMask{ colorWriteMask_ } { } @@ -41013,13 +41304,13 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState * pAttachments_ = {}, std::array const & blendConstants_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , logicOpEnable( logicOpEnable_ ) - , logicOp( logicOp_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , blendConstants( blendConstants_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , logicOpEnable{ logicOpEnable_ } + , logicOp{ logicOp_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , blendConstants{ blendConstants_ } { } @@ -41191,10 +41482,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState * pDynamicStates_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dynamicStateCount( dynamicStateCount_ ) - , pDynamicStates( pDynamicStates_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , dynamicStateCount{ dynamicStateCount_ } + , pDynamicStates{ pDynamicStates_ } { } @@ -41344,24 +41635,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , pVertexInputState( pVertexInputState_ ) - , pInputAssemblyState( pInputAssemblyState_ ) - , pTessellationState( pTessellationState_ ) - , pViewportState( pViewportState_ ) - , pRasterizationState( pRasterizationState_ ) - , pMultisampleState( pMultisampleState_ ) - , pDepthStencilState( pDepthStencilState_ ) - , pColorBlendState( pColorBlendState_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pVertexInputState{ pVertexInputState_ } + , pInputAssemblyState{ pInputAssemblyState_ } + , pTessellationState{ pTessellationState_ } + , pViewportState{ pViewportState_ } + , pRasterizationState{ pRasterizationState_ } + , pMultisampleState{ pMultisampleState_ } + , pDepthStencilState{ pDepthStencilState_ } + , pColorBlendState{ pColorBlendState_ } + , pDynamicState{ pDynamicState_ } + , layout{ layout_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } @@ -41670,8 +41961,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR GraphicsPipelineLibraryCreateInfoEXT( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -41770,11 +42061,11 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo * pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo * pTessellationState_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , pVertexInputState( pVertexInputState_ ) - , pTessellationState( pTessellationState_ ) + : pNext{ pNext_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , pVertexInputState{ pVertexInputState_ } + , pTessellationState{ pTessellationState_ } { } @@ -41927,11 +42218,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline * pPipelines_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , pipelineCount( pipelineCount_ ) - , pPipelines( pPipelines_ ) + : pNext{ pNext_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , pipelineCount{ pipelineCount_ } + , pPipelines{ pPipelines_ } { } @@ -42085,8 +42376,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR XYColorEXT( float x_ = {}, float y_ = {} ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) + : x{ x_ } + , y{ y_ } { } @@ -42179,15 +42470,15 @@ namespace VULKAN_HPP_NAMESPACE float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displayPrimaryRed( displayPrimaryRed_ ) - , displayPrimaryGreen( displayPrimaryGreen_ ) - , displayPrimaryBlue( displayPrimaryBlue_ ) - , whitePoint( whitePoint_ ) - , maxLuminance( maxLuminance_ ) - , minLuminance( minLuminance_ ) - , maxContentLightLevel( maxContentLightLevel_ ) - , maxFrameAverageLightLevel( maxFrameAverageLightLevel_ ) + : pNext{ pNext_ } + , displayPrimaryRed{ displayPrimaryRed_ } + , displayPrimaryGreen{ displayPrimaryGreen_ } + , displayPrimaryBlue{ displayPrimaryBlue_ } + , whitePoint{ whitePoint_ } + , maxLuminance{ maxLuminance_ } + , minLuminance{ minLuminance_ } + , maxContentLightLevel{ maxContentLightLevel_ } + , maxFrameAverageLightLevel{ maxFrameAverageLightLevel_ } { } @@ -42350,8 +42641,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -42448,9 +42739,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , optimalDeviceAccess( optimalDeviceAccess_ ) - , identicalMemoryLayout( identicalMemoryLayout_ ) + : pNext{ pNext_ } + , optimalDeviceAccess{ optimalDeviceAccess_ } + , identicalMemoryLayout{ identicalMemoryLayout_ } { } @@ -42537,11 +42828,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , subresourceRange( subresourceRange_ ) + : pNext{ pNext_ } + , image{ image_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , subresourceRange{ subresourceRange_ } { } @@ -42667,9 +42958,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pView( pView_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pView{ pView_ } { } @@ -42763,6 +43054,102 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_IOS_MVK*/ + struct ImageAlignmentControlCreateInfoMESA + { + using NativeType = VkImageAlignmentControlCreateInfoMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageAlignmentControlCreateInfoMESA; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( uint32_t maximumRequestedAlignment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maximumRequestedAlignment{ maximumRequestedAlignment_ } + { + } + + VULKAN_HPP_CONSTEXPR ImageAlignmentControlCreateInfoMESA( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ImageAlignmentControlCreateInfoMESA( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : ImageAlignmentControlCreateInfoMESA( *reinterpret_cast( &rhs ) ) + { + } + + ImageAlignmentControlCreateInfoMESA & operator=( ImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ImageAlignmentControlCreateInfoMESA & operator=( VkImageAlignmentControlCreateInfoMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ImageAlignmentControlCreateInfoMESA & setMaximumRequestedAlignment( uint32_t maximumRequestedAlignment_ ) VULKAN_HPP_NOEXCEPT + { + maximumRequestedAlignment = maximumRequestedAlignment_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkImageAlignmentControlCreateInfoMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkImageAlignmentControlCreateInfoMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maximumRequestedAlignment ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ImageAlignmentControlCreateInfoMESA const & ) const = default; +#else + bool operator==( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maximumRequestedAlignment == rhs.maximumRequestedAlignment ); +# endif + } + + bool operator!=( ImageAlignmentControlCreateInfoMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageAlignmentControlCreateInfoMESA; + const void * pNext = {}; + uint32_t maximumRequestedAlignment = {}; + }; + + template <> + struct CppType + { + using Type = ImageAlignmentControlCreateInfoMESA; + }; + struct ImageBlit { using NativeType = VkImageBlit; @@ -42772,10 +43159,10 @@ namespace VULKAN_HPP_NAMESPACE std::array const & srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array const & dstOffsets_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffsets( srcOffsets_ ) - , dstSubresource( dstSubresource_ ) - , dstOffsets( dstOffsets_ ) + : srcSubresource{ srcSubresource_ } + , srcOffsets{ srcOffsets_ } + , dstSubresource{ dstSubresource_ } + , dstOffsets{ dstOffsets_ } { } @@ -42878,8 +43265,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) + : pNext{ pNext_ } + , image{ image_ } { } @@ -42977,10 +43364,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t compressionControlPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT * pFixedRateFlags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , compressionControlPlaneCount( compressionControlPlaneCount_ ) - , pFixedRateFlags( pFixedRateFlags_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , compressionControlPlaneCount{ compressionControlPlaneCount_ } + , pFixedRateFlags{ pFixedRateFlags_ } { } @@ -43119,9 +43506,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImageCompressionPropertiesEXT( VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT imageCompressionFlags_ = {}, VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCompressionFlags( imageCompressionFlags_ ) - , imageCompressionFixedRateFlags( imageCompressionFixedRateFlags_ ) + : pNext{ pNext_ } + , imageCompressionFlags{ imageCompressionFlags_ } + , imageCompressionFixedRateFlags{ imageCompressionFixedRateFlags_ } { } @@ -43214,13 +43601,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t colorSpaceCount_ = {}, const VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA * pColorSpaces_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCreateInfo( imageCreateInfo_ ) - , requiredFormatFeatures( requiredFormatFeatures_ ) - , flags( flags_ ) - , sysmemPixelFormat( sysmemPixelFormat_ ) - , colorSpaceCount( colorSpaceCount_ ) - , pColorSpaces( pColorSpaces_ ) + : pNext{ pNext_ } + , imageCreateInfo{ imageCreateInfo_ } + , requiredFormatFeatures{ requiredFormatFeatures_ } + , flags{ flags_ } + , sysmemPixelFormat{ sysmemPixelFormat_ } + , colorSpaceCount{ colorSpaceCount_ } + , pColorSpaces{ pColorSpaces_ } { } @@ -43396,11 +43783,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints_ = {}, VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatConstraintsCount( formatConstraintsCount_ ) - , pFormatConstraints( pFormatConstraints_ ) - , bufferCollectionConstraints( bufferCollectionConstraints_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , formatConstraintsCount{ formatConstraintsCount_ } + , pFormatConstraints{ pFormatConstraints_ } + , bufferCollectionConstraints{ bufferCollectionConstraints_ } + , flags{ flags_ } { } @@ -43552,11 +43939,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) + : srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } { } @@ -43668,11 +44055,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , size( size_ ) - , rowPitch( rowPitch_ ) - , arrayPitch( arrayPitch_ ) - , depthPitch( depthPitch_ ) + : offset{ offset_ } + , size{ size_ } + , rowPitch{ rowPitch_ } + , arrayPitch{ arrayPitch_ } + , depthPitch{ depthPitch_ } { } @@ -43786,10 +44173,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout * pPlaneLayouts_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) - , drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ) - , pPlaneLayouts( pPlaneLayouts_ ) + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } + , drmFormatModifierPlaneCount{ drmFormatModifierPlaneCount_ } + , pPlaneLayouts{ pPlaneLayouts_ } { } @@ -43930,9 +44317,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( uint32_t drmFormatModifierCount_ = {}, const uint64_t * pDrmFormatModifiers_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifierCount( drmFormatModifierCount_ ) - , pDrmFormatModifiers( pDrmFormatModifiers_ ) + : pNext{ pNext_ } + , drmFormatModifierCount{ drmFormatModifierCount_ } + , pDrmFormatModifiers{ pDrmFormatModifiers_ } { } @@ -44053,8 +44440,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( uint64_t drmFormatModifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } { } @@ -44137,9 +44524,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format * pViewFormats_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewFormatCount( viewFormatCount_ ) - , pViewFormats( pViewFormats_ ) + : pNext{ pNext_ } + , viewFormatCount{ viewFormatCount_ } + , pViewFormats{ pViewFormats_ } { } @@ -44262,8 +44649,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageFormatProperties2( VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageFormatProperties( imageFormatProperties_ ) + : pNext{ pNext_ } + , imageFormatProperties{ imageFormatProperties_ } { } @@ -44354,15 +44741,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , oldLayout( oldLayout_ ) - , newLayout( newLayout_ ) - , srcQueueFamilyIndex( srcQueueFamilyIndex_ ) - , dstQueueFamilyIndex( dstQueueFamilyIndex_ ) - , image( image_ ) - , subresourceRange( subresourceRange_ ) + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , oldLayout{ oldLayout_ } + , newLayout{ newLayout_ } + , srcQueueFamilyIndex{ srcQueueFamilyIndex_ } + , dstQueueFamilyIndex{ dstQueueFamilyIndex_ } + , image{ image_ } + , subresourceRange{ subresourceRange_ } { } @@ -44515,8 +44902,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) + : pNext{ pNext_ } + , image{ image_ } { } @@ -44616,9 +45003,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , imagePipeHandle( imagePipeHandle_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , imagePipeHandle{ imagePipeHandle_ } { } @@ -44735,8 +45122,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , planeAspect( planeAspect_ ) + : pNext{ pNext_ } + , planeAspect{ planeAspect_ } { } @@ -44834,11 +45221,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) + : srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } { } @@ -44954,12 +45341,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubresource( srcSubresource_ ) - , srcOffset( srcOffset_ ) - , dstSubresource( dstSubresource_ ) - , dstOffset( dstOffset_ ) - , extent( extent_ ) + : pNext{ pNext_ } + , srcSubresource{ srcSubresource_ } + , srcOffset{ srcOffset_ } + , dstSubresource{ dstSubresource_ } + , dstOffset{ dstOffset_ } + , extent{ extent_ } { } @@ -45088,8 +45475,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( VULKAN_HPP_NAMESPACE::Image image_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) + : pNext{ pNext_ } + , image{ image_ } { } @@ -45187,8 +45574,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stencilUsage( stencilUsage_ ) + : pNext{ pNext_ } + , stencilUsage{ stencilUsage_ } { } @@ -45285,8 +45672,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchain( swapchain_ ) + : pNext{ pNext_ } + , swapchain{ swapchain_ } { } @@ -45382,8 +45769,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , decodeMode( decodeMode_ ) + : pNext{ pNext_ } + , decodeMode{ decodeMode_ } { } @@ -45480,9 +45867,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceAddress( deviceAddress_ ) - , size( size_ ) + : pNext{ pNext_ } + , deviceAddress{ deviceAddress_ } + , size{ size_ } { } @@ -45566,8 +45953,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) + : pNext{ pNext_ } + , imageView{ imageView_ } { } @@ -45668,13 +46055,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , image( image_ ) - , viewType( viewType_ ) - , format( format_ ) - , components( components_ ) - , subresourceRange( subresourceRange_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , image{ image_ } + , viewType{ viewType_ } + , format{ format_ } + , components{ components_ } + , subresourceRange{ subresourceRange_ } { } @@ -45815,10 +46202,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , descriptorType( descriptorType_ ) - , sampler( sampler_ ) + : pNext{ pNext_ } + , imageView{ imageView_ } + , descriptorType{ descriptorType_ } + , sampler{ sampler_ } { } @@ -45932,8 +46319,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewMinLodCreateInfoEXT( float minLod_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minLod( minLod_ ) + : pNext{ pNext_ } + , minLod{ minLod_ } { } @@ -46031,10 +46418,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D filterSize_ = {}, uint32_t numPhases_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , filterCenter( filterCenter_ ) - , filterSize( filterSize_ ) - , numPhases( numPhases_ ) + : pNext{ pNext_ } + , filterCenter{ filterCenter_ } + , filterSize{ filterSize_ } + , numPhases{ numPhases_ } { } @@ -46149,9 +46536,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewSlicedCreateInfoEXT( uint32_t sliceOffset_ = {}, uint32_t sliceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sliceOffset( sliceOffset_ ) - , sliceCount( sliceCount_ ) + : pNext{ pNext_ } + , sliceOffset{ sliceOffset_ } + , sliceCount{ sliceCount_ } { } @@ -46253,8 +46640,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , usage( usage_ ) + : pNext{ pNext_ } + , usage{ usage_ } { } @@ -46352,8 +46739,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( struct AHardwareBuffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -46454,11 +46841,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) + : pNext{ pNext_ } + , fence{ fence_ } + , flags{ flags_ } + , handleType{ handleType_ } + , fd{ fd_ } { } @@ -46587,12 +46974,12 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fence( fence_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) + : pNext{ pNext_ } + , fence{ fence_ } + , flags{ flags_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } { } @@ -46727,9 +47114,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImportMemoryBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , collection( collection_ ) - , index( index_ ) + : pNext{ pNext_ } + , collection{ collection_ } + , index{ index_ } { } @@ -46835,9 +47222,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , fd( fd_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } + , fd{ fd_ } { } @@ -46942,9 +47329,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void * pHostPointer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , pHostPointer( pHostPointer_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } + , pHostPointer{ pHostPointer_ } { } @@ -47053,10 +47440,10 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } { } @@ -47174,9 +47561,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , handle( handle_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } { } @@ -47284,9 +47671,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, zx_handle_t handle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) - , handle( handle_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } + , handle{ handle_ } { } @@ -47403,8 +47790,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMetalBufferInfoEXT( MTLBuffer_id mtlBuffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mtlBuffer( mtlBuffer_ ) + : pNext{ pNext_ } + , mtlBuffer{ mtlBuffer_ } { } @@ -47501,8 +47888,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMetalIOSurfaceInfoEXT( IOSurfaceRef ioSurface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ioSurface( ioSurface_ ) + : pNext{ pNext_ } + , ioSurface{ ioSurface_ } { } @@ -47599,8 +47986,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportMetalSharedEventInfoEXT( MTLSharedEvent_id mtlSharedEvent_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mtlSharedEvent( mtlSharedEvent_ ) + : pNext{ pNext_ } + , mtlSharedEvent{ mtlSharedEvent_ } { } @@ -47699,9 +48086,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ImportMetalTextureInfoEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits plane_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor, MTLTexture_id mtlTexture_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , plane( plane_ ) - , mtlTexture( mtlTexture_ ) + : pNext{ pNext_ } + , plane{ plane_ } + , mtlTexture{ mtlTexture_ } { } @@ -47805,8 +48192,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ImportScreenBufferInfoQNX( struct _screen_buffer * buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , buffer{ buffer_ } { } @@ -47907,11 +48294,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , fd( fd_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , fd{ fd_ } { } @@ -48041,12 +48428,12 @@ namespace VULKAN_HPP_NAMESPACE HANDLE handle_ = {}, LPCWSTR name_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , handle( handle_ ) - , name( name_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , handle{ handle_ } + , name{ name_ } { } @@ -48184,11 +48571,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, zx_handle_t zirconHandle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , flags( flags_ ) - , handleType( handleType_ ) - , zirconHandle( zirconHandle_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , flags{ flags_ } + , handleType{ handleType_ } + , zirconHandle{ zirconHandle_ } { } @@ -48338,20 +48725,20 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::IndexType * pIndexTypes_ = {}, const uint32_t * pIndexTypeValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , tokenType( tokenType_ ) - , stream( stream_ ) - , offset( offset_ ) - , vertexBindingUnit( vertexBindingUnit_ ) - , vertexDynamicStride( vertexDynamicStride_ ) - , pushconstantPipelineLayout( pushconstantPipelineLayout_ ) - , pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ) - , pushconstantOffset( pushconstantOffset_ ) - , pushconstantSize( pushconstantSize_ ) - , indirectStateFlags( indirectStateFlags_ ) - , indexTypeCount( indexTypeCount_ ) - , pIndexTypes( pIndexTypes_ ) - , pIndexTypeValues( pIndexTypeValues_ ) + : pNext{ pNext_ } + , tokenType{ tokenType_ } + , stream{ stream_ } + , offset{ offset_ } + , vertexBindingUnit{ vertexBindingUnit_ } + , vertexDynamicStride{ vertexDynamicStride_ } + , pushconstantPipelineLayout{ pushconstantPipelineLayout_ } + , pushconstantShaderStageFlags{ pushconstantShaderStageFlags_ } + , pushconstantOffset{ pushconstantOffset_ } + , pushconstantSize{ pushconstantSize_ } + , indirectStateFlags{ indirectStateFlags_ } + , indexTypeCount{ indexTypeCount_ } + , pIndexTypes{ pIndexTypes_ } + , pIndexTypeValues{ pIndexTypeValues_ } { } @@ -48634,13 +49021,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t streamCount_ = {}, const uint32_t * pStreamStrides_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , tokenCount( tokenCount_ ) - , pTokens( pTokens_ ) - , streamCount( streamCount_ ) - , pStreamStrides( pStreamStrides_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , tokenCount{ tokenCount_ } + , pTokens{ pTokens_ } + , streamCount{ streamCount_ } + , pStreamStrides{ pStreamStrides_ } { } @@ -48818,8 +49205,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( void * pUserData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pUserData( pUserData_ ) + : pNext{ pNext_ } + , pUserData{ pUserData_ } { } @@ -48913,9 +49300,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {} ) VULKAN_HPP_NOEXCEPT - : subpass( subpass_ ) - , inputAttachmentIndex( inputAttachmentIndex_ ) - , aspectMask( aspectMask_ ) + : subpass{ subpass_ } + , inputAttachmentIndex{ inputAttachmentIndex_ } + , aspectMask{ aspectMask_ } { } @@ -49018,13 +49405,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t enabledExtensionCount_ = {}, const char * const * ppEnabledExtensionNames_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pApplicationInfo( pApplicationInfo_ ) - , enabledLayerCount( enabledLayerCount_ ) - , ppEnabledLayerNames( ppEnabledLayerNames_ ) - , enabledExtensionCount( enabledExtensionCount_ ) - , ppEnabledExtensionNames( ppEnabledExtensionNames_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pApplicationInfo{ pApplicationInfo_ } + , enabledLayerCount{ enabledLayerCount_ } + , ppEnabledLayerNames{ ppEnabledLayerNames_ } + , enabledExtensionCount{ enabledExtensionCount_ } + , ppEnabledExtensionNames{ ppEnabledExtensionNames_ } { } @@ -49230,9 +49617,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LatencySleepInfoNV( VULKAN_HPP_NAMESPACE::Semaphore signalSemaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , signalSemaphore( signalSemaphore_ ) - , value( value_ ) + : pNext{ pNext_ } + , signalSemaphore{ signalSemaphore_ } + , value{ value_ } { } @@ -49334,10 +49721,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 lowLatencyBoost_ = {}, uint32_t minimumIntervalUs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , lowLatencyMode( lowLatencyMode_ ) - , lowLatencyBoost( lowLatencyBoost_ ) - , minimumIntervalUs( minimumIntervalUs_ ) + : pNext{ pNext_ } + , lowLatencyMode{ lowLatencyMode_ } + , lowLatencyBoost{ lowLatencyBoost_ } + , minimumIntervalUs{ minimumIntervalUs_ } { } @@ -49451,8 +49838,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR LatencySubmissionPresentIdNV( uint64_t presentID_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentID( presentID_ ) + : pNext{ pNext_ } + , presentID{ presentID_ } { } @@ -49549,9 +49936,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR LatencySurfaceCapabilitiesNV( uint32_t presentModeCount_ = {}, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentModeCount( presentModeCount_ ) - , pPresentModes( pPresentModes_ ) + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } { } @@ -49671,10 +50058,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array const & description_ = {} ) VULKAN_HPP_NOEXCEPT - : layerName( layerName_ ) - , specVersion( specVersion_ ) - , implementationVersion( implementationVersion_ ) - , description( description_ ) + : layerName{ layerName_ } + , specVersion{ specVersion_ } + , implementationVersion{ implementationVersion_ } + , description{ description_ } { } @@ -49682,26 +50069,6 @@ namespace VULKAN_HPP_NAMESPACE LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT : LayerProperties( *reinterpret_cast( &rhs ) ) {} -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - LayerProperties( std::string const & layerName_, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::string const & description_ = {} ) - : specVersion( specVersion_ ), implementationVersion( implementationVersion_ ) - { - VULKAN_HPP_ASSERT( layerName_.size() < VK_MAX_EXTENSION_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( layerName, VK_MAX_EXTENSION_NAME_SIZE, layerName_.data(), layerName_.size() ); -# else - strncpy( layerName, layerName_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, layerName_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); -# else - strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -49780,11 +50147,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_ = VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT::eBool32, uint32_t valueCount_ = {}, const void * pValues_ = {} ) VULKAN_HPP_NOEXCEPT - : pLayerName( pLayerName_ ) - , pSettingName( pSettingName_ ) - , type( type_ ) - , valueCount( valueCount_ ) - , pValues( pValues_ ) + : pLayerName{ pLayerName_ } + , pSettingName{ pSettingName_ } + , type{ type_ } + , valueCount{ valueCount_ } + , pValues{ pValues_ } { } @@ -49793,17 +50160,96 @@ namespace VULKAN_HPP_NAMESPACE LayerSettingEXT( VkLayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT : LayerSettingEXT( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - LayerSettingEXT( const char * pLayerName_, - const char * pSettingName_, - VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) + // NOTE: you need to provide the type because vk::Bool32 and uint32_t are indistinguishable! + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) : pLayerName( pLayerName_ ) , pSettingName( pSettingName_ ) , type( type_ ) - , valueCount( static_cast( values_.size() * sizeof( T ) ) ) + , valueCount( static_cast( values_.size() ) ) , pValues( values_.data() ) { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); + } + + LayerSettingEXT( char const * pLayerName_, + char const * pSettingName_, + VULKAN_HPP_NAMESPACE::LayerSettingTypeEXT type_, + vk::ArrayProxyNoTemporaries const & values_ ) + : pLayerName( pLayerName_ ) + , pSettingName( pSettingName_ ) + , type( type_ ) + , valueCount( static_cast( values_.size() ) ) + , pValues( values_.data() ) + { + VULKAN_HPP_ASSERT( VULKAN_HPP_NAMESPACE::isSameType( type ) ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -49841,17 +50287,52 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_CONSTEXPR_14 LayerSettingEXT & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - pValues = pValues_; + valueCount = static_cast( values_.size() ); + pValues = values_.data(); return *this; } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { - valueCount = static_cast( values_.size() * sizeof( T ) ); + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); + pValues = values_.data(); + return *this; + } + + LayerSettingEXT & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + { + valueCount = static_cast( values_.size() ); pValues = values_.data(); return *this; } @@ -49931,9 +50412,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR LayerSettingsCreateInfoEXT( uint32_t settingCount_ = {}, const VULKAN_HPP_NAMESPACE::LayerSettingEXT * pSettings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , settingCount( settingCount_ ) - , pSettings( pSettings_ ) + : pNext{ pNext_ } + , settingCount{ settingCount_ } + , pSettings{ pSettings_ } { } @@ -50056,9 +50537,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void * pView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pView( pView_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pView{ pView_ } { } @@ -50165,10 +50646,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , offset( offset_ ) - , size( size_ ) + : pNext{ pNext_ } + , memory{ memory_ } + , offset{ offset_ } + , size{ size_ } { } @@ -50280,9 +50761,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , deviceMask( deviceMask_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , deviceMask{ deviceMask_ } { } @@ -50388,9 +50869,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , allocationSize( allocationSize_ ) - , memoryTypeIndex( memoryTypeIndex_ ) + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeIndex{ memoryTypeIndex_ } { } @@ -50491,9 +50972,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MemoryBarrier( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) + : pNext{ pNext_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } { } @@ -50597,9 +51078,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image( image_ ) - , buffer( buffer_ ) + : pNext{ pNext_ } + , image{ image_ } + , buffer{ buffer_ } { } @@ -50705,9 +51186,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , prefersDedicatedAllocation( prefersDedicatedAllocation_ ) - , requiresDedicatedAllocation( requiresDedicatedAllocation_ ) + : pNext{ pNext_ } + , prefersDedicatedAllocation{ prefersDedicatedAllocation_ } + , requiresDedicatedAllocation{ requiresDedicatedAllocation_ } { } @@ -50792,8 +51273,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -50876,8 +51357,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) + : pNext{ pNext_ } + , memory{ memory_ } { } @@ -50976,9 +51457,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } { } @@ -51083,9 +51564,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } { } @@ -51195,9 +51676,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } { } @@ -51308,9 +51789,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memory( memory_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , memory{ memory_ } + , handleType{ handleType_ } { } @@ -51414,8 +51895,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryHeap( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , flags( flags_ ) + : size{ size_ } + , flags{ flags_ } { } @@ -51486,8 +51967,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -51572,11 +52053,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , memory( memory_ ) - , offset( offset_ ) - , size( size_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , memory{ memory_ } + , offset{ offset_ } + , size{ size_ } { } @@ -51695,8 +52176,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryMapPlacedInfoEXT( void * pPlacedAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pPlacedAddress( pPlacedAddress_ ) + : pNext{ pNext_ } + , pPlacedAddress{ pPlacedAddress_ } { } @@ -51791,8 +52272,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( uint64_t opaqueCaptureAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , opaqueCaptureAddress( opaqueCaptureAddress_ ) + : pNext{ pNext_ } + , opaqueCaptureAddress{ opaqueCaptureAddress_ } { } @@ -51889,8 +52370,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( float priority_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , priority( priority_ ) + : pNext{ pNext_ } + , priority{ priority_ } { } @@ -51984,9 +52465,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MemoryRequirements( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {} ) VULKAN_HPP_NOEXCEPT - : size( size_ ) - , alignment( alignment_ ) - , memoryTypeBits( memoryTypeBits_ ) + : size{ size_ } + , alignment{ alignment_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -52058,8 +52539,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryRequirements2( VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryRequirements( memoryRequirements_ ) + : pNext{ pNext_ } + , memoryRequirements{ memoryRequirements_ } { } @@ -52138,8 +52619,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryType( VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : propertyFlags( propertyFlags_ ) - , heapIndex( heapIndex_ ) + : propertyFlags{ propertyFlags_ } + , heapIndex{ heapIndex_ } { } @@ -52212,9 +52693,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MemoryUnmapInfoKHR( VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , memory( memory_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , memory{ memory_ } { } @@ -52317,8 +52798,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -52401,8 +52882,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MemoryZirconHandlePropertiesFUCHSIA( uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryTypeBits( memoryTypeBits_ ) + : pNext{ pNext_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -52487,9 +52968,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer * pLayer_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pLayer( pLayer_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pLayer{ pLayer_ } { } @@ -52606,18 +53087,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR triangleArray_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize triangleArrayStride_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , flags( flags_ ) - , mode( mode_ ) - , dstMicromap( dstMicromap_ ) - , usageCountsCount( usageCountsCount_ ) - , pUsageCounts( pUsageCounts_ ) - , ppUsageCounts( ppUsageCounts_ ) - , data( data_ ) - , scratchData( scratchData_ ) - , triangleArray( triangleArray_ ) - , triangleArrayStride( triangleArrayStride_ ) + : pNext{ pNext_ } + , type{ type_ } + , flags{ flags_ } + , mode{ mode_ } + , dstMicromap{ dstMicromap_ } + , usageCountsCount{ usageCountsCount_ } + , pUsageCounts{ pUsageCounts_ } + , ppUsageCounts{ ppUsageCounts_ } + , data{ data_ } + , scratchData{ scratchData_ } + , triangleArray{ triangleArray_ } + , triangleArrayStride{ triangleArrayStride_ } { } @@ -52837,10 +53318,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 discardable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , micromapSize( micromapSize_ ) - , buildScratchSize( buildScratchSize_ ) - , discardable( discardable_ ) + : pNext{ pNext_ } + , micromapSize{ micromapSize_ } + , buildScratchSize{ buildScratchSize_ } + , discardable{ discardable_ } { } @@ -52960,13 +53441,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_ = VULKAN_HPP_NAMESPACE::MicromapTypeEXT::eOpacityMicromap, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , createFlags( createFlags_ ) - , buffer( buffer_ ) - , offset( offset_ ) - , size( size_ ) - , type( type_ ) - , deviceAddress( deviceAddress_ ) + : pNext{ pNext_ } + , createFlags{ createFlags_ } + , buffer{ buffer_ } + , offset{ offset_ } + , size{ size_ } + , type{ type_ } + , deviceAddress{ deviceAddress_ } { } @@ -53101,9 +53582,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapTriangleEXT( uint32_t dataOffset_ = {}, uint16_t subdivisionLevel_ = {}, uint16_t format_ = {} ) VULKAN_HPP_NOEXCEPT - : dataOffset( dataOffset_ ) - , subdivisionLevel( subdivisionLevel_ ) - , format( format_ ) + : dataOffset{ dataOffset_ } + , subdivisionLevel{ subdivisionLevel_ } + , format{ format_ } { } @@ -53197,8 +53678,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MicromapVersionInfoEXT( const uint8_t * pVersionData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pVersionData( pVersionData_ ) + : pNext{ pNext_ } + , pVersionData{ pVersionData_ } { } @@ -53290,9 +53771,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultiDrawIndexedInfoEXT( uint32_t firstIndex_ = {}, uint32_t indexCount_ = {}, int32_t vertexOffset_ = {} ) VULKAN_HPP_NOEXCEPT - : firstIndex( firstIndex_ ) - , indexCount( indexCount_ ) - , vertexOffset( vertexOffset_ ) + : firstIndex{ firstIndex_ } + , indexCount{ indexCount_ } + , vertexOffset{ vertexOffset_ } { } @@ -53384,8 +53865,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultiDrawInfoEXT( uint32_t firstVertex_ = {}, uint32_t vertexCount_ = {} ) VULKAN_HPP_NOEXCEPT - : firstVertex( firstVertex_ ) - , vertexCount( vertexCount_ ) + : firstVertex{ firstVertex_ } + , vertexCount{ vertexCount_ } { } @@ -53470,8 +53951,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) + : pNext{ pNext_ } + , maxSampleLocationGridSize{ maxSampleLocationGridSize_ } { } @@ -53555,9 +54036,9 @@ namespace VULKAN_HPP_NAMESPACE MultisampledRenderToSingleSampledInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampledEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multisampledRenderToSingleSampledEnable( multisampledRenderToSingleSampledEnable_ ) - , rasterizationSamples( rasterizationSamples_ ) + : pNext{ pNext_ } + , multisampledRenderToSingleSampledEnable{ multisampledRenderToSingleSampledEnable_ } + , rasterizationSamples{ rasterizationSamples_ } { } @@ -53667,9 +54148,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MultiviewPerViewAttributesInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewAttributes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 perViewAttributesPositionXOnly_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , perViewAttributes( perViewAttributes_ ) - , perViewAttributesPositionXOnly( perViewAttributesPositionXOnly_ ) + : pNext{ pNext_ } + , perViewAttributes{ perViewAttributes_ } + , perViewAttributesPositionXOnly{ perViewAttributesPositionXOnly_ } { } @@ -53775,9 +54256,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM( uint32_t perViewRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pPerViewRenderAreas_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , perViewRenderAreaCount( perViewRenderAreaCount_ ) - , pPerViewRenderAreas( pPerViewRenderAreas_ ) + : pNext{ pNext_ } + , perViewRenderAreaCount{ perViewRenderAreaCount_ } + , pPerViewRenderAreas{ pPerViewRenderAreas_ } { } @@ -53900,8 +54381,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListEXT( uint32_t descriptorTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorType * pDescriptorTypes_ = {} ) VULKAN_HPP_NOEXCEPT - : descriptorTypeCount( descriptorTypeCount_ ) - , pDescriptorTypes( pDescriptorTypes_ ) + : descriptorTypeCount{ descriptorTypeCount_ } + , pDescriptorTypes{ pDescriptorTypes_ } { } @@ -54011,9 +54492,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoEXT( uint32_t mutableDescriptorTypeListCount_ = {}, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT * pMutableDescriptorTypeLists_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ) - , pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ ) + : pNext{ pNext_ } + , mutableDescriptorTypeListCount{ mutableDescriptorTypeListCount_ } + , pMutableDescriptorTypeLists{ pMutableDescriptorTypeLists_ } { } @@ -54146,8 +54627,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpaqueCaptureDescriptorDataCreateInfoEXT( const void * opaqueCaptureDescriptorData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , opaqueCaptureDescriptorData( opaqueCaptureDescriptorData_ ) + : pNext{ pNext_ } + , opaqueCaptureDescriptorData{ opaqueCaptureDescriptorData_ } { } @@ -54246,10 +54727,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pRegions_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -54382,8 +54863,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatInfoNV( VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , usage( usage_ ) + : pNext{ pNext_ } + , usage{ usage_ } { } @@ -54479,8 +54960,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OpticalFlowImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) + : pNext{ pNext_ } + , format{ format_ } { } @@ -54571,16 +55052,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV performanceLevel_ = VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV::eUnknown, VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV flags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , width( width_ ) - , height( height_ ) - , imageFormat( imageFormat_ ) - , flowVectorFormat( flowVectorFormat_ ) - , costFormat( costFormat_ ) - , outputGridSize( outputGridSize_ ) - , hintGridSize( hintGridSize_ ) - , performanceLevel( performanceLevel_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , width{ width_ } + , height{ height_ } + , imageFormat{ imageFormat_ } + , flowVectorFormat{ flowVectorFormat_ } + , costFormat{ costFormat_ } + , outputGridSize{ outputGridSize_ } + , hintGridSize{ hintGridSize_ } + , performanceLevel{ performanceLevel_ } + , flags{ flags_ } { } @@ -54749,10 +55230,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t size_ = {}, const void * pPrivateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , id( id_ ) - , size( size_ ) - , pPrivateData( pPrivateData_ ) + : pNext{ pNext_ } + , id{ id_ } + , size{ size_ } + , pPrivateData{ pPrivateData_ } { } @@ -54862,8 +55343,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR OutOfBandQueueTypeInfoNV( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV queueType_ = VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV::eRender, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queueType( queueType_ ) + : pNext{ pNext_ } + , queueType{ queueType_ } { } @@ -54959,11 +55440,11 @@ namespace VULKAN_HPP_NAMESPACE uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {} ) VULKAN_HPP_NOEXCEPT - : presentID( presentID_ ) - , desiredPresentTime( desiredPresentTime_ ) - , actualPresentTime( actualPresentTime_ ) - , earliestPresentTime( earliestPresentTime_ ) - , presentMargin( presentMargin_ ) + : presentID{ presentID_ } + , desiredPresentTime{ desiredPresentTime_ } + , actualPresentTime{ actualPresentTime_ } + , earliestPresentTime{ earliestPresentTime_ } + , presentMargin{ presentMargin_ } { } @@ -55044,8 +55525,8 @@ namespace VULKAN_HPP_NAMESPACE PerformanceConfigurationAcquireInfoINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) + : pNext{ pNext_ } + , type{ type_ } { } @@ -55146,11 +55627,11 @@ namespace VULKAN_HPP_NAMESPACE std::array const & category_ = {}, std::array const & description_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , name( name_ ) - , category( category_ ) - , description( description_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , name{ name_ } + , category{ category_ } + , description{ description_ } { } @@ -55161,37 +55642,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PerformanceCounterDescriptionKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_, - std::string const & name_, - std::string const & category_ = {}, - std::string const & description_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ), flags( flags_ ) - { - VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); -# else - strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( category_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( category, VK_MAX_DESCRIPTION_SIZE, category_.data(), category_.size() ); -# else - strncpy( category, category_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, category_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); -# else - strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -55288,11 +55738,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array const & uuid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , unit( unit_ ) - , scope( scope_ ) - , storage( storage_ ) - , uuid( uuid_ ) + : pNext{ pNext_ } + , unit{ unit_ } + , scope{ scope_ } + , storage{ storage_ } + , uuid{ uuid_ } { } @@ -55456,8 +55906,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( uint64_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , marker( marker_ ) + : pNext{ pNext_ } + , marker{ marker_ } { } @@ -55556,10 +56006,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , enable( enable_ ) - , parameter( parameter_ ) + : pNext{ pNext_ } + , type{ type_ } + , enable{ enable_ } + , parameter{ parameter_ } { } @@ -55672,8 +56122,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( uint32_t counterPassIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , counterPassIndex( counterPassIndex_ ) + : pNext{ pNext_ } + , counterPassIndex{ counterPassIndex_ } { } @@ -55768,8 +56218,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( uint32_t marker_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , marker( marker_ ) + : pNext{ pNext_ } + , marker{ marker_ } { } @@ -55934,8 +56384,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PerformanceValueINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {} ) VULKAN_HPP_NOEXCEPT - : type( type_ ) - , data( data_ ) + : type{ type_ } + , data{ data_ } { } @@ -55995,11 +56445,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) + : pNext{ pNext_ } + , storageBuffer16BitAccess{ storageBuffer16BitAccess_ } + , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ } + , storagePushConstant16{ storagePushConstant16_ } + , storageInputOutput16{ storageInputOutput16_ } { } @@ -56130,9 +56580,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatA4R4G4B4( formatA4R4G4B4_ ) - , formatA4B4G4R4( formatA4B4G4R4_ ) + : pNext{ pNext_ } + , formatA4R4G4B4{ formatA4R4G4B4_ } + , formatA4B4G4R4{ formatA4B4G4R4_ } { } @@ -56237,10 +56687,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) + : pNext{ pNext_ } + , storageBuffer8BitAccess{ storageBuffer8BitAccess_ } + , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ } + , storagePushConstant8{ storagePushConstant8_ } { } @@ -56360,8 +56810,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , decodeModeSharedExponent( decodeModeSharedExponent_ ) + : pNext{ pNext_ } + , decodeModeSharedExponent{ decodeModeSharedExponent_ } { } @@ -56463,12 +56913,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructure( accelerationStructure_ ) - , accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ) - , accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ) - , accelerationStructureHostCommands( accelerationStructureHostCommands_ ) - , descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ ) + : pNext{ pNext_ } + , accelerationStructure{ accelerationStructure_ } + , accelerationStructureCaptureReplay{ accelerationStructureCaptureReplay_ } + , accelerationStructureIndirectBuild{ accelerationStructureIndirectBuild_ } + , accelerationStructureHostCommands{ accelerationStructureHostCommands_ } + , descriptorBindingAccelerationStructureUpdateAfterBind{ descriptorBindingAccelerationStructureUpdateAfterBind_ } { } @@ -56621,15 +57071,15 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxPrimitiveCount( maxPrimitiveCount_ ) - , maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ) - , maxPerStageDescriptorUpdateAfterBindAccelerationStructures( maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) - , maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ) - , minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ ) + : pNext{ pNext_ } + , maxGeometryCount{ maxGeometryCount_ } + , maxInstanceCount{ maxInstanceCount_ } + , maxPrimitiveCount{ maxPrimitiveCount_ } + , maxPerStageDescriptorAccelerationStructures{ maxPerStageDescriptorAccelerationStructures_ } + , maxPerStageDescriptorUpdateAfterBindAccelerationStructures{ maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ } + , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ } + , maxDescriptorSetUpdateAfterBindAccelerationStructures{ maxDescriptorSetUpdateAfterBindAccelerationStructures_ } + , minAccelerationStructureScratchOffsetAlignment{ minAccelerationStructureScratchOffsetAlignment_ } { } @@ -56743,8 +57193,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAddressBindingReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 reportAddressBinding_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , reportAddressBinding( reportAddressBinding_ ) + : pNext{ pNext_ } + , reportAddressBinding{ reportAddressBinding_ } { } @@ -56842,8 +57292,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAmigoProfilingFeaturesSEC( VULKAN_HPP_NAMESPACE::Bool32 amigoProfiling_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , amigoProfiling( amigoProfiling_ ) + : pNext{ pNext_ } + , amigoProfiling{ amigoProfiling_ } { } @@ -56929,6 +57379,102 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceAmigoProfilingFeaturesSEC; }; + struct PhysicalDeviceAntiLagFeaturesAMD + { + using NativeType = VkPhysicalDeviceAntiLagFeaturesAMD; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 antiLag_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , antiLag{ antiLag_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceAntiLagFeaturesAMD( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceAntiLagFeaturesAMD( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceAntiLagFeaturesAMD( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceAntiLagFeaturesAMD & operator=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceAntiLagFeaturesAMD & operator=( VkPhysicalDeviceAntiLagFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAntiLagFeaturesAMD & setAntiLag( VULKAN_HPP_NAMESPACE::Bool32 antiLag_ ) VULKAN_HPP_NOEXCEPT + { + antiLag = antiLag_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceAntiLagFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceAntiLagFeaturesAMD &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, antiLag ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceAntiLagFeaturesAMD const & ) const = default; +#else + bool operator==( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( antiLag == rhs.antiLag ); +# endif + } + + bool operator!=( PhysicalDeviceAntiLagFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAntiLagFeaturesAMD; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 antiLag = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceAntiLagFeaturesAMD; + }; + struct PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT { using NativeType = VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; @@ -56939,8 +57485,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopDynamicState_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentFeedbackLoopDynamicState( attachmentFeedbackLoopDynamicState_ ) + : pNext{ pNext_ } + , attachmentFeedbackLoopDynamicState{ attachmentFeedbackLoopDynamicState_ } { } @@ -57041,8 +57587,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 attachmentFeedbackLoopLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentFeedbackLoopLayout( attachmentFeedbackLoopLayout_ ) + : pNext{ pNext_ } + , attachmentFeedbackLoopLayout{ attachmentFeedbackLoopLayout_ } { } @@ -57141,8 +57687,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , advancedBlendCoherentOperations( advancedBlendCoherentOperations_ ) + : pNext{ pNext_ } + , advancedBlendCoherentOperations{ advancedBlendCoherentOperations_ } { } @@ -57245,13 +57791,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ) - , advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ) - , advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ) - , advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ) - , advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ) - , advancedBlendAllOperations( advancedBlendAllOperations_ ) + : pNext{ pNext_ } + , advancedBlendMaxColorAttachments{ advancedBlendMaxColorAttachments_ } + , advancedBlendIndependentBlend{ advancedBlendIndependentBlend_ } + , advancedBlendNonPremultipliedSrcColor{ advancedBlendNonPremultipliedSrcColor_ } + , advancedBlendNonPremultipliedDstColor{ advancedBlendNonPremultipliedDstColor_ } + , advancedBlendCorrelatedOverlap{ advancedBlendCorrelatedOverlap_ } + , advancedBlendAllOperations{ advancedBlendAllOperations_ } { } @@ -57359,9 +57905,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceBorderColorSwizzleFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 borderColorSwizzleFromImage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , borderColorSwizzle( borderColorSwizzle_ ) - , borderColorSwizzleFromImage( borderColorSwizzleFromImage_ ) + : pNext{ pNext_ } + , borderColorSwizzle{ borderColorSwizzle_ } + , borderColorSwizzleFromImage{ borderColorSwizzleFromImage_ } { } @@ -57469,10 +58015,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + : pNext{ pNext_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } { } @@ -57595,10 +58141,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) + : pNext{ pNext_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } { } @@ -57720,9 +58266,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clustercullingShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewClusterCullingShader_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , clustercullingShader( clustercullingShader_ ) - , multiviewClusterCullingShader( multiviewClusterCullingShader_ ) + : pNext{ pNext_ } + , clustercullingShader{ clustercullingShader_ } + , multiviewClusterCullingShader{ multiviewClusterCullingShader_ } { } @@ -57832,11 +58378,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxOutputClusterCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indirectBufferOffsetAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxWorkGroupCount( maxWorkGroupCount_ ) - , maxWorkGroupSize( maxWorkGroupSize_ ) - , maxOutputClusterCount( maxOutputClusterCount_ ) - , indirectBufferOffsetAlignment( indirectBufferOffsetAlignment_ ) + : pNext{ pNext_ } + , maxWorkGroupCount{ maxWorkGroupCount_ } + , maxWorkGroupSize{ maxWorkGroupSize_ } + , maxOutputClusterCount{ maxOutputClusterCount_ } + , indirectBufferOffsetAlignment{ indirectBufferOffsetAlignment_ } { } @@ -57930,8 +58476,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 clusterShadingRate_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , clusterShadingRate( clusterShadingRate_ ) + : pNext{ pNext_ } + , clusterShadingRate{ clusterShadingRate_ } { } @@ -58030,8 +58576,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceCoherentMemory( deviceCoherentMemory_ ) + : pNext{ pNext_ } + , deviceCoherentMemory{ deviceCoherentMemory_ } { } @@ -58128,8 +58674,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceColorWriteEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 colorWriteEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorWriteEnable( colorWriteEnable_ ) + : pNext{ pNext_ } + , colorWriteEnable{ colorWriteEnable_ } { } @@ -58216,55 +58762,155 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceColorWriteEnableFeaturesEXT; }; - struct PhysicalDeviceComputeShaderDerivativesFeaturesNV + struct PhysicalDeviceCommandBufferInheritanceFeaturesNV { - using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + using NativeType = VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, + VULKAN_HPP_CONSTEXPR PhysicalDeviceCommandBufferInheritanceFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ) - , computeDerivativeGroupLinear( computeDerivativeGroupLinear_ ) + : pNext{ pNext_ } + , commandBufferInheritance{ commandBufferInheritance_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceCommandBufferInheritanceFeaturesNV( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceComputeShaderDerivativesFeaturesNV( *reinterpret_cast( &rhs ) ) + PhysicalDeviceCommandBufferInheritanceFeaturesNV( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCommandBufferInheritanceFeaturesNV( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceCommandBufferInheritanceFeaturesNV & operator=( VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCommandBufferInheritanceFeaturesNV & + setCommandBufferInheritance( VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance_ ) VULKAN_HPP_NOEXCEPT + { + commandBufferInheritance = commandBufferInheritance_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, commandBufferInheritance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( commandBufferInheritance == rhs.commandBufferInheritance ); +# endif + } + + bool operator!=( PhysicalDeviceCommandBufferInheritanceFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 commandBufferInheritance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCommandBufferInheritanceFeaturesNV; + }; + + struct PhysicalDeviceComputeShaderDerivativesFeaturesKHR + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , computeDerivativeGroupQuads{ computeDerivativeGroupQuads_ } + , computeDerivativeGroupLinear{ computeDerivativeGroupLinear_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceComputeShaderDerivativesFeaturesKHR( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR & + operator=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesFeaturesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT { computeDerivativeGroupQuads = computeDerivativeGroupQuads_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesKHR & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT { computeDerivativeGroupLinear = computeDerivativeGroupLinear_; @@ -58272,14 +58918,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -58295,9 +58941,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & ) const = default; + auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -58307,23 +58953,110 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {}; VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV; + using Type = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + }; + + using PhysicalDeviceComputeShaderDerivativesFeaturesNV = PhysicalDeviceComputeShaderDerivativesFeaturesKHR; + + struct PhysicalDeviceComputeShaderDerivativesPropertiesKHR + { + using NativeType = VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , meshAndTaskShaderDerivatives{ meshAndTaskShaderDerivatives_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceComputeShaderDerivativesPropertiesKHR( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceComputeShaderDerivativesPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR & + operator=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceComputeShaderDerivativesPropertiesKHR & operator=( VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, meshAndTaskShaderDerivatives ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( meshAndTaskShaderDerivatives == rhs.meshAndTaskShaderDerivatives ); +# endif + } + + bool operator!=( PhysicalDeviceComputeShaderDerivativesPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 meshAndTaskShaderDerivatives = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceComputeShaderDerivativesPropertiesKHR; }; struct PhysicalDeviceConditionalRenderingFeaturesEXT @@ -58337,9 +59070,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , conditionalRendering( conditionalRendering_ ) - , inheritedConditionalRendering( inheritedConditionalRendering_ ) + : pNext{ pNext_ } + , conditionalRendering{ conditionalRendering_ } + , inheritedConditionalRendering{ inheritedConditionalRendering_ } { } @@ -58454,16 +59187,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , primitiveOverestimationSize( primitiveOverestimationSize_ ) - , maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ) - , extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ) - , primitiveUnderestimation( primitiveUnderestimation_ ) - , conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ) - , degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ) - , degenerateLinesRasterized( degenerateLinesRasterized_ ) - , fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ) - , conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ ) + : pNext{ pNext_ } + , primitiveOverestimationSize{ primitiveOverestimationSize_ } + , maxExtraPrimitiveOverestimationSize{ maxExtraPrimitiveOverestimationSize_ } + , extraPrimitiveOverestimationSizeGranularity{ extraPrimitiveOverestimationSizeGranularity_ } + , primitiveUnderestimation{ primitiveUnderestimation_ } + , conservativePointAndLineRasterization{ conservativePointAndLineRasterization_ } + , degenerateTrianglesRasterized{ degenerateTrianglesRasterized_ } + , degenerateLinesRasterized{ degenerateLinesRasterized_ } + , fullyCoveredFragmentShaderInputVariable{ fullyCoveredFragmentShaderInputVariable_ } + , conservativeRasterizationPostDepthCoverage{ conservativeRasterizationPostDepthCoverage_ } { } @@ -58583,9 +59316,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cooperativeMatrix( cooperativeMatrix_ ) - , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + : pNext{ pNext_ } + , cooperativeMatrix{ cooperativeMatrix_ } + , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } { } @@ -58692,9 +59425,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cooperativeMatrix( cooperativeMatrix_ ) - , cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ ) + : pNext{ pNext_ } + , cooperativeMatrix{ cooperativeMatrix_ } + , cooperativeMatrixRobustBufferAccess{ cooperativeMatrixRobustBufferAccess_ } { } @@ -58800,8 +59533,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + : pNext{ pNext_ } + , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } { } @@ -58883,8 +59616,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ ) + : pNext{ pNext_ } + , cooperativeMatrixSupportedStages{ cooperativeMatrixSupportedStages_ } { } @@ -58966,8 +59699,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 indirectCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , indirectCopy( indirectCopy_ ) + : pNext{ pNext_ } + , indirectCopy{ indirectCopy_ } { } @@ -59063,8 +59796,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCopyMemoryIndirectPropertiesNV( VULKAN_HPP_NAMESPACE::QueueFlags supportedQueues_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportedQueues( supportedQueues_ ) + : pNext{ pNext_ } + , supportedQueues{ supportedQueues_ } { } @@ -59146,8 +59879,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cornerSampledImage( cornerSampledImage_ ) + : pNext{ pNext_ } + , cornerSampledImage{ cornerSampledImage_ } { } @@ -59244,8 +59977,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , coverageReductionMode( coverageReductionMode_ ) + : pNext{ pNext_ } + , coverageReductionMode{ coverageReductionMode_ } { } @@ -59342,8 +60075,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicClampFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 cubicRangeClamp_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cubicRangeClamp( cubicRangeClamp_ ) + : pNext{ pNext_ } + , cubicRangeClamp{ cubicRangeClamp_ } { } @@ -59439,8 +60172,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCubicWeightsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 selectableCubicWeights_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , selectableCubicWeights( selectableCubicWeights_ ) + : pNext{ pNext_ } + , selectableCubicWeights{ selectableCubicWeights_ } { } @@ -59538,8 +60271,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cudaKernelLaunchFeatures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cudaKernelLaunchFeatures( cudaKernelLaunchFeatures_ ) + : pNext{ pNext_ } + , cudaKernelLaunchFeatures{ cudaKernelLaunchFeatures_ } { } @@ -59639,9 +60372,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceCudaKernelLaunchPropertiesNV( uint32_t computeCapabilityMinor_ = {}, uint32_t computeCapabilityMajor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , computeCapabilityMinor( computeCapabilityMinor_ ) - , computeCapabilityMajor( computeCapabilityMajor_ ) + : pNext{ pNext_ } + , computeCapabilityMinor{ computeCapabilityMinor_ } + , computeCapabilityMajor{ computeCapabilityMajor_ } { } @@ -59727,9 +60460,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , customBorderColors( customBorderColors_ ) - , customBorderColorWithoutFormat( customBorderColorWithoutFormat_ ) + : pNext{ pNext_ } + , customBorderColors{ customBorderColors_ } + , customBorderColorWithoutFormat{ customBorderColorWithoutFormat_ } { } @@ -59835,8 +60568,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( uint32_t maxCustomBorderColorSamplers_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ ) + : pNext{ pNext_ } + , maxCustomBorderColorSamplers{ maxCustomBorderColorSamplers_ } { } @@ -59918,8 +60651,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ ) + : pNext{ pNext_ } + , dedicatedAllocationImageAliasing{ dedicatedAllocationImageAliasing_ } { } @@ -60022,11 +60755,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 floatRepresentation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasExact_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthBiasControl( depthBiasControl_ ) - , leastRepresentableValueForceUnormRepresentation( leastRepresentableValueForceUnormRepresentation_ ) - , floatRepresentation( floatRepresentation_ ) - , depthBiasExact( depthBiasExact_ ) + : pNext{ pNext_ } + , depthBiasControl{ depthBiasControl_ } + , leastRepresentableValueForceUnormRepresentation{ leastRepresentableValueForceUnormRepresentation_ } + , floatRepresentation{ floatRepresentation_ } + , depthBiasExact{ depthBiasExact_ } { } @@ -60153,8 +60886,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampZeroOne_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthClampZeroOne( depthClampZeroOne_ ) + : pNext{ pNext_ } + , depthClampZeroOne{ depthClampZeroOne_ } { } @@ -60251,8 +60984,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthClipControl( depthClipControl_ ) + : pNext{ pNext_ } + , depthClipControl{ depthClipControl_ } { } @@ -60349,8 +61082,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthClipEnable( depthClipEnable_ ) + : pNext{ pNext_ } + , depthClipEnable{ depthClipEnable_ } { } @@ -60449,11 +61182,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) + : pNext{ pNext_ } + , supportedDepthResolveModes{ supportedDepthResolveModes_ } + , supportedStencilResolveModes{ supportedStencilResolveModes_ } + , independentResolveNone{ independentResolveNone_ } + , independentResolve{ independentResolve_ } { } @@ -60547,8 +61280,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( size_t combinedImageSamplerDensityMapDescriptorSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , combinedImageSamplerDensityMapDescriptorSize( combinedImageSamplerDensityMapDescriptorSize_ ) + : pNext{ pNext_ } + , combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ } { } @@ -60636,11 +61369,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferImageLayoutIgnored_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBufferPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorBuffer( descriptorBuffer_ ) - , descriptorBufferCaptureReplay( descriptorBufferCaptureReplay_ ) - , descriptorBufferImageLayoutIgnored( descriptorBufferImageLayoutIgnored_ ) - , descriptorBufferPushDescriptors( descriptorBufferPushDescriptors_ ) + : pNext{ pNext_ } + , descriptorBuffer{ descriptorBuffer_ } + , descriptorBufferCaptureReplay{ descriptorBufferCaptureReplay_ } + , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ } + , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ } { } @@ -60801,40 +61534,40 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize resourceDescriptorBufferAddressSpaceSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize descriptorBufferAddressSpaceSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , combinedImageSamplerDescriptorSingleArray( combinedImageSamplerDescriptorSingleArray_ ) - , bufferlessPushDescriptors( bufferlessPushDescriptors_ ) - , allowSamplerImageViewPostSubmitCreation( allowSamplerImageViewPostSubmitCreation_ ) - , descriptorBufferOffsetAlignment( descriptorBufferOffsetAlignment_ ) - , maxDescriptorBufferBindings( maxDescriptorBufferBindings_ ) - , maxResourceDescriptorBufferBindings( maxResourceDescriptorBufferBindings_ ) - , maxSamplerDescriptorBufferBindings( maxSamplerDescriptorBufferBindings_ ) - , maxEmbeddedImmutableSamplerBindings( maxEmbeddedImmutableSamplerBindings_ ) - , maxEmbeddedImmutableSamplers( maxEmbeddedImmutableSamplers_ ) - , bufferCaptureReplayDescriptorDataSize( bufferCaptureReplayDescriptorDataSize_ ) - , imageCaptureReplayDescriptorDataSize( imageCaptureReplayDescriptorDataSize_ ) - , imageViewCaptureReplayDescriptorDataSize( imageViewCaptureReplayDescriptorDataSize_ ) - , samplerCaptureReplayDescriptorDataSize( samplerCaptureReplayDescriptorDataSize_ ) - , accelerationStructureCaptureReplayDescriptorDataSize( accelerationStructureCaptureReplayDescriptorDataSize_ ) - , samplerDescriptorSize( samplerDescriptorSize_ ) - , combinedImageSamplerDescriptorSize( combinedImageSamplerDescriptorSize_ ) - , sampledImageDescriptorSize( sampledImageDescriptorSize_ ) - , storageImageDescriptorSize( storageImageDescriptorSize_ ) - , uniformTexelBufferDescriptorSize( uniformTexelBufferDescriptorSize_ ) - , robustUniformTexelBufferDescriptorSize( robustUniformTexelBufferDescriptorSize_ ) - , storageTexelBufferDescriptorSize( storageTexelBufferDescriptorSize_ ) - , robustStorageTexelBufferDescriptorSize( robustStorageTexelBufferDescriptorSize_ ) - , uniformBufferDescriptorSize( uniformBufferDescriptorSize_ ) - , robustUniformBufferDescriptorSize( robustUniformBufferDescriptorSize_ ) - , storageBufferDescriptorSize( storageBufferDescriptorSize_ ) - , robustStorageBufferDescriptorSize( robustStorageBufferDescriptorSize_ ) - , inputAttachmentDescriptorSize( inputAttachmentDescriptorSize_ ) - , accelerationStructureDescriptorSize( accelerationStructureDescriptorSize_ ) - , maxSamplerDescriptorBufferRange( maxSamplerDescriptorBufferRange_ ) - , maxResourceDescriptorBufferRange( maxResourceDescriptorBufferRange_ ) - , samplerDescriptorBufferAddressSpaceSize( samplerDescriptorBufferAddressSpaceSize_ ) - , resourceDescriptorBufferAddressSpaceSize( resourceDescriptorBufferAddressSpaceSize_ ) - , descriptorBufferAddressSpaceSize( descriptorBufferAddressSpaceSize_ ) + : pNext{ pNext_ } + , combinedImageSamplerDescriptorSingleArray{ combinedImageSamplerDescriptorSingleArray_ } + , bufferlessPushDescriptors{ bufferlessPushDescriptors_ } + , allowSamplerImageViewPostSubmitCreation{ allowSamplerImageViewPostSubmitCreation_ } + , descriptorBufferOffsetAlignment{ descriptorBufferOffsetAlignment_ } + , maxDescriptorBufferBindings{ maxDescriptorBufferBindings_ } + , maxResourceDescriptorBufferBindings{ maxResourceDescriptorBufferBindings_ } + , maxSamplerDescriptorBufferBindings{ maxSamplerDescriptorBufferBindings_ } + , maxEmbeddedImmutableSamplerBindings{ maxEmbeddedImmutableSamplerBindings_ } + , maxEmbeddedImmutableSamplers{ maxEmbeddedImmutableSamplers_ } + , bufferCaptureReplayDescriptorDataSize{ bufferCaptureReplayDescriptorDataSize_ } + , imageCaptureReplayDescriptorDataSize{ imageCaptureReplayDescriptorDataSize_ } + , imageViewCaptureReplayDescriptorDataSize{ imageViewCaptureReplayDescriptorDataSize_ } + , samplerCaptureReplayDescriptorDataSize{ samplerCaptureReplayDescriptorDataSize_ } + , accelerationStructureCaptureReplayDescriptorDataSize{ accelerationStructureCaptureReplayDescriptorDataSize_ } + , samplerDescriptorSize{ samplerDescriptorSize_ } + , combinedImageSamplerDescriptorSize{ combinedImageSamplerDescriptorSize_ } + , sampledImageDescriptorSize{ sampledImageDescriptorSize_ } + , storageImageDescriptorSize{ storageImageDescriptorSize_ } + , uniformTexelBufferDescriptorSize{ uniformTexelBufferDescriptorSize_ } + , robustUniformTexelBufferDescriptorSize{ robustUniformTexelBufferDescriptorSize_ } + , storageTexelBufferDescriptorSize{ storageTexelBufferDescriptorSize_ } + , robustStorageTexelBufferDescriptorSize{ robustStorageTexelBufferDescriptorSize_ } + , uniformBufferDescriptorSize{ uniformBufferDescriptorSize_ } + , robustUniformBufferDescriptorSize{ robustUniformBufferDescriptorSize_ } + , storageBufferDescriptorSize{ storageBufferDescriptorSize_ } + , robustStorageBufferDescriptorSize{ robustStorageBufferDescriptorSize_ } + , inputAttachmentDescriptorSize{ inputAttachmentDescriptorSize_ } + , accelerationStructureDescriptorSize{ accelerationStructureDescriptorSize_ } + , maxSamplerDescriptorBufferRange{ maxSamplerDescriptorBufferRange_ } + , maxResourceDescriptorBufferRange{ maxResourceDescriptorBufferRange_ } + , samplerDescriptorBufferAddressSpaceSize{ samplerDescriptorBufferAddressSpaceSize_ } + , resourceDescriptorBufferAddressSpaceSize{ resourceDescriptorBufferAddressSpaceSize_ } + , descriptorBufferAddressSpaceSize{ descriptorBufferAddressSpaceSize_ } { } @@ -61065,27 +61798,27 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) + : pNext{ pNext_ } + , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ } + , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ } + , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ } + , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ } + , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ } + , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ } + , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ } + , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ } + , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ } + , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ } + , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ } + , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ } + , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ } + , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ } + , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ } + , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ } + , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ } + , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ } + , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ } + , runtimeDescriptorArray{ runtimeDescriptorArray_ } { } @@ -61420,30 +62153,30 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) + : pNext{ pNext_ } + , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ } + , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ } + , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ } + , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ } + , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ } + , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ } + , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ } + , quadDivergentImplicitLod{ quadDivergentImplicitLod_ } + , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ } + , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ } + , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ } + , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ } + , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ } + , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ } + , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ } + , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ } + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ } + , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ } + , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ } { } @@ -61618,8 +62351,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorPoolOverallocationFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 descriptorPoolOverallocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorPoolOverallocation( descriptorPoolOverallocation_ ) + : pNext{ pNext_ } + , descriptorPoolOverallocation{ descriptorPoolOverallocation_ } { } @@ -61718,8 +62451,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE( VULKAN_HPP_NAMESPACE::Bool32 descriptorSetHostMapping_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorSetHostMapping( descriptorSetHostMapping_ ) + : pNext{ pNext_ } + , descriptorSetHostMapping{ descriptorSetHostMapping_ } { } @@ -61820,10 +62553,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputePipelines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedComputeCaptureReplay_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceGeneratedCompute( deviceGeneratedCompute_ ) - , deviceGeneratedComputePipelines( deviceGeneratedComputePipelines_ ) - , deviceGeneratedComputeCaptureReplay( deviceGeneratedComputeCaptureReplay_ ) + : pNext{ pNext_ } + , deviceGeneratedCompute{ deviceGeneratedCompute_ } + , deviceGeneratedComputePipelines{ deviceGeneratedComputePipelines_ } + , deviceGeneratedComputeCaptureReplay{ deviceGeneratedComputeCaptureReplay_ } { } @@ -61945,8 +62678,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceGeneratedCommands( deviceGeneratedCommands_ ) + : pNext{ pNext_ } + , deviceGeneratedCommands{ deviceGeneratedCommands_ } { } @@ -62052,16 +62785,16 @@ namespace VULKAN_HPP_NAMESPACE uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ) - , maxIndirectSequenceCount( maxIndirectSequenceCount_ ) - , maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ) - , maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ) - , maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ) - , maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ) - , minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ) - , minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ) - , minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ ) + : pNext{ pNext_ } + , maxGraphicsShaderGroupCount{ maxGraphicsShaderGroupCount_ } + , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } + , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } + , maxIndirectCommandsStreamCount{ maxIndirectCommandsStreamCount_ } + , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } + , maxIndirectCommandsStreamStride{ maxIndirectCommandsStreamStride_ } + , minSequencesCountBufferOffsetAlignment{ minSequencesCountBufferOffsetAlignment_ } + , minSequencesIndexBufferOffsetAlignment{ minSequencesIndexBufferOffsetAlignment_ } + , minIndirectCommandsBufferOffsetAlignment{ minIndirectCommandsBufferOffsetAlignment_ } { } @@ -62180,8 +62913,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceMemoryReport( deviceMemoryReport_ ) + : pNext{ pNext_ } + , deviceMemoryReport{ deviceMemoryReport_ } { } @@ -62278,8 +63011,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , diagnosticsConfig( diagnosticsConfig_ ) + : pNext{ pNext_ } + , diagnosticsConfig{ diagnosticsConfig_ } { } @@ -62375,8 +63108,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( uint32_t maxDiscardRectangles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxDiscardRectangles( maxDiscardRectangles_ ) + : pNext{ pNext_ } + , maxDiscardRectangles{ maxDiscardRectangles_ } { } @@ -62459,8 +63192,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 displacementMicromap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , displacementMicromap( displacementMicromap_ ) + : pNext{ pNext_ } + , displacementMicromap{ displacementMicromap_ } { } @@ -62559,8 +63292,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDisplacementMicromapPropertiesNV( uint32_t maxDisplacementMicromapSubdivisionLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxDisplacementMicromapSubdivisionLevel( maxDisplacementMicromapSubdivisionLevel_ ) + : pNext{ pNext_ } + , maxDisplacementMicromapSubdivisionLevel{ maxDisplacementMicromapSubdivisionLevel_ } { } @@ -62647,11 +63380,11 @@ namespace VULKAN_HPP_NAMESPACE std::array const & driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , driverID( driverID_ ) - , driverName( driverName_ ) - , driverInfo( driverInfo_ ) - , conformanceVersion( conformanceVersion_ ) + : pNext{ pNext_ } + , driverID{ driverID_ } + , driverName{ driverName_ } + , driverInfo{ driverInfo_ } + , conformanceVersion{ conformanceVersion_ } { } @@ -62662,30 +63395,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceDriverProperties( VULKAN_HPP_NAMESPACE::DriverId driverID_, - std::string const & driverName_, - std::string const & driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ), driverID( driverID_ ), conformanceVersion( conformanceVersion_ ) - { - VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() ); -# else - strncpy( driverName, driverName_.data(), std::min( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE ); -# if defined( WIN32 ) - strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() ); -# else - strncpy( driverInfo, driverInfo_.data(), std::min( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -62785,13 +63494,13 @@ namespace VULKAN_HPP_NAMESPACE int64_t renderMajor_ = {}, int64_t renderMinor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hasPrimary( hasPrimary_ ) - , hasRender( hasRender_ ) - , primaryMajor( primaryMajor_ ) - , primaryMinor( primaryMinor_ ) - , renderMajor( renderMajor_ ) - , renderMinor( renderMinor_ ) + : pNext{ pNext_ } + , hasPrimary{ hasPrimary_ } + , hasRender{ hasRender_ } + , primaryMajor{ primaryMajor_ } + , primaryMinor{ primaryMinor_ } + , renderMajor{ renderMajor_ } + , renderMinor{ renderMinor_ } { } @@ -62887,8 +63596,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRendering_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dynamicRendering( dynamicRendering_ ) + : pNext{ pNext_ } + , dynamicRendering{ dynamicRendering_ } { } @@ -62986,8 +63695,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dynamicRenderingLocalRead( dynamicRenderingLocalRead_ ) + : pNext{ pNext_ } + , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } { } @@ -63086,8 +63795,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingUnusedAttachments_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dynamicRenderingUnusedAttachments( dynamicRenderingUnusedAttachments_ ) + : pNext{ pNext_ } + , dynamicRenderingUnusedAttachments{ dynamicRenderingUnusedAttachments_ } { } @@ -63188,8 +63897,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exclusiveScissor( exclusiveScissor_ ) + : pNext{ pNext_ } + , exclusiveScissor{ exclusiveScissor_ } { } @@ -63287,10 +63996,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2LogicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState2PatchControlPoints_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , extendedDynamicState2( extendedDynamicState2_ ) - , extendedDynamicState2LogicOp( extendedDynamicState2LogicOp_ ) - , extendedDynamicState2PatchControlPoints( extendedDynamicState2PatchControlPoints_ ) + : pNext{ pNext_ } + , extendedDynamicState2{ extendedDynamicState2_ } + , extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ } + , extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ } { } @@ -63441,38 +64150,38 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RepresentativeFragmentTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ShadingRateImageEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , extendedDynamicState3TessellationDomainOrigin( extendedDynamicState3TessellationDomainOrigin_ ) - , extendedDynamicState3DepthClampEnable( extendedDynamicState3DepthClampEnable_ ) - , extendedDynamicState3PolygonMode( extendedDynamicState3PolygonMode_ ) - , extendedDynamicState3RasterizationSamples( extendedDynamicState3RasterizationSamples_ ) - , extendedDynamicState3SampleMask( extendedDynamicState3SampleMask_ ) - , extendedDynamicState3AlphaToCoverageEnable( extendedDynamicState3AlphaToCoverageEnable_ ) - , extendedDynamicState3AlphaToOneEnable( extendedDynamicState3AlphaToOneEnable_ ) - , extendedDynamicState3LogicOpEnable( extendedDynamicState3LogicOpEnable_ ) - , extendedDynamicState3ColorBlendEnable( extendedDynamicState3ColorBlendEnable_ ) - , extendedDynamicState3ColorBlendEquation( extendedDynamicState3ColorBlendEquation_ ) - , extendedDynamicState3ColorWriteMask( extendedDynamicState3ColorWriteMask_ ) - , extendedDynamicState3RasterizationStream( extendedDynamicState3RasterizationStream_ ) - , extendedDynamicState3ConservativeRasterizationMode( extendedDynamicState3ConservativeRasterizationMode_ ) - , extendedDynamicState3ExtraPrimitiveOverestimationSize( extendedDynamicState3ExtraPrimitiveOverestimationSize_ ) - , extendedDynamicState3DepthClipEnable( extendedDynamicState3DepthClipEnable_ ) - , extendedDynamicState3SampleLocationsEnable( extendedDynamicState3SampleLocationsEnable_ ) - , extendedDynamicState3ColorBlendAdvanced( extendedDynamicState3ColorBlendAdvanced_ ) - , extendedDynamicState3ProvokingVertexMode( extendedDynamicState3ProvokingVertexMode_ ) - , extendedDynamicState3LineRasterizationMode( extendedDynamicState3LineRasterizationMode_ ) - , extendedDynamicState3LineStippleEnable( extendedDynamicState3LineStippleEnable_ ) - , extendedDynamicState3DepthClipNegativeOneToOne( extendedDynamicState3DepthClipNegativeOneToOne_ ) - , extendedDynamicState3ViewportWScalingEnable( extendedDynamicState3ViewportWScalingEnable_ ) - , extendedDynamicState3ViewportSwizzle( extendedDynamicState3ViewportSwizzle_ ) - , extendedDynamicState3CoverageToColorEnable( extendedDynamicState3CoverageToColorEnable_ ) - , extendedDynamicState3CoverageToColorLocation( extendedDynamicState3CoverageToColorLocation_ ) - , extendedDynamicState3CoverageModulationMode( extendedDynamicState3CoverageModulationMode_ ) - , extendedDynamicState3CoverageModulationTableEnable( extendedDynamicState3CoverageModulationTableEnable_ ) - , extendedDynamicState3CoverageModulationTable( extendedDynamicState3CoverageModulationTable_ ) - , extendedDynamicState3CoverageReductionMode( extendedDynamicState3CoverageReductionMode_ ) - , extendedDynamicState3RepresentativeFragmentTestEnable( extendedDynamicState3RepresentativeFragmentTestEnable_ ) - , extendedDynamicState3ShadingRateImageEnable( extendedDynamicState3ShadingRateImageEnable_ ) + : pNext{ pNext_ } + , extendedDynamicState3TessellationDomainOrigin{ extendedDynamicState3TessellationDomainOrigin_ } + , extendedDynamicState3DepthClampEnable{ extendedDynamicState3DepthClampEnable_ } + , extendedDynamicState3PolygonMode{ extendedDynamicState3PolygonMode_ } + , extendedDynamicState3RasterizationSamples{ extendedDynamicState3RasterizationSamples_ } + , extendedDynamicState3SampleMask{ extendedDynamicState3SampleMask_ } + , extendedDynamicState3AlphaToCoverageEnable{ extendedDynamicState3AlphaToCoverageEnable_ } + , extendedDynamicState3AlphaToOneEnable{ extendedDynamicState3AlphaToOneEnable_ } + , extendedDynamicState3LogicOpEnable{ extendedDynamicState3LogicOpEnable_ } + , extendedDynamicState3ColorBlendEnable{ extendedDynamicState3ColorBlendEnable_ } + , extendedDynamicState3ColorBlendEquation{ extendedDynamicState3ColorBlendEquation_ } + , extendedDynamicState3ColorWriteMask{ extendedDynamicState3ColorWriteMask_ } + , extendedDynamicState3RasterizationStream{ extendedDynamicState3RasterizationStream_ } + , extendedDynamicState3ConservativeRasterizationMode{ extendedDynamicState3ConservativeRasterizationMode_ } + , extendedDynamicState3ExtraPrimitiveOverestimationSize{ extendedDynamicState3ExtraPrimitiveOverestimationSize_ } + , extendedDynamicState3DepthClipEnable{ extendedDynamicState3DepthClipEnable_ } + , extendedDynamicState3SampleLocationsEnable{ extendedDynamicState3SampleLocationsEnable_ } + , extendedDynamicState3ColorBlendAdvanced{ extendedDynamicState3ColorBlendAdvanced_ } + , extendedDynamicState3ProvokingVertexMode{ extendedDynamicState3ProvokingVertexMode_ } + , extendedDynamicState3LineRasterizationMode{ extendedDynamicState3LineRasterizationMode_ } + , extendedDynamicState3LineStippleEnable{ extendedDynamicState3LineStippleEnable_ } + , extendedDynamicState3DepthClipNegativeOneToOne{ extendedDynamicState3DepthClipNegativeOneToOne_ } + , extendedDynamicState3ViewportWScalingEnable{ extendedDynamicState3ViewportWScalingEnable_ } + , extendedDynamicState3ViewportSwizzle{ extendedDynamicState3ViewportSwizzle_ } + , extendedDynamicState3CoverageToColorEnable{ extendedDynamicState3CoverageToColorEnable_ } + , extendedDynamicState3CoverageToColorLocation{ extendedDynamicState3CoverageToColorLocation_ } + , extendedDynamicState3CoverageModulationMode{ extendedDynamicState3CoverageModulationMode_ } + , extendedDynamicState3CoverageModulationTableEnable{ extendedDynamicState3CoverageModulationTableEnable_ } + , extendedDynamicState3CoverageModulationTable{ extendedDynamicState3CoverageModulationTable_ } + , extendedDynamicState3CoverageReductionMode{ extendedDynamicState3CoverageReductionMode_ } + , extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ } + , extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ } { } @@ -63905,8 +64614,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicState3PropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 dynamicPrimitiveTopologyUnrestricted_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dynamicPrimitiveTopologyUnrestricted( dynamicPrimitiveTopologyUnrestricted_ ) + : pNext{ pNext_ } + , dynamicPrimitiveTopologyUnrestricted{ dynamicPrimitiveTopologyUnrestricted_ } { } @@ -64004,8 +64713,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , extendedDynamicState( extendedDynamicState_ ) + : pNext{ pNext_ } + , extendedDynamicState{ extendedDynamicState_ } { } @@ -64103,8 +64812,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 extendedSparseAddressSpace_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , extendedSparseAddressSpace( extendedSparseAddressSpace_ ) + : pNext{ pNext_ } + , extendedSparseAddressSpace{ extendedSparseAddressSpace_ } { } @@ -64205,10 +64914,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageUsageFlags extendedSparseImageUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags extendedSparseBufferUsageFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , extendedSparseAddressSpaceSize( extendedSparseAddressSpaceSize_ ) - , extendedSparseImageUsageFlags( extendedSparseImageUsageFlags_ ) - , extendedSparseBufferUsageFlags( extendedSparseBufferUsageFlags_ ) + : pNext{ pNext_ } + , extendedSparseAddressSpaceSize{ extendedSparseAddressSpaceSize_ } + , extendedSparseImageUsageFlags{ extendedSparseImageUsageFlags_ } + , extendedSparseBufferUsageFlags{ extendedSparseBufferUsageFlags_ } { } @@ -64302,10 +65011,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , usage( usage_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , usage{ usage_ } + , handleType{ handleType_ } { } @@ -64423,8 +65132,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } { } @@ -64524,8 +65233,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFormatResolveFeaturesANDROID( VULKAN_HPP_NAMESPACE::Bool32 externalFormatResolve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalFormatResolve( externalFormatResolve_ ) + : pNext{ pNext_ } + , externalFormatResolve{ externalFormatResolve_ } { } @@ -64629,10 +65338,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetX_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation externalFormatResolveChromaOffsetY_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , nullColorAttachmentWithExternalFormatResolve( nullColorAttachmentWithExternalFormatResolve_ ) - , externalFormatResolveChromaOffsetX( externalFormatResolveChromaOffsetX_ ) - , externalFormatResolveChromaOffsetY( externalFormatResolveChromaOffsetY_ ) + : pNext{ pNext_ } + , nullColorAttachmentWithExternalFormatResolve{ nullColorAttachmentWithExternalFormatResolve_ } + , externalFormatResolveChromaOffsetX{ externalFormatResolveChromaOffsetX_ } + , externalFormatResolveChromaOffsetY{ externalFormatResolveChromaOffsetY_ } { } @@ -64727,8 +65436,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } { } @@ -64827,8 +65536,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minImportedHostPointerAlignment( minImportedHostPointerAlignment_ ) + : pNext{ pNext_ } + , minImportedHostPointerAlignment{ minImportedHostPointerAlignment_ } { } @@ -64911,8 +65620,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryRDMAFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 externalMemoryRDMA_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , externalMemoryRDMA( externalMemoryRDMA_ ) + : pNext{ pNext_ } + , externalMemoryRDMA{ externalMemoryRDMA_ } { } @@ -65010,8 +65719,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX( VULKAN_HPP_NAMESPACE::Bool32 screenBufferImport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , screenBufferImport( screenBufferImport_ ) + : pNext{ pNext_ } + , screenBufferImport{ screenBufferImport_ } { } @@ -65112,8 +65821,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , handleType{ handleType_ } { } @@ -65213,9 +65922,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceFaultFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceFault_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceFaultVendorBinary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceFault( deviceFault_ ) - , deviceFaultVendorBinary( deviceFaultVendorBinary_ ) + : pNext{ pNext_ } + , deviceFault{ deviceFault_ } + , deviceFaultVendorBinary{ deviceFaultVendorBinary_ } { } @@ -65319,8 +66028,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , features( features_ ) + : pNext{ pNext_ } + , features{ features_ } { } @@ -65435,24 +66144,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) + : pNext{ pNext_ } + , denormBehaviorIndependence{ denormBehaviorIndependence_ } + , roundingModeIndependence{ roundingModeIndependence_ } + , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ } + , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ } + , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ } + , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ } + , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ } + , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ } + , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ } + , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ } + , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ } + , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ } + , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ } + , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ } + , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ } + , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ } + , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ } { } @@ -65599,8 +66308,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMapDeferred( fragmentDensityMapDeferred_ ) + : pNext{ pNext_ } + , fragmentDensityMapDeferred{ fragmentDensityMapDeferred_ } { } @@ -65700,11 +66409,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subsampledLoads( subsampledLoads_ ) - , subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ) - , maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ) - , maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ ) + : pNext{ pNext_ } + , subsampledLoads{ subsampledLoads_ } + , subsampledCoarseReconstructionEarlyAccess{ subsampledCoarseReconstructionEarlyAccess_ } + , maxSubsampledArrayLayers{ maxSubsampledArrayLayers_ } + , maxDescriptorSetSubsampledSamplers{ maxDescriptorSetSubsampledSamplers_ } { } @@ -65799,10 +66508,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMap( fragmentDensityMap_ ) - , fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ) - , fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ ) + : pNext{ pNext_ } + , fragmentDensityMap{ fragmentDensityMap_ } + , fragmentDensityMapDynamic{ fragmentDensityMapDynamic_ } + , fragmentDensityMapNonSubsampledImages{ fragmentDensityMapNonSubsampledImages_ } { } @@ -65921,8 +66630,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapOffset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMapOffset( fragmentDensityMapOffset_ ) + : pNext{ pNext_ } + , fragmentDensityMapOffset{ fragmentDensityMapOffset_ } { } @@ -66021,8 +66730,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D fragmentDensityOffsetGranularity_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityOffsetGranularity( fragmentDensityOffsetGranularity_ ) + : pNext{ pNext_ } + , fragmentDensityOffsetGranularity{ fragmentDensityOffsetGranularity_ } { } @@ -66108,10 +66817,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ) - , maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ) - , fragmentDensityInvocations( fragmentDensityInvocations_ ) + : pNext{ pNext_ } + , minFragmentDensityTexelSize{ minFragmentDensityTexelSize_ } + , maxFragmentDensityTexelSize{ maxFragmentDensityTexelSize_ } + , fragmentDensityInvocations{ fragmentDensityInvocations_ } { } @@ -66201,8 +66910,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentShaderBarycentric( fragmentShaderBarycentric_ ) + : pNext{ pNext_ } + , fragmentShaderBarycentric{ fragmentShaderBarycentric_ } { } @@ -66304,8 +67013,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 triStripVertexOrderIndependentOfProvokingVertex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , triStripVertexOrderIndependentOfProvokingVertex( triStripVertexOrderIndependentOfProvokingVertex_ ) + : pNext{ pNext_ } + , triStripVertexOrderIndependentOfProvokingVertex{ triStripVertexOrderIndependentOfProvokingVertex_ } { } @@ -66392,10 +67101,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ) - , fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ) - , fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ ) + : pNext{ pNext_ } + , fragmentShaderSampleInterlock{ fragmentShaderSampleInterlock_ } + , fragmentShaderPixelInterlock{ fragmentShaderPixelInterlock_ } + , fragmentShaderShadingRateInterlock{ fragmentShaderShadingRateInterlock_ } { } @@ -66517,10 +67226,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentShadingRateEnums( fragmentShadingRateEnums_ ) - , supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ) - , noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ ) + : pNext{ pNext_ } + , fragmentShadingRateEnums{ fragmentShadingRateEnums_ } + , supersampleFragmentShadingRates{ supersampleFragmentShadingRates_ } + , noInvocationFragmentShadingRates{ noInvocationFragmentShadingRates_ } { } @@ -66641,8 +67350,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ ) + : pNext{ pNext_ } + , maxFragmentShadingRateInvocationCount{ maxFragmentShadingRateInvocationCount_ } { } @@ -66743,10 +67452,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ) - , primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ) - , attachmentFragmentShadingRate( attachmentFragmentShadingRate_ ) + : pNext{ pNext_ } + , pipelineFragmentShadingRate{ pipelineFragmentShadingRate_ } + , primitiveFragmentShadingRate{ primitiveFragmentShadingRate_ } + , attachmentFragmentShadingRate{ attachmentFragmentShadingRate_ } { } @@ -66865,9 +67574,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleCounts( sampleCounts_ ) - , fragmentSize( fragmentSize_ ) + : pNext{ pNext_ } + , sampleCounts{ sampleCounts_ } + , fragmentSize{ fragmentSize_ } { } @@ -66968,24 +67677,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ) - , maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ) - , maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ) - , primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ) - , layeredShadingRateAttachments( layeredShadingRateAttachments_ ) - , fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ) - , maxFragmentSize( maxFragmentSize_ ) - , maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ) - , maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ) - , maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ) - , fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ) - , fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ) - , fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ) - , fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ) - , fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ) - , fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ) - , fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ ) + : pNext{ pNext_ } + , minFragmentShadingRateAttachmentTexelSize{ minFragmentShadingRateAttachmentTexelSize_ } + , maxFragmentShadingRateAttachmentTexelSize{ maxFragmentShadingRateAttachmentTexelSize_ } + , maxFragmentShadingRateAttachmentTexelSizeAspectRatio{ maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ } + , primitiveFragmentShadingRateWithMultipleViewports{ primitiveFragmentShadingRateWithMultipleViewports_ } + , layeredShadingRateAttachments{ layeredShadingRateAttachments_ } + , fragmentShadingRateNonTrivialCombinerOps{ fragmentShadingRateNonTrivialCombinerOps_ } + , maxFragmentSize{ maxFragmentSize_ } + , maxFragmentSizeAspectRatio{ maxFragmentSizeAspectRatio_ } + , maxFragmentShadingRateCoverageSamples{ maxFragmentShadingRateCoverageSamples_ } + , maxFragmentShadingRateRasterizationSamples{ maxFragmentShadingRateRasterizationSamples_ } + , fragmentShadingRateWithShaderDepthStencilWrites{ fragmentShadingRateWithShaderDepthStencilWrites_ } + , fragmentShadingRateWithSampleMask{ fragmentShadingRateWithSampleMask_ } + , fragmentShadingRateWithShaderSampleMask{ fragmentShadingRateWithShaderSampleMask_ } + , fragmentShadingRateWithConservativeRasterization{ fragmentShadingRateWithConservativeRasterization_ } + , fragmentShadingRateWithFragmentShaderInterlock{ fragmentShadingRateWithFragmentShaderInterlock_ } + , fragmentShadingRateWithCustomSampleLocations{ fragmentShadingRateWithCustomSampleLocations_ } + , fragmentShadingRateStrictMultiplyCombiner{ fragmentShadingRateStrictMultiplyCombiner_ } { } @@ -67135,8 +67844,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceFrameBoundaryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 frameBoundary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , frameBoundary( frameBoundary_ ) + : pNext{ pNext_ } + , frameBoundary{ frameBoundary_ } { } @@ -67232,8 +67941,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , globalPriorityQuery( globalPriorityQuery_ ) + : pNext{ pNext_ } + , globalPriorityQuery{ globalPriorityQuery_ } { } @@ -67332,8 +68041,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibrary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , graphicsPipelineLibrary( graphicsPipelineLibrary_ ) + : pNext{ pNext_ } + , graphicsPipelineLibrary{ graphicsPipelineLibrary_ } { } @@ -67433,9 +68142,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryFastLinking_ = {}, VULKAN_HPP_NAMESPACE::Bool32 graphicsPipelineLibraryIndependentInterpolationDecoration_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , graphicsPipelineLibraryFastLinking( graphicsPipelineLibraryFastLinking_ ) - , graphicsPipelineLibraryIndependentInterpolationDecoration( graphicsPipelineLibraryIndependentInterpolationDecoration_ ) + : pNext{ pNext_ } + , graphicsPipelineLibraryFastLinking{ graphicsPipelineLibraryFastLinking_ } + , graphicsPipelineLibraryIndependentInterpolationDecoration{ graphicsPipelineLibraryIndependentInterpolationDecoration_ } { } @@ -67546,10 +68255,10 @@ namespace VULKAN_HPP_NAMESPACE std::array const & physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , physicalDeviceCount( physicalDeviceCount_ ) - , physicalDevices( physicalDevices_ ) - , subsetAllocation( subsetAllocation_ ) + : pNext{ pNext_ } + , physicalDeviceCount{ physicalDeviceCount_ } + , physicalDevices{ physicalDevices_ } + , subsetAllocation{ subsetAllocation_ } { } @@ -67560,19 +68269,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceGroupProperties( VULKAN_HPP_NAMESPACE::ArrayProxy const & physicalDevices_, - VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ) - , physicalDeviceCount( std::min( static_cast( physicalDevices_.size() ), VK_MAX_DEVICE_GROUP_SIZE ) ) - , subsetAllocation( subsetAllocation_ ) - { - VULKAN_HPP_ASSERT( physicalDevices_.size() < VK_MAX_DEVICE_GROUP_SIZE ); - memcpy( physicalDevices, physicalDevices_.data(), physicalDeviceCount * sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -67666,8 +68362,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hostImageCopy( hostImageCopy_ ) + : pNext{ pNext_ } + , hostImageCopy{ hostImageCopy_ } { } @@ -67768,13 +68464,13 @@ namespace VULKAN_HPP_NAMESPACE std::array const & optimalTilingLayoutUUID_ = {}, VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , copySrcLayoutCount( copySrcLayoutCount_ ) - , pCopySrcLayouts( pCopySrcLayouts_ ) - , copyDstLayoutCount( copyDstLayoutCount_ ) - , pCopyDstLayouts( pCopyDstLayouts_ ) - , optimalTilingLayoutUUID( optimalTilingLayoutUUID_ ) - , identicalMemoryTypeRequirements( identicalMemoryTypeRequirements_ ) + : pNext{ pNext_ } + , copySrcLayoutCount{ copySrcLayoutCount_ } + , pCopySrcLayouts{ pCopySrcLayouts_ } + , copyDstLayoutCount{ copyDstLayoutCount_ } + , pCopyDstLayouts{ pCopyDstLayouts_ } + , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } + , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } { } @@ -67955,8 +68651,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hostQueryReset( hostQueryReset_ ) + : pNext{ pNext_ } + , hostQueryReset{ hostQueryReset_ } { } @@ -68058,12 +68754,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceUUID( deviceUUID_ ) - , driverUUID( driverUUID_ ) - , deviceLUID( deviceLUID_ ) - , deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) + : pNext{ pNext_ } + , deviceUUID{ deviceUUID_ } + , driverUUID{ driverUUID_ } + , deviceLUID{ deviceLUID_ } + , deviceNodeMask{ deviceNodeMask_ } + , deviceLUIDValid{ deviceLUIDValid_ } { } @@ -68159,9 +68855,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceImage2DViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 image2DViewOf3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampler2DViewOf3D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , image2DViewOf3D( image2DViewOf3D_ ) - , sampler2DViewOf3D( sampler2DViewOf3D_ ) + : pNext{ pNext_ } + , image2DViewOf3D{ image2DViewOf3D_ } + , sampler2DViewOf3D{ sampler2DViewOf3D_ } { } @@ -68255,6 +68951,205 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceImage2DViewOf3DFeaturesEXT; }; + struct PhysicalDeviceImageAlignmentControlFeaturesMESA + { + using NativeType = VkPhysicalDeviceImageAlignmentControlFeaturesMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlFeaturesMESA( VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , imageAlignmentControl{ imageAlignmentControl_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageAlignmentControlFeaturesMESA( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageAlignmentControlFeaturesMESA( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageAlignmentControlFeaturesMESA( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageAlignmentControlFeaturesMESA & operator=( VkPhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlFeaturesMESA & + setImageAlignmentControl( VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl_ ) VULKAN_HPP_NOEXCEPT + { + imageAlignmentControl = imageAlignmentControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlFeaturesMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, imageAlignmentControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageAlignmentControlFeaturesMESA const & ) const = default; +#else + bool operator==( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageAlignmentControl == rhs.imageAlignmentControl ); +# endif + } + + bool operator!=( PhysicalDeviceImageAlignmentControlFeaturesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 imageAlignmentControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageAlignmentControlFeaturesMESA; + }; + + struct PhysicalDeviceImageAlignmentControlPropertiesMESA + { + using NativeType = VkPhysicalDeviceImageAlignmentControlPropertiesMESA; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceImageAlignmentControlPropertiesMESA( uint32_t supportedImageAlignmentMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , supportedImageAlignmentMask{ supportedImageAlignmentMask_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceImageAlignmentControlPropertiesMESA( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceImageAlignmentControlPropertiesMESA( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceImageAlignmentControlPropertiesMESA( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceImageAlignmentControlPropertiesMESA & + operator=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceImageAlignmentControlPropertiesMESA & operator=( VkPhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlPropertiesMESA & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageAlignmentControlPropertiesMESA & + setSupportedImageAlignmentMask( uint32_t supportedImageAlignmentMask_ ) VULKAN_HPP_NOEXCEPT + { + supportedImageAlignmentMask = supportedImageAlignmentMask_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceImageAlignmentControlPropertiesMESA &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, supportedImageAlignmentMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceImageAlignmentControlPropertiesMESA const & ) const = default; +#else + bool operator==( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( supportedImageAlignmentMask == rhs.supportedImageAlignmentMask ); +# endif + } + + bool operator!=( PhysicalDeviceImageAlignmentControlPropertiesMESA const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA; + void * pNext = {}; + uint32_t supportedImageAlignmentMask = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceImageAlignmentControlPropertiesMESA; + }; + struct PhysicalDeviceImageCompressionControlFeaturesEXT { using NativeType = VkPhysicalDeviceImageCompressionControlFeaturesEXT; @@ -68265,8 +69160,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCompressionControl( imageCompressionControl_ ) + : pNext{ pNext_ } + , imageCompressionControl{ imageCompressionControl_ } { } @@ -68364,8 +69259,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageCompressionControlSwapchain_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageCompressionControlSwapchain( imageCompressionControlSwapchain_ ) + : pNext{ pNext_ } + , imageCompressionControlSwapchain{ imageCompressionControlSwapchain_ } { } @@ -68470,11 +69365,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueFamilyIndexCount_ = {}, const uint32_t * pQueueFamilyIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , drmFormatModifier( drmFormatModifier_ ) - , sharingMode( sharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) + : pNext{ pNext_ } + , drmFormatModifier{ drmFormatModifier_ } + , sharingMode{ sharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } { } @@ -68625,12 +69520,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , type( type_ ) - , tiling( tiling_ ) - , usage( usage_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , format{ format_ } + , type{ type_ } + , tiling{ tiling_ } + , usage{ usage_ } + , flags{ flags_ } { } @@ -68763,8 +69658,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2FeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , textureBlockMatch2( textureBlockMatch2_ ) + : pNext{ pNext_ } + , textureBlockMatch2{ textureBlockMatch2_ } { } @@ -68861,8 +69756,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageProcessing2PropertiesQCOM( VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchWindow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxBlockMatchWindow( maxBlockMatchWindow_ ) + : pNext{ pNext_ } + , maxBlockMatchWindow{ maxBlockMatchWindow_ } { } @@ -68946,10 +69841,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 textureBoxFilter_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureBlockMatch_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , textureSampleWeighted( textureSampleWeighted_ ) - , textureBoxFilter( textureBoxFilter_ ) - , textureBlockMatch( textureBlockMatch_ ) + : pNext{ pNext_ } + , textureSampleWeighted{ textureSampleWeighted_ } + , textureBoxFilter{ textureBoxFilter_ } + , textureBlockMatch{ textureBlockMatch_ } { } @@ -69070,11 +69965,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D maxBlockMatchRegion_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxBoxFilterBlockSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxWeightFilterPhases( maxWeightFilterPhases_ ) - , maxWeightFilterDimension( maxWeightFilterDimension_ ) - , maxBlockMatchRegion( maxBlockMatchRegion_ ) - , maxBoxFilterBlockSize( maxBoxFilterBlockSize_ ) + : pNext{ pNext_ } + , maxWeightFilterPhases{ maxWeightFilterPhases_ } + , maxWeightFilterDimension{ maxWeightFilterDimension_ } + , maxBlockMatchRegion{ maxBlockMatchRegion_ } + , maxBoxFilterBlockSize{ maxBoxFilterBlockSize_ } { } @@ -69166,8 +70061,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustImageAccess( robustImageAccess_ ) + : pNext{ pNext_ } + , robustImageAccess{ robustImageAccess_ } { } @@ -69265,8 +70160,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageSlicedViewOf3DFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 imageSlicedViewOf3D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageSlicedViewOf3D( imageSlicedViewOf3D_ ) + : pNext{ pNext_ } + , imageSlicedViewOf3D{ imageSlicedViewOf3D_ } { } @@ -69364,8 +70259,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageViewType( imageViewType_ ) + : pNext{ pNext_ } + , imageViewType{ imageViewType_ } { } @@ -69461,8 +70356,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewMinLodFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 minLod_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minLod( minLod_ ) + : pNext{ pNext_ } + , minLod{ minLod_ } { } @@ -69558,8 +70453,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imagelessFramebuffer( imagelessFramebuffer_ ) + : pNext{ pNext_ } + , imagelessFramebuffer{ imagelessFramebuffer_ } { } @@ -69658,8 +70553,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , indexTypeUint8( indexTypeUint8_ ) + : pNext{ pNext_ } + , indexTypeUint8{ indexTypeUint8_ } { } @@ -69757,8 +70652,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceInheritedViewportScissorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 inheritedViewportScissor2D_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , inheritedViewportScissor2D( inheritedViewportScissor2D_ ) + : pNext{ pNext_ } + , inheritedViewportScissor2D{ inheritedViewportScissor2D_ } { } @@ -69857,9 +70752,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeatures( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , inlineUniformBlock( inlineUniformBlock_ ) - , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) + : pNext{ pNext_ } + , inlineUniformBlock{ inlineUniformBlock_ } + , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ } { } @@ -69971,12 +70866,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) - , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) - , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) - , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) + : pNext{ pNext_ } + , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ } + , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ } + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ } + , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ } + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ } { } @@ -70080,8 +70975,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceInvocationMaskFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 invocationMask_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , invocationMask( invocationMask_ ) + : pNext{ pNext_ } + , invocationMask{ invocationMask_ } { } @@ -70167,185 +71062,231 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceInvocationMaskFeaturesHUAWEI; }; - struct PhysicalDeviceLayeredDriverPropertiesMSFT + struct PhysicalDeviceLayeredApiPropertiesKHR { - using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT; + using NativeType = VkPhysicalDeviceLayeredApiPropertiesKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( - VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI_ = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , underlyingAPI( underlyingAPI_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan, + std::array const & deviceName_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , layeredAPI{ layeredAPI_ } + , deviceName{ deviceName_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesKHR( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceLayeredApiPropertiesKHR( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiPropertiesKHR( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceLayeredApiPropertiesKHR & operator=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLayeredApiPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLayeredApiPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLayeredApiPropertiesKHR &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, underlyingAPI ); + return std::tie( sType, pNext, vendorID, deviceID, layeredAPI, deviceName ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default; + auto operator<=>( PhysicalDeviceLayeredApiPropertiesKHR const & ) const = default; #else - bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( underlyingAPI == rhs.underlyingAPI ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( layeredAPI == rhs.layeredAPI ) && ( deviceName == rhs.deviceName ); # endif } - bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLayeredApiPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesKHR; + void * pNext = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR layeredAPI = VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR::eVulkan; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLayeredDriverPropertiesMSFT; + using Type = PhysicalDeviceLayeredApiPropertiesKHR; }; - struct PhysicalDeviceLegacyDitheringFeaturesEXT + struct PhysicalDeviceLayeredApiPropertiesListKHR { - using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT; + using NativeType = VkPhysicalDeviceLayeredApiPropertiesListKHR; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , legacyDithering( legacyDithering_ ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( uint32_t layeredApiCount_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , layeredApiCount{ layeredApiCount_ } + , pLayeredApis{ pLayeredApis_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceLayeredApiPropertiesListKHR( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiPropertiesListKHR( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceLayeredApiPropertiesListKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layeredApis_, void * pNext_ = nullptr ) + : pNext( pNext_ ), layeredApiCount( static_cast( layeredApis_.size() ) ), pLayeredApis( layeredApis_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PhysicalDeviceLayeredApiPropertiesListKHR & operator=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLayeredApiPropertiesListKHR & operator=( VkPhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApiCount( uint32_t layeredApiCount_ ) VULKAN_HPP_NOEXCEPT { - legacyDithering = legacyDithering_; + layeredApiCount = layeredApiCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiPropertiesListKHR & + setPLayeredApis( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pLayeredApis = pLayeredApis_; + return *this; } - operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PhysicalDeviceLayeredApiPropertiesListKHR & setLayeredApis( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & layeredApis_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + layeredApiCount = static_cast( layeredApis_.size() ); + pLayeredApis = layeredApis_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiPropertiesListKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, legacyDithering ); + return std::tie( sType, pNext, layeredApiCount, pLayeredApis ); } #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceLayeredApiPropertiesListKHR const & ) const = default; #else - bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( layeredApiCount == rhs.layeredApiCount ) && ( pLayeredApis == rhs.pLayeredApis ); # endif } - bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLayeredApiPropertiesListKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR; + void * pNext = {}; + uint32_t layeredApiCount = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiPropertiesKHR * pLayeredApis = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; + using Type = PhysicalDeviceLayeredApiPropertiesListKHR; }; struct PhysicalDeviceLimits @@ -70459,112 +71400,112 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {} ) VULKAN_HPP_NOEXCEPT - : maxImageDimension1D( maxImageDimension1D_ ) - , maxImageDimension2D( maxImageDimension2D_ ) - , maxImageDimension3D( maxImageDimension3D_ ) - , maxImageDimensionCube( maxImageDimensionCube_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , maxTexelBufferElements( maxTexelBufferElements_ ) - , maxUniformBufferRange( maxUniformBufferRange_ ) - , maxStorageBufferRange( maxStorageBufferRange_ ) - , maxPushConstantsSize( maxPushConstantsSize_ ) - , maxMemoryAllocationCount( maxMemoryAllocationCount_ ) - , maxSamplerAllocationCount( maxSamplerAllocationCount_ ) - , bufferImageGranularity( bufferImageGranularity_ ) - , sparseAddressSpaceSize( sparseAddressSpaceSize_ ) - , maxBoundDescriptorSets( maxBoundDescriptorSets_ ) - , maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ) - , maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ) - , maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ) - , maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ) - , maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ) - , maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ) - , maxPerStageResources( maxPerStageResources_ ) - , maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ) - , maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ) - , maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ) - , maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ) - , maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ) - , maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ) - , maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ) - , maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ) - , maxVertexInputAttributes( maxVertexInputAttributes_ ) - , maxVertexInputBindings( maxVertexInputBindings_ ) - , maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ) - , maxVertexInputBindingStride( maxVertexInputBindingStride_ ) - , maxVertexOutputComponents( maxVertexOutputComponents_ ) - , maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ) - , maxTessellationPatchSize( maxTessellationPatchSize_ ) - , maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ) - , maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ) - , maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ) - , maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ) - , maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ) - , maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ) - , maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ) - , maxGeometryInputComponents( maxGeometryInputComponents_ ) - , maxGeometryOutputComponents( maxGeometryOutputComponents_ ) - , maxGeometryOutputVertices( maxGeometryOutputVertices_ ) - , maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ) - , maxFragmentInputComponents( maxFragmentInputComponents_ ) - , maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ) - , maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ) - , maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ) - , maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ) - , maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ) - , maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ) - , maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ) - , subPixelPrecisionBits( subPixelPrecisionBits_ ) - , subTexelPrecisionBits( subTexelPrecisionBits_ ) - , mipmapPrecisionBits( mipmapPrecisionBits_ ) - , maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ) - , maxDrawIndirectCount( maxDrawIndirectCount_ ) - , maxSamplerLodBias( maxSamplerLodBias_ ) - , maxSamplerAnisotropy( maxSamplerAnisotropy_ ) - , maxViewports( maxViewports_ ) - , maxViewportDimensions( maxViewportDimensions_ ) - , viewportBoundsRange( viewportBoundsRange_ ) - , viewportSubPixelBits( viewportSubPixelBits_ ) - , minMemoryMapAlignment( minMemoryMapAlignment_ ) - , minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ) - , minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ) - , minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ) - , minTexelOffset( minTexelOffset_ ) - , maxTexelOffset( maxTexelOffset_ ) - , minTexelGatherOffset( minTexelGatherOffset_ ) - , maxTexelGatherOffset( maxTexelGatherOffset_ ) - , minInterpolationOffset( minInterpolationOffset_ ) - , maxInterpolationOffset( maxInterpolationOffset_ ) - , subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ) - , maxFramebufferWidth( maxFramebufferWidth_ ) - , maxFramebufferHeight( maxFramebufferHeight_ ) - , maxFramebufferLayers( maxFramebufferLayers_ ) - , framebufferColorSampleCounts( framebufferColorSampleCounts_ ) - , framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ) - , framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ) - , framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ) - , maxColorAttachments( maxColorAttachments_ ) - , sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ) - , sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ) - , sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ) - , sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ) - , storageImageSampleCounts( storageImageSampleCounts_ ) - , maxSampleMaskWords( maxSampleMaskWords_ ) - , timestampComputeAndGraphics( timestampComputeAndGraphics_ ) - , timestampPeriod( timestampPeriod_ ) - , maxClipDistances( maxClipDistances_ ) - , maxCullDistances( maxCullDistances_ ) - , maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ) - , discreteQueuePriorities( discreteQueuePriorities_ ) - , pointSizeRange( pointSizeRange_ ) - , lineWidthRange( lineWidthRange_ ) - , pointSizeGranularity( pointSizeGranularity_ ) - , lineWidthGranularity( lineWidthGranularity_ ) - , strictLines( strictLines_ ) - , standardSampleLocations( standardSampleLocations_ ) - , optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ) - , optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ) - , nonCoherentAtomSize( nonCoherentAtomSize_ ) + : maxImageDimension1D{ maxImageDimension1D_ } + , maxImageDimension2D{ maxImageDimension2D_ } + , maxImageDimension3D{ maxImageDimension3D_ } + , maxImageDimensionCube{ maxImageDimensionCube_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , maxTexelBufferElements{ maxTexelBufferElements_ } + , maxUniformBufferRange{ maxUniformBufferRange_ } + , maxStorageBufferRange{ maxStorageBufferRange_ } + , maxPushConstantsSize{ maxPushConstantsSize_ } + , maxMemoryAllocationCount{ maxMemoryAllocationCount_ } + , maxSamplerAllocationCount{ maxSamplerAllocationCount_ } + , bufferImageGranularity{ bufferImageGranularity_ } + , sparseAddressSpaceSize{ sparseAddressSpaceSize_ } + , maxBoundDescriptorSets{ maxBoundDescriptorSets_ } + , maxPerStageDescriptorSamplers{ maxPerStageDescriptorSamplers_ } + , maxPerStageDescriptorUniformBuffers{ maxPerStageDescriptorUniformBuffers_ } + , maxPerStageDescriptorStorageBuffers{ maxPerStageDescriptorStorageBuffers_ } + , maxPerStageDescriptorSampledImages{ maxPerStageDescriptorSampledImages_ } + , maxPerStageDescriptorStorageImages{ maxPerStageDescriptorStorageImages_ } + , maxPerStageDescriptorInputAttachments{ maxPerStageDescriptorInputAttachments_ } + , maxPerStageResources{ maxPerStageResources_ } + , maxDescriptorSetSamplers{ maxDescriptorSetSamplers_ } + , maxDescriptorSetUniformBuffers{ maxDescriptorSetUniformBuffers_ } + , maxDescriptorSetUniformBuffersDynamic{ maxDescriptorSetUniformBuffersDynamic_ } + , maxDescriptorSetStorageBuffers{ maxDescriptorSetStorageBuffers_ } + , maxDescriptorSetStorageBuffersDynamic{ maxDescriptorSetStorageBuffersDynamic_ } + , maxDescriptorSetSampledImages{ maxDescriptorSetSampledImages_ } + , maxDescriptorSetStorageImages{ maxDescriptorSetStorageImages_ } + , maxDescriptorSetInputAttachments{ maxDescriptorSetInputAttachments_ } + , maxVertexInputAttributes{ maxVertexInputAttributes_ } + , maxVertexInputBindings{ maxVertexInputBindings_ } + , maxVertexInputAttributeOffset{ maxVertexInputAttributeOffset_ } + , maxVertexInputBindingStride{ maxVertexInputBindingStride_ } + , maxVertexOutputComponents{ maxVertexOutputComponents_ } + , maxTessellationGenerationLevel{ maxTessellationGenerationLevel_ } + , maxTessellationPatchSize{ maxTessellationPatchSize_ } + , maxTessellationControlPerVertexInputComponents{ maxTessellationControlPerVertexInputComponents_ } + , maxTessellationControlPerVertexOutputComponents{ maxTessellationControlPerVertexOutputComponents_ } + , maxTessellationControlPerPatchOutputComponents{ maxTessellationControlPerPatchOutputComponents_ } + , maxTessellationControlTotalOutputComponents{ maxTessellationControlTotalOutputComponents_ } + , maxTessellationEvaluationInputComponents{ maxTessellationEvaluationInputComponents_ } + , maxTessellationEvaluationOutputComponents{ maxTessellationEvaluationOutputComponents_ } + , maxGeometryShaderInvocations{ maxGeometryShaderInvocations_ } + , maxGeometryInputComponents{ maxGeometryInputComponents_ } + , maxGeometryOutputComponents{ maxGeometryOutputComponents_ } + , maxGeometryOutputVertices{ maxGeometryOutputVertices_ } + , maxGeometryTotalOutputComponents{ maxGeometryTotalOutputComponents_ } + , maxFragmentInputComponents{ maxFragmentInputComponents_ } + , maxFragmentOutputAttachments{ maxFragmentOutputAttachments_ } + , maxFragmentDualSrcAttachments{ maxFragmentDualSrcAttachments_ } + , maxFragmentCombinedOutputResources{ maxFragmentCombinedOutputResources_ } + , maxComputeSharedMemorySize{ maxComputeSharedMemorySize_ } + , maxComputeWorkGroupCount{ maxComputeWorkGroupCount_ } + , maxComputeWorkGroupInvocations{ maxComputeWorkGroupInvocations_ } + , maxComputeWorkGroupSize{ maxComputeWorkGroupSize_ } + , subPixelPrecisionBits{ subPixelPrecisionBits_ } + , subTexelPrecisionBits{ subTexelPrecisionBits_ } + , mipmapPrecisionBits{ mipmapPrecisionBits_ } + , maxDrawIndexedIndexValue{ maxDrawIndexedIndexValue_ } + , maxDrawIndirectCount{ maxDrawIndirectCount_ } + , maxSamplerLodBias{ maxSamplerLodBias_ } + , maxSamplerAnisotropy{ maxSamplerAnisotropy_ } + , maxViewports{ maxViewports_ } + , maxViewportDimensions{ maxViewportDimensions_ } + , viewportBoundsRange{ viewportBoundsRange_ } + , viewportSubPixelBits{ viewportSubPixelBits_ } + , minMemoryMapAlignment{ minMemoryMapAlignment_ } + , minTexelBufferOffsetAlignment{ minTexelBufferOffsetAlignment_ } + , minUniformBufferOffsetAlignment{ minUniformBufferOffsetAlignment_ } + , minStorageBufferOffsetAlignment{ minStorageBufferOffsetAlignment_ } + , minTexelOffset{ minTexelOffset_ } + , maxTexelOffset{ maxTexelOffset_ } + , minTexelGatherOffset{ minTexelGatherOffset_ } + , maxTexelGatherOffset{ maxTexelGatherOffset_ } + , minInterpolationOffset{ minInterpolationOffset_ } + , maxInterpolationOffset{ maxInterpolationOffset_ } + , subPixelInterpolationOffsetBits{ subPixelInterpolationOffsetBits_ } + , maxFramebufferWidth{ maxFramebufferWidth_ } + , maxFramebufferHeight{ maxFramebufferHeight_ } + , maxFramebufferLayers{ maxFramebufferLayers_ } + , framebufferColorSampleCounts{ framebufferColorSampleCounts_ } + , framebufferDepthSampleCounts{ framebufferDepthSampleCounts_ } + , framebufferStencilSampleCounts{ framebufferStencilSampleCounts_ } + , framebufferNoAttachmentsSampleCounts{ framebufferNoAttachmentsSampleCounts_ } + , maxColorAttachments{ maxColorAttachments_ } + , sampledImageColorSampleCounts{ sampledImageColorSampleCounts_ } + , sampledImageIntegerSampleCounts{ sampledImageIntegerSampleCounts_ } + , sampledImageDepthSampleCounts{ sampledImageDepthSampleCounts_ } + , sampledImageStencilSampleCounts{ sampledImageStencilSampleCounts_ } + , storageImageSampleCounts{ storageImageSampleCounts_ } + , maxSampleMaskWords{ maxSampleMaskWords_ } + , timestampComputeAndGraphics{ timestampComputeAndGraphics_ } + , timestampPeriod{ timestampPeriod_ } + , maxClipDistances{ maxClipDistances_ } + , maxCullDistances{ maxCullDistances_ } + , maxCombinedClipAndCullDistances{ maxCombinedClipAndCullDistances_ } + , discreteQueuePriorities{ discreteQueuePriorities_ } + , pointSizeRange{ pointSizeRange_ } + , lineWidthRange{ lineWidthRange_ } + , pointSizeGranularity{ pointSizeGranularity_ } + , lineWidthGranularity{ lineWidthGranularity_ } + , strictLines{ strictLines_ } + , standardSampleLocations{ standardSampleLocations_ } + , optimalBufferCopyOffsetAlignment{ optimalBufferCopyOffsetAlignment_ } + , optimalBufferCopyRowPitchAlignment{ optimalBufferCopyRowPitchAlignment_ } + , nonCoherentAtomSize{ nonCoherentAtomSize_ } { } @@ -71004,6 +71945,770 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {}; }; + struct PhysicalDeviceSparseProperties + { + using NativeType = VkPhysicalDeviceSparseProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT + : residencyStandard2DBlockShape{ residencyStandard2DBlockShape_ } + , residencyStandard2DMultisampleBlockShape{ residencyStandard2DMultisampleBlockShape_ } + , residencyStandard3DBlockShape{ residencyStandard3DBlockShape_ } + , residencyAlignedMipSize{ residencyAlignedMipSize_ } + , residencyNonResidentStrict{ residencyNonResidentStrict_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( residencyStandard2DBlockShape, + residencyStandard2DMultisampleBlockShape, + residencyStandard3DBlockShape, + residencyAlignedMipSize, + residencyNonResidentStrict ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && + ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && + ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && + ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); +# endif + } + + bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; + }; + + struct PhysicalDeviceProperties + { + using NativeType = VkPhysicalDeviceProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, + uint32_t driverVersion_ = {}, + uint32_t vendorID_ = {}, + uint32_t deviceID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, + std::array const & deviceName_ = {}, + std::array const & pipelineCacheUUID_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT + : apiVersion{ apiVersion_ } + , driverVersion{ driverVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , deviceType{ deviceType_ } + , deviceName{ deviceName_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } + , limits{ limits_ } + , sparseProperties{ sparseProperties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) + return cmp; + if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) + return cmp; + if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) + return cmp; + if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) + return cmp; + if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 ) + return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; + if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) + return cmp; + if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) + return cmp; + if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) + return cmp; + + return std::partial_ordering::equivalent; + } +#endif + + bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && + ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && + ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); + } + + bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t apiVersion = {}; + uint32_t driverVersion = {}; + uint32_t vendorID = {}; + uint32_t deviceID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; + }; + + struct PhysicalDeviceProperties2 + { + using NativeType = VkPhysicalDeviceProperties2; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; +#else + bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceProperties2; + }; + + using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; + + struct PhysicalDeviceLayeredApiVulkanPropertiesKHR + { + using NativeType = VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLayeredApiVulkanPropertiesKHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , properties{ properties_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 + PhysicalDeviceLayeredApiVulkanPropertiesKHR( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredApiVulkanPropertiesKHR( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredApiVulkanPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLayeredApiVulkanPropertiesKHR & operator=( VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredApiVulkanPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, properties ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredApiVulkanPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredApiVulkanPropertiesKHR; + }; + + struct PhysicalDeviceLayeredDriverPropertiesMSFT + { + using NativeType = VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI_ = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , underlyingAPI{ underlyingAPI_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLayeredDriverPropertiesMSFT( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLayeredDriverPropertiesMSFT( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLayeredDriverPropertiesMSFT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLayeredDriverPropertiesMSFT & operator=( VkPhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLayeredDriverPropertiesMSFT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, underlyingAPI ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLayeredDriverPropertiesMSFT const & ) const = default; +#else + bool operator==( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( underlyingAPI == rhs.underlyingAPI ); +# endif + } + + bool operator!=( PhysicalDeviceLayeredDriverPropertiesMSFT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT underlyingAPI = VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT::eNone; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLayeredDriverPropertiesMSFT; + }; + + struct PhysicalDeviceLegacyDitheringFeaturesEXT + { + using NativeType = VkPhysicalDeviceLegacyDitheringFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , legacyDithering{ legacyDithering_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyDitheringFeaturesEXT( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyDitheringFeaturesEXT( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyDitheringFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLegacyDitheringFeaturesEXT & operator=( VkPhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyDitheringFeaturesEXT & setLegacyDithering( VULKAN_HPP_NAMESPACE::Bool32 legacyDithering_ ) VULKAN_HPP_NOEXCEPT + { + legacyDithering = legacyDithering_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyDitheringFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, legacyDithering ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyDitheringFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyDithering == rhs.legacyDithering ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyDitheringFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 legacyDithering = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyDitheringFeaturesEXT; + }; + + struct PhysicalDeviceLegacyVertexAttributesFeaturesEXT + { + using NativeType = VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , legacyVertexAttributes{ legacyVertexAttributes_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLegacyVertexAttributesFeaturesEXT( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyVertexAttributesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLegacyVertexAttributesFeaturesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesFeaturesEXT & + setLegacyVertexAttributes( VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes_ ) VULKAN_HPP_NOEXCEPT + { + legacyVertexAttributes = legacyVertexAttributes_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, legacyVertexAttributes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( legacyVertexAttributes == rhs.legacyVertexAttributes ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyVertexAttributesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 legacyVertexAttributes = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyVertexAttributesFeaturesEXT; + }; + + struct PhysicalDeviceLegacyVertexAttributesPropertiesEXT + { + using NativeType = VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , nativeUnalignedPerformance{ nativeUnalignedPerformance_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceLegacyVertexAttributesPropertiesEXT( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLegacyVertexAttributesPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT & + operator=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceLegacyVertexAttributesPropertiesEXT & operator=( VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLegacyVertexAttributesPropertiesEXT & + setNativeUnalignedPerformance( VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance_ ) VULKAN_HPP_NOEXCEPT + { + nativeUnalignedPerformance = nativeUnalignedPerformance_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, nativeUnalignedPerformance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( nativeUnalignedPerformance == rhs.nativeUnalignedPerformance ); +# endif + } + + bool operator!=( PhysicalDeviceLegacyVertexAttributesPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 nativeUnalignedPerformance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT; + }; + struct PhysicalDeviceLineRasterizationFeaturesKHR { using NativeType = VkPhysicalDeviceLineRasterizationFeaturesKHR; @@ -71019,13 +72724,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rectangularLines( rectangularLines_ ) - , bresenhamLines( bresenhamLines_ ) - , smoothLines( smoothLines_ ) - , stippledRectangularLines( stippledRectangularLines_ ) - , stippledBresenhamLines( stippledBresenhamLines_ ) - , stippledSmoothLines( stippledSmoothLines_ ) + : pNext{ pNext_ } + , rectangularLines{ rectangularLines_ } + , bresenhamLines{ bresenhamLines_ } + , smoothLines{ smoothLines_ } + , stippledRectangularLines{ stippledRectangularLines_ } + , stippledBresenhamLines{ stippledBresenhamLines_ } + , stippledSmoothLines{ stippledSmoothLines_ } { } @@ -71170,8 +72875,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesKHR( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ ) + : pNext{ pNext_ } + , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } { } @@ -71255,8 +72960,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceLinearColorAttachmentFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 linearColorAttachment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , linearColorAttachment( linearColorAttachment_ ) + : pNext{ pNext_ } + , linearColorAttachment{ linearColorAttachment_ } { } @@ -71355,9 +73060,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + : pNext{ pNext_ } + , maxPerSetDescriptors{ maxPerSetDescriptors_ } + , maxMemoryAllocationSize{ maxMemoryAllocationSize_ } { } @@ -71442,8 +73147,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maintenance4( maintenance4_ ) + : pNext{ pNext_ } + , maintenance4{ maintenance4_ } { } @@ -71541,8 +73246,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance4Properties( VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxBufferSize( maxBufferSize_ ) + : pNext{ pNext_ } + , maxBufferSize{ maxBufferSize_ } { } @@ -71625,8 +73330,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maintenance5( maintenance5_ ) + : pNext{ pNext_ } + , maintenance5{ maintenance5_ } { } @@ -71727,13 +73432,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , earlyFragmentMultisampleCoverageAfterSampleCounting( earlyFragmentMultisampleCoverageAfterSampleCounting_ ) - , earlyFragmentSampleMaskTestBeforeSampleCounting( earlyFragmentSampleMaskTestBeforeSampleCounting_ ) - , depthStencilSwizzleOneSupport( depthStencilSwizzleOneSupport_ ) - , polygonModePointSize( polygonModePointSize_ ) - , nonStrictSinglePixelWideLinesUseParallelogram( nonStrictSinglePixelWideLinesUseParallelogram_ ) - , nonStrictWideLinesUseParallelogram( nonStrictWideLinesUseParallelogram_ ) + : pNext{ pNext_ } + , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } + , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } + , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ } + , polygonModePointSize{ polygonModePointSize_ } + , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ } + , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ } { } @@ -71838,8 +73543,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maintenance6( maintenance6_ ) + : pNext{ pNext_ } + , maintenance6{ maintenance6_ } { } @@ -71937,10 +73642,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , blockTexelViewCompatibleMultipleLayers( blockTexelViewCompatibleMultipleLayers_ ) - , maxCombinedImageSamplerDescriptorCount( maxCombinedImageSamplerDescriptorCount_ ) - , fragmentShadingRateClampCombinerInputs( fragmentShadingRateClampCombinerInputs_ ) + : pNext{ pNext_ } + , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } + , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } + , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ } { } @@ -72020,6 +73725,232 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceMaintenance6PropertiesKHR; }; + struct PhysicalDeviceMaintenance7FeaturesKHR + { + using NativeType = VkPhysicalDeviceMaintenance7FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maintenance7{ maintenance7_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7FeaturesKHR( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance7FeaturesKHR( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance7FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance7FeaturesKHR & operator=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance7FeaturesKHR & operator=( VkPhysicalDeviceMaintenance7FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance7FeaturesKHR & setMaintenance7( VULKAN_HPP_NAMESPACE::Bool32 maintenance7_ ) VULKAN_HPP_NOEXCEPT + { + maintenance7 = maintenance7_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMaintenance7FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maintenance7 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance7FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maintenance7 == rhs.maintenance7 ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance7FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance7 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance7FeaturesKHR; + }; + + struct PhysicalDeviceMaintenance7PropertiesKHR + { + using NativeType = VkPhysicalDeviceMaintenance7PropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess_ = {}, + uint32_t maxDescriptorSetTotalUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetTotalStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetTotalBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ = {}, + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , robustFragmentShadingRateAttachmentAccess{ robustFragmentShadingRateAttachmentAccess_ } + , separateDepthStencilAttachmentAccess{ separateDepthStencilAttachmentAccess_ } + , maxDescriptorSetTotalUniformBuffersDynamic{ maxDescriptorSetTotalUniformBuffersDynamic_ } + , maxDescriptorSetTotalStorageBuffersDynamic{ maxDescriptorSetTotalStorageBuffersDynamic_ } + , maxDescriptorSetTotalBuffersDynamic{ maxDescriptorSetTotalBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindTotalBuffersDynamic{ maxDescriptorSetUpdateAfterBindTotalBuffersDynamic_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance7PropertiesKHR( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceMaintenance7PropertiesKHR( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance7PropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceMaintenance7PropertiesKHR & operator=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceMaintenance7PropertiesKHR & operator=( VkPhysicalDeviceMaintenance7PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMaintenance7PropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + robustFragmentShadingRateAttachmentAccess, + separateDepthStencilAttachmentAccess, + maxDescriptorSetTotalUniformBuffersDynamic, + maxDescriptorSetTotalStorageBuffersDynamic, + maxDescriptorSetTotalBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic, + maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceMaintenance7PropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( robustFragmentShadingRateAttachmentAccess == rhs.robustFragmentShadingRateAttachmentAccess ) && + ( separateDepthStencilAttachmentAccess == rhs.separateDepthStencilAttachmentAccess ) && + ( maxDescriptorSetTotalUniformBuffersDynamic == rhs.maxDescriptorSetTotalUniformBuffersDynamic ) && + ( maxDescriptorSetTotalStorageBuffersDynamic == rhs.maxDescriptorSetTotalStorageBuffersDynamic ) && + ( maxDescriptorSetTotalBuffersDynamic == rhs.maxDescriptorSetTotalBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic ) && + ( maxDescriptorSetUpdateAfterBindTotalBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindTotalBuffersDynamic ); +# endif + } + + bool operator!=( PhysicalDeviceMaintenance7PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance7PropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 robustFragmentShadingRateAttachmentAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilAttachmentAccess = {}; + uint32_t maxDescriptorSetTotalUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetTotalStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetTotalBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic = {}; + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceMaintenance7PropertiesKHR; + }; + struct PhysicalDeviceMapMemoryPlacedFeaturesEXT { using NativeType = VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; @@ -72032,10 +73963,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 memoryMapRangePlaced_ = {}, VULKAN_HPP_NAMESPACE::Bool32 memoryUnmapReserve_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryMapPlaced( memoryMapPlaced_ ) - , memoryMapRangePlaced( memoryMapRangePlaced_ ) - , memoryUnmapReserve( memoryUnmapReserve_ ) + : pNext{ pNext_ } + , memoryMapPlaced{ memoryMapPlaced_ } + , memoryMapRangePlaced{ memoryMapRangePlaced_ } + , memoryUnmapReserve{ memoryUnmapReserve_ } { } @@ -72152,8 +74083,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMapMemoryPlacedPropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize minPlacedMemoryMapAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minPlacedMemoryMapAlignment( minPlacedMemoryMapAlignment_ ) + : pNext{ pNext_ } + , minPlacedMemoryMapAlignment{ minPlacedMemoryMapAlignment_ } { } @@ -72236,9 +74167,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( std::array const & heapBudget_ = {}, std::array const & heapUsage_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , heapBudget( heapBudget_ ) - , heapUsage( heapUsage_ ) + : pNext{ pNext_ } + , heapBudget{ heapBudget_ } + , heapUsage{ heapUsage_ } { } @@ -72324,8 +74255,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 memoryDecompression_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryDecompression( memoryDecompression_ ) + : pNext{ pNext_ } + , memoryDecompression{ memoryDecompression_ } { } @@ -72423,9 +74354,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryDecompressionPropertiesNV( VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV decompressionMethods_ = {}, uint64_t maxDecompressionIndirectCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , decompressionMethods( decompressionMethods_ ) - , maxDecompressionIndirectCount( maxDecompressionIndirectCount_ ) + : pNext{ pNext_ } + , decompressionMethods{ decompressionMethods_ } + , maxDecompressionIndirectCount{ maxDecompressionIndirectCount_ } { } @@ -72510,8 +74441,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryPriority( memoryPriority_ ) + : pNext{ pNext_ } + , memoryPriority{ memoryPriority_ } { } @@ -72607,10 +74538,10 @@ namespace VULKAN_HPP_NAMESPACE std::array const & memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array const & memoryHeaps_ = {} ) VULKAN_HPP_NOEXCEPT - : memoryTypeCount( memoryTypeCount_ ) - , memoryTypes( memoryTypes_ ) - , memoryHeapCount( memoryHeapCount_ ) - , memoryHeaps( memoryHeaps_ ) + : memoryTypeCount{ memoryTypeCount_ } + , memoryTypes{ memoryTypes_ } + , memoryHeapCount{ memoryHeapCount_ } + , memoryHeaps{ memoryHeaps_ } { } @@ -72621,19 +74552,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceMemoryProperties( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryTypes_, - VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryHeaps_ = {} ) - : memoryTypeCount( std::min( static_cast( memoryTypes_.size() ), VK_MAX_MEMORY_TYPES ) ) - , memoryHeapCount( std::min( static_cast( memoryHeaps_.size() ), VK_MAX_MEMORY_HEAPS ) ) - { - VULKAN_HPP_ASSERT( memoryTypes_.size() < VK_MAX_MEMORY_TYPES ); - memcpy( memoryTypes, memoryTypes_.data(), memoryTypeCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) ); - VULKAN_HPP_ASSERT( memoryHeaps_.size() < VK_MAX_MEMORY_HEAPS ); - memcpy( memoryHeaps, memoryHeaps_.data(), memoryHeapCount * sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -72720,8 +74638,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryProperties( memoryProperties_ ) + : pNext{ pNext_ } + , memoryProperties{ memoryProperties_ } { } @@ -72809,12 +74727,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateMeshShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShaderQueries_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , taskShader( taskShader_ ) - , meshShader( meshShader_ ) - , multiviewMeshShader( multiviewMeshShader_ ) - , primitiveFragmentShadingRateMeshShader( primitiveFragmentShadingRateMeshShader_ ) - , meshShaderQueries( meshShaderQueries_ ) + : pNext{ pNext_ } + , taskShader{ taskShader_ } + , meshShader{ meshShader_ } + , multiviewMeshShader{ multiviewMeshShader_ } + , primitiveFragmentShadingRateMeshShader{ primitiveFragmentShadingRateMeshShader_ } + , meshShaderQueries{ meshShaderQueries_ } { } @@ -72949,9 +74867,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , taskShader( taskShader_ ) - , meshShader( meshShader_ ) + : pNext{ pNext_ } + , taskShader{ taskShader_ } + , meshShader{ meshShader_ } { } @@ -73081,35 +74999,35 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 prefersCompactVertexOutput_ = {}, VULKAN_HPP_NAMESPACE::Bool32 prefersCompactPrimitiveOutput_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxTaskWorkGroupTotalCount( maxTaskWorkGroupTotalCount_ ) - , maxTaskWorkGroupCount( maxTaskWorkGroupCount_ ) - , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) - , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) - , maxTaskPayloadSize( maxTaskPayloadSize_ ) - , maxTaskSharedMemorySize( maxTaskSharedMemorySize_ ) - , maxTaskPayloadAndSharedMemorySize( maxTaskPayloadAndSharedMemorySize_ ) - , maxMeshWorkGroupTotalCount( maxMeshWorkGroupTotalCount_ ) - , maxMeshWorkGroupCount( maxMeshWorkGroupCount_ ) - , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) - , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) - , maxMeshSharedMemorySize( maxMeshSharedMemorySize_ ) - , maxMeshPayloadAndSharedMemorySize( maxMeshPayloadAndSharedMemorySize_ ) - , maxMeshOutputMemorySize( maxMeshOutputMemorySize_ ) - , maxMeshPayloadAndOutputMemorySize( maxMeshPayloadAndOutputMemorySize_ ) - , maxMeshOutputComponents( maxMeshOutputComponents_ ) - , maxMeshOutputVertices( maxMeshOutputVertices_ ) - , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) - , maxMeshOutputLayers( maxMeshOutputLayers_ ) - , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) - , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) - , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) - , maxPreferredTaskWorkGroupInvocations( maxPreferredTaskWorkGroupInvocations_ ) - , maxPreferredMeshWorkGroupInvocations( maxPreferredMeshWorkGroupInvocations_ ) - , prefersLocalInvocationVertexOutput( prefersLocalInvocationVertexOutput_ ) - , prefersLocalInvocationPrimitiveOutput( prefersLocalInvocationPrimitiveOutput_ ) - , prefersCompactVertexOutput( prefersCompactVertexOutput_ ) - , prefersCompactPrimitiveOutput( prefersCompactPrimitiveOutput_ ) + : pNext{ pNext_ } + , maxTaskWorkGroupTotalCount{ maxTaskWorkGroupTotalCount_ } + , maxTaskWorkGroupCount{ maxTaskWorkGroupCount_ } + , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } + , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } + , maxTaskPayloadSize{ maxTaskPayloadSize_ } + , maxTaskSharedMemorySize{ maxTaskSharedMemorySize_ } + , maxTaskPayloadAndSharedMemorySize{ maxTaskPayloadAndSharedMemorySize_ } + , maxMeshWorkGroupTotalCount{ maxMeshWorkGroupTotalCount_ } + , maxMeshWorkGroupCount{ maxMeshWorkGroupCount_ } + , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } + , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } + , maxMeshSharedMemorySize{ maxMeshSharedMemorySize_ } + , maxMeshPayloadAndSharedMemorySize{ maxMeshPayloadAndSharedMemorySize_ } + , maxMeshOutputMemorySize{ maxMeshOutputMemorySize_ } + , maxMeshPayloadAndOutputMemorySize{ maxMeshPayloadAndOutputMemorySize_ } + , maxMeshOutputComponents{ maxMeshOutputComponents_ } + , maxMeshOutputVertices{ maxMeshOutputVertices_ } + , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } + , maxMeshOutputLayers{ maxMeshOutputLayers_ } + , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } + , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } + , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } + , maxPreferredTaskWorkGroupInvocations{ maxPreferredTaskWorkGroupInvocations_ } + , maxPreferredMeshWorkGroupInvocations{ maxPreferredMeshWorkGroupInvocations_ } + , prefersLocalInvocationVertexOutput{ prefersLocalInvocationVertexOutput_ } + , prefersLocalInvocationPrimitiveOutput{ prefersLocalInvocationPrimitiveOutput_ } + , prefersCompactVertexOutput{ prefersCompactVertexOutput_ } + , prefersCompactPrimitiveOutput{ prefersCompactPrimitiveOutput_ } { } @@ -73304,20 +75222,20 @@ namespace VULKAN_HPP_NAMESPACE uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ) - , maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ) - , maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ) - , maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ) - , maxTaskOutputCount( maxTaskOutputCount_ ) - , maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ) - , maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ) - , maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ) - , maxMeshOutputVertices( maxMeshOutputVertices_ ) - , maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ) - , maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ) - , meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ) - , meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ ) + : pNext{ pNext_ } + , maxDrawMeshTasksCount{ maxDrawMeshTasksCount_ } + , maxTaskWorkGroupInvocations{ maxTaskWorkGroupInvocations_ } + , maxTaskWorkGroupSize{ maxTaskWorkGroupSize_ } + , maxTaskTotalMemorySize{ maxTaskTotalMemorySize_ } + , maxTaskOutputCount{ maxTaskOutputCount_ } + , maxMeshWorkGroupInvocations{ maxMeshWorkGroupInvocations_ } + , maxMeshWorkGroupSize{ maxMeshWorkGroupSize_ } + , maxMeshTotalMemorySize{ maxMeshTotalMemorySize_ } + , maxMeshOutputVertices{ maxMeshOutputVertices_ } + , maxMeshOutputPrimitives{ maxMeshOutputPrimitives_ } + , maxMeshMultiviewViewCount{ maxMeshMultiviewViewCount_ } + , meshOutputPerVertexGranularity{ meshOutputPerVertexGranularity_ } + , meshOutputPerPrimitiveGranularity{ meshOutputPerPrimitiveGranularity_ } { } @@ -73445,8 +75363,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multiDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multiDraw( multiDraw_ ) + : pNext{ pNext_ } + , multiDraw{ multiDraw_ } { } @@ -73541,8 +75459,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiDrawPropertiesEXT( uint32_t maxMultiDrawCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxMultiDrawCount( maxMultiDrawCount_ ) + : pNext{ pNext_ } + , maxMultiDrawCount{ maxMultiDrawCount_ } { } @@ -73624,8 +75542,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 multisampledRenderToSingleSampled_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multisampledRenderToSingleSampled( multisampledRenderToSingleSampled_ ) + : pNext{ pNext_ } + , multisampledRenderToSingleSampled{ multisampledRenderToSingleSampled_ } { } @@ -73728,10 +75646,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) + : pNext{ pNext_ } + , multiview{ multiview_ } + , multiviewGeometryShader{ multiviewGeometryShader_ } + , multiviewTessellationShader{ multiviewTessellationShader_ } { } @@ -73850,8 +75768,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , perViewPositionAllComponents( perViewPositionAllComponents_ ) + : pNext{ pNext_ } + , perViewPositionAllComponents{ perViewPositionAllComponents_ } { } @@ -73935,8 +75853,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewRenderAreas_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multiviewPerViewRenderAreas( multiviewPerViewRenderAreas_ ) + : pNext{ pNext_ } + , multiviewPerViewRenderAreas{ multiviewPerViewRenderAreas_ } { } @@ -74035,8 +75953,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 multiviewPerViewViewports_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , multiviewPerViewViewports( multiviewPerViewViewports_ ) + : pNext{ pNext_ } + , multiviewPerViewViewports{ multiviewPerViewViewports_ } { } @@ -74136,9 +76054,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) + : pNext{ pNext_ } + , maxMultiviewViewCount{ maxMultiviewViewCount_ } + , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ } { } @@ -74224,8 +76142,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , mutableDescriptorType( mutableDescriptorType_ ) + : pNext{ pNext_ } + , mutableDescriptorType{ mutableDescriptorType_ } { } @@ -74327,10 +76245,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nestedCommandBufferSimultaneousUse_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , nestedCommandBuffer( nestedCommandBuffer_ ) - , nestedCommandBufferRendering( nestedCommandBufferRendering_ ) - , nestedCommandBufferSimultaneousUse( nestedCommandBufferSimultaneousUse_ ) + : pNext{ pNext_ } + , nestedCommandBuffer{ nestedCommandBuffer_ } + , nestedCommandBufferRendering{ nestedCommandBufferRendering_ } + , nestedCommandBufferSimultaneousUse{ nestedCommandBufferSimultaneousUse_ } { } @@ -74449,8 +76367,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceNestedCommandBufferPropertiesEXT( uint32_t maxCommandBufferNestingLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxCommandBufferNestingLevel( maxCommandBufferNestingLevel_ ) + : pNext{ pNext_ } + , maxCommandBufferNestingLevel{ maxCommandBufferNestingLevel_ } { } @@ -74548,8 +76466,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceNonSeamlessCubeMapFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 nonSeamlessCubeMap_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , nonSeamlessCubeMap( nonSeamlessCubeMap_ ) + : pNext{ pNext_ } + , nonSeamlessCubeMap{ nonSeamlessCubeMap_ } { } @@ -74648,10 +76566,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 micromapCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 micromapHostCommands_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , micromap( micromap_ ) - , micromapCaptureReplay( micromapCaptureReplay_ ) - , micromapHostCommands( micromapHostCommands_ ) + : pNext{ pNext_ } + , micromap{ micromap_ } + , micromapCaptureReplay{ micromapCaptureReplay_ } + , micromapHostCommands{ micromapHostCommands_ } { } @@ -74769,9 +76687,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceOpacityMicromapPropertiesEXT( uint32_t maxOpacity2StateSubdivisionLevel_ = {}, uint32_t maxOpacity4StateSubdivisionLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxOpacity2StateSubdivisionLevel( maxOpacity2StateSubdivisionLevel_ ) - , maxOpacity4StateSubdivisionLevel( maxOpacity4StateSubdivisionLevel_ ) + : pNext{ pNext_ } + , maxOpacity2StateSubdivisionLevel{ maxOpacity2StateSubdivisionLevel_ } + , maxOpacity4StateSubdivisionLevel{ maxOpacity4StateSubdivisionLevel_ } { } @@ -74854,8 +76772,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceOpticalFlowFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 opticalFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , opticalFlow( opticalFlow_ ) + : pNext{ pNext_ } + , opticalFlow{ opticalFlow_ } { } @@ -74961,18 +76879,18 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxHeight_ = {}, uint32_t maxNumRegionsOfInterest_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportedOutputGridSizes( supportedOutputGridSizes_ ) - , supportedHintGridSizes( supportedHintGridSizes_ ) - , hintSupported( hintSupported_ ) - , costSupported( costSupported_ ) - , bidirectionalFlowSupported( bidirectionalFlowSupported_ ) - , globalFlowSupported( globalFlowSupported_ ) - , minWidth( minWidth_ ) - , minHeight( minHeight_ ) - , maxWidth( maxWidth_ ) - , maxHeight( maxHeight_ ) - , maxNumRegionsOfInterest( maxNumRegionsOfInterest_ ) + : pNext{ pNext_ } + , supportedOutputGridSizes{ supportedOutputGridSizes_ } + , supportedHintGridSizes{ supportedHintGridSizes_ } + , hintSupported{ hintSupported_ } + , costSupported{ costSupported_ } + , bidirectionalFlowSupported{ bidirectionalFlowSupported_ } + , globalFlowSupported{ globalFlowSupported_ } + , minWidth{ minWidth_ } + , minHeight{ minHeight_ } + , maxWidth{ maxWidth_ } + , maxHeight{ maxHeight_ } + , maxNumRegionsOfInterest{ maxNumRegionsOfInterest_ } { } @@ -75092,11 +77010,11 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pciDomain( pciDomain_ ) - , pciBus( pciBus_ ) - , pciDevice( pciDevice_ ) - , pciFunction( pciFunction_ ) + : pNext{ pNext_ } + , pciDomain{ pciDomain_ } + , pciBus{ pciBus_ } + , pciDevice{ pciDevice_ } + , pciFunction{ pciFunction_ } { } @@ -75182,8 +77100,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pageableDeviceLocalMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pageableDeviceLocalMemory( pageableDeviceLocalMemory_ ) + : pNext{ pNext_ } + , pageableDeviceLocalMemory{ pageableDeviceLocalMemory_ } { } @@ -75283,9 +77201,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDevicePerStageDescriptorSetFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 perStageDescriptorSet_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dynamicPipelineLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , perStageDescriptorSet( perStageDescriptorSet_ ) - , dynamicPipelineLayout( dynamicPipelineLayout_ ) + : pNext{ pNext_ } + , perStageDescriptorSet{ perStageDescriptorSet_ } + , dynamicPipelineLayout{ dynamicPipelineLayout_ } { } @@ -75393,9 +77311,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , performanceCounterQueryPools( performanceCounterQueryPools_ ) - , performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ ) + : pNext{ pNext_ } + , performanceCounterQueryPools{ performanceCounterQueryPools_ } + , performanceCounterMultipleQueryPools{ performanceCounterMultipleQueryPools_ } { } @@ -75501,8 +77419,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ ) + : pNext{ pNext_ } + , allowCommandBufferQueryCopies{ allowCommandBufferQueryCopies_ } { } @@ -75574,6 +77492,257 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDevicePerformanceQueryPropertiesKHR; }; + struct PhysicalDevicePipelineBinaryFeaturesKHR + { + using NativeType = VkPhysicalDevicePipelineBinaryFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaries{ pipelineBinaries_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryFeaturesKHR( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineBinaryFeaturesKHR( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineBinaryFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineBinaryFeaturesKHR & operator=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineBinaryFeaturesKHR & operator=( VkPhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryFeaturesKHR & setPipelineBinaries( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaries = pipelineBinaries_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineBinaryFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaries == rhs.pipelineBinaries ); +# endif + } + + bool operator!=( PhysicalDevicePipelineBinaryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineBinaryFeaturesKHR; + }; + + struct PhysicalDevicePipelineBinaryPropertiesKHR + { + using NativeType = VkPhysicalDevicePipelineBinaryPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaryInternalCache{ pipelineBinaryInternalCache_ } + , pipelineBinaryInternalCacheControl{ pipelineBinaryInternalCacheControl_ } + , pipelineBinaryPrefersInternalCache{ pipelineBinaryPrefersInternalCache_ } + , pipelineBinaryPrecompiledInternalCache{ pipelineBinaryPrecompiledInternalCache_ } + , pipelineBinaryCompressedData{ pipelineBinaryCompressedData_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineBinaryPropertiesKHR( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePipelineBinaryPropertiesKHR( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineBinaryPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePipelineBinaryPropertiesKHR & operator=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePipelineBinaryPropertiesKHR & operator=( VkPhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryInternalCache( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryInternalCache = pipelineBinaryInternalCache_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryInternalCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryInternalCacheControl = pipelineBinaryInternalCacheControl_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryPrefersInternalCache( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryPrefersInternalCache = pipelineBinaryPrefersInternalCache_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryPrecompiledInternalCache( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryPrecompiledInternalCache = pipelineBinaryPrecompiledInternalCache_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineBinaryPropertiesKHR & + setPipelineBinaryCompressedData( VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryCompressedData = pipelineBinaryCompressedData_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePipelineBinaryPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + pipelineBinaryInternalCache, + pipelineBinaryInternalCacheControl, + pipelineBinaryPrefersInternalCache, + pipelineBinaryPrecompiledInternalCache, + pipelineBinaryCompressedData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePipelineBinaryPropertiesKHR const & ) const = default; +#else + bool operator==( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryInternalCache == rhs.pipelineBinaryInternalCache ) && + ( pipelineBinaryInternalCacheControl == rhs.pipelineBinaryInternalCacheControl ) && + ( pipelineBinaryPrefersInternalCache == rhs.pipelineBinaryPrefersInternalCache ) && + ( pipelineBinaryPrecompiledInternalCache == rhs.pipelineBinaryPrecompiledInternalCache ) && + ( pipelineBinaryCompressedData == rhs.pipelineBinaryCompressedData ); +# endif + } + + bool operator!=( PhysicalDevicePipelineBinaryPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryInternalCacheControl = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrefersInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryPrecompiledInternalCache = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineBinaryCompressedData = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePipelineBinaryPropertiesKHR; + }; + struct PhysicalDevicePipelineCreationCacheControlFeatures { using NativeType = VkPhysicalDevicePipelineCreationCacheControlFeatures; @@ -75584,8 +77753,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineCreationCacheControl( pipelineCreationCacheControl_ ) + : pNext{ pNext_ } + , pipelineCreationCacheControl{ pipelineCreationCacheControl_ } { } @@ -75686,8 +77855,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineExecutableInfo( pipelineExecutableInfo_ ) + : pNext{ pNext_ } + , pipelineExecutableInfo{ pipelineExecutableInfo_ } { } @@ -75786,8 +77955,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineLibraryGroupHandles_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineLibraryGroupHandles( pipelineLibraryGroupHandles_ ) + : pNext{ pNext_ } + , pipelineLibraryGroupHandles{ pipelineLibraryGroupHandles_ } { } @@ -75886,8 +78055,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelinePropertiesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelinePropertiesIdentifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelinePropertiesIdentifier( pipelinePropertiesIdentifier_ ) + : pNext{ pNext_ } + , pipelinePropertiesIdentifier{ pipelinePropertiesIdentifier_ } { } @@ -75984,8 +78153,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineProtectedAccess( pipelineProtectedAccess_ ) + : pNext{ pNext_ } + , pipelineProtectedAccess{ pipelineProtectedAccess_ } { } @@ -76083,8 +78252,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineRobustness( pipelineRobustness_ ) + : pNext{ pNext_ } + , pipelineRobustness{ pipelineRobustness_ } { } @@ -76189,11 +78358,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , defaultRobustnessStorageBuffers( defaultRobustnessStorageBuffers_ ) - , defaultRobustnessUniformBuffers( defaultRobustnessUniformBuffers_ ) - , defaultRobustnessVertexInputs( defaultRobustnessVertexInputs_ ) - , defaultRobustnessImages( defaultRobustnessImages_ ) + : pNext{ pNext_ } + , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } + , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } + , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ } + , defaultRobustnessImages{ defaultRobustnessImages_ } { } @@ -76290,8 +78459,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pointClippingBehavior( pointClippingBehavior_ ) + : pNext{ pNext_ } + , pointClippingBehavior{ pointClippingBehavior_ } { } @@ -76390,22 +78559,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ) - , events( events_ ) - , imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ) - , imageViewFormatSwizzle( imageViewFormatSwizzle_ ) - , imageView2DOn3DImage( imageView2DOn3DImage_ ) - , multisampleArrayImage( multisampleArrayImage_ ) - , mutableComparisonSamplers( mutableComparisonSamplers_ ) - , pointPolygons( pointPolygons_ ) - , samplerMipLodBias( samplerMipLodBias_ ) - , separateStencilMaskRef( separateStencilMaskRef_ ) - , shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ) - , tessellationIsolines( tessellationIsolines_ ) - , tessellationPointMode( tessellationPointMode_ ) - , triangleFans( triangleFans_ ) - , vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ ) + : pNext{ pNext_ } + , constantAlphaColorBlendFactors{ constantAlphaColorBlendFactors_ } + , events{ events_ } + , imageViewFormatReinterpretation{ imageViewFormatReinterpretation_ } + , imageViewFormatSwizzle{ imageViewFormatSwizzle_ } + , imageView2DOn3DImage{ imageView2DOn3DImage_ } + , multisampleArrayImage{ multisampleArrayImage_ } + , mutableComparisonSamplers{ mutableComparisonSamplers_ } + , pointPolygons{ pointPolygons_ } + , samplerMipLodBias{ samplerMipLodBias_ } + , separateStencilMaskRef{ separateStencilMaskRef_ } + , shaderSampleRateInterpolationFunctions{ shaderSampleRateInterpolationFunctions_ } + , tessellationIsolines{ tessellationIsolines_ } + , tessellationPointMode{ tessellationPointMode_ } + , triangleFans{ triangleFans_ } + , vertexAttributeAccessBeyondStride{ vertexAttributeAccessBeyondStride_ } { } @@ -76653,8 +78822,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( uint32_t minVertexInputBindingStrideAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ ) + : pNext{ pNext_ } + , minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ } { } @@ -76752,8 +78921,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentBarrier( presentBarrier_ ) + : pNext{ pNext_ } + , presentBarrier{ presentBarrier_ } { } @@ -76848,8 +79017,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentId( presentId_ ) + : pNext{ pNext_ } + , presentId{ presentId_ } { } @@ -76944,8 +79113,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePresentWaitFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 presentWait_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentWait( presentWait_ ) + : pNext{ pNext_ } + , presentWait{ presentWait_ } { } @@ -77042,9 +79211,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyListRestart_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveTopologyPatchListRestart_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , primitiveTopologyListRestart( primitiveTopologyListRestart_ ) - , primitiveTopologyPatchListRestart( primitiveTopologyPatchListRestart_ ) + : pNext{ pNext_ } + , primitiveTopologyListRestart{ primitiveTopologyListRestart_ } + , primitiveTopologyPatchListRestart{ primitiveTopologyPatchListRestart_ } { } @@ -77154,10 +79323,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithRasterizerDiscard_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitivesGeneratedQueryWithNonZeroStreams_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , primitivesGeneratedQuery( primitivesGeneratedQuery_ ) - , primitivesGeneratedQueryWithRasterizerDiscard( primitivesGeneratedQueryWithRasterizerDiscard_ ) - , primitivesGeneratedQueryWithNonZeroStreams( primitivesGeneratedQueryWithNonZeroStreams_ ) + : pNext{ pNext_ } + , primitivesGeneratedQuery{ primitivesGeneratedQuery_ } + , primitivesGeneratedQueryWithRasterizerDiscard{ primitivesGeneratedQueryWithRasterizerDiscard_ } + , primitivesGeneratedQueryWithNonZeroStreams{ primitivesGeneratedQueryWithNonZeroStreams_ } { } @@ -77277,8 +79446,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , privateData( privateData_ ) + : pNext{ pNext_ } + , privateData{ privateData_ } { } @@ -77366,334 +79535,6 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; - struct PhysicalDeviceSparseProperties - { - using NativeType = VkPhysicalDeviceSparseProperties; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {} ) VULKAN_HPP_NOEXCEPT - : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ) - , residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ) - , residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ) - , residencyAlignedMipSize( residencyAlignedMipSize_ ) - , residencyNonResidentStrict( residencyNonResidentStrict_ ) - { - } - - VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceSparseProperties( *reinterpret_cast( &rhs ) ) - { - } - - PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - - operator VkPhysicalDeviceSparseProperties const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( residencyStandard2DBlockShape, - residencyStandard2DMultisampleBlockShape, - residencyStandard3DBlockShape, - residencyAlignedMipSize, - residencyNonResidentStrict ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceSparseProperties const & ) const = default; -#else - bool operator==( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape ) && - ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape ) && - ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape ) && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize ) && - ( residencyNonResidentStrict == rhs.residencyNonResidentStrict ); -# endif - } - - bool operator!=( PhysicalDeviceSparseProperties const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {}; - VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {}; - }; - - struct PhysicalDeviceProperties - { - using NativeType = VkPhysicalDeviceProperties; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( uint32_t apiVersion_ = {}, - uint32_t driverVersion_ = {}, - uint32_t vendorID_ = {}, - uint32_t deviceID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, - std::array const & deviceName_ = {}, - std::array const & pipelineCacheUUID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) VULKAN_HPP_NOEXCEPT - : apiVersion( apiVersion_ ) - , driverVersion( driverVersion_ ) - , vendorID( vendorID_ ) - , deviceID( deviceID_ ) - , deviceType( deviceType_ ) - , deviceName( deviceName_ ) - , pipelineCacheUUID( pipelineCacheUUID_ ) - , limits( limits_ ) - , sparseProperties( sparseProperties_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProperties( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceProperties( uint32_t apiVersion_, - uint32_t driverVersion_, - uint32_t vendorID_, - uint32_t deviceID_, - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_, - std::string const & deviceName_, - std::array const & pipelineCacheUUID_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {} ) - : apiVersion( apiVersion_ ) - , driverVersion( driverVersion_ ) - , vendorID( vendorID_ ) - , deviceID( deviceID_ ) - , deviceType( deviceType_ ) - , pipelineCacheUUID( pipelineCacheUUID_ ) - , limits( limits_ ) - , sparseProperties( sparseProperties_ ) - { - VULKAN_HPP_ASSERT( deviceName_.size() < VK_MAX_PHYSICAL_DEVICE_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( deviceName, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.data(), deviceName_.size() ); -# else - strncpy( deviceName, deviceName_.data(), std::min( VK_MAX_PHYSICAL_DEVICE_NAME_SIZE, deviceName_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - - operator VkPhysicalDeviceProperties const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple const &, - VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const &, - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const &> -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( apiVersion, driverVersion, vendorID, deviceID, deviceType, deviceName, pipelineCacheUUID, limits, sparseProperties ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::partial_ordering operator<=>( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT - { - if ( auto cmp = apiVersion <=> rhs.apiVersion; cmp != 0 ) - return cmp; - if ( auto cmp = driverVersion <=> rhs.driverVersion; cmp != 0 ) - return cmp; - if ( auto cmp = vendorID <=> rhs.vendorID; cmp != 0 ) - return cmp; - if ( auto cmp = deviceID <=> rhs.deviceID; cmp != 0 ) - return cmp; - if ( auto cmp = deviceType <=> rhs.deviceType; cmp != 0 ) - return cmp; - if ( auto cmp = strcmp( deviceName, rhs.deviceName ); cmp != 0 ) - return ( cmp < 0 ) ? std::partial_ordering::less : std::partial_ordering::greater; - if ( auto cmp = pipelineCacheUUID <=> rhs.pipelineCacheUUID; cmp != 0 ) - return cmp; - if ( auto cmp = limits <=> rhs.limits; cmp != 0 ) - return cmp; - if ( auto cmp = sparseProperties <=> rhs.sparseProperties; cmp != 0 ) - return cmp; - - return std::partial_ordering::equivalent; - } -#endif - - bool operator==( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return ( apiVersion == rhs.apiVersion ) && ( driverVersion == rhs.driverVersion ) && ( vendorID == rhs.vendorID ) && ( deviceID == rhs.deviceID ) && - ( deviceType == rhs.deviceType ) && ( strcmp( deviceName, rhs.deviceName ) == 0 ) && ( pipelineCacheUUID == rhs.pipelineCacheUUID ) && - ( limits == rhs.limits ) && ( sparseProperties == rhs.sparseProperties ); - } - - bool operator!=( PhysicalDeviceProperties const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } - - public: - uint32_t apiVersion = {}; - uint32_t driverVersion = {}; - uint32_t vendorID = {}; - uint32_t deviceID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D deviceName = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D pipelineCacheUUID = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {}; - }; - - struct PhysicalDeviceProperties2 - { - using NativeType = VkPhysicalDeviceProperties2; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , properties( properties_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceProperties2( *reinterpret_cast( &rhs ) ) - { - } - - PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - - operator VkPhysicalDeviceProperties2 const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, properties ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceProperties2 const & ) const = default; -#else - bool operator==( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( properties == rhs.properties ); -# endif - } - - bool operator!=( PhysicalDeviceProperties2 const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceProperties2; - }; - - using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2; - struct PhysicalDeviceProtectedMemoryFeatures { using NativeType = VkPhysicalDeviceProtectedMemoryFeatures; @@ -77704,8 +79545,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , protectedMemory( protectedMemory_ ) + : pNext{ pNext_ } + , protectedMemory{ protectedMemory_ } { } @@ -77801,8 +79642,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , protectedNoFault( protectedNoFault_ ) + : pNext{ pNext_ } + , protectedNoFault{ protectedNoFault_ } { } @@ -77885,9 +79726,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexLast_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesProvokingVertex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , provokingVertexLast( provokingVertexLast_ ) - , transformFeedbackPreservesProvokingVertex( transformFeedbackPreservesProvokingVertex_ ) + : pNext{ pNext_ } + , provokingVertexLast{ provokingVertexLast_ } + , transformFeedbackPreservesProvokingVertex{ transformFeedbackPreservesProvokingVertex_ } { } @@ -77994,9 +79835,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceProvokingVertexPropertiesEXT( VULKAN_HPP_NAMESPACE::Bool32 provokingVertexModePerPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackPreservesTriangleFanProvokingVertex_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , provokingVertexModePerPipeline( provokingVertexModePerPipeline_ ) - , transformFeedbackPreservesTriangleFanProvokingVertex( transformFeedbackPreservesTriangleFanProvokingVertex_ ) + : pNext{ pNext_ } + , provokingVertexModePerPipeline{ provokingVertexModePerPipeline_ } + , transformFeedbackPreservesTriangleFanProvokingVertex{ transformFeedbackPreservesTriangleFanProvokingVertex_ } { } @@ -78079,8 +79920,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxPushDescriptors( maxPushDescriptors_ ) + : pNext{ pNext_ } + , maxPushDescriptors{ maxPushDescriptors_ } { } @@ -78162,8 +80003,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRGBA10X6FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 formatRgba10x6WithoutYCbCrSampler_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , formatRgba10x6WithoutYCbCrSampler( formatRgba10x6WithoutYCbCrSampler_ ) + : pNext{ pNext_ } + , formatRgba10x6WithoutYCbCrSampler{ formatRgba10x6WithoutYCbCrSampler_ } { } @@ -78263,10 +80104,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderDepthAttachmentAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizationOrderStencilAttachmentAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rasterizationOrderColorAttachmentAccess( rasterizationOrderColorAttachmentAccess_ ) - , rasterizationOrderDepthAttachmentAccess( rasterizationOrderDepthAttachmentAccess_ ) - , rasterizationOrderStencilAttachmentAccess( rasterizationOrderStencilAttachmentAccess_ ) + : pNext{ pNext_ } + , rasterizationOrderColorAttachmentAccess{ rasterizationOrderColorAttachmentAccess_ } + , rasterizationOrderDepthAttachmentAccess{ rasterizationOrderDepthAttachmentAccess_ } + , rasterizationOrderStencilAttachmentAccess{ rasterizationOrderStencilAttachmentAccess_ } { } @@ -78392,8 +80233,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRawAccessChainsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderRawAccessChains_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderRawAccessChains( shaderRawAccessChains_ ) + : pNext{ pNext_ } + , shaderRawAccessChains{ shaderRawAccessChains_ } { } @@ -78489,8 +80330,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayQuery( rayQuery_ ) + : pNext{ pNext_ } + , rayQuery{ rayQuery_ } { } @@ -78586,8 +80427,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingInvocationReorderFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingInvocationReorder_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingInvocationReorder( rayTracingInvocationReorder_ ) + : pNext{ pNext_ } + , rayTracingInvocationReorder{ rayTracingInvocationReorder_ } { } @@ -78688,8 +80529,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint_ = VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV::eNone, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingInvocationReorderReorderingHint( rayTracingInvocationReorderReorderingHint_ ) + : pNext{ pNext_ } + , rayTracingInvocationReorderReorderingHint{ rayTracingInvocationReorderReorderingHint_ } { } @@ -78775,9 +80616,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMaintenance1_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingMaintenance1( rayTracingMaintenance1_ ) - , rayTracingPipelineTraceRaysIndirect2( rayTracingPipelineTraceRaysIndirect2_ ) + : pNext{ pNext_ } + , rayTracingMaintenance1{ rayTracingMaintenance1_ } + , rayTracingPipelineTraceRaysIndirect2{ rayTracingPipelineTraceRaysIndirect2_ } { } @@ -78885,9 +80726,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingMotionBlurFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlur_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingMotionBlurPipelineTraceRaysIndirect_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingMotionBlur( rayTracingMotionBlur_ ) - , rayTracingMotionBlurPipelineTraceRaysIndirect( rayTracingMotionBlurPipelineTraceRaysIndirect_ ) + : pNext{ pNext_ } + , rayTracingMotionBlur{ rayTracingMotionBlur_ } + , rayTracingMotionBlurPipelineTraceRaysIndirect{ rayTracingMotionBlurPipelineTraceRaysIndirect_ } { } @@ -78997,12 +80838,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingPipeline( rayTracingPipeline_ ) - , rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ) - , rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) - , rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ) - , rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ ) + : pNext{ pNext_ } + , rayTracingPipeline{ rayTracingPipeline_ } + , rayTracingPipelineShaderGroupHandleCaptureReplay{ rayTracingPipelineShaderGroupHandleCaptureReplay_ } + , rayTracingPipelineShaderGroupHandleCaptureReplayMixed{ rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ } + , rayTracingPipelineTraceRaysIndirect{ rayTracingPipelineTraceRaysIndirect_ } + , rayTraversalPrimitiveCulling{ rayTraversalPrimitiveCulling_ } { } @@ -79154,15 +80995,15 @@ namespace VULKAN_HPP_NAMESPACE uint32_t shaderGroupHandleAlignment_ = {}, uint32_t maxRayHitAttributeSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRayRecursionDepth( maxRayRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ) - , maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ) - , shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ) - , maxRayHitAttributeSize( maxRayHitAttributeSize_ ) + : pNext{ pNext_ } + , shaderGroupHandleSize{ shaderGroupHandleSize_ } + , maxRayRecursionDepth{ maxRayRecursionDepth_ } + , maxShaderGroupStride{ maxShaderGroupStride_ } + , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ } + , shaderGroupHandleCaptureReplaySize{ shaderGroupHandleCaptureReplaySize_ } + , maxRayDispatchInvocationCount{ maxRayDispatchInvocationCount_ } + , shaderGroupHandleAlignment{ shaderGroupHandleAlignment_ } + , maxRayHitAttributeSize{ maxRayHitAttributeSize_ } { } @@ -79274,8 +81115,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPositionFetchFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPositionFetch_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingPositionFetch( rayTracingPositionFetch_ ) + : pNext{ pNext_ } + , rayTracingPositionFetch{ rayTracingPositionFetch_ } { } @@ -79380,15 +81221,15 @@ namespace VULKAN_HPP_NAMESPACE uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderGroupHandleSize( shaderGroupHandleSize_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , maxShaderGroupStride( maxShaderGroupStride_ ) - , shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ) - , maxGeometryCount( maxGeometryCount_ ) - , maxInstanceCount( maxInstanceCount_ ) - , maxTriangleCount( maxTriangleCount_ ) - , maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ) + : pNext{ pNext_ } + , shaderGroupHandleSize{ shaderGroupHandleSize_ } + , maxRecursionDepth{ maxRecursionDepth_ } + , maxShaderGroupStride{ maxShaderGroupStride_ } + , shaderGroupBaseAlignment{ shaderGroupBaseAlignment_ } + , maxGeometryCount{ maxGeometryCount_ } + , maxInstanceCount{ maxInstanceCount_ } + , maxTriangleCount{ maxTriangleCount_ } + , maxDescriptorSetAccelerationStructures{ maxDescriptorSetAccelerationStructures_ } { } @@ -79499,8 +81340,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingValidationFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 rayTracingValidation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rayTracingValidation( rayTracingValidation_ ) + : pNext{ pNext_ } + , rayTracingValidation{ rayTracingValidation_ } { } @@ -79597,8 +81438,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRelaxedLineRasterizationFeaturesIMG( VULKAN_HPP_NAMESPACE::Bool32 relaxedLineRasterization_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , relaxedLineRasterization( relaxedLineRasterization_ ) + : pNext{ pNext_ } + , relaxedLineRasterization{ relaxedLineRasterization_ } { } @@ -79697,8 +81538,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 renderPassStriped_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPassStriped( renderPassStriped_ ) + : pNext{ pNext_ } + , renderPassStriped{ renderPassStriped_ } { } @@ -79796,9 +81637,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceRenderPassStripedPropertiesARM( VULKAN_HPP_NAMESPACE::Extent2D renderPassStripeGranularity_ = {}, uint32_t maxRenderPassStripes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPassStripeGranularity( renderPassStripeGranularity_ ) - , maxRenderPassStripes( maxRenderPassStripes_ ) + : pNext{ pNext_ } + , renderPassStripeGranularity{ renderPassStripeGranularity_ } + , maxRenderPassStripes{ maxRenderPassStripes_ } { } @@ -79882,8 +81723,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , representativeFragmentTest( representativeFragmentTest_ ) + : pNext{ pNext_ } + , representativeFragmentTest{ representativeFragmentTest_ } { } @@ -79984,10 +81825,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustBufferAccess2( robustBufferAccess2_ ) - , robustImageAccess2( robustImageAccess2_ ) - , nullDescriptor( nullDescriptor_ ) + : pNext{ pNext_ } + , robustBufferAccess2{ robustBufferAccess2_ } + , robustImageAccess2{ robustImageAccess2_ } + , nullDescriptor{ nullDescriptor_ } { } @@ -80104,9 +81945,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ) - , robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ ) + : pNext{ pNext_ } + , robustStorageBufferAccessSizeAlignment{ robustStorageBufferAccessSizeAlignment_ } + , robustUniformBufferAccessSizeAlignment{ robustUniformBufferAccessSizeAlignment_ } { } @@ -80194,12 +82035,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleLocationSampleCounts( sampleLocationSampleCounts_ ) - , maxSampleLocationGridSize( maxSampleLocationGridSize_ ) - , sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ) - , sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ) - , variableSampleLocations( variableSampleLocations_ ) + : pNext{ pNext_ } + , sampleLocationSampleCounts{ sampleLocationSampleCounts_ } + , maxSampleLocationGridSize{ maxSampleLocationGridSize_ } + , sampleLocationCoordinateRange{ sampleLocationCoordinateRange_ } + , sampleLocationSubPixelBits{ sampleLocationSubPixelBits_ } + , variableSampleLocations{ variableSampleLocations_ } { } @@ -80300,9 +82141,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) + : pNext{ pNext_ } + , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ } + , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ } { } @@ -80388,8 +82229,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , samplerYcbcrConversion( samplerYcbcrConversion_ ) + : pNext{ pNext_ } + , samplerYcbcrConversion{ samplerYcbcrConversion_ } { } @@ -80488,8 +82329,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , scalarBlockLayout( scalarBlockLayout_ ) + : pNext{ pNext_ } + , scalarBlockLayout{ scalarBlockLayout_ } { } @@ -80588,8 +82429,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 schedulingControls_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , schedulingControls( schedulingControls_ ) + : pNext{ pNext_ } + , schedulingControls{ schedulingControls_ } { } @@ -80687,8 +82528,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceSchedulingControlsPropertiesARM( VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , schedulingControlsFlags( schedulingControlsFlags_ ) + : pNext{ pNext_ } + , schedulingControlsFlags{ schedulingControlsFlags_ } { } @@ -80786,8 +82627,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) + : pNext{ pNext_ } + , separateDepthStencilLayouts{ separateDepthStencilLayouts_ } { } @@ -80888,8 +82729,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16VectorAtomics_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderFloat16VectorAtomics( shaderFloat16VectorAtomics_ ) + : pNext{ pNext_ } + , shaderFloat16VectorAtomics{ shaderFloat16VectorAtomics_ } { } @@ -80999,19 +82840,19 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicMinMax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderBufferFloat16Atomics( shaderBufferFloat16Atomics_ ) - , shaderBufferFloat16AtomicAdd( shaderBufferFloat16AtomicAdd_ ) - , shaderBufferFloat16AtomicMinMax( shaderBufferFloat16AtomicMinMax_ ) - , shaderBufferFloat32AtomicMinMax( shaderBufferFloat32AtomicMinMax_ ) - , shaderBufferFloat64AtomicMinMax( shaderBufferFloat64AtomicMinMax_ ) - , shaderSharedFloat16Atomics( shaderSharedFloat16Atomics_ ) - , shaderSharedFloat16AtomicAdd( shaderSharedFloat16AtomicAdd_ ) - , shaderSharedFloat16AtomicMinMax( shaderSharedFloat16AtomicMinMax_ ) - , shaderSharedFloat32AtomicMinMax( shaderSharedFloat32AtomicMinMax_ ) - , shaderSharedFloat64AtomicMinMax( shaderSharedFloat64AtomicMinMax_ ) - , shaderImageFloat32AtomicMinMax( shaderImageFloat32AtomicMinMax_ ) - , sparseImageFloat32AtomicMinMax( sparseImageFloat32AtomicMinMax_ ) + : pNext{ pNext_ } + , shaderBufferFloat16Atomics{ shaderBufferFloat16Atomics_ } + , shaderBufferFloat16AtomicAdd{ shaderBufferFloat16AtomicAdd_ } + , shaderBufferFloat16AtomicMinMax{ shaderBufferFloat16AtomicMinMax_ } + , shaderBufferFloat32AtomicMinMax{ shaderBufferFloat32AtomicMinMax_ } + , shaderBufferFloat64AtomicMinMax{ shaderBufferFloat64AtomicMinMax_ } + , shaderSharedFloat16Atomics{ shaderSharedFloat16Atomics_ } + , shaderSharedFloat16AtomicAdd{ shaderSharedFloat16AtomicAdd_ } + , shaderSharedFloat16AtomicMinMax{ shaderSharedFloat16AtomicMinMax_ } + , shaderSharedFloat32AtomicMinMax{ shaderSharedFloat32AtomicMinMax_ } + , shaderSharedFloat64AtomicMinMax{ shaderSharedFloat64AtomicMinMax_ } + , shaderImageFloat32AtomicMinMax{ shaderImageFloat32AtomicMinMax_ } + , sparseImageFloat32AtomicMinMax{ sparseImageFloat32AtomicMinMax_ } { } @@ -81243,19 +83084,19 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ) - , shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ) - , shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ) - , shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ) - , shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ) - , shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ) - , shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ) - , shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ) - , shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ) - , shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ) - , sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ) - , sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ ) + : pNext{ pNext_ } + , shaderBufferFloat32Atomics{ shaderBufferFloat32Atomics_ } + , shaderBufferFloat32AtomicAdd{ shaderBufferFloat32AtomicAdd_ } + , shaderBufferFloat64Atomics{ shaderBufferFloat64Atomics_ } + , shaderBufferFloat64AtomicAdd{ shaderBufferFloat64AtomicAdd_ } + , shaderSharedFloat32Atomics{ shaderSharedFloat32Atomics_ } + , shaderSharedFloat32AtomicAdd{ shaderSharedFloat32AtomicAdd_ } + , shaderSharedFloat64Atomics{ shaderSharedFloat64Atomics_ } + , shaderSharedFloat64AtomicAdd{ shaderSharedFloat64AtomicAdd_ } + , shaderImageFloat32Atomics{ shaderImageFloat32Atomics_ } + , shaderImageFloat32AtomicAdd{ shaderImageFloat32AtomicAdd_ } + , sparseImageFloat32Atomics{ sparseImageFloat32Atomics_ } + , sparseImageFloat32AtomicAdd{ sparseImageFloat32AtomicAdd_ } { } @@ -81473,9 +83314,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) + : pNext{ pNext_ } + , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } + , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } { } @@ -81584,9 +83425,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSubgroupClock( shaderSubgroupClock_ ) - , shaderDeviceClock( shaderDeviceClock_ ) + : pNext{ pNext_ } + , shaderSubgroupClock{ shaderSubgroupClock_ } + , shaderDeviceClock{ shaderDeviceClock_ } { } @@ -81691,8 +83532,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreBuiltinsFeaturesARM( VULKAN_HPP_NAMESPACE::Bool32 shaderCoreBuiltins_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderCoreBuiltins( shaderCoreBuiltins_ ) + : pNext{ pNext_ } + , shaderCoreBuiltins{ shaderCoreBuiltins_ } { } @@ -81791,10 +83632,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t shaderCoreCount_ = {}, uint32_t shaderWarpsPerCore_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderCoreMask( shaderCoreMask_ ) - , shaderCoreCount( shaderCoreCount_ ) - , shaderWarpsPerCore( shaderWarpsPerCore_ ) + : pNext{ pNext_ } + , shaderCoreMask{ shaderCoreMask_ } + , shaderCoreCount{ shaderCoreCount_ } + , shaderWarpsPerCore{ shaderWarpsPerCore_ } { } @@ -81881,9 +83722,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderCoreFeatures( shaderCoreFeatures_ ) - , activeComputeUnitCount( activeComputeUnitCount_ ) + : pNext{ pNext_ } + , shaderCoreFeatures{ shaderCoreFeatures_ } + , activeComputeUnitCount{ activeComputeUnitCount_ } { } @@ -81980,21 +83821,21 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderEngineCount( shaderEngineCount_ ) - , shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ) - , computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ) - , simdPerComputeUnit( simdPerComputeUnit_ ) - , wavefrontsPerSimd( wavefrontsPerSimd_ ) - , wavefrontSize( wavefrontSize_ ) - , sgprsPerSimd( sgprsPerSimd_ ) - , minSgprAllocation( minSgprAllocation_ ) - , maxSgprAllocation( maxSgprAllocation_ ) - , sgprAllocationGranularity( sgprAllocationGranularity_ ) - , vgprsPerSimd( vgprsPerSimd_ ) - , minVgprAllocation( minVgprAllocation_ ) - , maxVgprAllocation( maxVgprAllocation_ ) - , vgprAllocationGranularity( vgprAllocationGranularity_ ) + : pNext{ pNext_ } + , shaderEngineCount{ shaderEngineCount_ } + , shaderArraysPerEngineCount{ shaderArraysPerEngineCount_ } + , computeUnitsPerShaderArray{ computeUnitsPerShaderArray_ } + , simdPerComputeUnit{ simdPerComputeUnit_ } + , wavefrontsPerSimd{ wavefrontsPerSimd_ } + , wavefrontSize{ wavefrontSize_ } + , sgprsPerSimd{ sgprsPerSimd_ } + , minSgprAllocation{ minSgprAllocation_ } + , maxSgprAllocation{ maxSgprAllocation_ } + , sgprAllocationGranularity{ sgprAllocationGranularity_ } + , vgprsPerSimd{ vgprsPerSimd_ } + , minVgprAllocation{ minVgprAllocation_ } + , maxVgprAllocation{ maxVgprAllocation_ } + , vgprAllocationGranularity{ vgprAllocationGranularity_ } { } @@ -82127,10 +83968,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t texelRate_ = {}, uint32_t fmaRate_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pixelRate( pixelRate_ ) - , texelRate( texelRate_ ) - , fmaRate( fmaRate_ ) + : pNext{ pNext_ } + , pixelRate{ pixelRate_ } + , texelRate{ texelRate_ } + , fmaRate{ fmaRate_ } { } @@ -82214,8 +84055,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) + : pNext{ pNext_ } + , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ } { } @@ -82316,8 +84157,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderDrawParameters( shaderDrawParameters_ ) + : pNext{ pNext_ } + , shaderDrawParameters{ shaderDrawParameters_ } { } @@ -82416,8 +84257,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD( VULKAN_HPP_NAMESPACE::Bool32 shaderEarlyAndLateFragmentTests_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderEarlyAndLateFragmentTests( shaderEarlyAndLateFragmentTests_ ) + : pNext{ pNext_ } + , shaderEarlyAndLateFragmentTests{ shaderEarlyAndLateFragmentTests_ } { } @@ -82518,8 +84359,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderEnqueue( shaderEnqueue_ ) + : pNext{ pNext_ } + , shaderEnqueue{ shaderEnqueue_ } { } @@ -82621,12 +84462,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxExecutionGraphShaderPayloadCount_ = {}, uint32_t executionGraphDispatchAddressAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxExecutionGraphDepth( maxExecutionGraphDepth_ ) - , maxExecutionGraphShaderOutputNodes( maxExecutionGraphShaderOutputNodes_ ) - , maxExecutionGraphShaderPayloadSize( maxExecutionGraphShaderPayloadSize_ ) - , maxExecutionGraphShaderPayloadCount( maxExecutionGraphShaderPayloadCount_ ) - , executionGraphDispatchAddressAlignment( executionGraphDispatchAddressAlignment_ ) + : pNext{ pNext_ } + , maxExecutionGraphDepth{ maxExecutionGraphDepth_ } + , maxExecutionGraphShaderOutputNodes{ maxExecutionGraphShaderOutputNodes_ } + , maxExecutionGraphShaderPayloadSize{ maxExecutionGraphShaderPayloadSize_ } + , maxExecutionGraphShaderPayloadCount{ maxExecutionGraphShaderPayloadCount_ } + , executionGraphDispatchAddressAlignment{ executionGraphDispatchAddressAlignment_ } { } @@ -82771,8 +84612,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderExpectAssume( shaderExpectAssume_ ) + : pNext{ pNext_ } + , shaderExpectAssume{ shaderExpectAssume_ } { } @@ -82870,9 +84711,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) + : pNext{ pNext_ } + , shaderFloat16{ shaderFloat16_ } + , shaderInt8{ shaderInt8_ } { } @@ -82978,8 +84819,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderFloatControls2( shaderFloatControls2_ ) + : pNext{ pNext_ } + , shaderFloatControls2{ shaderFloatControls2_ } { } @@ -83078,9 +84919,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderImageInt64Atomics( shaderImageInt64Atomics_ ) - , sparseImageInt64Atomics( sparseImageInt64Atomics_ ) + : pNext{ pNext_ } + , shaderImageInt64Atomics{ shaderImageInt64Atomics_ } + , sparseImageInt64Atomics{ sparseImageInt64Atomics_ } { } @@ -83187,8 +85028,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageFootprint( imageFootprint_ ) + : pNext{ pNext_ } + , imageFootprint{ imageFootprint_ } { } @@ -83284,8 +85125,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerDotProductFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderIntegerDotProduct( shaderIntegerDotProduct_ ) + : pNext{ pNext_ } + , shaderIntegerDotProduct{ shaderIntegerDotProduct_ } { } @@ -83415,38 +85256,37 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated_ = {}, VULKAN_HPP_NAMESPACE::Bool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) - , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) - , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) - , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) - , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) - , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) - , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) - , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) - , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) - , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) - , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) - , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) - , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) - , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( - integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) + : pNext{ pNext_ } + , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ } + , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ } + , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ } + , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ } + , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ } + , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ } + , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ } + , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ } + , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ } + , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ } + , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ } + , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ } + , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ } + , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ } { } @@ -83658,8 +85498,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderIntegerFunctions2( shaderIntegerFunctions2_ ) + : pNext{ pNext_ } + , shaderIntegerFunctions2{ shaderIntegerFunctions2_ } { } @@ -83758,8 +85598,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderMaximalReconvergence_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderMaximalReconvergence( shaderMaximalReconvergence_ ) + : pNext{ pNext_ } + , shaderMaximalReconvergence{ shaderMaximalReconvergence_ } { } @@ -83858,8 +85698,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderModuleIdentifierFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderModuleIdentifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderModuleIdentifier( shaderModuleIdentifier_ ) + : pNext{ pNext_ } + , shaderModuleIdentifier{ shaderModuleIdentifier_ } { } @@ -83958,8 +85798,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderModuleIdentifierPropertiesEXT( std::array const & shaderModuleIdentifierAlgorithmUUID_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderModuleIdentifierAlgorithmUUID( shaderModuleIdentifierAlgorithmUUID_ ) + : pNext{ pNext_ } + , shaderModuleIdentifierAlgorithmUUID{ shaderModuleIdentifierAlgorithmUUID_ } { } @@ -84042,8 +85882,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderObjectFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderObject_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderObject( shaderObject_ ) + : pNext{ pNext_ } + , shaderObject{ shaderObject_ } { } @@ -84140,9 +85980,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( std::array const & shaderBinaryUUID_ = {}, uint32_t shaderBinaryVersion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderBinaryUUID( shaderBinaryUUID_ ) - , shaderBinaryVersion( shaderBinaryVersion_ ) + : pNext{ pNext_ } + , shaderBinaryUUID{ shaderBinaryUUID_ } + , shaderBinaryVersion{ shaderBinaryVersion_ } { } @@ -84227,8 +86067,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderQuadControlFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderQuadControl_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderQuadControl( shaderQuadControl_ ) + : pNext{ pNext_ } + , shaderQuadControl{ shaderQuadControl_ } { } @@ -84315,6 +86155,208 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceShaderQuadControlFeaturesKHR; }; + struct PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR + { + using NativeType = VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderRelaxedExtendedInstruction{ shaderRelaxedExtendedInstruction_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR( + *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + operator=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + operator=( VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR & + setShaderRelaxedExtendedInstruction( VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction_ ) VULKAN_HPP_NOEXCEPT + { + shaderRelaxedExtendedInstruction = shaderRelaxedExtendedInstruction_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderRelaxedExtendedInstruction ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderRelaxedExtendedInstruction == rhs.shaderRelaxedExtendedInstruction ); +# endif + } + + bool operator!=( PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderRelaxedExtendedInstruction = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + }; + + struct PhysicalDeviceShaderReplicatedCompositesFeaturesEXT + { + using NativeType = VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderReplicatedComposites{ shaderReplicatedComposites_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderReplicatedCompositesFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & + operator=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & operator=( VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderReplicatedCompositesFeaturesEXT & + setShaderReplicatedComposites( VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites_ ) VULKAN_HPP_NOEXCEPT + { + shaderReplicatedComposites = shaderReplicatedComposites_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderReplicatedComposites ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderReplicatedComposites == rhs.shaderReplicatedComposites ); +# endif + } + + bool operator!=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderReplicatedComposites = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + }; + struct PhysicalDeviceShaderSMBuiltinsFeaturesNV { using NativeType = VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; @@ -84325,8 +86367,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSMBuiltins( shaderSMBuiltins_ ) + : pNext{ pNext_ } + , shaderSMBuiltins{ shaderSMBuiltins_ } { } @@ -84422,9 +86464,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSMCount( shaderSMCount_ ) - , shaderWarpsPerSM( shaderWarpsPerSM_ ) + : pNext{ pNext_ } + , shaderSMCount{ shaderSMCount_ } + , shaderWarpsPerSM{ shaderWarpsPerSM_ } { } @@ -84507,8 +86549,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) + : pNext{ pNext_ } + , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ } { } @@ -84610,9 +86652,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSubgroupRotate( shaderSubgroupRotate_ ) - , shaderSubgroupRotateClustered( shaderSubgroupRotateClustered_ ) + : pNext{ pNext_ } + , shaderSubgroupRotate{ shaderSubgroupRotate_ } + , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ } { } @@ -84719,8 +86761,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupUniformControlFlow_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderSubgroupUniformControlFlow( shaderSubgroupUniformControlFlow_ ) + : pNext{ pNext_ } + , shaderSubgroupUniformControlFlow{ shaderSubgroupUniformControlFlow_ } { } @@ -84821,8 +86863,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderTerminateInvocation( shaderTerminateInvocation_ ) + : pNext{ pNext_ } + , shaderTerminateInvocation{ shaderTerminateInvocation_ } { } @@ -84924,10 +86966,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageDepthReadAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageStencilReadAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderTileImageColorReadAccess( shaderTileImageColorReadAccess_ ) - , shaderTileImageDepthReadAccess( shaderTileImageDepthReadAccess_ ) - , shaderTileImageStencilReadAccess( shaderTileImageStencilReadAccess_ ) + : pNext{ pNext_ } + , shaderTileImageColorReadAccess{ shaderTileImageColorReadAccess_ } + , shaderTileImageDepthReadAccess{ shaderTileImageDepthReadAccess_ } + , shaderTileImageStencilReadAccess{ shaderTileImageStencilReadAccess_ } { } @@ -85048,10 +87090,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadSampleFromPixelRateInvocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTileImageReadFromHelperInvocation_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderTileImageCoherentReadAccelerated( shaderTileImageCoherentReadAccelerated_ ) - , shaderTileImageReadSampleFromPixelRateInvocation( shaderTileImageReadSampleFromPixelRateInvocation_ ) - , shaderTileImageReadFromHelperInvocation( shaderTileImageReadFromHelperInvocation_ ) + : pNext{ pNext_ } + , shaderTileImageCoherentReadAccelerated{ shaderTileImageCoherentReadAccelerated_ } + , shaderTileImageReadSampleFromPixelRateInvocation{ shaderTileImageReadSampleFromPixelRateInvocation_ } + , shaderTileImageReadFromHelperInvocation{ shaderTileImageReadFromHelperInvocation_ } { } @@ -85143,9 +87185,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateImage( shadingRateImage_ ) - , shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ ) + : pNext{ pNext_ } + , shadingRateImage{ shadingRateImage_ } + , shadingRateCoarseSampleOrder{ shadingRateCoarseSampleOrder_ } { } @@ -85252,10 +87294,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateTexelSize( shadingRateTexelSize_ ) - , shadingRatePaletteSize( shadingRatePaletteSize_ ) - , shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ ) + : pNext{ pNext_ } + , shadingRateTexelSize{ shadingRateTexelSize_ } + , shadingRatePaletteSize{ shadingRatePaletteSize_ } + , shadingRateMaxCoarseSamples{ shadingRateMaxCoarseSamples_ } { } @@ -85345,12 +87387,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , type( type_ ) - , samples( samples_ ) - , usage( usage_ ) - , tiling( tiling_ ) + : pNext{ pNext_ } + , format{ format_ } + , type{ type_ } + , samples{ samples_ } + , usage{ usage_ } + , tiling{ tiling_ } { } @@ -85486,11 +87528,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subgroupSize( subgroupSize_ ) - , supportedStages( supportedStages_ ) - , supportedOperations( supportedOperations_ ) - , quadOperationsInAllStages( quadOperationsInAllStages_ ) + : pNext{ pNext_ } + , subgroupSize{ subgroupSize_ } + , supportedStages{ supportedStages_ } + , supportedOperations{ supportedOperations_ } + , quadOperationsInAllStages{ quadOperationsInAllStages_ } { } @@ -85582,9 +87624,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeatures( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subgroupSizeControl( subgroupSizeControl_ ) - , computeFullSubgroups( computeFullSubgroups_ ) + : pNext{ pNext_ } + , subgroupSizeControl{ subgroupSizeControl_ } + , computeFullSubgroups{ computeFullSubgroups_ } { } @@ -85695,11 +87737,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minSubgroupSize( minSubgroupSize_ ) - , maxSubgroupSize( maxSubgroupSize_ ) - , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) - , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) + : pNext{ pNext_ } + , minSubgroupSize{ minSubgroupSize_ } + , maxSubgroupSize{ maxSubgroupSize_ } + , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ } + , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ } { } @@ -85792,8 +87834,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassMergeFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 subpassMergeFeedback_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subpassMergeFeedback( subpassMergeFeedback_ ) + : pNext{ pNext_ } + , subpassMergeFeedback{ subpassMergeFeedback_ } { } @@ -85891,8 +87933,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 subpassShading_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subpassShading( subpassShading_ ) + : pNext{ pNext_ } + , subpassShading{ subpassShading_ } { } @@ -85988,8 +88030,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSubpassShadingPropertiesHUAWEI( uint32_t maxSubpassShadingWorkgroupSizeAspectRatio_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxSubpassShadingWorkgroupSizeAspectRatio( maxSubpassShadingWorkgroupSizeAspectRatio_ ) + : pNext{ pNext_ } + , maxSubpassShadingWorkgroupSizeAspectRatio{ maxSubpassShadingWorkgroupSizeAspectRatio_ } { } @@ -86070,8 +88112,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surface( surface_ ) + : pNext{ pNext_ } + , surface{ surface_ } { } @@ -86167,8 +88209,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSwapchainMaintenance1FeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 swapchainMaintenance1_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainMaintenance1( swapchainMaintenance1_ ) + : pNext{ pNext_ } + , swapchainMaintenance1{ swapchainMaintenance1_ } { } @@ -86266,8 +88308,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceSynchronization2Features( VULKAN_HPP_NAMESPACE::Bool32 synchronization2_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , synchronization2( synchronization2_ ) + : pNext{ pNext_ } + , synchronization2{ synchronization2_ } { } @@ -86365,8 +88407,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , texelBufferAlignment( texelBufferAlignment_ ) + : pNext{ pNext_ } + , texelBufferAlignment{ texelBufferAlignment_ } { } @@ -86467,11 +88509,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) - , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) - , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) - , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) + : pNext{ pNext_ } + , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ } + , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ } + , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ } + , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ } { } @@ -86571,8 +88613,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeatures( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) + : pNext{ pNext_ } + , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ } { } @@ -86672,8 +88714,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceTilePropertiesFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 tileProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , tileProperties( tileProperties_ ) + : pNext{ pNext_ } + , tileProperties{ tileProperties_ } { } @@ -86769,8 +88811,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , timelineSemaphore( timelineSemaphore_ ) + : pNext{ pNext_ } + , timelineSemaphore{ timelineSemaphore_ } { } @@ -86869,8 +88911,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( uint64_t maxTimelineSemaphoreValueDifference_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) + : pNext{ pNext_ } + , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ } { } @@ -86958,12 +89000,12 @@ namespace VULKAN_HPP_NAMESPACE std::array const & description_ = {}, std::array const & layer_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , name( name_ ) - , version( version_ ) - , purposes( purposes_ ) - , description( description_ ) - , layer( layer_ ) + : pNext{ pNext_ } + , name{ name_ } + , version{ version_ } + , purposes{ purposes_ } + , description{ description_ } + , layer{ layer_ } { } @@ -86974,45 +89016,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceToolProperties( std::string const & name_, - std::string const & version_ = {}, - VULKAN_HPP_NAMESPACE::ToolPurposeFlags purposes_ = {}, - std::string const & description_ = {}, - std::string const & layer_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ), purposes( purposes_ ) - { - VULKAN_HPP_ASSERT( name_.size() < VK_MAX_EXTENSION_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( name, VK_MAX_EXTENSION_NAME_SIZE, name_.data(), name_.size() ); -# else - strncpy( name, name_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, name_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( version_.size() < VK_MAX_EXTENSION_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( version, VK_MAX_EXTENSION_NAME_SIZE, version_.data(), version_.size() ); -# else - strncpy( version, version_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, version_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); -# else - strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( layer_.size() < VK_MAX_EXTENSION_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( layer, VK_MAX_EXTENSION_NAME_SIZE, layer_.data(), layer_.size() ); -# else - strncpy( layer, layer_.data(), std::min( VK_MAX_EXTENSION_NAME_SIZE, layer_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceToolProperties & operator=( PhysicalDeviceToolProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -87112,9 +89115,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transformFeedback( transformFeedback_ ) - , geometryStreams( geometryStreams_ ) + : pNext{ pNext_ } + , transformFeedback{ transformFeedback_ } + , geometryStreams{ geometryStreams_ } { } @@ -87227,17 +89230,17 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ) - , maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ) - , maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ) - , maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ) - , maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ) - , maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ) - , transformFeedbackQueries( transformFeedbackQueries_ ) - , transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ) - , transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ) - , transformFeedbackDraw( transformFeedbackDraw_ ) + : pNext{ pNext_ } + , maxTransformFeedbackStreams{ maxTransformFeedbackStreams_ } + , maxTransformFeedbackBuffers{ maxTransformFeedbackBuffers_ } + , maxTransformFeedbackBufferSize{ maxTransformFeedbackBufferSize_ } + , maxTransformFeedbackStreamDataSize{ maxTransformFeedbackStreamDataSize_ } + , maxTransformFeedbackBufferDataSize{ maxTransformFeedbackBufferDataSize_ } + , maxTransformFeedbackBufferDataStride{ maxTransformFeedbackBufferDataStride_ } + , transformFeedbackQueries{ transformFeedbackQueries_ } + , transformFeedbackStreamsLinesTriangles{ transformFeedbackStreamsLinesTriangles_ } + , transformFeedbackRasterizationStreamSelect{ transformFeedbackRasterizationStreamSelect_ } + , transformFeedbackDraw{ transformFeedbackDraw_ } { } @@ -87358,8 +89361,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) + : pNext{ pNext_ } + , uniformBufferStandardLayout{ uniformBufferStandardLayout_ } { } @@ -87461,9 +89464,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) - , variablePointers( variablePointers_ ) + : pNext{ pNext_ } + , variablePointersStorageBuffer{ variablePointersStorageBuffer_ } + , variablePointers{ variablePointers_ } { } @@ -87573,9 +89576,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ) - , vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ ) + : pNext{ pNext_ } + , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ } + , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ } { } @@ -87683,8 +89686,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( uint32_t maxVertexAttribDivisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxVertexAttribDivisor( maxVertexAttribDivisor_ ) + : pNext{ pNext_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } { } @@ -87769,9 +89772,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesKHR( uint32_t maxVertexAttribDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxVertexAttribDivisor( maxVertexAttribDivisor_ ) - , supportsNonZeroFirstInstance( supportsNonZeroFirstInstance_ ) + : pNext{ pNext_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } { } @@ -87857,8 +89860,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexInputDynamicStateFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexInputDynamicState_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexInputDynamicState( vertexInputDynamicState_ ) + : pNext{ pNext_ } + , vertexInputDynamicState{ vertexInputDynamicState_ } { } @@ -87960,11 +89963,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR lumaBitDepth_ = {}, VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR chromaBitDepth_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoCodecOperation( videoCodecOperation_ ) - , chromaSubsampling( chromaSubsampling_ ) - , lumaBitDepth( lumaBitDepth_ ) - , chromaBitDepth( chromaBitDepth_ ) + : pNext{ pNext_ } + , videoCodecOperation{ videoCodecOperation_ } + , chromaSubsampling{ chromaSubsampling_ } + , lumaBitDepth{ lumaBitDepth_ } + , chromaBitDepth{ chromaBitDepth_ } { } @@ -88089,9 +90092,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQualityLevelInfoKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile_ = {}, uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pVideoProfile( pVideoProfile_ ) - , qualityLevel( qualityLevel_ ) + : pNext{ pNext_ } + , pVideoProfile{ pVideoProfile_ } + , qualityLevel{ qualityLevel_ } { } @@ -88195,8 +90198,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoFormatInfoKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageUsage( imageUsage_ ) + : pNext{ pNext_ } + , imageUsage{ imageUsage_ } { } @@ -88292,8 +90295,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoMaintenance1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoMaintenance1_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoMaintenance1( videoMaintenance1_ ) + : pNext{ pNext_ } + , videoMaintenance1{ videoMaintenance1_ } { } @@ -88401,19 +90404,19 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffer16BitAccess( storageBuffer16BitAccess_ ) - , uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ) - , storagePushConstant16( storagePushConstant16_ ) - , storageInputOutput16( storageInputOutput16_ ) - , multiview( multiview_ ) - , multiviewGeometryShader( multiviewGeometryShader_ ) - , multiviewTessellationShader( multiviewTessellationShader_ ) - , variablePointersStorageBuffer( variablePointersStorageBuffer_ ) - , variablePointers( variablePointers_ ) - , protectedMemory( protectedMemory_ ) - , samplerYcbcrConversion( samplerYcbcrConversion_ ) - , shaderDrawParameters( shaderDrawParameters_ ) + : pNext{ pNext_ } + , storageBuffer16BitAccess{ storageBuffer16BitAccess_ } + , uniformAndStorageBuffer16BitAccess{ uniformAndStorageBuffer16BitAccess_ } + , storagePushConstant16{ storagePushConstant16_ } + , storageInputOutput16{ storageInputOutput16_ } + , multiview{ multiview_ } + , multiviewGeometryShader{ multiviewGeometryShader_ } + , multiviewTessellationShader{ multiviewTessellationShader_ } + , variablePointersStorageBuffer{ variablePointersStorageBuffer_ } + , variablePointers{ variablePointers_ } + , protectedMemory{ protectedMemory_ } + , samplerYcbcrConversion{ samplerYcbcrConversion_ } + , shaderDrawParameters{ shaderDrawParameters_ } { } @@ -88639,22 +90642,22 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , deviceUUID( deviceUUID_ ) - , driverUUID( driverUUID_ ) - , deviceLUID( deviceLUID_ ) - , deviceNodeMask( deviceNodeMask_ ) - , deviceLUIDValid( deviceLUIDValid_ ) - , subgroupSize( subgroupSize_ ) - , subgroupSupportedStages( subgroupSupportedStages_ ) - , subgroupSupportedOperations( subgroupSupportedOperations_ ) - , subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ) - , pointClippingBehavior( pointClippingBehavior_ ) - , maxMultiviewViewCount( maxMultiviewViewCount_ ) - , maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ) - , protectedNoFault( protectedNoFault_ ) - , maxPerSetDescriptors( maxPerSetDescriptors_ ) - , maxMemoryAllocationSize( maxMemoryAllocationSize_ ) + : pNext{ pNext_ } + , deviceUUID{ deviceUUID_ } + , driverUUID{ driverUUID_ } + , deviceLUID{ deviceLUID_ } + , deviceNodeMask{ deviceNodeMask_ } + , deviceLUIDValid{ deviceLUIDValid_ } + , subgroupSize{ subgroupSize_ } + , subgroupSupportedStages{ subgroupSupportedStages_ } + , subgroupSupportedOperations{ subgroupSupportedOperations_ } + , subgroupQuadOperationsInAllStages{ subgroupQuadOperationsInAllStages_ } + , pointClippingBehavior{ pointClippingBehavior_ } + , maxMultiviewViewCount{ maxMultiviewViewCount_ } + , maxMultiviewInstanceIndex{ maxMultiviewInstanceIndex_ } + , protectedNoFault{ protectedNoFault_ } + , maxPerSetDescriptors{ maxPerSetDescriptors_ } + , maxMemoryAllocationSize{ maxMemoryAllocationSize_ } { } @@ -88835,54 +90838,54 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ) - , drawIndirectCount( drawIndirectCount_ ) - , storageBuffer8BitAccess( storageBuffer8BitAccess_ ) - , uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ) - , storagePushConstant8( storagePushConstant8_ ) - , shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ) - , shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ) - , shaderFloat16( shaderFloat16_ ) - , shaderInt8( shaderInt8_ ) - , descriptorIndexing( descriptorIndexing_ ) - , shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ) - , shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ) - , shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ) - , shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ) - , shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ) - , shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ) - , shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ) - , shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ) - , shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ) - , shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ) - , descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ) - , descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ) - , descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ) - , descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ) - , descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ) - , descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ) - , descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ) - , descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ) - , descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ) - , runtimeDescriptorArray( runtimeDescriptorArray_ ) - , samplerFilterMinmax( samplerFilterMinmax_ ) - , scalarBlockLayout( scalarBlockLayout_ ) - , imagelessFramebuffer( imagelessFramebuffer_ ) - , uniformBufferStandardLayout( uniformBufferStandardLayout_ ) - , shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ) - , separateDepthStencilLayouts( separateDepthStencilLayouts_ ) - , hostQueryReset( hostQueryReset_ ) - , timelineSemaphore( timelineSemaphore_ ) - , bufferDeviceAddress( bufferDeviceAddress_ ) - , bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ) - , bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ) - , vulkanMemoryModel( vulkanMemoryModel_ ) - , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) - , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) - , shaderOutputViewportIndex( shaderOutputViewportIndex_ ) - , shaderOutputLayer( shaderOutputLayer_ ) - , subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ ) + : pNext{ pNext_ } + , samplerMirrorClampToEdge{ samplerMirrorClampToEdge_ } + , drawIndirectCount{ drawIndirectCount_ } + , storageBuffer8BitAccess{ storageBuffer8BitAccess_ } + , uniformAndStorageBuffer8BitAccess{ uniformAndStorageBuffer8BitAccess_ } + , storagePushConstant8{ storagePushConstant8_ } + , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } + , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } + , shaderFloat16{ shaderFloat16_ } + , shaderInt8{ shaderInt8_ } + , descriptorIndexing{ descriptorIndexing_ } + , shaderInputAttachmentArrayDynamicIndexing{ shaderInputAttachmentArrayDynamicIndexing_ } + , shaderUniformTexelBufferArrayDynamicIndexing{ shaderUniformTexelBufferArrayDynamicIndexing_ } + , shaderStorageTexelBufferArrayDynamicIndexing{ shaderStorageTexelBufferArrayDynamicIndexing_ } + , shaderUniformBufferArrayNonUniformIndexing{ shaderUniformBufferArrayNonUniformIndexing_ } + , shaderSampledImageArrayNonUniformIndexing{ shaderSampledImageArrayNonUniformIndexing_ } + , shaderStorageBufferArrayNonUniformIndexing{ shaderStorageBufferArrayNonUniformIndexing_ } + , shaderStorageImageArrayNonUniformIndexing{ shaderStorageImageArrayNonUniformIndexing_ } + , shaderInputAttachmentArrayNonUniformIndexing{ shaderInputAttachmentArrayNonUniformIndexing_ } + , shaderUniformTexelBufferArrayNonUniformIndexing{ shaderUniformTexelBufferArrayNonUniformIndexing_ } + , shaderStorageTexelBufferArrayNonUniformIndexing{ shaderStorageTexelBufferArrayNonUniformIndexing_ } + , descriptorBindingUniformBufferUpdateAfterBind{ descriptorBindingUniformBufferUpdateAfterBind_ } + , descriptorBindingSampledImageUpdateAfterBind{ descriptorBindingSampledImageUpdateAfterBind_ } + , descriptorBindingStorageImageUpdateAfterBind{ descriptorBindingStorageImageUpdateAfterBind_ } + , descriptorBindingStorageBufferUpdateAfterBind{ descriptorBindingStorageBufferUpdateAfterBind_ } + , descriptorBindingUniformTexelBufferUpdateAfterBind{ descriptorBindingUniformTexelBufferUpdateAfterBind_ } + , descriptorBindingStorageTexelBufferUpdateAfterBind{ descriptorBindingStorageTexelBufferUpdateAfterBind_ } + , descriptorBindingUpdateUnusedWhilePending{ descriptorBindingUpdateUnusedWhilePending_ } + , descriptorBindingPartiallyBound{ descriptorBindingPartiallyBound_ } + , descriptorBindingVariableDescriptorCount{ descriptorBindingVariableDescriptorCount_ } + , runtimeDescriptorArray{ runtimeDescriptorArray_ } + , samplerFilterMinmax{ samplerFilterMinmax_ } + , scalarBlockLayout{ scalarBlockLayout_ } + , imagelessFramebuffer{ imagelessFramebuffer_ } + , uniformBufferStandardLayout{ uniformBufferStandardLayout_ } + , shaderSubgroupExtendedTypes{ shaderSubgroupExtendedTypes_ } + , separateDepthStencilLayouts{ separateDepthStencilLayouts_ } + , hostQueryReset{ hostQueryReset_ } + , timelineSemaphore{ timelineSemaphore_ } + , bufferDeviceAddress{ bufferDeviceAddress_ } + , bufferDeviceAddressCaptureReplay{ bufferDeviceAddressCaptureReplay_ } + , bufferDeviceAddressMultiDevice{ bufferDeviceAddressMultiDevice_ } + , vulkanMemoryModel{ vulkanMemoryModel_ } + , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ } + , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ } + , shaderOutputViewportIndex{ shaderOutputViewportIndex_ } + , shaderOutputLayer{ shaderOutputLayer_ } + , subgroupBroadcastDynamicId{ subgroupBroadcastDynamicId_ } { } @@ -89516,59 +91519,59 @@ namespace VULKAN_HPP_NAMESPACE uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , driverID( driverID_ ) - , driverName( driverName_ ) - , driverInfo( driverInfo_ ) - , conformanceVersion( conformanceVersion_ ) - , denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) - , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) - , supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) - , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) - , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) - , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) + : pNext{ pNext_ } + , driverID{ driverID_ } + , driverName{ driverName_ } + , driverInfo{ driverInfo_ } + , conformanceVersion{ conformanceVersion_ } + , denormBehaviorIndependence{ denormBehaviorIndependence_ } + , roundingModeIndependence{ roundingModeIndependence_ } + , shaderSignedZeroInfNanPreserveFloat16{ shaderSignedZeroInfNanPreserveFloat16_ } + , shaderSignedZeroInfNanPreserveFloat32{ shaderSignedZeroInfNanPreserveFloat32_ } + , shaderSignedZeroInfNanPreserveFloat64{ shaderSignedZeroInfNanPreserveFloat64_ } + , shaderDenormPreserveFloat16{ shaderDenormPreserveFloat16_ } + , shaderDenormPreserveFloat32{ shaderDenormPreserveFloat32_ } + , shaderDenormPreserveFloat64{ shaderDenormPreserveFloat64_ } + , shaderDenormFlushToZeroFloat16{ shaderDenormFlushToZeroFloat16_ } + , shaderDenormFlushToZeroFloat32{ shaderDenormFlushToZeroFloat32_ } + , shaderDenormFlushToZeroFloat64{ shaderDenormFlushToZeroFloat64_ } + , shaderRoundingModeRTEFloat16{ shaderRoundingModeRTEFloat16_ } + , shaderRoundingModeRTEFloat32{ shaderRoundingModeRTEFloat32_ } + , shaderRoundingModeRTEFloat64{ shaderRoundingModeRTEFloat64_ } + , shaderRoundingModeRTZFloat16{ shaderRoundingModeRTZFloat16_ } + , shaderRoundingModeRTZFloat32{ shaderRoundingModeRTZFloat32_ } + , shaderRoundingModeRTZFloat64{ shaderRoundingModeRTZFloat64_ } + , maxUpdateAfterBindDescriptorsInAllPools{ maxUpdateAfterBindDescriptorsInAllPools_ } + , shaderUniformBufferArrayNonUniformIndexingNative{ shaderUniformBufferArrayNonUniformIndexingNative_ } + , shaderSampledImageArrayNonUniformIndexingNative{ shaderSampledImageArrayNonUniformIndexingNative_ } + , shaderStorageBufferArrayNonUniformIndexingNative{ shaderStorageBufferArrayNonUniformIndexingNative_ } + , shaderStorageImageArrayNonUniformIndexingNative{ shaderStorageImageArrayNonUniformIndexingNative_ } + , shaderInputAttachmentArrayNonUniformIndexingNative{ shaderInputAttachmentArrayNonUniformIndexingNative_ } + , robustBufferAccessUpdateAfterBind{ robustBufferAccessUpdateAfterBind_ } + , quadDivergentImplicitLod{ quadDivergentImplicitLod_ } + , maxPerStageDescriptorUpdateAfterBindSamplers{ maxPerStageDescriptorUpdateAfterBindSamplers_ } + , maxPerStageDescriptorUpdateAfterBindUniformBuffers{ maxPerStageDescriptorUpdateAfterBindUniformBuffers_ } + , maxPerStageDescriptorUpdateAfterBindStorageBuffers{ maxPerStageDescriptorUpdateAfterBindStorageBuffers_ } + , maxPerStageDescriptorUpdateAfterBindSampledImages{ maxPerStageDescriptorUpdateAfterBindSampledImages_ } + , maxPerStageDescriptorUpdateAfterBindStorageImages{ maxPerStageDescriptorUpdateAfterBindStorageImages_ } + , maxPerStageDescriptorUpdateAfterBindInputAttachments{ maxPerStageDescriptorUpdateAfterBindInputAttachments_ } + , maxPerStageUpdateAfterBindResources{ maxPerStageUpdateAfterBindResources_ } + , maxDescriptorSetUpdateAfterBindSamplers{ maxDescriptorSetUpdateAfterBindSamplers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffers{ maxDescriptorSetUpdateAfterBindUniformBuffers_ } + , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic{ maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindStorageBuffers{ maxDescriptorSetUpdateAfterBindStorageBuffers_ } + , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic{ maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ } + , maxDescriptorSetUpdateAfterBindSampledImages{ maxDescriptorSetUpdateAfterBindSampledImages_ } + , maxDescriptorSetUpdateAfterBindStorageImages{ maxDescriptorSetUpdateAfterBindStorageImages_ } + , maxDescriptorSetUpdateAfterBindInputAttachments{ maxDescriptorSetUpdateAfterBindInputAttachments_ } + , supportedDepthResolveModes{ supportedDepthResolveModes_ } + , supportedStencilResolveModes{ supportedStencilResolveModes_ } + , independentResolveNone{ independentResolveNone_ } + , independentResolve{ independentResolve_ } + , filterMinmaxSingleComponentFormats{ filterMinmaxSingleComponentFormats_ } + , filterMinmaxImageComponentMapping{ filterMinmaxImageComponentMapping_ } + , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ } + , framebufferIntegerColorSampleCounts{ framebufferIntegerColorSampleCounts_ } { } @@ -89579,129 +91582,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceVulkan12Properties( - VULKAN_HPP_NAMESPACE::DriverId driverID_, - std::string const & driverName_, - std::string const & driverInfo_ = {}, - VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, - uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, - uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, - uint32_t maxPerStageUpdateAfterBindResources_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, - uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, - uint64_t maxTimelineSemaphoreValueDifference_ = {}, - VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ) - , driverID( driverID_ ) - , conformanceVersion( conformanceVersion_ ) - , denormBehaviorIndependence( denormBehaviorIndependence_ ) - , roundingModeIndependence( roundingModeIndependence_ ) - , shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ) - , shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ) - , shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ) - , shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ) - , shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ) - , shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ) - , shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ) - , shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ) - , shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ) - , shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ) - , shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ) - , shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ) - , shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ) - , shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ) - , shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ) - , maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ) - , shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ) - , shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ) - , shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ) - , shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ) - , shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ) - , robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ) - , quadDivergentImplicitLod( quadDivergentImplicitLod_ ) - , maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ) - , maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ) - , maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ) - , maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ) - , maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ) - , maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ) - , maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ) - , maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ) - , maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ) - , maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ) - , maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ) - , maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ) - , supportedDepthResolveModes( supportedDepthResolveModes_ ) - , supportedStencilResolveModes( supportedStencilResolveModes_ ) - , independentResolveNone( independentResolveNone_ ) - , independentResolve( independentResolve_ ) - , filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ) - , filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ) - , maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ) - , framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ ) - { - VULKAN_HPP_ASSERT( driverName_.size() < VK_MAX_DRIVER_NAME_SIZE ); -# if defined( WIN32 ) - strncpy_s( driverName, VK_MAX_DRIVER_NAME_SIZE, driverName_.data(), driverName_.size() ); -# else - strncpy( driverName, driverName_.data(), std::min( VK_MAX_DRIVER_NAME_SIZE, driverName_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( driverInfo_.size() < VK_MAX_DRIVER_INFO_SIZE ); -# if defined( WIN32 ) - strncpy_s( driverInfo, VK_MAX_DRIVER_INFO_SIZE, driverInfo_.data(), driverInfo_.size() ); -# else - strncpy( driverInfo, driverInfo_.data(), std::min( VK_MAX_DRIVER_INFO_SIZE, driverInfo_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -90092,22 +91972,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerDotProduct_ = {}, VULKAN_HPP_NAMESPACE::Bool32 maintenance4_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , robustImageAccess( robustImageAccess_ ) - , inlineUniformBlock( inlineUniformBlock_ ) - , descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ ) - , pipelineCreationCacheControl( pipelineCreationCacheControl_ ) - , privateData( privateData_ ) - , shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ ) - , shaderTerminateInvocation( shaderTerminateInvocation_ ) - , subgroupSizeControl( subgroupSizeControl_ ) - , computeFullSubgroups( computeFullSubgroups_ ) - , synchronization2( synchronization2_ ) - , textureCompressionASTC_HDR( textureCompressionASTC_HDR_ ) - , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) - , dynamicRendering( dynamicRendering_ ) - , shaderIntegerDotProduct( shaderIntegerDotProduct_ ) - , maintenance4( maintenance4_ ) + : pNext{ pNext_ } + , robustImageAccess{ robustImageAccess_ } + , inlineUniformBlock{ inlineUniformBlock_ } + , descriptorBindingInlineUniformBlockUpdateAfterBind{ descriptorBindingInlineUniformBlockUpdateAfterBind_ } + , pipelineCreationCacheControl{ pipelineCreationCacheControl_ } + , privateData{ privateData_ } + , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ } + , shaderTerminateInvocation{ shaderTerminateInvocation_ } + , subgroupSizeControl{ subgroupSizeControl_ } + , computeFullSubgroups{ computeFullSubgroups_ } + , synchronization2{ synchronization2_ } + , textureCompressionASTC_HDR{ textureCompressionASTC_HDR_ } + , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ } + , dynamicRendering{ dynamicRendering_ } + , shaderIntegerDotProduct{ shaderIntegerDotProduct_ } + , maintenance4{ maintenance4_ } { } @@ -90393,53 +92273,52 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxBufferSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minSubgroupSize( minSubgroupSize_ ) - , maxSubgroupSize( maxSubgroupSize_ ) - , maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ) - , requiredSubgroupSizeStages( requiredSubgroupSizeStages_ ) - , maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ) - , maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ) - , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ) - , maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ) - , maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ ) - , maxInlineUniformTotalSize( maxInlineUniformTotalSize_ ) - , integerDotProduct8BitUnsignedAccelerated( integerDotProduct8BitUnsignedAccelerated_ ) - , integerDotProduct8BitSignedAccelerated( integerDotProduct8BitSignedAccelerated_ ) - , integerDotProduct8BitMixedSignednessAccelerated( integerDotProduct8BitMixedSignednessAccelerated_ ) - , integerDotProduct4x8BitPackedUnsignedAccelerated( integerDotProduct4x8BitPackedUnsignedAccelerated_ ) - , integerDotProduct4x8BitPackedSignedAccelerated( integerDotProduct4x8BitPackedSignedAccelerated_ ) - , integerDotProduct4x8BitPackedMixedSignednessAccelerated( integerDotProduct4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProduct16BitUnsignedAccelerated( integerDotProduct16BitUnsignedAccelerated_ ) - , integerDotProduct16BitSignedAccelerated( integerDotProduct16BitSignedAccelerated_ ) - , integerDotProduct16BitMixedSignednessAccelerated( integerDotProduct16BitMixedSignednessAccelerated_ ) - , integerDotProduct32BitUnsignedAccelerated( integerDotProduct32BitUnsignedAccelerated_ ) - , integerDotProduct32BitSignedAccelerated( integerDotProduct32BitSignedAccelerated_ ) - , integerDotProduct32BitMixedSignednessAccelerated( integerDotProduct32BitMixedSignednessAccelerated_ ) - , integerDotProduct64BitUnsignedAccelerated( integerDotProduct64BitUnsignedAccelerated_ ) - , integerDotProduct64BitSignedAccelerated( integerDotProduct64BitSignedAccelerated_ ) - , integerDotProduct64BitMixedSignednessAccelerated( integerDotProduct64BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated( integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitSignedAccelerated( integerDotProductAccumulatingSaturating8BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated( integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated( - integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated( integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitSignedAccelerated( integerDotProductAccumulatingSaturating16BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated( integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitSignedAccelerated( integerDotProductAccumulatingSaturating32BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated( integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitSignedAccelerated( integerDotProductAccumulatingSaturating64BitSignedAccelerated_ ) - , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated( integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ ) - , storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ) - , storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ) - , uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ) - , uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ ) - , maxBufferSize( maxBufferSize_ ) + : pNext{ pNext_ } + , minSubgroupSize{ minSubgroupSize_ } + , maxSubgroupSize{ maxSubgroupSize_ } + , maxComputeWorkgroupSubgroups{ maxComputeWorkgroupSubgroups_ } + , requiredSubgroupSizeStages{ requiredSubgroupSizeStages_ } + , maxInlineUniformBlockSize{ maxInlineUniformBlockSize_ } + , maxPerStageDescriptorInlineUniformBlocks{ maxPerStageDescriptorInlineUniformBlocks_ } + , maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks{ maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ } + , maxDescriptorSetInlineUniformBlocks{ maxDescriptorSetInlineUniformBlocks_ } + , maxDescriptorSetUpdateAfterBindInlineUniformBlocks{ maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ } + , maxInlineUniformTotalSize{ maxInlineUniformTotalSize_ } + , integerDotProduct8BitUnsignedAccelerated{ integerDotProduct8BitUnsignedAccelerated_ } + , integerDotProduct8BitSignedAccelerated{ integerDotProduct8BitSignedAccelerated_ } + , integerDotProduct8BitMixedSignednessAccelerated{ integerDotProduct8BitMixedSignednessAccelerated_ } + , integerDotProduct4x8BitPackedUnsignedAccelerated{ integerDotProduct4x8BitPackedUnsignedAccelerated_ } + , integerDotProduct4x8BitPackedSignedAccelerated{ integerDotProduct4x8BitPackedSignedAccelerated_ } + , integerDotProduct4x8BitPackedMixedSignednessAccelerated{ integerDotProduct4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProduct16BitUnsignedAccelerated{ integerDotProduct16BitUnsignedAccelerated_ } + , integerDotProduct16BitSignedAccelerated{ integerDotProduct16BitSignedAccelerated_ } + , integerDotProduct16BitMixedSignednessAccelerated{ integerDotProduct16BitMixedSignednessAccelerated_ } + , integerDotProduct32BitUnsignedAccelerated{ integerDotProduct32BitUnsignedAccelerated_ } + , integerDotProduct32BitSignedAccelerated{ integerDotProduct32BitSignedAccelerated_ } + , integerDotProduct32BitMixedSignednessAccelerated{ integerDotProduct32BitMixedSignednessAccelerated_ } + , integerDotProduct64BitUnsignedAccelerated{ integerDotProduct64BitUnsignedAccelerated_ } + , integerDotProduct64BitSignedAccelerated{ integerDotProduct64BitSignedAccelerated_ } + , integerDotProduct64BitMixedSignednessAccelerated{ integerDotProduct64BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating8BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating8BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitSignedAccelerated{ integerDotProductAccumulatingSaturating8BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated_ } + , integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating16BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating16BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitSignedAccelerated{ integerDotProductAccumulatingSaturating16BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating32BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating32BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitSignedAccelerated{ integerDotProductAccumulatingSaturating32BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated_ } + , integerDotProductAccumulatingSaturating64BitUnsignedAccelerated{ integerDotProductAccumulatingSaturating64BitUnsignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitSignedAccelerated{ integerDotProductAccumulatingSaturating64BitSignedAccelerated_ } + , integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated{ integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated_ } + , storageTexelBufferOffsetAlignmentBytes{ storageTexelBufferOffsetAlignmentBytes_ } + , storageTexelBufferOffsetSingleTexelAlignment{ storageTexelBufferOffsetSingleTexelAlignment_ } + , uniformTexelBufferOffsetAlignmentBytes{ uniformTexelBufferOffsetAlignmentBytes_ } + , uniformTexelBufferOffsetSingleTexelAlignment{ uniformTexelBufferOffsetSingleTexelAlignment_ } + , maxBufferSize{ maxBufferSize_ } { } @@ -90707,10 +92586,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vulkanMemoryModel( vulkanMemoryModel_ ) - , vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ) - , vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ) + : pNext{ pNext_ } + , vulkanMemoryModel{ vulkanMemoryModel_ } + , vulkanMemoryModelDeviceScope{ vulkanMemoryModelDeviceScope_ } + , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ } { } @@ -90835,11 +92714,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ) - , workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ) - , workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ) - , workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ ) + : pNext{ pNext_ } + , workgroupMemoryExplicitLayout{ workgroupMemoryExplicitLayout_ } + , workgroupMemoryExplicitLayoutScalarBlockLayout{ workgroupMemoryExplicitLayoutScalarBlockLayout_ } + , workgroupMemoryExplicitLayout8BitAccess{ workgroupMemoryExplicitLayout8BitAccess_ } + , workgroupMemoryExplicitLayout16BitAccess{ workgroupMemoryExplicitLayout16BitAccess_ } { } @@ -90976,8 +92855,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcr2plane444Formats_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ycbcr2plane444Formats( ycbcr2plane444Formats_ ) + : pNext{ pNext_ } + , ycbcr2plane444Formats{ ycbcr2plane444Formats_ } { } @@ -91074,8 +92953,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrDegammaFeaturesQCOM( VULKAN_HPP_NAMESPACE::Bool32 ycbcrDegamma_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ycbcrDegamma( ycbcrDegamma_ ) + : pNext{ pNext_ } + , ycbcrDegamma{ ycbcrDegamma_ } { } @@ -91171,8 +93050,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , ycbcrImageArrays( ycbcrImageArrays_ ) + : pNext{ pNext_ } + , ycbcrImageArrays{ ycbcrImageArrays_ } { } @@ -91269,8 +93148,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ ) + : pNext{ pNext_ } + , shaderZeroInitializeWorkgroupMemory{ shaderZeroInitializeWorkgroupMemory_ } { } @@ -91361,6 +93240,910 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + struct PipelineBinaryKeyKHR + { + using NativeType = VkPipelineBinaryKeyKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryKeyKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( uint32_t keySize_ = {}, + std::array const & key_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , keySize{ keySize_ } + , key{ key_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryKeyKHR( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryKeyKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineBinaryKeyKHR & operator=( PipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineBinaryKeyKHR & operator=( VkPipelineBinaryKeyKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKeySize( uint32_t keySize_ ) VULKAN_HPP_NOEXCEPT + { + keySize = keySize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeyKHR & setKey( std::array key_ ) VULKAN_HPP_NOEXCEPT + { + key = key_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineBinaryKeyKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeyKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, keySize, key ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryKeyKHR const & ) const = default; +#else + bool operator==( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( keySize == rhs.keySize ) && ( key == rhs.key ); +# endif + } + + bool operator!=( PipelineBinaryKeyKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryKeyKHR; + void * pNext = {}; + uint32_t keySize = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D key = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryKeyKHR; + }; + + struct PipelineBinaryDataKHR + { + using NativeType = VkPipelineBinaryDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( size_t dataSize_ = {}, void * pData_ = {} ) VULKAN_HPP_NOEXCEPT + : dataSize{ dataSize_ } + , pData{ pData_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryDataKHR( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryDataKHR( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryDataKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineBinaryDataKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) : dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryDataKHR & operator=( PipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineBinaryDataKHR & operator=( VkPipelineBinaryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = dataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataKHR & setPData( void * pData_ ) VULKAN_HPP_NOEXCEPT + { + pData = pData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + PipelineBinaryDataKHR & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ ) VULKAN_HPP_NOEXCEPT + { + dataSize = data_.size() * sizeof( T ); + pData = data_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineBinaryDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( dataSize, pData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryDataKHR const & ) const = default; +#else + bool operator==( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( dataSize == rhs.dataSize ) && ( pData == rhs.pData ); +# endif + } + + bool operator!=( PipelineBinaryDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + size_t dataSize = {}; + void * pData = {}; + }; + + struct PipelineBinaryKeysAndDataKHR + { + using NativeType = VkPipelineBinaryKeysAndDataKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( uint32_t binaryCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ = {} ) VULKAN_HPP_NOEXCEPT + : binaryCount{ binaryCount_ } + , pPipelineBinaryKeys{ pPipelineBinaryKeys_ } + , pPipelineBinaryData{ pPipelineBinaryData_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryKeysAndDataKHR( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryKeysAndDataKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryKeys_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryData_ = {} ) + : binaryCount( static_cast( pipelineBinaryKeys_.size() ) ) + , pPipelineBinaryKeys( pipelineBinaryKeys_.data() ) + , pPipelineBinaryData( pipelineBinaryData_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( pipelineBinaryKeys_.size() == pipelineBinaryData_.size() ); +# else + if ( pipelineBinaryKeys_.size() != pipelineBinaryData_.size() ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING + "::PipelineBinaryKeysAndDataKHR::PipelineBinaryKeysAndDataKHR: pipelineBinaryKeys_.size() != pipelineBinaryData_.size()" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryKeysAndDataKHR & operator=( PipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineBinaryKeysAndDataKHR & operator=( VkPipelineBinaryKeysAndDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = binaryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & + setPPipelineBinaryKeys( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaryKeys = pPipelineBinaryKeys_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR & setPipelineBinaryKeys( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryKeys_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = static_cast( pipelineBinaryKeys_.size() ); + pPipelineBinaryKeys = pipelineBinaryKeys_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryKeysAndDataKHR & + setPPipelineBinaryData( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaryData = pPipelineBinaryData_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryKeysAndDataKHR & setPipelineBinaryData( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaryData_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = static_cast( pipelineBinaryData_.size() ); + pPipelineBinaryData = pipelineBinaryData_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineBinaryKeysAndDataKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryKeysAndDataKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( binaryCount, pPipelineBinaryKeys, pPipelineBinaryData ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryKeysAndDataKHR const & ) const = default; +#else + bool operator==( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaryKeys == rhs.pPipelineBinaryKeys ) && ( pPipelineBinaryData == rhs.pPipelineBinaryData ); +# endif + } + + bool operator!=( PipelineBinaryKeysAndDataKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t binaryCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKeys = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryDataKHR * pPipelineBinaryData = {}; + }; + + struct PipelineCreateInfoKHR + { + using NativeType = VkPipelineCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} + + VULKAN_HPP_CONSTEXPR PipelineCreateInfoKHR( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineCreateInfoKHR( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineCreateInfoKHR & operator=( PipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineCreateInfoKHR & operator=( VkPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineCreateInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ); +# endif + } + + bool operator!=( PipelineCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateInfoKHR; + void * pNext = {}; + }; + + template <> + struct CppType + { + using Type = PipelineCreateInfoKHR; + }; + + struct PipelineBinaryCreateInfoKHR + { + using NativeType = VkPipelineBinaryCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pKeysAndDataInfo{ pKeysAndDataInfo_ } + , pipeline{ pipeline_ } + , pPipelineCreateInfo{ pPipelineCreateInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryCreateInfoKHR( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineBinaryCreateInfoKHR & operator=( PipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineBinaryCreateInfoKHR & operator=( VkPipelineBinaryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & + setPKeysAndDataInfo( const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo_ ) VULKAN_HPP_NOEXCEPT + { + pKeysAndDataInfo = pKeysAndDataInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryCreateInfoKHR & + setPPipelineCreateInfo( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineCreateInfo = pPipelineCreateInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineBinaryCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pKeysAndDataInfo, pipeline, pPipelineCreateInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryCreateInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pKeysAndDataInfo == rhs.pKeysAndDataInfo ) && ( pipeline == rhs.pipeline ) && + ( pPipelineCreateInfo == rhs.pPipelineCreateInfo ); +# endif + } + + bool operator!=( PipelineBinaryCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryCreateInfoKHR; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKeysAndDataKHR * pKeysAndDataInfo = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryCreateInfoKHR; + }; + + struct PipelineBinaryDataInfoKHR + { + using NativeType = VkPipelineBinaryDataInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryDataInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinary{ pipelineBinary_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryDataInfoKHR( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryDataInfoKHR( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryDataInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + PipelineBinaryDataInfoKHR & operator=( PipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineBinaryDataInfoKHR & operator=( VkPipelineBinaryDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryDataInfoKHR & setPipelineBinary( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinary = pipelineBinary_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineBinaryDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryDataInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBinary ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryDataInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinary == rhs.pipelineBinary ); +# endif + } + + bool operator!=( PipelineBinaryDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryDataInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryDataInfoKHR; + }; + + struct PipelineBinaryHandlesInfoKHR + { + using NativeType = VkPipelineBinaryHandlesInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryHandlesInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( uint32_t pipelineBinaryCount_ = {}, + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipelineBinaryCount{ pipelineBinaryCount_ } + , pPipelineBinaries{ pPipelineBinaries_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryHandlesInfoKHR( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryHandlesInfoKHR( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryHandlesInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryHandlesInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), pipelineBinaryCount( static_cast( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryHandlesInfoKHR & operator=( PipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineBinaryHandlesInfoKHR & operator=( VkPipelineBinaryHandlesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & setPipelineBinaryCount( uint32_t pipelineBinaryCount_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryCount = pipelineBinaryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryHandlesInfoKHR & + setPPipelineBinaries( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaries = pPipelineBinaries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryHandlesInfoKHR & setPipelineBinaries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pipelineBinaryCount = static_cast( pipelineBinaries_.size() ); + pPipelineBinaries = pipelineBinaries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineBinaryHandlesInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryHandlesInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipelineBinaryCount, pPipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryHandlesInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipelineBinaryCount == rhs.pipelineBinaryCount ) && + ( pPipelineBinaries == rhs.pPipelineBinaries ); +# endif + } + + bool operator!=( PipelineBinaryHandlesInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryHandlesInfoKHR; + const void * pNext = {}; + uint32_t pipelineBinaryCount = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryHandlesInfoKHR; + }; + + struct PipelineBinaryInfoKHR + { + using NativeType = VkPipelineBinaryInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineBinaryInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( uint32_t binaryCount_ = {}, + const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , binaryCount{ binaryCount_ } + , pPipelineBinaries{ pPipelineBinaries_ } + { + } + + VULKAN_HPP_CONSTEXPR PipelineBinaryInfoKHR( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineBinaryInfoKHR( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineBinaryInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), binaryCount( static_cast( pipelineBinaries_.size() ) ), pPipelineBinaries( pipelineBinaries_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + PipelineBinaryInfoKHR & operator=( PipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineBinaryInfoKHR & operator=( VkPipelineBinaryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & setBinaryCount( uint32_t binaryCount_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = binaryCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineBinaryInfoKHR & + setPPipelineBinaries( const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineBinaries = pPipelineBinaries_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + PipelineBinaryInfoKHR & setPipelineBinaries( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pipelineBinaries_ ) VULKAN_HPP_NOEXCEPT + { + binaryCount = static_cast( pipelineBinaries_.size() ); + pPipelineBinaries = pipelineBinaries_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineBinaryInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineBinaryInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, binaryCount, pPipelineBinaries ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineBinaryInfoKHR const & ) const = default; +#else + bool operator==( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( binaryCount == rhs.binaryCount ) && ( pPipelineBinaries == rhs.pPipelineBinaries ); +# endif + } + + bool operator!=( PipelineBinaryInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineBinaryInfoKHR; + const void * pNext = {}; + uint32_t binaryCount = {}; + const VULKAN_HPP_NAMESPACE::PipelineBinaryKHR * pPipelineBinaries = {}; + }; + + template <> + struct CppType + { + using Type = PipelineBinaryInfoKHR; + }; + struct PipelineCacheCreateInfo { using NativeType = VkPipelineCacheCreateInfo; @@ -91373,10 +94156,10 @@ namespace VULKAN_HPP_NAMESPACE size_t initialDataSize_ = {}, const void * pInitialData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , initialDataSize{ initialDataSize_ } + , pInitialData{ pInitialData_ } { } @@ -91512,11 +94295,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, std::array const & pipelineCacheUUID_ = {} ) VULKAN_HPP_NOEXCEPT - : headerSize( headerSize_ ) - , headerVersion( headerVersion_ ) - , vendorID( vendorID_ ) - , deviceID( deviceID_ ) - , pipelineCacheUUID( pipelineCacheUUID_ ) + : headerSize{ headerSize_ } + , headerVersion{ headerVersion_ } + , vendorID{ vendorID_ } + , deviceID{ deviceID_ } + , pipelineCacheUUID{ pipelineCacheUUID_ } { } @@ -91635,10 +94418,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcPremultiplied( srcPremultiplied_ ) - , dstPremultiplied( dstPremultiplied_ ) - , blendOverlap( blendOverlap_ ) + : pNext{ pNext_ } + , srcPremultiplied{ srcPremultiplied_ } + , dstPremultiplied{ dstPremultiplied_ } + , blendOverlap{ blendOverlap_ } { } @@ -91757,9 +94540,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineColorWriteCreateInfoEXT( uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentCount( attachmentCount_ ) - , pColorWriteEnables( pColorWriteEnables_ ) + : pNext{ pNext_ } + , attachmentCount{ attachmentCount_ } + , pColorWriteEnables{ pColorWriteEnables_ } { } @@ -91881,8 +94664,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , compilerControlFlags( compilerControlFlags_ ) + : pNext{ pNext_ } + , compilerControlFlags{ compilerControlFlags_ } { } @@ -91984,12 +94767,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t coverageModulationTableCount_ = {}, const float * pCoverageModulationTable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , coverageModulationMode( coverageModulationMode_ ) - , coverageModulationTableEnable( coverageModulationTableEnable_ ) - , coverageModulationTableCount( coverageModulationTableCount_ ) - , pCoverageModulationTable( pCoverageModulationTable_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , coverageModulationMode{ coverageModulationMode_ } + , coverageModulationTableEnable{ coverageModulationTableEnable_ } + , coverageModulationTableCount{ coverageModulationTableCount_ } + , pCoverageModulationTable{ pCoverageModulationTable_ } { } @@ -92154,9 +94937,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , coverageReductionMode( coverageReductionMode_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , coverageReductionMode{ coverageReductionMode_ } { } @@ -92266,10 +95049,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , coverageToColorEnable( coverageToColorEnable_ ) - , coverageToColorLocation( coverageToColorLocation_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , coverageToColorEnable{ coverageToColorEnable_ } + , coverageToColorLocation{ coverageToColorLocation_ } { } @@ -92386,8 +95169,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -92480,8 +95263,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags flags_ = {}, uint64_t duration_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , duration( duration_ ) + : flags{ flags_ } + , duration{ duration_ } { } @@ -92560,10 +95343,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedback * pPipelineStageCreationFeedbacks_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pPipelineCreationFeedback( pPipelineCreationFeedback_ ) - , pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ) - , pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ ) + : pNext{ pNext_ } + , pPipelineCreationFeedback{ pPipelineCreationFeedback_ } + , pipelineStageCreationFeedbackCount{ pipelineStageCreationFeedbackCount_ } + , pPipelineStageCreationFeedbacks{ pPipelineStageCreationFeedbacks_ } { } @@ -92712,11 +95495,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , discardRectangleMode( discardRectangleMode_ ) - , discardRectangleCount( discardRectangleCount_ ) - , pDiscardRectangles( pDiscardRectangles_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , discardRectangleMode{ discardRectangleMode_ } + , discardRectangleCount{ discardRectangleCount_ } + , pDiscardRectangles{ pDiscardRectangles_ } { } @@ -92867,9 +95650,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipeline( pipeline_ ) - , executableIndex( executableIndex_ ) + : pNext{ pNext_ } + , pipeline{ pipeline_ } + , executableIndex{ executableIndex_ } { } @@ -92976,12 +95759,12 @@ namespace VULKAN_HPP_NAMESPACE size_t dataSize_ = {}, void * pData_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , name( name_ ) - , description( description_ ) - , isText( isText_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) + : pNext{ pNext_ } + , name{ name_ } + , description{ description_ } + , isText{ isText_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } @@ -92993,31 +95776,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - template - PipelineExecutableInternalRepresentationKHR( std::string const & name_, - std::string const & description_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & data_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ), isText( isText_ ), dataSize( data_.size() * sizeof( T ) ), pData( data_.data() ) - { - VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); -# else - strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); -# else - strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -93117,11 +95875,11 @@ namespace VULKAN_HPP_NAMESPACE std::array const & description_ = {}, uint32_t subgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stages( stages_ ) - , name( name_ ) - , description( description_ ) - , subgroupSize( subgroupSize_ ) + : pNext{ pNext_ } + , stages{ stages_ } + , name{ name_ } + , description{ description_ } + , subgroupSize{ subgroupSize_ } { } @@ -93132,30 +95890,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineExecutablePropertiesKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_, - std::string const & name_, - std::string const & description_ = {}, - uint32_t subgroupSize_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ), stages( stages_ ), subgroupSize( subgroupSize_ ) - { - VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); -# else - strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); -# else - strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -93315,11 +96049,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , name( name_ ) - , description( description_ ) - , format( format_ ) - , value( value_ ) + : pNext{ pNext_ } + , name{ name_ } + , description{ description_ } + , format{ format_ } + , value{ value_ } { } @@ -93330,31 +96064,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineExecutableStatisticKHR( - std::string const & name_, - std::string const & description_ = {}, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, - VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}, - void * pNext_ = nullptr ) - : pNext( pNext_ ), format( format_ ), value( value_ ) - { - VULKAN_HPP_ASSERT( name_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( name, VK_MAX_DESCRIPTION_SIZE, name_.data(), name_.size() ); -# else - strncpy( name, name_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, name_.size() ) ); -# endif - - VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); -# else - strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -93420,10 +96129,10 @@ namespace VULKAN_HPP_NAMESPACE std::array const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateType( shadingRateType_ ) - , shadingRate( shadingRate_ ) - , combinerOps( combinerOps_ ) + : pNext{ pNext_ } + , shadingRateType{ shadingRateType_ } + , shadingRate{ shadingRate_ } + , combinerOps{ combinerOps_ } { } @@ -93545,9 +96254,9 @@ namespace VULKAN_HPP_NAMESPACE std::array const & combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentSize( fragmentSize_ ) - , combinerOps( combinerOps_ ) + : pNext{ pNext_ } + , fragmentSize{ fragmentSize_ } + , combinerOps{ combinerOps_ } { } @@ -93658,9 +96367,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineIndirectDeviceAddressInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , pipeline( pipeline_ ) + : pNext{ pNext_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , pipeline{ pipeline_ } { } @@ -93766,8 +96475,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipeline( pipeline_ ) + : pNext{ pNext_ } + , pipeline{ pipeline_ } { } @@ -93859,9 +96568,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : stageFlags( stageFlags_ ) - , offset( offset_ ) - , size( size_ ) + : stageFlags{ stageFlags_ } + , offset{ offset_ } + , size{ size_ } { } @@ -93958,12 +96667,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , setLayoutCount( setLayoutCount_ ) - , pSetLayouts( pSetLayouts_ ) - , pushConstantRangeCount( pushConstantRangeCount_ ) - , pPushConstantRanges( pPushConstantRanges_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } { } @@ -94131,8 +96840,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 PipelinePropertiesIdentifierEXT( std::array const & pipelineIdentifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pipelineIdentifier( pipelineIdentifier_ ) + : pNext{ pNext_ } + , pipelineIdentifier{ pipelineIdentifier_ } { } @@ -94217,10 +96926,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , conservativeRasterizationMode( conservativeRasterizationMode_ ) - , extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , conservativeRasterizationMode{ conservativeRasterizationMode_ } + , extraPrimitiveOverestimationSize{ extraPrimitiveOverestimationSize_ } { } @@ -94342,9 +97051,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , depthClipEnable( depthClipEnable_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , depthClipEnable{ depthClipEnable_ } { } @@ -94457,11 +97166,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , lineRasterizationMode( lineRasterizationMode_ ) - , stippledLineEnable( stippledLineEnable_ ) - , lineStippleFactor( lineStippleFactor_ ) - , lineStipplePattern( lineStipplePattern_ ) + : pNext{ pNext_ } + , lineRasterizationMode{ lineRasterizationMode_ } + , stippledLineEnable{ stippledLineEnable_ } + , lineStippleFactor{ lineStippleFactor_ } + , lineStipplePattern{ lineStipplePattern_ } { } @@ -94590,8 +97299,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineRasterizationProvokingVertexStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode_ = VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT::eFirstVertex, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , provokingVertexMode( provokingVertexMode_ ) + : pNext{ pNext_ } + , provokingVertexMode{ provokingVertexMode_ } { } @@ -94692,8 +97401,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , rasterizationOrder( rasterizationOrder_ ) + : pNext{ pNext_ } + , rasterizationOrder{ rasterizationOrder_ } { } @@ -94792,9 +97501,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rasterizationStream( rasterizationStream_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , rasterizationStream{ rasterizationStream_ } { } @@ -94906,12 +97615,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentFormats( pColorAttachmentFormats_ ) - , depthAttachmentFormat( depthAttachmentFormat_ ) - , stencilAttachmentFormat( stencilAttachmentFormat_ ) + : pNext{ pNext_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } { } @@ -95073,8 +97782,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , representativeFragmentTestEnable( representativeFragmentTestEnable_ ) + : pNext{ pNext_ } + , representativeFragmentTestEnable{ representativeFragmentTestEnable_ } { } @@ -95177,11 +97886,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , storageBuffers( storageBuffers_ ) - , uniformBuffers( uniformBuffers_ ) - , vertexInputs( vertexInputs_ ) - , images( images_ ) + : pNext{ pNext_ } + , storageBuffers{ storageBuffers_ } + , uniformBuffers{ uniformBuffers_ } + , vertexInputs{ vertexInputs_ } + , images{ images_ } { } @@ -95308,9 +98017,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleLocationsEnable( sampleLocationsEnable_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + : pNext{ pNext_ } + , sampleLocationsEnable{ sampleLocationsEnable_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } { } @@ -95420,9 +98129,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineShaderStageModuleIdentifierCreateInfoEXT( uint32_t identifierSize_ = {}, const uint8_t * pIdentifier_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , identifierSize( identifierSize_ ) - , pIdentifier( pIdentifier_ ) + : pNext{ pNext_ } + , identifierSize{ identifierSize_ } + , pIdentifier{ pIdentifier_ } { } @@ -95545,9 +98254,9 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineShaderStageNodeCreateInfoAMDX( const char * pName_ = {}, uint32_t index_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pName( pName_ ) - , index( index_ ) + : pNext{ pNext_ } + , pName{ pName_ } + , index{ index_ } { } @@ -95659,8 +98368,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfo( uint32_t requiredSubgroupSize_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , requiredSubgroupSize( requiredSubgroupSize_ ) + : pNext{ pNext_ } + , requiredSubgroupSize{ requiredSubgroupSize_ } { } @@ -95748,8 +98457,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , domainOrigin( domainOrigin_ ) + : pNext{ pNext_ } + , domainOrigin{ domainOrigin_ } { } @@ -95845,8 +98554,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionKHR( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT - : binding( binding_ ) - , divisor( divisor_ ) + : binding{ binding_ } + , divisor{ divisor_ } { } @@ -95939,9 +98648,9 @@ namespace VULKAN_HPP_NAMESPACE PipelineVertexInputDivisorStateCreateInfoKHR( uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR * pVertexBindingDivisors_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , vertexBindingDivisorCount( vertexBindingDivisorCount_ ) - , pVertexBindingDivisors( pVertexBindingDivisors_ ) + : pNext{ pNext_ } + , vertexBindingDivisorCount{ vertexBindingDivisorCount_ } + , pVertexBindingDivisors{ pVertexBindingDivisors_ } { } @@ -96077,10 +98786,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampleOrderType( sampleOrderType_ ) - , customSampleOrderCount( customSampleOrderCount_ ) - , pCustomSampleOrders( pCustomSampleOrders_ ) + : pNext{ pNext_ } + , sampleOrderType{ sampleOrderType_ } + , customSampleOrderCount{ customSampleOrderCount_ } + , pCustomSampleOrders{ pCustomSampleOrders_ } { } @@ -96223,8 +98932,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PipelineViewportDepthClipControlCreateInfoEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , negativeOneToOne( negativeOneToOne_ ) + : pNext{ pNext_ } + , negativeOneToOne{ negativeOneToOne_ } { } @@ -96323,9 +99032,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , exclusiveScissorCount( exclusiveScissorCount_ ) - , pExclusiveScissors( pExclusiveScissors_ ) + : pNext{ pNext_ } + , exclusiveScissorCount{ exclusiveScissorCount_ } + , pExclusiveScissors{ pExclusiveScissors_ } { } @@ -96447,8 +99156,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV * pShadingRatePaletteEntries_ = {} ) VULKAN_HPP_NOEXCEPT - : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ) - , pShadingRatePaletteEntries( pShadingRatePaletteEntries_ ) + : shadingRatePaletteEntryCount{ shadingRatePaletteEntryCount_ } + , pShadingRatePaletteEntries{ pShadingRatePaletteEntries_ } { } @@ -96560,10 +99269,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , shadingRateImageEnable( shadingRateImageEnable_ ) - , viewportCount( viewportCount_ ) - , pShadingRatePalettes( pShadingRatePalettes_ ) + : pNext{ pNext_ } + , shadingRateImageEnable{ shadingRateImageEnable_ } + , viewportCount{ viewportCount_ } + , pShadingRatePalettes{ pShadingRatePalettes_ } { } @@ -96705,10 +99414,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX ) VULKAN_HPP_NOEXCEPT - : x( x_ ) - , y( y_ ) - , z( z_ ) - , w( w_ ) + : x{ x_ } + , y{ y_ } + , z{ z_ } + , w{ w_ } { } @@ -96813,10 +99522,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , viewportCount( viewportCount_ ) - , pViewportSwizzles( pViewportSwizzles_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , viewportCount{ viewportCount_ } + , pViewportSwizzles{ pViewportSwizzles_ } { } @@ -96949,8 +99658,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ViewportWScalingNV( float xcoeff_ = {}, float ycoeff_ = {} ) VULKAN_HPP_NOEXCEPT - : xcoeff( xcoeff_ ) - , ycoeff( ycoeff_ ) + : xcoeff{ xcoeff_ } + , ycoeff{ ycoeff_ } { } @@ -97038,10 +99747,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewportWScalingEnable( viewportWScalingEnable_ ) - , viewportCount( viewportCount_ ) - , pViewportWScalings( pViewportWScalings_ ) + : pNext{ pNext_ } + , viewportWScalingEnable{ viewportWScalingEnable_ } + , viewportCount{ viewportCount_ } + , pViewportWScalings{ pViewportWScalings_ } { } @@ -97181,8 +99890,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( GgpFrameToken frameToken_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , frameToken( frameToken_ ) + : pNext{ pNext_ } + , frameToken{ frameToken_ } { } @@ -97284,9 +99993,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PresentIdKHR( uint32_t swapchainCount_ = {}, const uint64_t * pPresentIds_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pPresentIds( pPresentIds_ ) + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pPresentIds{ pPresentIds_ } { } @@ -97407,13 +100116,13 @@ namespace VULKAN_HPP_NAMESPACE const uint32_t * pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result * pResults_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , swapchainCount( swapchainCount_ ) - , pSwapchains( pSwapchains_ ) - , pImageIndices( pImageIndices_ ) - , pResults( pResults_ ) + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , swapchainCount{ swapchainCount_ } + , pSwapchains{ pSwapchains_ } + , pImageIndices{ pImageIndices_ } + , pResults{ pResults_ } { } @@ -97620,9 +100329,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR RectLayerKHR( VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {} ) VULKAN_HPP_NOEXCEPT - : offset( offset_ ) - , extent( extent_ ) - , layer( layer_ ) + : offset{ offset_ } + , extent{ extent_ } + , layer{ layer_ } { } @@ -97713,8 +100422,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PresentRegionKHR( uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR * pRectangles_ = {} ) VULKAN_HPP_NOEXCEPT - : rectangleCount( rectangleCount_ ) - , pRectangles( pRectangles_ ) + : rectangleCount{ rectangleCount_ } + , pRectangles{ pRectangles_ } { } @@ -97818,9 +100527,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PresentRegionsKHR( uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pRegions{ pRegions_ } { } @@ -97934,8 +100643,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {} ) VULKAN_HPP_NOEXCEPT - : presentID( presentID_ ) - , desiredPresentTime( desiredPresentTime_ ) + : presentID{ presentID_ } + , desiredPresentTime{ desiredPresentTime_ } { } @@ -98022,9 +100731,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE * pTimes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pTimes( pTimes_ ) + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pTimes{ pTimes_ } { } @@ -98145,8 +100854,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfo( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -98243,8 +100952,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , protectedSubmit( protectedSubmit_ ) + : pNext{ pNext_ } + , protectedSubmit{ protectedSubmit_ } { } @@ -98343,12 +101052,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t size_ = {}, const void * pValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , layout( layout_ ) - , stageFlags( stageFlags_ ) - , offset( offset_ ) - , size( size_ ) - , pValues( pValues_ ) + : pNext{ pNext_ } + , layout{ layout_ } + , stageFlags{ stageFlags_ } + , offset{ offset_ } + , size{ size_ } + , pValues{ pValues_ } { } @@ -98513,15 +101222,15 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo * pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView * pTexelBufferView_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dstSet( dstSet_ ) - , dstBinding( dstBinding_ ) - , dstArrayElement( dstArrayElement_ ) - , descriptorCount( descriptorCount_ ) - , descriptorType( descriptorType_ ) - , pImageInfo( pImageInfo_ ) - , pBufferInfo( pBufferInfo_ ) - , pTexelBufferView( pTexelBufferView_ ) + : pNext{ pNext_ } + , dstSet{ dstSet_ } + , dstBinding{ dstBinding_ } + , dstArrayElement{ dstArrayElement_ } + , descriptorCount{ descriptorCount_ } + , descriptorType{ descriptorType_ } + , pImageInfo{ pImageInfo_ } + , pBufferInfo{ pBufferInfo_ } + , pTexelBufferView{ pTexelBufferView_ } { } @@ -98741,12 +101450,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t descriptorWriteCount_ = {}, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stageFlags( stageFlags_ ) - , layout( layout_ ) - , set( set_ ) - , descriptorWriteCount( descriptorWriteCount_ ) - , pDescriptorWrites( pDescriptorWrites_ ) + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , set{ set_ } + , descriptorWriteCount{ descriptorWriteCount_ } + , pDescriptorWrites{ pDescriptorWrites_ } { } @@ -98907,11 +101616,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t set_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , descriptorUpdateTemplate( descriptorUpdateTemplate_ ) - , layout( layout_ ) - , set( set_ ) - , pData( pData_ ) + : pNext{ pNext_ } + , descriptorUpdateTemplate{ descriptorUpdateTemplate_ } + , layout{ layout_ } + , set{ set_ } + , pData{ pData_ } { } @@ -99034,8 +101743,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR QueryLowLatencySupportNV( void * pQueriedLowLatencyData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pQueriedLowLatencyData( pQueriedLowLatencyData_ ) + : pNext{ pNext_ } + , pQueriedLowLatencyData{ pQueriedLowLatencyData_ } { } @@ -99134,11 +101843,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , queryType( queryType_ ) - , queryCount( queryCount_ ) - , pipelineStatistics( pipelineStatistics_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , queryType{ queryType_ } + , queryCount{ queryCount_ } + , pipelineStatistics{ pipelineStatistics_ } { } @@ -99263,10 +101972,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t counterIndexCount_ = {}, const uint32_t * pCounterIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , counterIndexCount( counterIndexCount_ ) - , pCounterIndices( pCounterIndices_ ) + : pNext{ pNext_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , counterIndexCount{ counterIndexCount_ } + , pCounterIndices{ pCounterIndices_ } { } @@ -99400,8 +102109,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , performanceCountersSampling( performanceCountersSampling_ ) + : pNext{ pNext_ } + , performanceCountersSampling{ performanceCountersSampling_ } { } @@ -99500,8 +102209,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR QueryPoolVideoEncodeFeedbackCreateInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR encodeFeedbackFlags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , encodeFeedbackFlags( encodeFeedbackFlags_ ) + : pNext{ pNext_ } + , encodeFeedbackFlags{ encodeFeedbackFlags_ } { } @@ -99598,8 +102307,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointProperties2NV( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 checkpointExecutionStageMask_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + : pNext{ pNext_ } + , checkpointExecutionStageMask{ checkpointExecutionStageMask_ } { } @@ -99681,8 +102390,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , checkpointExecutionStageMask( checkpointExecutionStageMask_ ) + : pNext{ pNext_ } + , checkpointExecutionStageMask{ checkpointExecutionStageMask_ } { } @@ -99782,9 +102491,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } }, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , priorityCount( priorityCount_ ) - , priorities( priorities_ ) + : pNext{ pNext_ } + , priorityCount{ priorityCount_ } + , priorities{ priorities_ } { } @@ -99795,16 +102504,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - QueueFamilyGlobalPriorityPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & priorities_, - void * pNext_ = nullptr ) - : pNext( pNext_ ), priorityCount( std::min( static_cast( priorities_.size() ), VK_MAX_GLOBAL_PRIORITY_SIZE_KHR ) ) - { - VULKAN_HPP_ASSERT( priorities_.size() < VK_MAX_GLOBAL_PRIORITY_SIZE_KHR ); - memcpy( priorities, priorities_.data(), priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR ) ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -99893,10 +102592,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {} ) VULKAN_HPP_NOEXCEPT - : queueFlags( queueFlags_ ) - , queueCount( queueCount_ ) - , timestampValidBits( timestampValidBits_ ) - , minImageTransferGranularity( minImageTransferGranularity_ ) + : queueFlags{ queueFlags_ } + , queueCount{ queueCount_ } + , timestampValidBits{ timestampValidBits_ } + , minImageTransferGranularity{ minImageTransferGranularity_ } { } @@ -99974,8 +102673,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queueFamilyProperties( queueFamilyProperties_ ) + : pNext{ pNext_ } + , queueFamilyProperties{ queueFamilyProperties_ } { } @@ -100059,8 +102758,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR QueueFamilyQueryResultStatusPropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 queryResultStatusSupport_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queryResultStatusSupport( queryResultStatusSupport_ ) + : pNext{ pNext_ } + , queryResultStatusSupport{ queryResultStatusSupport_ } { } @@ -100142,8 +102841,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR QueueFamilyVideoPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR videoCodecOperations_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoCodecOperations( videoCodecOperations_ ) + : pNext{ pNext_ } + , videoCodecOperations{ videoCodecOperations_ } { } @@ -100231,13 +102930,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedKHR, const void * pShaderGroupCaptureReplayHandle_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) - , pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ ) + : pNext{ pNext_ } + , type{ type_ } + , generalShader{ generalShader_ } + , closestHitShader{ closestHitShader_ } + , anyHitShader{ anyHitShader_ } + , intersectionShader{ intersectionShader_ } + , pShaderGroupCaptureReplayHandle{ pShaderGroupCaptureReplayHandle_ } { } @@ -100379,9 +103078,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( uint32_t maxPipelineRayPayloadSize_ = {}, uint32_t maxPipelineRayHitAttributeSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ) - , maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ ) + : pNext{ pNext_ } + , maxPipelineRayPayloadSize{ maxPipelineRayPayloadSize_ } + , maxPipelineRayHitAttributeSize{ maxPipelineRayHitAttributeSize_ } { } @@ -100497,19 +103196,19 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ) - , pLibraryInfo( pLibraryInfo_ ) - , pLibraryInterface( pLibraryInterface_ ) - , pDynamicState( pDynamicState_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , maxPipelineRayRecursionDepth{ maxPipelineRayRecursionDepth_ } + , pLibraryInfo{ pLibraryInfo_ } + , pLibraryInterface{ pLibraryInterface_ } + , pDynamicState{ pDynamicState_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } @@ -100771,12 +103470,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t anyHitShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, uint32_t intersectionShader_ = VULKAN_HPP_NAMESPACE::ShaderUnusedNV, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , type( type_ ) - , generalShader( generalShader_ ) - , closestHitShader( closestHitShader_ ) - , anyHitShader( anyHitShader_ ) - , intersectionShader( intersectionShader_ ) + : pNext{ pNext_ } + , type{ type_ } + , generalShader{ generalShader_ } + , closestHitShader{ closestHitShader_ } + , anyHitShader{ anyHitShader_ } + , intersectionShader{ intersectionShader_ } { } @@ -100915,16 +103614,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stageCount( stageCount_ ) - , pStages( pStages_ ) - , groupCount( groupCount_ ) - , pGroups( pGroups_ ) - , maxRecursionDepth( maxRecursionDepth_ ) - , layout( layout_ ) - , basePipelineHandle( basePipelineHandle_ ) - , basePipelineIndex( basePipelineIndex_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , stageCount{ stageCount_ } + , pStages{ pStages_ } + , groupCount{ groupCount_ } + , pGroups{ pGroups_ } + , maxRecursionDepth{ maxRecursionDepth_ } + , layout{ layout_ } + , basePipelineHandle{ basePipelineHandle_ } + , basePipelineIndex{ basePipelineIndex_ } { } @@ -101129,7 +103828,7 @@ namespace VULKAN_HPP_NAMESPACE using NativeType = VkRefreshCycleDurationGOOGLE; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration( refreshDuration_ ) {} + VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( uint64_t refreshDuration_ = {} ) VULKAN_HPP_NOEXCEPT : refreshDuration{ refreshDuration_ } {} VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -101191,6 +103890,102 @@ namespace VULKAN_HPP_NAMESPACE uint64_t refreshDuration = {}; }; + struct ReleaseCapturedPipelineDataInfoKHR + { + using NativeType = VkReleaseCapturedPipelineDataInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eReleaseCapturedPipelineDataInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR ReleaseCapturedPipelineDataInfoKHR( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + ReleaseCapturedPipelineDataInfoKHR( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : ReleaseCapturedPipelineDataInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + ReleaseCapturedPipelineDataInfoKHR & operator=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + ReleaseCapturedPipelineDataInfoKHR & operator=( VkReleaseCapturedPipelineDataInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ReleaseCapturedPipelineDataInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkReleaseCapturedPipelineDataInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkReleaseCapturedPipelineDataInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( ReleaseCapturedPipelineDataInfoKHR const & ) const = default; +#else + bool operator==( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( ReleaseCapturedPipelineDataInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eReleaseCapturedPipelineDataInfoKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = ReleaseCapturedPipelineDataInfoKHR; + }; + struct ReleaseSwapchainImagesInfoEXT { using NativeType = VkReleaseSwapchainImagesInfoEXT; @@ -101203,10 +103998,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t imageIndexCount_ = {}, const uint32_t * pImageIndices_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchain( swapchain_ ) - , imageIndexCount( imageIndexCount_ ) - , pImageIndices( pImageIndices_ ) + : pNext{ pNext_ } + , swapchain{ swapchain_ } + , imageIndexCount{ imageIndexCount_ } + , pImageIndices{ pImageIndices_ } { } @@ -101340,9 +104135,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView * pAttachments_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) + : pNext{ pNext_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } { } @@ -101469,12 +104264,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue * pClearValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPass( renderPass_ ) - , framebuffer( framebuffer_ ) - , renderArea( renderArea_ ) - , clearValueCount( clearValueCount_ ) - , pClearValues( pClearValues_ ) + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , framebuffer{ framebuffer_ } + , renderArea{ renderArea_ } + , clearValueCount{ clearValueCount_ } + , pClearValues{ pClearValues_ } { } @@ -101635,16 +104430,16 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AttachmentReference * pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {} ) VULKAN_HPP_NOEXCEPT - : flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) + : flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , inputAttachmentCount{ inputAttachmentCount_ } + , pInputAttachments{ pInputAttachments_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pResolveAttachments{ pResolveAttachments_ } + , pDepthStencilAttachment{ pDepthStencilAttachment_ } + , preserveAttachmentCount{ preserveAttachmentCount_ } + , pPreserveAttachments{ pPreserveAttachments_ } { } @@ -101886,13 +104681,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) + : srcSubpass{ srcSubpass_ } + , dstSubpass{ dstSubpass_ } + , srcStageMask{ srcStageMask_ } + , dstStageMask{ dstStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , dependencyFlags{ dependencyFlags_ } { } @@ -102027,14 +104822,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency * pDependencies_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , subpassCount{ subpassCount_ } + , pSubpasses{ pSubpasses_ } + , dependencyCount{ dependencyCount_ } + , pDependencies{ pDependencies_ } { } @@ -102240,18 +105035,18 @@ namespace VULKAN_HPP_NAMESPACE uint32_t preserveAttachmentCount_ = {}, const uint32_t * pPreserveAttachments_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , pipelineBindPoint( pipelineBindPoint_ ) - , viewMask( viewMask_ ) - , inputAttachmentCount( inputAttachmentCount_ ) - , pInputAttachments( pInputAttachments_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pResolveAttachments( pResolveAttachments_ ) - , pDepthStencilAttachment( pDepthStencilAttachment_ ) - , preserveAttachmentCount( preserveAttachmentCount_ ) - , pPreserveAttachments( pPreserveAttachments_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , pipelineBindPoint{ pipelineBindPoint_ } + , viewMask{ viewMask_ } + , inputAttachmentCount{ inputAttachmentCount_ } + , pInputAttachments{ pInputAttachments_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pResolveAttachments{ pResolveAttachments_ } + , pDepthStencilAttachment{ pDepthStencilAttachment_ } + , preserveAttachmentCount{ preserveAttachmentCount_ } + , pPreserveAttachments{ pPreserveAttachments_ } { } @@ -102533,15 +105328,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcSubpass( srcSubpass_ ) - , dstSubpass( dstSubpass_ ) - , srcStageMask( srcStageMask_ ) - , dstStageMask( dstStageMask_ ) - , srcAccessMask( srcAccessMask_ ) - , dstAccessMask( dstAccessMask_ ) - , dependencyFlags( dependencyFlags_ ) - , viewOffset( viewOffset_ ) + : pNext{ pNext_ } + , srcSubpass{ srcSubpass_ } + , dstSubpass{ dstSubpass_ } + , srcStageMask{ srcStageMask_ } + , dstStageMask{ dstStageMask_ } + , srcAccessMask{ srcAccessMask_ } + , dstAccessMask{ dstAccessMask_ } + , dependencyFlags{ dependencyFlags_ } + , viewOffset{ viewOffset_ } { } @@ -102704,16 +105499,16 @@ namespace VULKAN_HPP_NAMESPACE uint32_t correlatedViewMaskCount_ = {}, const uint32_t * pCorrelatedViewMasks_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , attachmentCount( attachmentCount_ ) - , pAttachments( pAttachments_ ) - , subpassCount( subpassCount_ ) - , pSubpasses( pSubpasses_ ) - , dependencyCount( dependencyCount_ ) - , pDependencies( pDependencies_ ) - , correlatedViewMaskCount( correlatedViewMaskCount_ ) - , pCorrelatedViewMasks( pCorrelatedViewMasks_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , attachmentCount{ attachmentCount_ } + , pAttachments{ pAttachments_ } + , subpassCount{ subpassCount_ } + , pSubpasses{ pSubpasses_ } + , dependencyCount{ dependencyCount_ } + , pDependencies{ pDependencies_ } + , correlatedViewMaskCount{ correlatedViewMaskCount_ } + , pCorrelatedViewMasks{ pCorrelatedViewMasks_ } { } @@ -102950,8 +105745,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR RenderPassCreationControlEXT( VULKAN_HPP_NAMESPACE::Bool32 disallowMerging_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , disallowMerging( disallowMerging_ ) + : pNext{ pNext_ } + , disallowMerging{ disallowMerging_ } { } @@ -103043,7 +105838,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackInfoEXT( uint32_t postMergeSubpassCount_ = {} ) VULKAN_HPP_NOEXCEPT - : postMergeSubpassCount( postMergeSubpassCount_ ) + : postMergeSubpassCount{ postMergeSubpassCount_ } { } @@ -103117,8 +105912,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR RenderPassCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT * pRenderPassFeedback_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pRenderPassFeedback( pRenderPassFeedback_ ) + : pNext{ pNext_ } + , pRenderPassFeedback{ pRenderPassFeedback_ } { } @@ -103215,8 +106010,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityMapAttachment( fragmentDensityMapAttachment_ ) + : pNext{ pNext_ } + , fragmentDensityMapAttachment{ fragmentDensityMapAttachment_ } { } @@ -103314,9 +106109,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference * pAspectReferences_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , aspectReferenceCount( aspectReferenceCount_ ) - , pAspectReferences( pAspectReferences_ ) + : pNext{ pNext_ } + , aspectReferenceCount{ aspectReferenceCount_ } + , pAspectReferences{ pAspectReferences_ } { } @@ -103450,13 +106245,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t correlationMaskCount_ = {}, const uint32_t * pCorrelationMasks_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subpassCount( subpassCount_ ) - , pViewMasks( pViewMasks_ ) - , dependencyCount( dependencyCount_ ) - , pViewOffsets( pViewOffsets_ ) - , correlationMaskCount( correlationMaskCount_ ) - , pCorrelationMasks( pCorrelationMasks_ ) + : pNext{ pNext_ } + , subpassCount{ subpassCount_ } + , pViewMasks{ pViewMasks_ } + , dependencyCount{ dependencyCount_ } + , pViewOffsets{ pViewOffsets_ } + , correlationMaskCount{ correlationMaskCount_ } + , pCorrelationMasks{ pCorrelationMasks_ } { } @@ -103639,8 +106434,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {} ) VULKAN_HPP_NOEXCEPT - : subpassIndex( subpassIndex_ ) - , sampleLocationsInfo( sampleLocationsInfo_ ) + : subpassIndex{ subpassIndex_ } + , sampleLocationsInfo{ sampleLocationsInfo_ } { } @@ -103734,11 +106529,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT * pPostSubpassSampleLocations_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ) - , pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ) - , postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ) - , pPostSubpassSampleLocations( pPostSubpassSampleLocations_ ) + : pNext{ pNext_ } + , attachmentInitialSampleLocationsCount{ attachmentInitialSampleLocationsCount_ } + , pAttachmentInitialSampleLocations{ pAttachmentInitialSampleLocations_ } + , postSubpassSampleLocationsCount{ postSubpassSampleLocationsCount_ } + , pPostSubpassSampleLocations{ pPostSubpassSampleLocations_ } { } @@ -103902,8 +106697,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR RenderPassStripeInfoARM( VULKAN_HPP_NAMESPACE::Rect2D stripeArea_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stripeArea( stripeArea_ ) + : pNext{ pNext_ } + , stripeArea{ stripeArea_ } { } @@ -104000,9 +106795,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR RenderPassStripeBeginInfoARM( uint32_t stripeInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::RenderPassStripeInfoARM * pStripeInfos_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stripeInfoCount( stripeInfoCount_ ) - , pStripeInfos( pStripeInfos_ ) + : pNext{ pNext_ } + , stripeInfoCount{ stripeInfoCount_ } + , pStripeInfos{ pStripeInfos_ } { } @@ -104128,11 +106923,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask_ = {}, uint32_t deviceIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , value( value_ ) - , stageMask( stageMask_ ) - , deviceIndex( deviceIndex_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , value{ value_ } + , stageMask{ stageMask_ } + , deviceIndex{ deviceIndex_ } { } @@ -104257,9 +107052,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR RenderPassStripeSubmitInfoARM( uint32_t stripeSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pStripeSemaphoreInfos_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stripeSemaphoreInfoCount( stripeSemaphoreInfoCount_ ) - , pStripeSemaphoreInfos( pStripeSemaphoreInfos_ ) + : pNext{ pNext_ } + , stripeSemaphoreInfoCount{ stripeSemaphoreInfoCount_ } + , pStripeSemaphoreInfos{ pStripeSemaphoreInfos_ } { } @@ -104383,9 +107178,9 @@ namespace VULKAN_HPP_NAMESPACE RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_ = VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT::eMerged, std::array const & description_ = {}, uint32_t postMergeIndex_ = {} ) VULKAN_HPP_NOEXCEPT - : subpassMergeStatus( subpassMergeStatus_ ) - , description( description_ ) - , postMergeIndex( postMergeIndex_ ) + : subpassMergeStatus{ subpassMergeStatus_ } + , description{ description_ } + , postMergeIndex{ postMergeIndex_ } { } @@ -104396,21 +107191,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderPassSubpassFeedbackInfoEXT( VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT subpassMergeStatus_, - std::string const & description_, - uint32_t postMergeIndex_ = {} ) - : subpassMergeStatus( subpassMergeStatus_ ), postMergeIndex( postMergeIndex_ ) - { - VULKAN_HPP_ASSERT( description_.size() < VK_MAX_DESCRIPTION_SIZE ); -# if defined( WIN32 ) - strncpy_s( description, VK_MAX_DESCRIPTION_SIZE, description_.data(), description_.size() ); -# else - strncpy( description, description_.data(), std::min( VK_MAX_DESCRIPTION_SIZE, description_.size() ) ); -# endif - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - RenderPassSubpassFeedbackInfoEXT & operator=( RenderPassSubpassFeedbackInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -104483,8 +107263,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR_14 RenderPassSubpassFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT * pSubpassFeedback_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pSubpassFeedback( pSubpassFeedback_ ) + : pNext{ pNext_ } + , pSubpassFeedback{ pSubpassFeedback_ } { } @@ -104582,8 +107362,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , transform( transform_ ) + : pNext{ pNext_ } + , transform{ transform_ } { } @@ -104683,12 +107463,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentFormats( pColorAttachmentFormats_ ) - , depthAttachmentFormat( depthAttachmentFormat_ ) - , stencilAttachmentFormat( stencilAttachmentFormat_ ) + : pNext{ pNext_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentFormats{ pColorAttachmentFormats_ } + , depthAttachmentFormat{ depthAttachmentFormat_ } + , stencilAttachmentFormat{ stencilAttachmentFormat_ } { } @@ -104854,15 +107634,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - , resolveMode( resolveMode_ ) - , resolveImageView( resolveImageView_ ) - , resolveImageLayout( resolveImageLayout_ ) - , loadOp( loadOp_ ) - , storeOp( storeOp_ ) - , clearValue( clearValue_ ) + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + , resolveMode{ resolveMode_ } + , resolveImageView{ resolveImageView_ } + , resolveImageLayout{ resolveImageLayout_ } + , loadOp{ loadOp_ } + , storeOp{ storeOp_ } + , clearValue{ clearValue_ } { } @@ -105001,9 +107781,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfoKHR( uint32_t colorAttachmentCount_ = {}, const uint32_t * pColorAttachmentLocations_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentLocations( pColorAttachmentLocations_ ) + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentLocations{ pColorAttachmentLocations_ } { } @@ -105129,9 +107909,9 @@ namespace VULKAN_HPP_NAMESPACE RenderingFragmentDensityMapAttachmentInfoEXT( VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } { } @@ -105240,10 +108020,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , imageView( imageView_ ) - , imageLayout( imageLayout_ ) - , shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ ) + : pNext{ pNext_ } + , imageView{ imageView_ } + , imageLayout{ imageLayout_ } + , shadingRateAttachmentTexelSize{ shadingRateAttachmentTexelSize_ } { } @@ -105367,15 +108147,15 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pDepthAttachment_ = {}, const VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo * pStencilAttachment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , renderArea( renderArea_ ) - , layerCount( layerCount_ ) - , viewMask( viewMask_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachments( pColorAttachments_ ) - , pDepthAttachment( pDepthAttachment_ ) - , pStencilAttachment( pStencilAttachment_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , renderArea{ renderArea_ } + , layerCount{ layerCount_ } + , viewMask{ viewMask_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachments{ pColorAttachments_ } + , pDepthAttachment{ pDepthAttachment_ } + , pStencilAttachment{ pStencilAttachment_ } { } @@ -105566,11 +108346,11 @@ namespace VULKAN_HPP_NAMESPACE const uint32_t * pDepthInputAttachmentIndex_ = {}, const uint32_t * pStencilInputAttachmentIndex_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , colorAttachmentCount( colorAttachmentCount_ ) - , pColorAttachmentInputIndices( pColorAttachmentInputIndices_ ) - , pDepthInputAttachmentIndex( pDepthInputAttachmentIndex_ ) - , pStencilInputAttachmentIndex( pStencilInputAttachmentIndex_ ) + : pNext{ pNext_ } + , colorAttachmentCount{ colorAttachmentCount_ } + , pColorAttachmentInputIndices{ pColorAttachmentInputIndices_ } + , pDepthInputAttachmentIndex{ pDepthInputAttachmentIndex_ } + , pStencilInputAttachmentIndex{ pStencilInputAttachmentIndex_ } { } @@ -105726,13 +108506,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2 * pRegions_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , srcImage( srcImage_ ) - , srcImageLayout( srcImageLayout_ ) - , dstImage( dstImage_ ) - , dstImageLayout( dstImageLayout_ ) - , regionCount( regionCount_ ) - , pRegions( pRegions_ ) + : pNext{ pNext_ } + , srcImage{ srcImage_ } + , srcImageLayout{ srcImageLayout_ } + , dstImage{ dstImage_ } + , dstImageLayout{ dstImageLayout_ } + , regionCount{ regionCount_ } + , pRegions{ pRegions_ } { } @@ -105900,9 +108680,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D windowExtent_ = {}, VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM windowCompareMode_ = VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM::eMin, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , windowExtent( windowExtent_ ) - , windowCompareMode( windowCompareMode_ ) + : pNext{ pNext_ } + , windowExtent{ windowExtent_ } + , windowCompareMode{ windowCompareMode_ } { } @@ -106010,9 +108790,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SamplerBorderColorComponentMappingCreateInfoEXT( VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::Bool32 srgb_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , components( components_ ) - , srgb( srgb_ ) + : pNext{ pNext_ } + , components{ components_ } + , srgb{ srgb_ } { } @@ -106119,8 +108899,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SamplerCaptureDescriptorDataInfoEXT( VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sampler( sampler_ ) + : pNext{ pNext_ } + , sampler{ sampler_ } { } @@ -106231,23 +109011,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , magFilter( magFilter_ ) - , minFilter( minFilter_ ) - , mipmapMode( mipmapMode_ ) - , addressModeU( addressModeU_ ) - , addressModeV( addressModeV_ ) - , addressModeW( addressModeW_ ) - , mipLodBias( mipLodBias_ ) - , anisotropyEnable( anisotropyEnable_ ) - , maxAnisotropy( maxAnisotropy_ ) - , compareEnable( compareEnable_ ) - , compareOp( compareOp_ ) - , minLod( minLod_ ) - , maxLod( maxLod_ ) - , borderColor( borderColor_ ) - , unnormalizedCoordinates( unnormalizedCoordinates_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , magFilter{ magFilter_ } + , minFilter{ minFilter_ } + , mipmapMode{ mipmapMode_ } + , addressModeU{ addressModeU_ } + , addressModeV{ addressModeV_ } + , addressModeW{ addressModeW_ } + , mipLodBias{ mipLodBias_ } + , anisotropyEnable{ anisotropyEnable_ } + , maxAnisotropy{ maxAnisotropy_ } + , compareEnable{ compareEnable_ } + , compareOp{ compareOp_ } + , minLod{ minLod_ } + , maxLod{ maxLod_ } + , borderColor{ borderColor_ } + , unnormalizedCoordinates{ unnormalizedCoordinates_ } { } @@ -106484,8 +109264,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SamplerCubicWeightsCreateInfoQCOM( VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM cubicWeights_ = VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM::eCatmullRom, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , cubicWeights( cubicWeights_ ) + : pNext{ pNext_ } + , cubicWeights{ cubicWeights_ } { } @@ -106583,9 +109363,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 SamplerCustomBorderColorCreateInfoEXT( VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , customBorderColor( customBorderColor_ ) - , format( format_ ) + : pNext{ pNext_ } + , customBorderColor{ customBorderColor_ } + , format{ format_ } { } @@ -106675,8 +109455,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , reductionMode( reductionMode_ ) + : pNext{ pNext_ } + , reductionMode{ reductionMode_ } { } @@ -106782,15 +109562,15 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , ycbcrModel( ycbcrModel_ ) - , ycbcrRange( ycbcrRange_ ) - , components( components_ ) - , xChromaOffset( xChromaOffset_ ) - , yChromaOffset( yChromaOffset_ ) - , chromaFilter( chromaFilter_ ) - , forceExplicitReconstruction( forceExplicitReconstruction_ ) + : pNext{ pNext_ } + , format{ format_ } + , ycbcrModel{ ycbcrModel_ } + , ycbcrRange{ ycbcrRange_ } + , components{ components_ } + , xChromaOffset{ xChromaOffset_ } + , yChromaOffset{ yChromaOffset_ } + , chromaFilter{ chromaFilter_ } + , forceExplicitReconstruction{ forceExplicitReconstruction_ } { } @@ -106951,8 +109731,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( uint32_t combinedImageSamplerDescriptorCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ ) + : pNext{ pNext_ } + , combinedImageSamplerDescriptorCount{ combinedImageSamplerDescriptorCount_ } { } @@ -107036,8 +109816,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , conversion( conversion_ ) + : pNext{ pNext_ } + , conversion{ conversion_ } { } @@ -107136,9 +109916,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM( VULKAN_HPP_NAMESPACE::Bool32 enableYDegamma_ = {}, VULKAN_HPP_NAMESPACE::Bool32 enableCbCrDegamma_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , enableYDegamma( enableYDegamma_ ) - , enableCbCrDegamma( enableCbCrDegamma_ ) + : pNext{ pNext_ } + , enableYDegamma{ enableYDegamma_ } + , enableCbCrDegamma{ enableCbCrDegamma_ } { } @@ -107254,16 +110034,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , externalFormat( externalFormat_ ) - , screenUsage( screenUsage_ ) - , formatFeatures( formatFeatures_ ) - , samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ) - , suggestedYcbcrModel( suggestedYcbcrModel_ ) - , suggestedYcbcrRange( suggestedYcbcrRange_ ) - , suggestedXChromaOffset( suggestedXChromaOffset_ ) - , suggestedYChromaOffset( suggestedYChromaOffset_ ) + : pNext{ pNext_ } + , format{ format_ } + , externalFormat{ externalFormat_ } + , screenUsage{ screenUsage_ } + , formatFeatures{ formatFeatures_ } + , samplerYcbcrConversionComponents{ samplerYcbcrConversionComponents_ } + , suggestedYcbcrModel{ suggestedYcbcrModel_ } + , suggestedYcbcrRange{ suggestedYcbcrRange_ } + , suggestedXChromaOffset{ suggestedXChromaOffset_ } + , suggestedYChromaOffset{ suggestedYChromaOffset_ } { } @@ -107380,9 +110160,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ScreenBufferPropertiesQNX( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , allocationSize( allocationSize_ ) - , memoryTypeBits( memoryTypeBits_ ) + : pNext{ pNext_ } + , allocationSize{ allocationSize_ } + , memoryTypeBits{ memoryTypeBits_ } { } @@ -107469,10 +110249,10 @@ namespace VULKAN_HPP_NAMESPACE struct _screen_context * context_ = {}, struct _screen_window * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , context( context_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , context{ context_ } + , window{ window_ } { } @@ -107586,8 +110366,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -107684,9 +110464,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } { } @@ -107795,9 +110575,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } { } @@ -107908,9 +110688,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , handleType( handleType_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , handleType{ handleType_ } { } @@ -108018,9 +110798,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphore( semaphore_ ) - , value( value_ ) + : pNext{ pNext_ } + , semaphore{ semaphore_ } + , value{ value_ } { } @@ -108125,9 +110905,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , semaphoreType( semaphoreType_ ) - , initialValue( initialValue_ ) + : pNext{ pNext_ } + , semaphoreType{ semaphoreType_ } + , initialValue{ initialValue_ } { } @@ -108235,11 +111015,11 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::Semaphore * pSemaphores_ = {}, const uint64_t * pValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , semaphoreCount( semaphoreCount_ ) - , pSemaphores( pSemaphores_ ) - , pValues( pValues_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , semaphoreCount{ semaphoreCount_ } + , pSemaphores{ pSemaphores_ } + , pValues{ pValues_ } { } @@ -108407,13 +111187,13 @@ namespace VULKAN_HPP_NAMESPACE const uint32_t * pBufferIndices_ = {}, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stageFlags( stageFlags_ ) - , layout( layout_ ) - , firstSet( firstSet_ ) - , setCount( setCount_ ) - , pBufferIndices( pBufferIndices_ ) - , pOffsets( pOffsets_ ) + : pNext{ pNext_ } + , stageFlags{ stageFlags_ } + , layout{ layout_ } + , firstSet{ firstSet_ } + , setCount{ setCount_ } + , pBufferIndices{ pBufferIndices_ } + , pOffsets{ pOffsets_ } { } @@ -108600,9 +111380,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SetLatencyMarkerInfoNV( uint64_t presentID_ = {}, VULKAN_HPP_NAMESPACE::LatencyMarkerNV marker_ = VULKAN_HPP_NAMESPACE::LatencyMarkerNV::eSimulationStart, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentID( presentID_ ) - , marker( marker_ ) + : pNext{ pNext_ } + , presentID{ presentID_ } + , marker{ marker_ } { } @@ -108700,7 +111480,7 @@ namespace VULKAN_HPP_NAMESPACE using NativeType = VkSetStateFlagsIndirectCommandNV; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data( data_ ) {} + VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( uint32_t data_ = {} ) VULKAN_HPP_NOEXCEPT : data{ data_ } {} VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -108791,19 +111571,19 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo * pSpecializationInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , stage( stage_ ) - , nextStage( nextStage_ ) - , codeType( codeType_ ) - , codeSize( codeSize_ ) - , pCode( pCode_ ) - , pName( pName_ ) - , setLayoutCount( setLayoutCount_ ) - , pSetLayouts( pSetLayouts_ ) - , pushConstantRangeCount( pushConstantRangeCount_ ) - , pPushConstantRanges( pPushConstantRanges_ ) - , pSpecializationInfo( pSpecializationInfo_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , stage{ stage_ } + , nextStage{ nextStage_ } + , codeType{ codeType_ } + , codeSize{ codeSize_ } + , pCode{ pCode_ } + , pName{ pName_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } + , pSpecializationInfo{ pSpecializationInfo_ } { } @@ -109097,10 +111877,10 @@ namespace VULKAN_HPP_NAMESPACE size_t codeSize_ = {}, const uint32_t * pCode_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , codeSize( codeSize_ ) - , pCode( pCode_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , codeSize{ codeSize_ } + , pCode{ pCode_ } { } @@ -109233,9 +112013,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 ShaderModuleIdentifierEXT( uint32_t identifierSize_ = {}, std::array const & identifier_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , identifierSize( identifierSize_ ) - , identifier( identifier_ ) + : pNext{ pNext_ } + , identifierSize{ identifierSize_ } + , identifier{ identifier_ } { } @@ -109246,15 +112026,6 @@ namespace VULKAN_HPP_NAMESPACE { } -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - ShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & identifier_, void * pNext_ = nullptr ) - : pNext( pNext_ ), identifierSize( std::min( static_cast( identifier_.size() ), VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT ) ) - { - VULKAN_HPP_ASSERT( identifier_.size() < VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT ); - memcpy( identifier, identifier_.data(), identifierSize * sizeof( uint8_t ) ); - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - ShaderModuleIdentifierEXT & operator=( ShaderModuleIdentifierEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -109342,8 +112113,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , validationCache( validationCache_ ) + : pNext{ pNext_ } + , validationCache{ validationCache_ } { } @@ -109440,11 +112211,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {} ) VULKAN_HPP_NOEXCEPT - : numUsedVgprs( numUsedVgprs_ ) - , numUsedSgprs( numUsedSgprs_ ) - , ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ) - , ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ) - , scratchMemUsageInBytes( scratchMemUsageInBytes_ ) + : numUsedVgprs{ numUsedVgprs_ } + , numUsedSgprs{ numUsedSgprs_ } + , ldsSizePerLocalWorkGroup{ ldsSizePerLocalWorkGroup_ } + , ldsUsageSizeInBytes{ ldsUsageSizeInBytes_ } + , scratchMemUsageInBytes{ scratchMemUsageInBytes_ } { } @@ -109525,13 +112296,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array const & computeWorkGroupSize_ = {} ) VULKAN_HPP_NOEXCEPT - : shaderStageMask( shaderStageMask_ ) - , resourceUsage( resourceUsage_ ) - , numPhysicalVgprs( numPhysicalVgprs_ ) - , numPhysicalSgprs( numPhysicalSgprs_ ) - , numAvailableVgprs( numAvailableVgprs_ ) - , numAvailableSgprs( numAvailableSgprs_ ) - , computeWorkGroupSize( computeWorkGroupSize_ ) + : shaderStageMask{ shaderStageMask_ } + , resourceUsage{ resourceUsage_ } + , numPhysicalVgprs{ numPhysicalVgprs_ } + , numPhysicalSgprs{ numPhysicalSgprs_ } + , numAvailableVgprs{ numAvailableVgprs_ } + , numAvailableSgprs{ numAvailableSgprs_ } + , computeWorkGroupSize{ computeWorkGroupSize_ } { } @@ -109619,8 +112390,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ ) + : pNext{ pNext_ } + , sharedPresentSupportedUsageFlags{ sharedPresentSupportedUsageFlags_ } { } @@ -109700,9 +112471,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {} ) VULKAN_HPP_NOEXCEPT - : aspectMask( aspectMask_ ) - , imageGranularity( imageGranularity_ ) - , flags( flags_ ) + : aspectMask{ aspectMask_ } + , imageGranularity{ imageGranularity_ } + , flags{ flags_ } { } @@ -109778,8 +112549,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , properties( properties_ ) + : pNext{ pNext_ } + , properties{ properties_ } { } @@ -109863,11 +112634,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {} ) VULKAN_HPP_NOEXCEPT - : formatProperties( formatProperties_ ) - , imageMipTailFirstLod( imageMipTailFirstLod_ ) - , imageMipTailSize( imageMipTailSize_ ) - , imageMipTailOffset( imageMipTailOffset_ ) - , imageMipTailStride( imageMipTailStride_ ) + : formatProperties{ formatProperties_ } + , imageMipTailFirstLod{ imageMipTailFirstLod_ } + , imageMipTailSize{ imageMipTailSize_ } + , imageMipTailOffset{ imageMipTailOffset_ } + , imageMipTailStride{ imageMipTailStride_ } { } @@ -109951,8 +112722,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryRequirements( memoryRequirements_ ) + : pNext{ pNext_ } + , memoryRequirements{ memoryRequirements_ } { } @@ -110038,9 +112809,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , streamDescriptor( streamDescriptor_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , streamDescriptor{ streamDescriptor_ } { } @@ -110155,9 +112926,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {} ) VULKAN_HPP_NOEXCEPT - : deviceAddress( deviceAddress_ ) - , stride( stride_ ) - , size( size_ ) + : deviceAddress{ deviceAddress_ } + , stride{ stride_ } + , size{ size_ } { } @@ -110259,14 +113030,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore * pSignalSemaphores_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreCount( waitSemaphoreCount_ ) - , pWaitSemaphores( pWaitSemaphores_ ) - , pWaitDstStageMask( pWaitDstStageMask_ ) - , commandBufferCount( commandBufferCount_ ) - , pCommandBuffers( pCommandBuffers_ ) - , signalSemaphoreCount( signalSemaphoreCount_ ) - , pSignalSemaphores( pSignalSemaphores_ ) + : pNext{ pNext_ } + , waitSemaphoreCount{ waitSemaphoreCount_ } + , pWaitSemaphores{ pWaitSemaphores_ } + , pWaitDstStageMask{ pWaitDstStageMask_ } + , commandBufferCount{ commandBufferCount_ } + , pCommandBuffers{ pCommandBuffers_ } + , signalSemaphoreCount{ signalSemaphoreCount_ } + , pSignalSemaphores{ pSignalSemaphores_ } { } @@ -110485,14 +113256,14 @@ namespace VULKAN_HPP_NAMESPACE uint32_t signalSemaphoreInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo * pSignalSemaphoreInfos_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , waitSemaphoreInfoCount( waitSemaphoreInfoCount_ ) - , pWaitSemaphoreInfos( pWaitSemaphoreInfos_ ) - , commandBufferInfoCount( commandBufferInfoCount_ ) - , pCommandBufferInfos( pCommandBufferInfos_ ) - , signalSemaphoreInfoCount( signalSemaphoreInfoCount_ ) - , pSignalSemaphoreInfos( pSignalSemaphoreInfos_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , waitSemaphoreInfoCount{ waitSemaphoreInfoCount_ } + , pWaitSemaphoreInfos{ pWaitSemaphoreInfos_ } + , commandBufferInfoCount{ commandBufferInfoCount_ } + , pCommandBufferInfos{ pCommandBufferInfos_ } + , signalSemaphoreInfoCount{ signalSemaphoreInfoCount_ } + , pSignalSemaphoreInfos{ pSignalSemaphoreInfos_ } { } @@ -110698,8 +113469,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SubpassBeginInfo( VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , contents( contents_ ) + : pNext{ pNext_ } + , contents{ contents_ } { } @@ -110797,10 +113568,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2 * pDepthStencilResolveAttachment_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , depthResolveMode( depthResolveMode_ ) - , stencilResolveMode( stencilResolveMode_ ) - , pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ ) + : pNext{ pNext_ } + , depthResolveMode{ depthResolveMode_ } + , stencilResolveMode{ stencilResolveMode_ } + , pDepthStencilResolveAttachment{ pDepthStencilResolveAttachment_ } { } @@ -110918,7 +113689,7 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext( pNext_ ) {} + VULKAN_HPP_CONSTEXPR SubpassEndInfo( const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } {} VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -111005,9 +113776,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SubpassFragmentDensityMapOffsetEndInfoQCOM( uint32_t fragmentDensityOffsetCount_ = {}, const VULKAN_HPP_NAMESPACE::Offset2D * pFragmentDensityOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fragmentDensityOffsetCount( fragmentDensityOffsetCount_ ) - , pFragmentDensityOffsets( pFragmentDensityOffsets_ ) + : pNext{ pNext_ } + , fragmentDensityOffsetCount{ fragmentDensityOffsetCount_ } + , pFragmentDensityOffsets{ pFragmentDensityOffsets_ } { } @@ -111132,8 +113903,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SubpassResolvePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimal_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , optimal( optimal_ ) + : pNext{ pNext_ } + , optimal{ optimal_ } { } @@ -111216,9 +113987,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SubpassShadingPipelineCreateInfoHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , renderPass( renderPass_ ) - , subpass( subpass_ ) + : pNext{ pNext_ } + , renderPass{ renderPass_ } + , subpass{ subpass_ } { } @@ -111320,8 +114091,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , size( size_ ) + : pNext{ pNext_ } + , size{ size_ } { } @@ -111402,8 +114173,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , subresourceLayout( subresourceLayout_ ) + : pNext{ pNext_ } + , subresourceLayout{ subresourceLayout_ } { } @@ -111498,18 +114269,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) - , supportedSurfaceCounters( supportedSurfaceCounters_ ) + : pNext{ pNext_ } + , minImageCount{ minImageCount_ } + , maxImageCount{ maxImageCount_ } + , currentExtent{ currentExtent_ } + , minImageExtent{ minImageExtent_ } + , maxImageExtent{ maxImageExtent_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , supportedTransforms{ supportedTransforms_ } + , currentTransform{ currentTransform_ } + , supportedCompositeAlpha{ supportedCompositeAlpha_ } + , supportedUsageFlags{ supportedUsageFlags_ } + , supportedSurfaceCounters{ supportedSurfaceCounters_ } { } @@ -111635,16 +114406,16 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {} ) VULKAN_HPP_NOEXCEPT - : minImageCount( minImageCount_ ) - , maxImageCount( maxImageCount_ ) - , currentExtent( currentExtent_ ) - , minImageExtent( minImageExtent_ ) - , maxImageExtent( maxImageExtent_ ) - , maxImageArrayLayers( maxImageArrayLayers_ ) - , supportedTransforms( supportedTransforms_ ) - , currentTransform( currentTransform_ ) - , supportedCompositeAlpha( supportedCompositeAlpha_ ) - , supportedUsageFlags( supportedUsageFlags_ ) + : minImageCount{ minImageCount_ } + , maxImageCount{ maxImageCount_ } + , currentExtent{ currentExtent_ } + , minImageExtent{ minImageExtent_ } + , maxImageExtent{ maxImageExtent_ } + , maxImageArrayLayers{ maxImageArrayLayers_ } + , supportedTransforms{ supportedTransforms_ } + , currentTransform{ currentTransform_ } + , supportedCompositeAlpha{ supportedCompositeAlpha_ } + , supportedUsageFlags{ supportedUsageFlags_ } { } @@ -111748,8 +114519,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surfaceCapabilities( surfaceCapabilities_ ) + : pNext{ pNext_ } + , surfaceCapabilities{ surfaceCapabilities_ } { } @@ -111832,8 +114603,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fullScreenExclusiveSupported( fullScreenExclusiveSupported_ ) + : pNext{ pNext_ } + , fullScreenExclusiveSupported{ fullScreenExclusiveSupported_ } { } @@ -111853,21 +114624,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & - setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT - { - fullScreenExclusiveSupported = fullScreenExclusiveSupported_; - return *this; - } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -111931,8 +114687,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesPresentBarrierNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentBarrierSupported( presentBarrierSupported_ ) + : pNext{ pNext_ } + , presentBarrierSupported{ presentBarrierSupported_ } { } @@ -111952,21 +114708,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesPresentBarrierNV & - setPresentBarrierSupported( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierSupported_ ) VULKAN_HPP_NOEXCEPT - { - presentBarrierSupported = presentBarrierSupported_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSurfaceCapabilitiesPresentBarrierNV const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -112027,8 +114768,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear ) VULKAN_HPP_NOEXCEPT - : format( format_ ) - , colorSpace( colorSpace_ ) + : format{ format_ } + , colorSpace{ colorSpace_ } { } @@ -112099,8 +114840,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surfaceFormat( surfaceFormat_ ) + : pNext{ pNext_ } + , surfaceFormat{ surfaceFormat_ } { } @@ -112181,8 +114922,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , fullScreenExclusive( fullScreenExclusive_ ) + : pNext{ pNext_ } + , fullScreenExclusive{ fullScreenExclusive_ } { } @@ -112280,8 +115021,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( HMONITOR hmonitor_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hmonitor( hmonitor_ ) + : pNext{ pNext_ } + , hmonitor{ hmonitor_ } { } @@ -112379,9 +115120,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SurfacePresentModeCompatibilityEXT( uint32_t presentModeCount_ = {}, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentModeCount( presentModeCount_ ) - , pPresentModes( pPresentModes_ ) + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } { } @@ -112502,8 +115243,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfacePresentModeEXT( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentMode( presentMode_ ) + : pNext{ pNext_ } + , presentMode{ presentMode_ } { } @@ -112603,12 +115344,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D minScaledImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxScaledImageExtent_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportedPresentScaling( supportedPresentScaling_ ) - , supportedPresentGravityX( supportedPresentGravityX_ ) - , supportedPresentGravityY( supportedPresentGravityY_ ) - , minScaledImageExtent( minScaledImageExtent_ ) - , maxScaledImageExtent( maxScaledImageExtent_ ) + : pNext{ pNext_ } + , supportedPresentScaling{ supportedPresentScaling_ } + , supportedPresentGravityX{ supportedPresentGravityX_ } + , supportedPresentGravityY{ supportedPresentGravityY_ } + , minScaledImageExtent{ minScaledImageExtent_ } + , maxScaledImageExtent{ maxScaledImageExtent_ } { } @@ -112628,49 +115369,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & - setSupportedPresentScaling( VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT supportedPresentScaling_ ) VULKAN_HPP_NOEXCEPT - { - supportedPresentScaling = supportedPresentScaling_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & - setSupportedPresentGravityX( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityX_ ) VULKAN_HPP_NOEXCEPT - { - supportedPresentGravityX = supportedPresentGravityX_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & - setSupportedPresentGravityY( VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT supportedPresentGravityY_ ) VULKAN_HPP_NOEXCEPT - { - supportedPresentGravityY = supportedPresentGravityY_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & - setMinScaledImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & minScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT - { - minScaledImageExtent = minScaledImageExtent_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfacePresentScalingCapabilitiesEXT & - setMaxScaledImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & maxScaledImageExtent_ ) VULKAN_HPP_NOEXCEPT - { - maxScaledImageExtent = maxScaledImageExtent_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSurfacePresentScalingCapabilitiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -112745,8 +115443,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportsProtected( supportsProtected_ ) + : pNext{ pNext_ } + , supportsProtected{ supportsProtected_ } { } @@ -112766,20 +115464,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT - { - supportsProtected = supportsProtected_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkSurfaceProtectedCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); @@ -112842,8 +115526,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , surfaceCounters( surfaceCounters_ ) + : pNext{ pNext_ } + , surfaceCounters{ surfaceCounters_ } { } @@ -112956,23 +115640,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , surface( surface_ ) - , minImageCount( minImageCount_ ) - , imageFormat( imageFormat_ ) - , imageColorSpace( imageColorSpace_ ) - , imageExtent( imageExtent_ ) - , imageArrayLayers( imageArrayLayers_ ) - , imageUsage( imageUsage_ ) - , imageSharingMode( imageSharingMode_ ) - , queueFamilyIndexCount( queueFamilyIndexCount_ ) - , pQueueFamilyIndices( pQueueFamilyIndices_ ) - , preTransform( preTransform_ ) - , compositeAlpha( compositeAlpha_ ) - , presentMode( presentMode_ ) - , clipped( clipped_ ) - , oldSwapchain( oldSwapchain_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , surface{ surface_ } + , minImageCount{ minImageCount_ } + , imageFormat{ imageFormat_ } + , imageColorSpace{ imageColorSpace_ } + , imageExtent{ imageExtent_ } + , imageArrayLayers{ imageArrayLayers_ } + , imageUsage{ imageUsage_ } + , imageSharingMode{ imageSharingMode_ } + , queueFamilyIndexCount{ queueFamilyIndexCount_ } + , pQueueFamilyIndices{ pQueueFamilyIndices_ } + , preTransform{ preTransform_ } + , compositeAlpha{ compositeAlpha_ } + , presentMode{ presentMode_ } + , clipped{ clipped_ } + , oldSwapchain{ oldSwapchain_ } { } @@ -113260,8 +115944,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , localDimmingEnable( localDimmingEnable_ ) + : pNext{ pNext_ } + , localDimmingEnable{ localDimmingEnable_ } { } @@ -113357,8 +116041,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SwapchainLatencyCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 latencyModeEnable_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , latencyModeEnable( latencyModeEnable_ ) + : pNext{ pNext_ } + , latencyModeEnable{ latencyModeEnable_ } { } @@ -113454,8 +116138,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR SwapchainPresentBarrierCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 presentBarrierEnable_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentBarrierEnable( presentBarrierEnable_ ) + : pNext{ pNext_ } + , presentBarrierEnable{ presentBarrierEnable_ } { } @@ -113553,9 +116237,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SwapchainPresentFenceInfoEXT( uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::Fence * pFences_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pFences( pFences_ ) + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pFences{ pFences_ } { } @@ -113677,9 +116361,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SwapchainPresentModeInfoEXT( uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , swapchainCount( swapchainCount_ ) - , pPresentModes( pPresentModes_ ) + : pNext{ pNext_ } + , swapchainCount{ swapchainCount_ } + , pPresentModes{ pPresentModes_ } { } @@ -113801,9 +116485,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR SwapchainPresentModesCreateInfoEXT( uint32_t presentModeCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , presentModeCount( presentModeCount_ ) - , pPresentModes( pPresentModes_ ) + : pNext{ pNext_ } + , presentModeCount{ presentModeCount_ } + , pPresentModes{ pPresentModes_ } { } @@ -113927,10 +116611,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityX_ = {}, VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT presentGravityY_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , scalingBehavior( scalingBehavior_ ) - , presentGravityX( presentGravityX_ ) - , presentGravityY( presentGravityY_ ) + : pNext{ pNext_ } + , scalingBehavior{ scalingBehavior_ } + , presentGravityX{ presentGravityX_ } + , presentGravityY{ presentGravityY_ } { } @@ -114048,8 +116732,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ ) + : pNext{ pNext_ } + , supportsTextureGatherLODBiasAMD{ supportsTextureGatherLODBiasAMD_ } { } @@ -114133,10 +116817,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D apronSize_ = {}, VULKAN_HPP_NAMESPACE::Offset2D origin_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , tileSize( tileSize_ ) - , apronSize( apronSize_ ) - , origin( origin_ ) + : pNext{ pNext_ } + , tileSize{ tileSize_ } + , apronSize{ apronSize_ } + , origin{ origin_ } { } @@ -114250,11 +116934,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t signalSemaphoreValueCount_ = {}, const uint64_t * pSignalSemaphoreValues_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , waitSemaphoreValueCount( waitSemaphoreValueCount_ ) - , pWaitSemaphoreValues( pWaitSemaphoreValues_ ) - , signalSemaphoreValueCount( signalSemaphoreValueCount_ ) - , pSignalSemaphoreValues( pSignalSemaphoreValues_ ) + : pNext{ pNext_ } + , waitSemaphoreValueCount{ waitSemaphoreValueCount_ } + , pWaitSemaphoreValues{ pWaitSemaphoreValues_ } + , signalSemaphoreValueCount{ signalSemaphoreValueCount_ } + , pSignalSemaphoreValues{ pSignalSemaphoreValues_ } { } @@ -114422,20 +117106,20 @@ namespace VULKAN_HPP_NAMESPACE uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT - : raygenShaderRecordAddress( raygenShaderRecordAddress_ ) - , raygenShaderRecordSize( raygenShaderRecordSize_ ) - , missShaderBindingTableAddress( missShaderBindingTableAddress_ ) - , missShaderBindingTableSize( missShaderBindingTableSize_ ) - , missShaderBindingTableStride( missShaderBindingTableStride_ ) - , hitShaderBindingTableAddress( hitShaderBindingTableAddress_ ) - , hitShaderBindingTableSize( hitShaderBindingTableSize_ ) - , hitShaderBindingTableStride( hitShaderBindingTableStride_ ) - , callableShaderBindingTableAddress( callableShaderBindingTableAddress_ ) - , callableShaderBindingTableSize( callableShaderBindingTableSize_ ) - , callableShaderBindingTableStride( callableShaderBindingTableStride_ ) - , width( width_ ) - , height( height_ ) - , depth( depth_ ) + : raygenShaderRecordAddress{ raygenShaderRecordAddress_ } + , raygenShaderRecordSize{ raygenShaderRecordSize_ } + , missShaderBindingTableAddress{ missShaderBindingTableAddress_ } + , missShaderBindingTableSize{ missShaderBindingTableSize_ } + , missShaderBindingTableStride{ missShaderBindingTableStride_ } + , hitShaderBindingTableAddress{ hitShaderBindingTableAddress_ } + , hitShaderBindingTableSize{ hitShaderBindingTableSize_ } + , hitShaderBindingTableStride{ hitShaderBindingTableStride_ } + , callableShaderBindingTableAddress{ callableShaderBindingTableAddress_ } + , callableShaderBindingTableSize{ callableShaderBindingTableSize_ } + , callableShaderBindingTableStride{ callableShaderBindingTableStride_ } + , width{ width_ } + , height{ height_ } + , depth{ depth_ } { } @@ -114648,9 +117332,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {} ) VULKAN_HPP_NOEXCEPT - : width( width_ ) - , height( height_ ) - , depth( depth_ ) + : width{ width_ } + , height{ height_ } + , depth{ depth_ } { } @@ -114753,10 +117437,10 @@ namespace VULKAN_HPP_NAMESPACE size_t initialDataSize_ = {}, const void * pInitialData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , initialDataSize( initialDataSize_ ) - , pInitialData( pInitialData_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , initialDataSize{ initialDataSize_ } + , pInitialData{ pInitialData_ } { } @@ -114894,11 +117578,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT * pDisabledValidationFeatures_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , enabledValidationFeatureCount( enabledValidationFeatureCount_ ) - , pEnabledValidationFeatures( pEnabledValidationFeatures_ ) - , disabledValidationFeatureCount( disabledValidationFeatureCount_ ) - , pDisabledValidationFeatures( pDisabledValidationFeatures_ ) + : pNext{ pNext_ } + , enabledValidationFeatureCount{ enabledValidationFeatureCount_ } + , pEnabledValidationFeatures{ pEnabledValidationFeatures_ } + , disabledValidationFeatureCount{ disabledValidationFeatureCount_ } + , pDisabledValidationFeatures{ pDisabledValidationFeatures_ } { } @@ -115061,9 +117745,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT * pDisabledValidationChecks_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , disabledValidationCheckCount( disabledValidationCheckCount_ ) - , pDisabledValidationChecks( pDisabledValidationChecks_ ) + : pNext{ pNext_ } + , disabledValidationCheckCount{ disabledValidationCheckCount_ } + , pDisabledValidationChecks{ pDisabledValidationChecks_ } { } @@ -115188,11 +117872,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , location( location_ ) - , binding( binding_ ) - , format( format_ ) - , offset( offset_ ) + : pNext{ pNext_ } + , location{ location_ } + , binding{ binding_ } + , format{ format_ } + , offset{ offset_ } { } @@ -115318,11 +118002,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex, uint32_t divisor_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , binding( binding_ ) - , stride( stride_ ) - , inputRate( inputRate_ ) - , divisor( divisor_ ) + : pNext{ pNext_ } + , binding{ binding_ } + , stride{ stride_ } + , inputRate{ inputRate_ } + , divisor{ divisor_ } { } @@ -115446,9 +118130,9 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void * window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , window{ window_ } { } @@ -115555,11 +118239,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t baseArrayLayer_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageViewBinding_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , codedOffset( codedOffset_ ) - , codedExtent( codedExtent_ ) - , baseArrayLayer( baseArrayLayer_ ) - , imageViewBinding( imageViewBinding_ ) + : pNext{ pNext_ } + , codedOffset{ codedOffset_ } + , codedExtent{ codedExtent_ } + , baseArrayLayer{ baseArrayLayer_ } + , imageViewBinding{ imageViewBinding_ } { } @@ -115683,9 +118367,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoReferenceSlotInfoKHR( int32_t slotIndex_ = {}, const VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR * pPictureResource_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , slotIndex( slotIndex_ ) - , pPictureResource( pPictureResource_ ) + : pNext{ pNext_ } + , slotIndex{ slotIndex_ } + , pPictureResource{ pPictureResource_ } { } @@ -115796,12 +118480,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , videoSession( videoSession_ ) - , videoSessionParameters( videoSessionParameters_ ) - , referenceSlotCount( referenceSlotCount_ ) - , pReferenceSlots( pReferenceSlots_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , videoSession{ videoSession_ } + , videoSessionParameters{ videoSessionParameters_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } { } @@ -115969,16 +118653,16 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxActiveReferencePictures_ = {}, VULKAN_HPP_NAMESPACE::ExtensionProperties stdHeaderVersion_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , minBitstreamBufferOffsetAlignment( minBitstreamBufferOffsetAlignment_ ) - , minBitstreamBufferSizeAlignment( minBitstreamBufferSizeAlignment_ ) - , pictureAccessGranularity( pictureAccessGranularity_ ) - , minCodedExtent( minCodedExtent_ ) - , maxCodedExtent( maxCodedExtent_ ) - , maxDpbSlots( maxDpbSlots_ ) - , maxActiveReferencePictures( maxActiveReferencePictures_ ) - , stdHeaderVersion( stdHeaderVersion_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , minBitstreamBufferOffsetAlignment{ minBitstreamBufferOffsetAlignment_ } + , minBitstreamBufferSizeAlignment{ minBitstreamBufferSizeAlignment_ } + , pictureAccessGranularity{ pictureAccessGranularity_ } + , minCodedExtent{ minCodedExtent_ } + , maxCodedExtent{ maxCodedExtent_ } + , maxDpbSlots{ maxDpbSlots_ } + , maxActiveReferencePictures{ maxActiveReferencePictures_ } + , stdHeaderVersion{ stdHeaderVersion_ } { } @@ -116092,8 +118776,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoCodingControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -116188,8 +118872,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeAV1CapabilitiesKHR( StdVideoAV1Level maxLevel_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxLevel( maxLevel_ ) + : pNext{ pNext_ } + , maxLevel{ maxLevel_ } { } @@ -116277,8 +118961,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeAV1DpbSlotInfoKHR( const StdVideoDecodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } @@ -116379,13 +119063,13 @@ namespace VULKAN_HPP_NAMESPACE const uint32_t * pTileOffsets_ = {}, const uint32_t * pTileSizes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , referenceNameSlotIndices( referenceNameSlotIndices_ ) - , frameHeaderOffset( frameHeaderOffset_ ) - , tileCount( tileCount_ ) - , pTileOffsets( pTileOffsets_ ) - , pTileSizes( pTileSizes_ ) + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , referenceNameSlotIndices{ referenceNameSlotIndices_ } + , frameHeaderOffset{ frameHeaderOffset_ } + , tileCount{ tileCount_ } + , pTileOffsets{ pTileOffsets_ } + , pTileSizes{ pTileSizes_ } { } @@ -116572,9 +119256,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoDecodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filmGrainSupport_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfile( stdProfile_ ) - , filmGrainSupport( filmGrainSupport_ ) + : pNext{ pNext_ } + , stdProfile{ stdProfile_ } + , filmGrainSupport{ filmGrainSupport_ } { } @@ -116686,8 +119370,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdSequenceHeader( pStdSequenceHeader_ ) + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } { } @@ -116784,8 +119468,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeCapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR flags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -116868,9 +119552,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoDecodeH264CapabilitiesKHR( StdVideoH264LevelIdc maxLevelIdc_ = {}, VULKAN_HPP_NAMESPACE::Offset2D fieldOffsetGranularity_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxLevelIdc( maxLevelIdc_ ) - , fieldOffsetGranularity( fieldOffsetGranularity_ ) + : pNext{ pNext_ } + , maxLevelIdc{ maxLevelIdc_ } + , fieldOffsetGranularity{ fieldOffsetGranularity_ } { } @@ -116962,8 +119646,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeH264DpbSlotInfoKHR( const StdVideoDecodeH264ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } @@ -117062,10 +119746,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t sliceCount_ = {}, const uint32_t * pSliceOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , sliceCount( sliceCount_ ) - , pSliceOffsets( pSliceOffsets_ ) + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , sliceCount{ sliceCount_ } + , pSliceOffsets{ pSliceOffsets_ } { } @@ -117200,9 +119884,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout_ = VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) - , pictureLayout( pictureLayout_ ) + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } + , pictureLayout{ pictureLayout_ } { } @@ -117321,11 +120005,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stdPPSCount_ = {}, const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdSPSCount( stdSPSCount_ ) - , pStdSPSs( pStdSPSs_ ) - , stdPPSCount( stdPPSCount_ ) - , pStdPPSs( pStdPPSs_ ) + : pNext{ pNext_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } { } @@ -117484,10 +120168,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxStdSPSCount( maxStdSPSCount_ ) - , maxStdPPSCount( maxStdPPSCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) + : pNext{ pNext_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { } @@ -117603,8 +120287,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeH265CapabilitiesKHR( StdVideoH265LevelIdc maxLevelIdc_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxLevelIdc( maxLevelIdc_ ) + : pNext{ pNext_ } + , maxLevelIdc{ maxLevelIdc_ } { } @@ -117692,8 +120376,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeH265DpbSlotInfoKHR( const StdVideoDecodeH265ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } @@ -117792,10 +120476,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t sliceSegmentCount_ = {}, const uint32_t * pSliceSegmentOffsets_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , sliceSegmentCount( sliceSegmentCount_ ) - , pSliceSegmentOffsets( pSliceSegmentOffsets_ ) + : pNext{ pNext_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , sliceSegmentCount{ sliceSegmentCount_ } + , pSliceSegmentOffsets{ pSliceSegmentOffsets_ } { } @@ -117931,8 +120615,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } { } @@ -118039,13 +120723,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stdPPSCount_ = {}, const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdVPSCount( stdVPSCount_ ) - , pStdVPSs( pStdVPSs_ ) - , stdSPSCount( stdSPSCount_ ) - , pStdSPSs( pStdSPSs_ ) - , stdPPSCount( stdPPSCount_ ) - , pStdPPSs( pStdPPSs_ ) + : pNext{ pNext_ } + , stdVPSCount{ stdVPSCount_ } + , pStdVPSs{ pStdVPSs_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } { } @@ -118234,11 +120918,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxStdVPSCount( maxStdVPSCount_ ) - , maxStdSPSCount( maxStdSPSCount_ ) - , maxStdPPSCount( maxStdPPSCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) + : pNext{ pNext_ } + , maxStdVPSCount{ maxStdVPSCount_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { } @@ -118370,15 +121054,15 @@ namespace VULKAN_HPP_NAMESPACE uint32_t referenceSlotCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , srcBuffer( srcBuffer_ ) - , srcBufferOffset( srcBufferOffset_ ) - , srcBufferRange( srcBufferRange_ ) - , dstPictureResource( dstPictureResource_ ) - , pSetupReferenceSlot( pSetupReferenceSlot_ ) - , referenceSlotCount( referenceSlotCount_ ) - , pReferenceSlots( pReferenceSlots_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , srcBuffer{ srcBuffer_ } + , srcBufferOffset{ srcBufferOffset_ } + , srcBufferRange{ srcBufferRange_ } + , dstPictureResource{ dstPictureResource_ } + , pSetupReferenceSlot{ pSetupReferenceSlot_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } { } @@ -118568,8 +121252,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoDecodeUsageInfoKHR( VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR videoUsageHints_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoUsageHints( videoUsageHints_ ) + : pNext{ pNext_ } + , videoUsageHints{ videoUsageHints_ } { } @@ -118671,14 +121355,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent2D encodeInputPictureGranularity_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rateControlModes( rateControlModes_ ) - , maxRateControlLayers( maxRateControlLayers_ ) - , maxBitrate( maxBitrate_ ) - , maxQualityLevels( maxQualityLevels_ ) - , encodeInputPictureGranularity( encodeInputPictureGranularity_ ) - , supportedEncodeFeedbackFlags( supportedEncodeFeedbackFlags_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , rateControlModes{ rateControlModes_ } + , maxRateControlLayers{ maxRateControlLayers_ } + , maxBitrate{ maxBitrate_ } + , maxQualityLevels{ maxQualityLevels_ } + , encodeInputPictureGranularity{ encodeInputPictureGranularity_ } + , supportedEncodeFeedbackFlags{ supportedEncodeFeedbackFlags_ } { } @@ -118796,20 +121480,20 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsKHR stdSyntaxFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , maxLevelIdc( maxLevelIdc_ ) - , maxSliceCount( maxSliceCount_ ) - , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ) - , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ) - , maxL1ReferenceCount( maxL1ReferenceCount_ ) - , maxTemporalLayerCount( maxTemporalLayerCount_ ) - , expectDyadicTemporalLayerPattern( expectDyadicTemporalLayerPattern_ ) - , minQp( minQp_ ) - , maxQp( maxQp_ ) - , prefersGopRemainingFrames( prefersGopRemainingFrames_ ) - , requiresGopRemainingFrames( requiresGopRemainingFrames_ ) - , stdSyntaxFlags( stdSyntaxFlags_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevelIdc{ maxLevelIdc_ } + , maxSliceCount{ maxSliceCount_ } + , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ } + , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ } + , maxL1ReferenceCount{ maxL1ReferenceCount_ } + , maxTemporalLayerCount{ maxTemporalLayerCount_ } + , expectDyadicTemporalLayerPattern{ expectDyadicTemporalLayerPattern_ } + , minQp{ minQp_ } + , maxQp{ maxQp_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } { } @@ -118967,8 +121651,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH264DpbSlotInfoKHR( const StdVideoEncodeH264ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } @@ -119061,9 +121745,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH264FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT - : frameISize( frameISize_ ) - , framePSize( framePSize_ ) - , frameBSize( frameBSize_ ) + : frameISize{ frameISize_ } + , framePSize{ framePSize_ } + , frameBSize{ frameBSize_ } { } @@ -119162,11 +121846,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t gopRemainingP_ = {}, uint32_t gopRemainingB_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , useGopRemainingFrames( useGopRemainingFrames_ ) - , gopRemainingI( gopRemainingI_ ) - , gopRemainingP( gopRemainingP_ ) - , gopRemainingB( gopRemainingB_ ) + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingI{ gopRemainingI_ } + , gopRemainingP{ gopRemainingP_ } + , gopRemainingB{ gopRemainingB_ } { } @@ -119291,9 +121975,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoEncodeH264NaluSliceInfoKHR( int32_t constantQp_ = {}, const StdVideoEncodeH264SliceHeader * pStdSliceHeader_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , constantQp( constantQp_ ) - , pStdSliceHeader( pStdSliceHeader_ ) + : pNext{ pNext_ } + , constantQp{ constantQp_ } + , pStdSliceHeader{ pStdSliceHeader_ } { } @@ -119399,11 +122083,11 @@ namespace VULKAN_HPP_NAMESPACE const StdVideoEncodeH264PictureInfo * pStdPictureInfo_ = {}, VULKAN_HPP_NAMESPACE::Bool32 generatePrefixNalu_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , naluSliceEntryCount( naluSliceEntryCount_ ) - , pNaluSliceEntries( pNaluSliceEntries_ ) - , pStdPictureInfo( pStdPictureInfo_ ) - , generatePrefixNalu( generatePrefixNalu_ ) + : pNext{ pNext_ } + , naluSliceEntryCount{ naluSliceEntryCount_ } + , pNaluSliceEntries{ pNaluSliceEntries_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , generatePrefixNalu{ generatePrefixNalu_ } { } @@ -119551,8 +122235,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH264ProfileInfoKHR( StdVideoH264ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } { } @@ -119650,9 +122334,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH264QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT - : qpI( qpI_ ) - , qpP( qpP_ ) - , qpB( qpB_ ) + : qpI{ qpI_ } + , qpP{ qpP_ } + , qpB{ qpB_ } { } @@ -119756,16 +122440,16 @@ namespace VULKAN_HPP_NAMESPACE uint32_t preferredMaxL1ReferenceCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 preferredStdEntropyCodingModeFlag_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , preferredRateControlFlags( preferredRateControlFlags_ ) - , preferredGopFrameCount( preferredGopFrameCount_ ) - , preferredIdrPeriod( preferredIdrPeriod_ ) - , preferredConsecutiveBFrameCount( preferredConsecutiveBFrameCount_ ) - , preferredTemporalLayerCount( preferredTemporalLayerCount_ ) - , preferredConstantQp( preferredConstantQp_ ) - , preferredMaxL0ReferenceCount( preferredMaxL0ReferenceCount_ ) - , preferredMaxL1ReferenceCount( preferredMaxL1ReferenceCount_ ) - , preferredStdEntropyCodingModeFlag( preferredStdEntropyCodingModeFlag_ ) + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredIdrPeriod{ preferredIdrPeriod_ } + , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ } + , preferredTemporalLayerCount{ preferredTemporalLayerCount_ } + , preferredConstantQp{ preferredConstantQp_ } + , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ } + , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ } + , preferredStdEntropyCodingModeFlag{ preferredStdEntropyCodingModeFlag_ } { } @@ -119884,12 +122568,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t consecutiveBFrameCount_ = {}, uint32_t temporalLayerCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , gopFrameCount( gopFrameCount_ ) - , idrPeriod( idrPeriod_ ) - , consecutiveBFrameCount( consecutiveBFrameCount_ ) - , temporalLayerCount( temporalLayerCount_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , idrPeriod{ idrPeriod_ } + , consecutiveBFrameCount{ consecutiveBFrameCount_ } + , temporalLayerCount{ temporalLayerCount_ } { } @@ -120025,13 +122709,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeKHR maxFrameSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , useMinQp( useMinQp_ ) - , minQp( minQp_ ) - , useMaxQp( useMaxQp_ ) - , maxQp( maxQp_ ) - , useMaxFrameSize( useMaxFrameSize_ ) - , maxFrameSize( maxFrameSize_ ) + : pNext{ pNext_ } + , useMinQp{ useMinQp_ } + , minQp{ minQp_ } + , useMaxQp{ useMaxQp_ } + , maxQp{ maxQp_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } { } @@ -120172,9 +122856,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, StdVideoH264LevelIdc maxLevelIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , useMaxLevelIdc( useMaxLevelIdc_ ) - , maxLevelIdc( maxLevelIdc_ ) + : pNext{ pNext_ } + , useMaxLevelIdc{ useMaxLevelIdc_ } + , maxLevelIdc{ maxLevelIdc_ } { } @@ -120289,11 +122973,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stdPPSCount_ = {}, const StdVideoH264PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdSPSCount( stdSPSCount_ ) - , pStdSPSs( pStdSPSs_ ) - , stdPPSCount( stdPPSCount_ ) - , pStdPPSs( pStdPPSs_ ) + : pNext{ pNext_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } { } @@ -120452,10 +123136,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxStdSPSCount( maxStdSPSCount_ ) - , maxStdPPSCount( maxStdPPSCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) + : pNext{ pNext_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { } @@ -120573,9 +123257,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoEncodeH264SessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hasStdSPSOverrides( hasStdSPSOverrides_ ) - , hasStdPPSOverrides( hasStdPPSOverrides_ ) + : pNext{ pNext_ } + , hasStdSPSOverrides{ hasStdSPSOverrides_ } + , hasStdPPSOverrides{ hasStdPPSOverrides_ } { } @@ -120663,11 +123347,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stdSPSId_ = {}, uint32_t stdPPSId_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , writeStdSPS( writeStdSPS_ ) - , writeStdPPS( writeStdPPS_ ) - , stdSPSId( stdSPSId_ ) - , stdPPSId( stdPPSId_ ) + : pNext{ pNext_ } + , writeStdSPS{ writeStdSPS_ } + , writeStdPPS{ writeStdPPS_ } + , stdSPSId{ stdSPSId_ } + , stdPPSId{ stdPPSId_ } { } @@ -120805,23 +123489,23 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsKHR stdSyntaxFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , maxLevelIdc( maxLevelIdc_ ) - , maxSliceSegmentCount( maxSliceSegmentCount_ ) - , maxTiles( maxTiles_ ) - , ctbSizes( ctbSizes_ ) - , transformBlockSizes( transformBlockSizes_ ) - , maxPPictureL0ReferenceCount( maxPPictureL0ReferenceCount_ ) - , maxBPictureL0ReferenceCount( maxBPictureL0ReferenceCount_ ) - , maxL1ReferenceCount( maxL1ReferenceCount_ ) - , maxSubLayerCount( maxSubLayerCount_ ) - , expectDyadicTemporalSubLayerPattern( expectDyadicTemporalSubLayerPattern_ ) - , minQp( minQp_ ) - , maxQp( maxQp_ ) - , prefersGopRemainingFrames( prefersGopRemainingFrames_ ) - , requiresGopRemainingFrames( requiresGopRemainingFrames_ ) - , stdSyntaxFlags( stdSyntaxFlags_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevelIdc{ maxLevelIdc_ } + , maxSliceSegmentCount{ maxSliceSegmentCount_ } + , maxTiles{ maxTiles_ } + , ctbSizes{ ctbSizes_ } + , transformBlockSizes{ transformBlockSizes_ } + , maxPPictureL0ReferenceCount{ maxPPictureL0ReferenceCount_ } + , maxBPictureL0ReferenceCount{ maxBPictureL0ReferenceCount_ } + , maxL1ReferenceCount{ maxL1ReferenceCount_ } + , maxSubLayerCount{ maxSubLayerCount_ } + , expectDyadicTemporalSubLayerPattern{ expectDyadicTemporalSubLayerPattern_ } + , minQp{ minQp_ } + , maxQp{ maxQp_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } { } @@ -120995,8 +123679,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH265DpbSlotInfoKHR( const StdVideoEncodeH265ReferenceInfo * pStdReferenceInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , pStdReferenceInfo( pStdReferenceInfo_ ) + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } { } @@ -121089,9 +123773,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH265FrameSizeKHR( uint32_t frameISize_ = {}, uint32_t framePSize_ = {}, uint32_t frameBSize_ = {} ) VULKAN_HPP_NOEXCEPT - : frameISize( frameISize_ ) - , framePSize( framePSize_ ) - , frameBSize( frameBSize_ ) + : frameISize{ frameISize_ } + , framePSize{ framePSize_ } + , frameBSize{ frameBSize_ } { } @@ -121190,11 +123874,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t gopRemainingP_ = {}, uint32_t gopRemainingB_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , useGopRemainingFrames( useGopRemainingFrames_ ) - , gopRemainingI( gopRemainingI_ ) - , gopRemainingP( gopRemainingP_ ) - , gopRemainingB( gopRemainingB_ ) + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingI{ gopRemainingI_ } + , gopRemainingP{ gopRemainingP_ } + , gopRemainingB{ gopRemainingB_ } { } @@ -121319,9 +124003,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoEncodeH265NaluSliceSegmentInfoKHR( int32_t constantQp_ = {}, const StdVideoEncodeH265SliceSegmentHeader * pStdSliceSegmentHeader_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , constantQp( constantQp_ ) - , pStdSliceSegmentHeader( pStdSliceSegmentHeader_ ) + : pNext{ pNext_ } + , constantQp{ constantQp_ } + , pStdSliceSegmentHeader{ pStdSliceSegmentHeader_ } { } @@ -121427,10 +124111,10 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoKHR * pNaluSliceSegmentEntries_ = {}, const StdVideoEncodeH265PictureInfo * pStdPictureInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , naluSliceSegmentEntryCount( naluSliceSegmentEntryCount_ ) - , pNaluSliceSegmentEntries( pNaluSliceSegmentEntries_ ) - , pStdPictureInfo( pStdPictureInfo_ ) + : pNext{ pNext_ } + , naluSliceSegmentEntryCount{ naluSliceSegmentEntryCount_ } + , pNaluSliceSegmentEntries{ pNaluSliceSegmentEntries_ } + , pStdPictureInfo{ pStdPictureInfo_ } { } @@ -121569,8 +124253,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH265ProfileInfoKHR( StdVideoH265ProfileIdc stdProfileIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdProfileIdc( stdProfileIdc_ ) + : pNext{ pNext_ } + , stdProfileIdc{ stdProfileIdc_ } { } @@ -121668,9 +124352,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeH265QpKHR( int32_t qpI_ = {}, int32_t qpP_ = {}, int32_t qpB_ = {} ) VULKAN_HPP_NOEXCEPT - : qpI( qpI_ ) - , qpP( qpP_ ) - , qpB( qpB_ ) + : qpI{ qpI_ } + , qpP{ qpP_ } + , qpB{ qpB_ } { } @@ -121773,15 +124457,15 @@ namespace VULKAN_HPP_NAMESPACE uint32_t preferredMaxL0ReferenceCount_ = {}, uint32_t preferredMaxL1ReferenceCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , preferredRateControlFlags( preferredRateControlFlags_ ) - , preferredGopFrameCount( preferredGopFrameCount_ ) - , preferredIdrPeriod( preferredIdrPeriod_ ) - , preferredConsecutiveBFrameCount( preferredConsecutiveBFrameCount_ ) - , preferredSubLayerCount( preferredSubLayerCount_ ) - , preferredConstantQp( preferredConstantQp_ ) - , preferredMaxL0ReferenceCount( preferredMaxL0ReferenceCount_ ) - , preferredMaxL1ReferenceCount( preferredMaxL1ReferenceCount_ ) + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredIdrPeriod{ preferredIdrPeriod_ } + , preferredConsecutiveBFrameCount{ preferredConsecutiveBFrameCount_ } + , preferredSubLayerCount{ preferredSubLayerCount_ } + , preferredConstantQp{ preferredConstantQp_ } + , preferredMaxL0ReferenceCount{ preferredMaxL0ReferenceCount_ } + , preferredMaxL1ReferenceCount{ preferredMaxL1ReferenceCount_ } { } @@ -121896,12 +124580,12 @@ namespace VULKAN_HPP_NAMESPACE uint32_t consecutiveBFrameCount_ = {}, uint32_t subLayerCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , gopFrameCount( gopFrameCount_ ) - , idrPeriod( idrPeriod_ ) - , consecutiveBFrameCount( consecutiveBFrameCount_ ) - , subLayerCount( subLayerCount_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , idrPeriod{ idrPeriod_ } + , consecutiveBFrameCount{ consecutiveBFrameCount_ } + , subLayerCount{ subLayerCount_ } { } @@ -122037,13 +124721,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeKHR maxFrameSize_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , useMinQp( useMinQp_ ) - , minQp( minQp_ ) - , useMaxQp( useMaxQp_ ) - , maxQp( maxQp_ ) - , useMaxFrameSize( useMaxFrameSize_ ) - , maxFrameSize( maxFrameSize_ ) + : pNext{ pNext_ } + , useMinQp{ useMinQp_ } + , minQp{ minQp_ } + , useMaxQp{ useMaxQp_ } + , maxQp{ maxQp_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } { } @@ -122184,9 +124868,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoEncodeH265SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevelIdc_ = {}, StdVideoH265LevelIdc maxLevelIdc_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , useMaxLevelIdc( useMaxLevelIdc_ ) - , maxLevelIdc( maxLevelIdc_ ) + : pNext{ pNext_ } + , useMaxLevelIdc{ useMaxLevelIdc_ } + , maxLevelIdc{ maxLevelIdc_ } { } @@ -122303,13 +124987,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stdPPSCount_ = {}, const StdVideoH265PictureParameterSet * pStdPPSs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , stdVPSCount( stdVPSCount_ ) - , pStdVPSs( pStdVPSs_ ) - , stdSPSCount( stdSPSCount_ ) - , pStdSPSs( pStdSPSs_ ) - , stdPPSCount( stdPPSCount_ ) - , pStdPPSs( pStdPPSs_ ) + : pNext{ pNext_ } + , stdVPSCount{ stdVPSCount_ } + , pStdVPSs{ pStdVPSs_ } + , stdSPSCount{ stdSPSCount_ } + , pStdSPSs{ pStdSPSs_ } + , stdPPSCount{ stdPPSCount_ } + , pStdPPSs{ pStdPPSs_ } { } @@ -122498,11 +125182,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxStdPPSCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoKHR * pParametersAddInfo_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , maxStdVPSCount( maxStdVPSCount_ ) - , maxStdSPSCount( maxStdSPSCount_ ) - , maxStdPPSCount( maxStdPPSCount_ ) - , pParametersAddInfo( pParametersAddInfo_ ) + : pNext{ pNext_ } + , maxStdVPSCount{ maxStdVPSCount_ } + , maxStdSPSCount{ maxStdSPSCount_ } + , maxStdPPSCount{ maxStdPPSCount_ } + , pParametersAddInfo{ pParametersAddInfo_ } { } @@ -122629,10 +125313,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 hasStdSPSOverrides_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hasStdPPSOverrides_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hasStdVPSOverrides( hasStdVPSOverrides_ ) - , hasStdSPSOverrides( hasStdSPSOverrides_ ) - , hasStdPPSOverrides( hasStdPPSOverrides_ ) + : pNext{ pNext_ } + , hasStdVPSOverrides{ hasStdVPSOverrides_ } + , hasStdSPSOverrides{ hasStdSPSOverrides_ } + , hasStdPPSOverrides{ hasStdPPSOverrides_ } { } @@ -122727,13 +125411,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t stdSPSId_ = {}, uint32_t stdPPSId_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , writeStdVPS( writeStdVPS_ ) - , writeStdSPS( writeStdSPS_ ) - , writeStdPPS( writeStdPPS_ ) - , stdVPSId( stdVPSId_ ) - , stdSPSId( stdSPSId_ ) - , stdPPSId( stdPPSId_ ) + : pNext{ pNext_ } + , writeStdVPS{ writeStdVPS_ } + , writeStdSPS{ writeStdSPS_ } + , writeStdPPS{ writeStdPPS_ } + , stdVPSId{ stdVPSId_ } + , stdSPSId{ stdSPSId_ } + , stdPPSId{ stdPPSId_ } { } @@ -122880,16 +125564,16 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR * pReferenceSlots_ = {}, uint32_t precedingExternallyEncodedBytes_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dstBuffer( dstBuffer_ ) - , dstBufferOffset( dstBufferOffset_ ) - , dstBufferRange( dstBufferRange_ ) - , srcPictureResource( srcPictureResource_ ) - , pSetupReferenceSlot( pSetupReferenceSlot_ ) - , referenceSlotCount( referenceSlotCount_ ) - , pReferenceSlots( pReferenceSlots_ ) - , precedingExternallyEncodedBytes( precedingExternallyEncodedBytes_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , dstBuffer{ dstBuffer_ } + , dstBufferOffset{ dstBufferOffset_ } + , dstBufferRange{ dstBufferRange_ } + , srcPictureResource{ srcPictureResource_ } + , pSetupReferenceSlot{ pSetupReferenceSlot_ } + , referenceSlotCount{ referenceSlotCount_ } + , pReferenceSlots{ pReferenceSlots_ } + , precedingExternallyEncodedBytes{ precedingExternallyEncodedBytes_ } { } @@ -123097,8 +125781,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeQualityLevelInfoKHR( uint32_t qualityLevel_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , qualityLevel( qualityLevel_ ) + : pNext{ pNext_ } + , qualityLevel{ qualityLevel_ } { } @@ -123196,9 +125880,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR::eDefault, uint32_t preferredRateControlLayerCount_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , preferredRateControlMode( preferredRateControlMode_ ) - , preferredRateControlLayerCount( preferredRateControlLayerCount_ ) + : pNext{ pNext_ } + , preferredRateControlMode{ preferredRateControlMode_ } + , preferredRateControlLayerCount{ preferredRateControlLayerCount_ } { } @@ -123287,11 +125971,11 @@ namespace VULKAN_HPP_NAMESPACE uint32_t frameRateNumerator_ = {}, uint32_t frameRateDenominator_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , averageBitrate( averageBitrate_ ) - , maxBitrate( maxBitrate_ ) - , frameRateNumerator( frameRateNumerator_ ) - , frameRateDenominator( frameRateDenominator_ ) + : pNext{ pNext_ } + , averageBitrate{ averageBitrate_ } + , maxBitrate{ maxBitrate_ } + , frameRateNumerator{ frameRateNumerator_ } + , frameRateDenominator{ frameRateDenominator_ } { } @@ -123415,13 +126099,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t virtualBufferSizeInMs_ = {}, uint32_t initialVirtualBufferSizeInMs_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , rateControlMode( rateControlMode_ ) - , layerCount( layerCount_ ) - , pLayers( pLayers_ ) - , virtualBufferSizeInMs( virtualBufferSizeInMs_ ) - , initialVirtualBufferSizeInMs( initialVirtualBufferSizeInMs_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , rateControlMode{ rateControlMode_ } + , layerCount{ layerCount_ } + , pLayers{ pLayers_ } + , virtualBufferSizeInMs{ virtualBufferSizeInMs_ } + , initialVirtualBufferSizeInMs{ initialVirtualBufferSizeInMs_ } { } @@ -123592,8 +126276,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersFeedbackInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 hasOverrides_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , hasOverrides( hasOverrides_ ) + : pNext{ pNext_ } + , hasOverrides{ hasOverrides_ } { } @@ -123675,8 +126359,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEncodeSessionParametersGetInfoKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoSessionParameters( videoSessionParameters_ ) + : pNext{ pNext_ } + , videoSessionParameters{ videoSessionParameters_ } { } @@ -123776,10 +126460,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR videoContentHints_ = {}, VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR tuningMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR::eDefault, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , videoUsageHints( videoUsageHints_ ) - , videoContentHints( videoContentHints_ ) - , tuningMode( tuningMode_ ) + : pNext{ pNext_ } + , videoUsageHints{ videoUsageHints_ } + , videoContentHints{ videoContentHints_ } + , tuningMode{ tuningMode_ } { } @@ -123894,8 +126578,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoEndCodingInfoKHR( VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR flags_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) + : pNext{ pNext_ } + , flags{ flags_ } { } @@ -123996,13 +126680,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageTiling imageTiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsageFlags_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , format( format_ ) - , componentMapping( componentMapping_ ) - , imageCreateFlags( imageCreateFlags_ ) - , imageType( imageType_ ) - , imageTiling( imageTiling_ ) - , imageUsageFlags( imageUsageFlags_ ) + : pNext{ pNext_ } + , format{ format_ } + , componentMapping{ componentMapping_ } + , imageCreateFlags{ imageCreateFlags_ } + , imageType{ imageType_ } + , imageTiling{ imageTiling_ } + , imageUsageFlags{ imageUsageFlags_ } { } @@ -124100,10 +126784,10 @@ namespace VULKAN_HPP_NAMESPACE uint32_t firstQuery_ = {}, uint32_t queryCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queryPool( queryPool_ ) - , firstQuery( firstQuery_ ) - , queryCount( queryCount_ ) + : pNext{ pNext_ } + , queryPool{ queryPool_ } + , firstQuery{ firstQuery_ } + , queryCount{ queryCount_ } { } @@ -124215,9 +126899,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoProfileListInfoKHR( uint32_t profileCount_ = {}, const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pProfiles_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , profileCount( profileCount_ ) - , pProfiles( pProfiles_ ) + : pNext{ pNext_ } + , profileCount{ profileCount_ } + , pProfiles{ pProfiles_ } { } @@ -124346,16 +127030,16 @@ namespace VULKAN_HPP_NAMESPACE uint32_t maxActiveReferencePictures_ = {}, const VULKAN_HPP_NAMESPACE::ExtensionProperties * pStdHeaderVersion_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , queueFamilyIndex( queueFamilyIndex_ ) - , flags( flags_ ) - , pVideoProfile( pVideoProfile_ ) - , pictureFormat( pictureFormat_ ) - , maxCodedExtent( maxCodedExtent_ ) - , referencePictureFormat( referencePictureFormat_ ) - , maxDpbSlots( maxDpbSlots_ ) - , maxActiveReferencePictures( maxActiveReferencePictures_ ) - , pStdHeaderVersion( pStdHeaderVersion_ ) + : pNext{ pNext_ } + , queueFamilyIndex{ queueFamilyIndex_ } + , flags{ flags_ } + , pVideoProfile{ pVideoProfile_ } + , pictureFormat{ pictureFormat_ } + , maxCodedExtent{ maxCodedExtent_ } + , referencePictureFormat{ referencePictureFormat_ } + , maxDpbSlots{ maxDpbSlots_ } + , maxActiveReferencePictures{ maxActiveReferencePictures_ } + , pStdHeaderVersion{ pStdHeaderVersion_ } { } @@ -124532,9 +127216,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR VideoSessionMemoryRequirementsKHR( uint32_t memoryBindIndex_ = {}, VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , memoryBindIndex( memoryBindIndex_ ) - , memoryRequirements( memoryRequirements_ ) + : pNext{ pNext_ } + , memoryBindIndex{ memoryBindIndex_ } + , memoryRequirements{ memoryRequirements_ } { } @@ -124619,10 +127303,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParametersTemplate_ = {}, VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , videoSessionParametersTemplate( videoSessionParametersTemplate_ ) - , videoSession( videoSession_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , videoSessionParametersTemplate{ videoSessionParametersTemplate_ } + , videoSession{ videoSession_ } { } @@ -124738,8 +127422,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR VideoSessionParametersUpdateInfoKHR( uint32_t updateSequenceCount_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , updateSequenceCount( updateSequenceCount_ ) + : pNext{ pNext_ } + , updateSequenceCount{ updateSequenceCount_ } { } @@ -124838,10 +127522,10 @@ namespace VULKAN_HPP_NAMESPACE struct wl_display * display_ = {}, struct wl_surface * surface_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , display( display_ ) - , surface( surface_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , display{ display_ } + , surface{ surface_ } { } @@ -124963,14 +127647,14 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeouts( pAcquireTimeouts_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) + : pNext{ pNext_ } + , acquireCount{ acquireCount_ } + , pAcquireSyncs{ pAcquireSyncs_ } + , pAcquireKeys{ pAcquireKeys_ } + , pAcquireTimeouts{ pAcquireTimeouts_ } + , releaseCount{ releaseCount_ } + , pReleaseSyncs{ pReleaseSyncs_ } + , pReleaseKeys{ pReleaseKeys_ } { } @@ -125228,14 +127912,14 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::DeviceMemory * pReleaseSyncs_ = {}, const uint64_t * pReleaseKeys_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , acquireCount( acquireCount_ ) - , pAcquireSyncs( pAcquireSyncs_ ) - , pAcquireKeys( pAcquireKeys_ ) - , pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ) - , releaseCount( releaseCount_ ) - , pReleaseSyncs( pReleaseSyncs_ ) - , pReleaseKeys( pReleaseKeys_ ) + : pNext{ pNext_ } + , acquireCount{ acquireCount_ } + , pAcquireSyncs{ pAcquireSyncs_ } + , pAcquireKeys{ pAcquireKeys_ } + , pAcquireTimeoutMilliseconds{ pAcquireTimeoutMilliseconds_ } + , releaseCount{ releaseCount_ } + , pReleaseSyncs{ pReleaseSyncs_ } + , pReleaseKeys{ pReleaseKeys_ } { } @@ -125492,10 +128176,10 @@ namespace VULKAN_HPP_NAMESPACE HINSTANCE hinstance_ = {}, HWND hwnd_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , hinstance( hinstance_ ) - , hwnd( hwnd_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , hinstance{ hinstance_ } + , hwnd{ hwnd_ } { } @@ -125611,9 +128295,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructureCount( accelerationStructureCount_ ) - , pAccelerationStructures( pAccelerationStructures_ ) + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } { } @@ -125742,9 +128426,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , accelerationStructureCount( accelerationStructureCount_ ) - , pAccelerationStructures( pAccelerationStructures_ ) + : pNext{ pNext_ } + , accelerationStructureCount{ accelerationStructureCount_ } + , pAccelerationStructures{ pAccelerationStructures_ } { } @@ -125872,9 +128556,9 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlock( uint32_t dataSize_ = {}, const void * pData_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , dataSize( dataSize_ ) - , pData( pData_ ) + : pNext{ pNext_ } + , dataSize{ dataSize_ } + , pData{ pData_ } { } @@ -126000,10 +128684,10 @@ namespace VULKAN_HPP_NAMESPACE xcb_connection_t * connection_ = {}, xcb_window_t window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , connection( connection_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , connection{ connection_ } + , window{ window_ } { } @@ -126132,10 +128816,10 @@ namespace VULKAN_HPP_NAMESPACE Display * dpy_ = {}, Window window_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext( pNext_ ) - , flags( flags_ ) - , dpy( dpy_ ) - , window( window_ ) + : pNext{ pNext_ } + , flags{ flags_ } + , dpy{ dpy_ } + , window{ window_ } { } diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index f32d156..8417c14 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -77,14 +77,14 @@ namespace VULKAN_HPP_NAMESPACE result += "CositedChromaSamples | "; if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | "; - if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) - result += "SampledImageFilterCubicEXT | "; if ( value & FormatFeatureFlagBits::eVideoDecodeOutputKHR ) result += "VideoDecodeOutputKHR | "; if ( value & FormatFeatureFlagBits::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | "; if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | "; + if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicEXT ) + result += "SampledImageFilterCubicEXT | "; if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) @@ -175,10 +175,10 @@ namespace VULKAN_HPP_NAMESPACE result += "VideoDecodeSrcKHR | "; if ( value & ImageUsageFlagBits::eVideoDecodeDpbKHR ) result += "VideoDecodeDpbKHR | "; - if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) - result += "FragmentShadingRateAttachmentKHR | "; if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | "; + if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) + result += "FragmentShadingRateAttachmentKHR | "; if ( value & ImageUsageFlagBits::eHostTransferEXT ) result += "HostTransferEXT | "; if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) @@ -368,16 +368,16 @@ namespace VULKAN_HPP_NAMESPACE result += "AccelerationStructureBuildKHR | "; if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | "; + if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) + result += "FragmentDensityProcessEXT | "; if ( value & PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; + if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) + result += "CommandPreprocessNV | "; if ( value & PipelineStageFlagBits::eTaskShaderEXT ) result += "TaskShaderEXT | "; if ( value & PipelineStageFlagBits::eMeshShaderEXT ) result += "MeshShaderEXT | "; - if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) - result += "FragmentDensityProcessEXT | "; - if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) - result += "CommandPreprocessNV | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -966,10 +966,10 @@ namespace VULKAN_HPP_NAMESPACE result += "DescriptorBufferEXT | "; if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT ) result += "EmbeddedImmutableSamplersEXT | "; - if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) - result += "HostOnlyPoolEXT | "; if ( value & DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | "; + if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT ) + result += "HostOnlyPoolEXT | "; if ( value & DescriptorSetLayoutCreateFlagBits::ePerStageNV ) result += "PerStageNV | "; @@ -1030,10 +1030,10 @@ namespace VULKAN_HPP_NAMESPACE result += "AccelerationStructureReadKHR | "; if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | "; - if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) - result += "FragmentShadingRateAttachmentReadKHR | "; if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | "; + if ( value & AccessFlagBits::eFragmentShadingRateAttachmentReadKHR ) + result += "FragmentShadingRateAttachmentReadKHR | "; if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | "; if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) @@ -1747,10 +1747,10 @@ namespace VULKAN_HPP_NAMESPACE result += "Suspending | "; if ( value & RenderingFlagBits::eResuming ) result += "Resuming | "; - if ( value & RenderingFlagBits::eContentsInlineEXT ) - result += "ContentsInlineEXT | "; if ( value & RenderingFlagBits::eEnableLegacyDitheringEXT ) result += "EnableLegacyDitheringEXT | "; + if ( value & RenderingFlagBits::eContentsInlineKHR ) + result += "ContentsInlineKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2915,8 +2915,8 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes ) result += "PrecedingExternallyEncodedBytes | "; - if ( value & VideoEncodeCapabilityFlagBitsKHR::eInsufficientstreamBufferRangeDetectionBit ) - result += "InsufficientstreamBufferRangeDetectionBit | "; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection ) + result += "InsufficientBitstreamBufferRangeDetection | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2927,12 +2927,12 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; std::string result; - if ( value & VideoEncodeFeedbackFlagBitsKHR::estreamBufferOffsetBit ) - result += "streamBufferOffsetBit | "; - if ( value & VideoEncodeFeedbackFlagBitsKHR::estreamBytesWrittenBit ) - result += "streamBytesWrittenBit | "; - if ( value & VideoEncodeFeedbackFlagBitsKHR::estreamHasOverridesBit ) - result += "streamHasOverridesBit | "; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset ) + result += "BitstreamBufferOffset | "; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten ) + result += "BitstreamBytesWritten | "; + if ( value & VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides ) + result += "BitstreamHasOverrides | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3372,6 +3372,8 @@ namespace VULKAN_HPP_NAMESPACE result += "AllowDerivatives | "; if ( value & PipelineCreateFlagBits2KHR::eDerivative ) result += "Derivative | "; + if ( value & PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT ) + result += "EnableLegacyDitheringEXT | "; if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | "; if ( value & PipelineCreateFlagBits2KHR::eDispatchBase ) @@ -3428,6 +3430,8 @@ namespace VULKAN_HPP_NAMESPACE result += "RayTracingDisplacementMicromapNV | "; if ( value & PipelineCreateFlagBits2KHR::eDescriptorBufferEXT ) result += "DescriptorBufferEXT | "; + if ( value & PipelineCreateFlagBits2KHR::eCaptureData ) + result += "CaptureData | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3592,6 +3596,8 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorInvalidVideoStdParametersKHR: return "ErrorInvalidVideoStdParametersKHR"; case Result::eErrorCompressionExhaustedEXT: return "ErrorCompressionExhaustedEXT"; case Result::eIncompatibleShaderBinaryEXT: return "IncompatibleShaderBinaryEXT"; + case Result::ePipelineBinaryMissingKHR: return "PipelineBinaryMissingKHR"; + case Result::eErrorNotEnoughSpaceKHR: return "ErrorNotEnoughSpaceKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4091,13 +4097,11 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV: return "PipelineRepresentativeFragmentTestStateCreateInfoNV"; case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT: return "PhysicalDeviceImageViewImageFormatInfoEXT"; case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT: return "FilterCubicImageViewImageFormatPropertiesEXT"; - case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR"; case StructureType::eImportMemoryHostPointerInfoEXT: return "ImportMemoryHostPointerInfoEXT"; case StructureType::eMemoryHostPointerPropertiesEXT: return "MemoryHostPointerPropertiesEXT"; case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT: return "PhysicalDeviceExternalMemoryHostPropertiesEXT"; case StructureType::ePhysicalDeviceShaderClockFeaturesKHR: return "PhysicalDeviceShaderClockFeaturesKHR"; case StructureType::ePipelineCompilerControlCreateInfoAMD: return "PipelineCompilerControlCreateInfoAMD"; - case StructureType::eCalibratedTimestampInfoKHR: return "CalibratedTimestampInfoKHR"; case StructureType::ePhysicalDeviceShaderCorePropertiesAMD: return "PhysicalDeviceShaderCorePropertiesAMD"; case StructureType::eVideoDecodeH265CapabilitiesKHR: return "VideoDecodeH265CapabilitiesKHR"; case StructureType::eVideoDecodeH265SessionParametersCreateInfoKHR: return "VideoDecodeH265SessionParametersCreateInfoKHR"; @@ -4105,19 +4109,16 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eVideoDecodeH265ProfileInfoKHR: return "VideoDecodeH265ProfileInfoKHR"; case StructureType::eVideoDecodeH265PictureInfoKHR: return "VideoDecodeH265PictureInfoKHR"; case StructureType::eVideoDecodeH265DpbSlotInfoKHR: return "VideoDecodeH265DpbSlotInfoKHR"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR"; case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR: return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR"; case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR: return "QueueFamilyGlobalPriorityPropertiesKHR"; case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; - case StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR: return "PipelineVertexInputDivisorStateCreateInfoKHR"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR: return "PhysicalDeviceVertexAttributeDivisorFeaturesKHR"; #if defined( VK_USE_PLATFORM_GGP ) case StructureType::ePresentFrameTokenGGP: return "PresentFrameTokenGGP"; #endif /*VK_USE_PLATFORM_GGP*/ - case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV: return "PhysicalDeviceComputeShaderDerivativesFeaturesNV"; case StructureType::ePhysicalDeviceMeshShaderFeaturesNV: return "PhysicalDeviceMeshShaderFeaturesNV"; case StructureType::ePhysicalDeviceMeshShaderPropertiesNV: return "PhysicalDeviceMeshShaderPropertiesNV"; - case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR: return "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR"; case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV: return "PhysicalDeviceShaderImageFootprintFeaturesNV"; case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV: return "PipelineViewportExclusiveScissorStateCreateInfoNV"; case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; @@ -4180,11 +4181,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT: return "SurfaceFullScreenExclusiveWin32InfoEXT"; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ case StructureType::eHeadlessSurfaceCreateInfoEXT: return "HeadlessSurfaceCreateInfoEXT"; - case StructureType::ePhysicalDeviceLineRasterizationFeaturesKHR: return "PhysicalDeviceLineRasterizationFeaturesKHR"; - case StructureType::ePipelineRasterizationLineStateCreateInfoKHR: return "PipelineRasterizationLineStateCreateInfoKHR"; - case StructureType::ePhysicalDeviceLineRasterizationPropertiesKHR: return "PhysicalDeviceLineRasterizationPropertiesKHR"; case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT: return "PhysicalDeviceShaderAtomicFloatFeaturesEXT"; - case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesKHR: return "PhysicalDeviceIndexTypeUint8FeaturesKHR"; case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT: return "PhysicalDeviceExtendedDynamicStateFeaturesEXT"; case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR: return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR"; case StructureType::ePipelineInfoKHR: return "PipelineInfoKHR"; @@ -4301,6 +4298,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT: return "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT"; case StructureType::eGraphicsPipelineLibraryCreateInfoEXT: return "GraphicsPipelineLibraryCreateInfoEXT"; case StructureType::ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD: return "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD"; + case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR: return "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR"; case StructureType::ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR: return "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR"; case StructureType::ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR: return "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR"; case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV: return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV"; @@ -4318,21 +4316,16 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR: return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR"; case StructureType::ePhysicalDeviceImageCompressionControlFeaturesEXT: return "PhysicalDeviceImageCompressionControlFeaturesEXT"; case StructureType::eImageCompressionControlEXT: return "ImageCompressionControlEXT"; - case StructureType::eSubresourceLayout2KHR: return "SubresourceLayout2KHR"; - case StructureType::eImageSubresource2KHR: return "ImageSubresource2KHR"; case StructureType::eImageCompressionPropertiesEXT: return "ImageCompressionPropertiesEXT"; case StructureType::ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT"; case StructureType::ePhysicalDevice4444FormatsFeaturesEXT: return "PhysicalDevice4444FormatsFeaturesEXT"; case StructureType::ePhysicalDeviceFaultFeaturesEXT: return "PhysicalDeviceFaultFeaturesEXT"; case StructureType::eDeviceFaultCountsEXT: return "DeviceFaultCountsEXT"; case StructureType::eDeviceFaultInfoEXT: return "DeviceFaultInfoEXT"; - case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT: return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT"; case StructureType::ePhysicalDeviceRgba10X6FormatsFeaturesEXT: return "PhysicalDeviceRgba10X6FormatsFeaturesEXT"; #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) case StructureType::eDirectfbSurfaceCreateInfoEXT: return "DirectfbSurfaceCreateInfoEXT"; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT: return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT"; - case StructureType::eMutableDescriptorTypeCreateInfoEXT: return "MutableDescriptorTypeCreateInfoEXT"; case StructureType::ePhysicalDeviceVertexInputDynamicStateFeaturesEXT: return "PhysicalDeviceVertexInputDynamicStateFeaturesEXT"; case StructureType::eVertexInputBindingDescription2EXT: return "VertexInputBindingDescription2EXT"; case StructureType::eVertexInputAttributeDescription2EXT: return "VertexInputAttributeDescription2EXT"; @@ -4456,6 +4449,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT: return "PhysicalDeviceShaderModuleIdentifierPropertiesEXT"; case StructureType::ePipelineShaderStageModuleIdentifierCreateInfoEXT: return "PipelineShaderStageModuleIdentifierCreateInfoEXT"; case StructureType::eShaderModuleIdentifierEXT: return "ShaderModuleIdentifierEXT"; + case StructureType::ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT: return "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT"; case StructureType::ePhysicalDeviceOpticalFlowFeaturesNV: return "PhysicalDeviceOpticalFlowFeaturesNV"; case StructureType::ePhysicalDeviceOpticalFlowPropertiesNV: return "PhysicalDeviceOpticalFlowPropertiesNV"; case StructureType::eOpticalFlowImageFormatInfoNV: return "OpticalFlowImageFormatInfoNV"; @@ -4474,12 +4468,27 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceMaintenance5PropertiesKHR: return "PhysicalDeviceMaintenance5PropertiesKHR"; case StructureType::eRenderingAreaInfoKHR: return "RenderingAreaInfoKHR"; case StructureType::eDeviceImageSubresourceInfoKHR: return "DeviceImageSubresourceInfoKHR"; + case StructureType::eSubresourceLayout2KHR: return "SubresourceLayout2KHR"; + case StructureType::eImageSubresource2KHR: return "ImageSubresource2KHR"; case StructureType::ePipelineCreateFlags2CreateInfoKHR: return "PipelineCreateFlags2CreateInfoKHR"; case StructureType::eBufferUsageFlags2CreateInfoKHR: return "BufferUsageFlags2CreateInfoKHR"; + case StructureType::ePhysicalDeviceAntiLagFeaturesAMD: return "PhysicalDeviceAntiLagFeaturesAMD"; + case StructureType::eAntiLagDataAMD: return "AntiLagDataAMD"; + case StructureType::eAntiLagPresentationInfoAMD: return "AntiLagPresentationInfoAMD"; case StructureType::ePhysicalDeviceRayTracingPositionFetchFeaturesKHR: return "PhysicalDeviceRayTracingPositionFetchFeaturesKHR"; case StructureType::ePhysicalDeviceShaderObjectFeaturesEXT: return "PhysicalDeviceShaderObjectFeaturesEXT"; case StructureType::ePhysicalDeviceShaderObjectPropertiesEXT: return "PhysicalDeviceShaderObjectPropertiesEXT"; case StructureType::eShaderCreateInfoEXT: return "ShaderCreateInfoEXT"; + case StructureType::ePhysicalDevicePipelineBinaryFeaturesKHR: return "PhysicalDevicePipelineBinaryFeaturesKHR"; + case StructureType::ePipelineBinaryCreateInfoKHR: return "PipelineBinaryCreateInfoKHR"; + case StructureType::ePipelineBinaryInfoKHR: return "PipelineBinaryInfoKHR"; + case StructureType::ePipelineBinaryKeyKHR: return "PipelineBinaryKeyKHR"; + case StructureType::ePhysicalDevicePipelineBinaryPropertiesKHR: return "PhysicalDevicePipelineBinaryPropertiesKHR"; + case StructureType::eReleaseCapturedPipelineDataInfoKHR: return "ReleaseCapturedPipelineDataInfoKHR"; + case StructureType::ePipelineBinaryDataInfoKHR: return "PipelineBinaryDataInfoKHR"; + case StructureType::ePipelineCreateInfoKHR: return "PipelineCreateInfoKHR"; + case StructureType::eDevicePipelineBinaryInternalCacheControlKHR: return "DevicePipelineBinaryInternalCacheControlKHR"; + case StructureType::ePipelineBinaryHandlesInfoKHR: return "PipelineBinaryHandlesInfoKHR"; case StructureType::ePhysicalDeviceTilePropertiesFeaturesQCOM: return "PhysicalDeviceTilePropertiesFeaturesQCOM"; case StructureType::eTilePropertiesQCOM: return "TilePropertiesQCOM"; case StructureType::ePhysicalDeviceAmigoProfilingFeaturesSEC: return "PhysicalDeviceAmigoProfilingFeaturesSEC"; @@ -4489,6 +4498,10 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceRayTracingInvocationReorderPropertiesNV: return "PhysicalDeviceRayTracingInvocationReorderPropertiesNV"; case StructureType::ePhysicalDeviceExtendedSparseAddressSpaceFeaturesNV: return "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV"; case StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV: return "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV"; + case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesEXT: return "PhysicalDeviceMutableDescriptorTypeFeaturesEXT"; + case StructureType::eMutableDescriptorTypeCreateInfoEXT: return "MutableDescriptorTypeCreateInfoEXT"; + case StructureType::ePhysicalDeviceLegacyVertexAttributesFeaturesEXT: return "PhysicalDeviceLegacyVertexAttributesFeaturesEXT"; + case StructureType::ePhysicalDeviceLegacyVertexAttributesPropertiesEXT: return "PhysicalDeviceLegacyVertexAttributesPropertiesEXT"; case StructureType::eLayerSettingsCreateInfoEXT: return "LayerSettingsCreateInfoEXT"; case StructureType::ePhysicalDeviceShaderCoreBuiltinsFeaturesARM: return "PhysicalDeviceShaderCoreBuiltinsFeaturesARM"; case StructureType::ePhysicalDeviceShaderCoreBuiltinsPropertiesARM: return "PhysicalDeviceShaderCoreBuiltinsPropertiesARM"; @@ -4508,6 +4521,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR: return "PhysicalDeviceCooperativeMatrixPropertiesKHR"; case StructureType::ePhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM: return "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM"; case StructureType::eMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM: return "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR: return "PhysicalDeviceComputeShaderDerivativesFeaturesKHR"; + case StructureType::ePhysicalDeviceComputeShaderDerivativesPropertiesKHR: return "PhysicalDeviceComputeShaderDerivativesPropertiesKHR"; case StructureType::eVideoDecodeAv1CapabilitiesKHR: return "VideoDecodeAv1CapabilitiesKHR"; case StructureType::eVideoDecodeAv1PictureInfoKHR: return "VideoDecodeAv1PictureInfoKHR"; case StructureType::eVideoDecodeAv1ProfileInfoKHR: return "VideoDecodeAv1ProfileInfoKHR"; @@ -4527,6 +4542,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceCubicClampFeaturesQCOM: return "PhysicalDeviceCubicClampFeaturesQCOM"; case StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT"; case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesKHR: return "PhysicalDeviceVertexAttributeDivisorPropertiesKHR"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR: return "PipelineVertexInputDivisorStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR: return "PhysicalDeviceVertexAttributeDivisorFeaturesKHR"; case StructureType::ePhysicalDeviceShaderFloatControls2FeaturesKHR: return "PhysicalDeviceShaderFloatControls2FeaturesKHR"; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) case StructureType::eScreenBufferPropertiesQNX: return "ScreenBufferPropertiesQNX"; @@ -4536,6 +4553,11 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX: return "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX"; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ case StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT: return "PhysicalDeviceLayeredDriverPropertiesMSFT"; + case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesKHR: return "PhysicalDeviceIndexTypeUint8FeaturesKHR"; + case StructureType::ePhysicalDeviceLineRasterizationFeaturesKHR: return "PhysicalDeviceLineRasterizationFeaturesKHR"; + case StructureType::ePipelineRasterizationLineStateCreateInfoKHR: return "PipelineRasterizationLineStateCreateInfoKHR"; + case StructureType::ePhysicalDeviceLineRasterizationPropertiesKHR: return "PhysicalDeviceLineRasterizationPropertiesKHR"; + case StructureType::eCalibratedTimestampInfoKHR: return "CalibratedTimestampInfoKHR"; case StructureType::ePhysicalDeviceShaderExpectAssumeFeaturesKHR: return "PhysicalDeviceShaderExpectAssumeFeaturesKHR"; case StructureType::ePhysicalDeviceMaintenance6FeaturesKHR: return "PhysicalDeviceMaintenance6FeaturesKHR"; case StructureType::ePhysicalDeviceMaintenance6PropertiesKHR: return "PhysicalDeviceMaintenance6PropertiesKHR"; @@ -4548,8 +4570,19 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT"; case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV"; case StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV: return "PhysicalDeviceRawAccessChainsFeaturesNV"; + case StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR: return "PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR"; + case StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV: return "PhysicalDeviceCommandBufferInheritanceFeaturesNV"; + case StructureType::ePhysicalDeviceMaintenance7FeaturesKHR: return "PhysicalDeviceMaintenance7FeaturesKHR"; + case StructureType::ePhysicalDeviceMaintenance7PropertiesKHR: return "PhysicalDeviceMaintenance7PropertiesKHR"; + case StructureType::ePhysicalDeviceLayeredApiPropertiesListKHR: return "PhysicalDeviceLayeredApiPropertiesListKHR"; + case StructureType::ePhysicalDeviceLayeredApiPropertiesKHR: return "PhysicalDeviceLayeredApiPropertiesKHR"; + case StructureType::ePhysicalDeviceLayeredApiVulkanPropertiesKHR: return "PhysicalDeviceLayeredApiVulkanPropertiesKHR"; case StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV: return "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV"; + case StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT: return "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT"; case StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV: return "PhysicalDeviceRayTracingValidationFeaturesNV"; + case StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA: return "PhysicalDeviceImageAlignmentControlFeaturesMESA"; + case StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA: return "PhysicalDeviceImageAlignmentControlPropertiesMESA"; + case StructureType::eImageAlignmentControlCreateInfoMESA: return "ImageAlignmentControlCreateInfoMESA"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4622,6 +4655,7 @@ namespace VULKAN_HPP_NAMESPACE case ObjectType::eMicromapEXT: return "MicromapEXT"; case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV"; case ObjectType::eShaderEXT: return "ShaderEXT"; + case ObjectType::ePipelineBinaryKHR: return "PipelineBinaryKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4630,6 +4664,7 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { + case VendorId::eKhronos: return "Khronos"; case VendorId::eVIV: return "VIV"; case VendorId::eVSI: return "VSI"; case VendorId::eKazan: return "Kazan"; @@ -4892,7 +4927,7 @@ namespace VULKAN_HPP_NAMESPACE case Format::ePvrtc14BppSrgbBlockIMG: return "Pvrtc14BppSrgbBlockIMG"; case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; - case Format::eR16G16S105NV: return "R16G16S105NV"; + case Format::eR16G16Sfixed5NV: return "R16G16Sfixed5NV"; case Format::eA1B5G5R5UnormPack16KHR: return "A1B5G5R5UnormPack16KHR"; case Format::eA8UnormKHR: return "A8UnormKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; @@ -4927,10 +4962,10 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits::eDisjoint: return "Disjoint"; case FormatFeatureFlagBits::eCositedChromaSamples: return "CositedChromaSamples"; case FormatFeatureFlagBits::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; - case FormatFeatureFlagBits::eSampledImageFilterCubicEXT: return "SampledImageFilterCubicEXT"; case FormatFeatureFlagBits::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; case FormatFeatureFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; + case FormatFeatureFlagBits::eSampledImageFilterCubicEXT: return "SampledImageFilterCubicEXT"; case FormatFeatureFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; case FormatFeatureFlagBits::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; @@ -5004,8 +5039,8 @@ namespace VULKAN_HPP_NAMESPACE case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; - case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; + case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; case ImageUsageFlagBits::eHostTransferEXT: return "HostTransferEXT"; case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; @@ -5160,11 +5195,11 @@ namespace VULKAN_HPP_NAMESPACE case PipelineStageFlagBits::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; case PipelineStageFlagBits::eAccelerationStructureBuildKHR: return "AccelerationStructureBuildKHR"; case PipelineStageFlagBits::eRayTracingShaderKHR: return "RayTracingShaderKHR"; + case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; case PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; + case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV"; case PipelineStageFlagBits::eTaskShaderEXT: return "TaskShaderEXT"; case PipelineStageFlagBits::eMeshShaderEXT: return "MeshShaderEXT"; - case PipelineStageFlagBits::eFragmentDensityProcessEXT: return "FragmentDensityProcessEXT"; - case PipelineStageFlagBits::eCommandPreprocessNV: return "CommandPreprocessNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5398,8 +5433,8 @@ namespace VULKAN_HPP_NAMESPACE case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; case ImageLayout::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; - case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; + case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; case ImageLayout::eRenderingLocalReadKHR: return "RenderingLocalReadKHR"; case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; @@ -5629,7 +5664,6 @@ namespace VULKAN_HPP_NAMESPACE case DynamicState::eExclusiveScissorEnableNV: return "ExclusiveScissorEnableNV"; case DynamicState::eExclusiveScissorNV: return "ExclusiveScissorNV"; case DynamicState::eFragmentShadingRateKHR: return "FragmentShadingRateKHR"; - case DynamicState::eLineStippleKHR: return "LineStippleKHR"; case DynamicState::eVertexInputEXT: return "VertexInputEXT"; case DynamicState::ePatchControlPointsEXT: return "PatchControlPointsEXT"; case DynamicState::eLogicOpEXT: return "LogicOpEXT"; @@ -5666,6 +5700,7 @@ namespace VULKAN_HPP_NAMESPACE case DynamicState::eRepresentativeFragmentTestEnableNV: return "RepresentativeFragmentTestEnableNV"; case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV"; case DynamicState::eAttachmentFeedbackLoopEnableEXT: return "AttachmentFeedbackLoopEnableEXT"; + case DynamicState::eLineStippleKHR: return "LineStippleKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5985,8 +6020,8 @@ namespace VULKAN_HPP_NAMESPACE case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT: return "EmbeddedImmutableSamplersEXT"; - case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT"; case DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; + case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT: return "HostOnlyPoolEXT"; case DescriptorSetLayoutCreateFlagBits::ePerStageNV: return "PerStageNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } @@ -6010,9 +6045,9 @@ namespace VULKAN_HPP_NAMESPACE case DescriptorType::eInlineUniformBlock: return "InlineUniformBlock"; case DescriptorType::eAccelerationStructureKHR: return "AccelerationStructureKHR"; case DescriptorType::eAccelerationStructureNV: return "AccelerationStructureNV"; - case DescriptorType::eMutableEXT: return "MutableEXT"; case DescriptorType::eSampleWeightImageQCOM: return "SampleWeightImageQCOM"; case DescriptorType::eBlockMatchImageQCOM: return "BlockMatchImageQCOM"; + case DescriptorType::eMutableEXT: return "MutableEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6051,8 +6086,8 @@ namespace VULKAN_HPP_NAMESPACE case AccessFlagBits::eColorAttachmentReadNoncoherentEXT: return "ColorAttachmentReadNoncoherentEXT"; case AccessFlagBits::eAccelerationStructureReadKHR: return "AccelerationStructureReadKHR"; case AccessFlagBits::eAccelerationStructureWriteKHR: return "AccelerationStructureWriteKHR"; - case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; case AccessFlagBits::eFragmentDensityMapReadEXT: return "FragmentDensityMapReadEXT"; + case AccessFlagBits::eFragmentShadingRateAttachmentReadKHR: return "FragmentShadingRateAttachmentReadKHR"; case AccessFlagBits::eCommandPreprocessReadNV: return "CommandPreprocessReadNV"; case AccessFlagBits::eCommandPreprocessWriteNV: return "CommandPreprocessWriteNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; @@ -6240,7 +6275,7 @@ namespace VULKAN_HPP_NAMESPACE { case SubpassContents::eInline: return "Inline"; case SubpassContents::eSecondaryCommandBuffers: return "SecondaryCommandBuffers"; - case SubpassContents::eInlineAndSecondaryCommandBuffersEXT: return "InlineAndSecondaryCommandBuffersEXT"; + case SubpassContents::eInlineAndSecondaryCommandBuffersKHR: return "InlineAndSecondaryCommandBuffersKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6498,7 +6533,8 @@ namespace VULKAN_HPP_NAMESPACE case DriverId::eMesaDozen: return "MesaDozen"; case DriverId::eMesaNvk: return "MesaNvk"; case DriverId::eImaginationOpenSourceMESA: return "ImaginationOpenSourceMESA"; - case DriverId::eMesaAgxv: return "MesaAgxv"; + case DriverId::eMesaHoneykrisp: return "MesaHoneykrisp"; + case DriverId::eReserved27: return "Reserved27"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6723,8 +6759,8 @@ namespace VULKAN_HPP_NAMESPACE case RenderingFlagBits::eContentsSecondaryCommandBuffers: return "ContentsSecondaryCommandBuffers"; case RenderingFlagBits::eSuspending: return "Suspending"; case RenderingFlagBits::eResuming: return "Resuming"; - case RenderingFlagBits::eContentsInlineEXT: return "ContentsInlineEXT"; case RenderingFlagBits::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + case RenderingFlagBits::eContentsInlineKHR: return "ContentsInlineKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8247,7 +8283,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes"; - case VideoEncodeCapabilityFlagBitsKHR::eInsufficientstreamBufferRangeDetectionBit: return "InsufficientstreamBufferRangeDetectionBit"; + case VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection: return "InsufficientBitstreamBufferRangeDetection"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8256,9 +8292,9 @@ namespace VULKAN_HPP_NAMESPACE { switch ( value ) { - case VideoEncodeFeedbackFlagBitsKHR::estreamBufferOffsetBit: return "streamBufferOffsetBit"; - case VideoEncodeFeedbackFlagBitsKHR::estreamBytesWrittenBit: return "streamBytesWrittenBit"; - case VideoEncodeFeedbackFlagBitsKHR::estreamHasOverridesBit: return "streamHasOverridesBit"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset: return "BitstreamBufferOffset"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten: return "BitstreamBytesWritten"; + case VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides: return "BitstreamHasOverrides"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8813,6 +8849,7 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization"; case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives"; case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative"; + case PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase"; case PipelineCreateFlagBits2KHR::eDeferCompileNV: return "DeferCompileNV"; @@ -8841,6 +8878,7 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT"; case PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; case PipelineCreateFlagBits2KHR::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case PipelineCreateFlagBits2KHR::eCaptureData: return "CaptureData"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8881,6 +8919,29 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_AMD_anti_lag === + + VULKAN_HPP_INLINE std::string to_string( AntiLagModeAMD value ) + { + switch ( value ) + { + case AntiLagModeAMD::eDriverControl: return "DriverControl"; + case AntiLagModeAMD::eOn: return "On"; + case AntiLagModeAMD::eOff: return "Off"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( AntiLagStageAMD value ) + { + switch ( value ) + { + case AntiLagStageAMD::eInput: return "Input"; + case AntiLagStageAMD::ePresent: return "Present"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_EXT_shader_object === VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagBitsEXT value ) @@ -9069,5 +9130,20 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_KHR_maintenance7 === + + VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceLayeredApiKHR value ) + { + switch ( value ) + { + case PhysicalDeviceLayeredApiKHR::eVulkan: return "Vulkan"; + case PhysicalDeviceLayeredApiKHR::eD3D12: return "D3D12"; + case PhysicalDeviceLayeredApiKHR::eMetal: return "Metal"; + case PhysicalDeviceLayeredApiKHR::eOpengl: return "Opengl"; + case PhysicalDeviceLayeredApiKHR::eOpengles: return "Opengles"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + } // namespace VULKAN_HPP_NAMESPACE #endif From b7d554553b26fbe50342cdd08ab8e56eb7850db2 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 2 Sep 2024 13:02:32 +0200 Subject: [PATCH 015/131] adding depth buffer, ci skip --- runtime/Includes/Core/Application.inl | 24 ++-- runtime/Includes/Core/Enums.h | 2 +- runtime/Includes/Core/EventBase.h | 2 +- runtime/Includes/Core/Graphics.h | 35 +++--- runtime/Includes/Core/Graphics.inl | 39 +----- runtime/Includes/Embedded/2DVertex.nzsl | 10 +- runtime/Includes/Embedded/2DVertex.spv.h | 115 +++++++++--------- runtime/Includes/Graphics/Scene.h | 11 +- runtime/Includes/Graphics/Sprite.h | 8 +- runtime/Includes/Renderer/Descriptor.h | 1 + runtime/Includes/Renderer/Enums.h | 1 + runtime/Includes/Renderer/Image.h | 20 ++- .../Includes/Renderer/Pipelines/Graphics.h | 2 + runtime/Includes/Renderer/Vertex.h | 7 +- runtime/Includes/Renderer/Vertex.inl | 13 +- runtime/Sources/Core/Graphics.cpp | 42 +++---- runtime/Sources/Core/SDLManager.cpp | 4 +- runtime/Sources/Graphics/Scene.cpp | 4 +- runtime/Sources/Graphics/Sprite.cpp | 7 ++ runtime/Sources/Renderer/Descriptor.cpp | 6 + runtime/Sources/Renderer/Image.cpp | 4 +- .../Sources/Renderer/Pipelines/Graphics.cpp | 20 ++- .../Sources/Renderer/RenderPasses/2DPass.cpp | 17 ++- .../Renderer/RenderPasses/FinalPass.cpp | 4 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 3 +- runtime/Sources/Renderer/Renderer.cpp | 8 +- 26 files changed, 217 insertions(+), 192 deletions(-) diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 3193aee..f00c206 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -7,7 +7,7 @@ Error("invalid window ptr (NULL)"); \ return; \ } \ - else if(*static_cast(win) < 0 || *static_cast(win) > static_cast(_graphics.size()))\ + else if(*static_cast(win) < 0 || *static_cast(win) > static_cast(m_graphics.size()))\ { \ Error("invalid window ptr"); \ return; \ @@ -33,7 +33,7 @@ namespace mlx *y = p_in->GetY(); } - void Application::MouseMove(void* win, int x, int y) noexcept + void Application::MouseMove(Handle win, int x, int y) noexcept { CHECK_WINDOW_PTR(win); if(!m_graphics[*static_cast(win)]->HasWindow()) @@ -43,7 +43,7 @@ namespace mlx } } - void Application::OnEvent(void* win, int event, int (*funct_ptr)(int, void*), void* param) noexcept + void Application::OnEvent(Handle win, int event, int (*funct_ptr)(int, void*), void* param) noexcept { CHECK_WINDOW_PTR(win); if(!m_graphics[*static_cast(win)]->HasWindow()) @@ -54,7 +54,7 @@ namespace mlx m_in.OnEvent(m_graphics[*static_cast(win)]->GetWindow()->GetID(), event, funct_ptr, param); } - void Application::GetScreenSize(void* win, int* w, int* h) noexcept + void Application::GetScreenSize(Handle win, int* w, int* h) noexcept { CHECK_WINDOW_PTR(win); *w = 0; @@ -88,28 +88,28 @@ namespace mlx return static_cast(&m_graphics.back()->GetID()); } - void Application::ClearGraphicsSupport(void* win) + void Application::ClearGraphicsSupport(Handle win) { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); m_graphics[*static_cast(win)]->ClearRenderData(); } - void Application::DestroyGraphicsSupport(void* win) + void Application::DestroyGraphicsSupport(Handle win) { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); m_graphics[*static_cast(win)].reset(); } - void Application::PixelPut(void* win, int x, int y, std::uint32_t color) const noexcept + void Application::PixelPut(Handle win, int x, int y, std::uint32_t color) const noexcept { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); m_graphics[*static_cast(win)]->PixelPut(x, y, color); } - void Application::StringPut(void* win, int x, int y, std::uint32_t color, char* str) + void Application::StringPut(Handle win, int x, int y, std::uint32_t color, char* str) { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); @@ -126,14 +126,14 @@ namespace mlx m_graphics[*static_cast(win)]->StringPut(x, y, color, str); } - void Application::LoadFont(void* win, const std::filesystem::path& filepath, float scale) + void Application::LoadFont(Handle win, const std::filesystem::path& filepath, float scale) { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); m_graphics[*static_cast(win)]->LoadFont(filepath, scale); } - void Application::TexturePut(void* win, void* img, int x, int y) + void Application::TexturePut(Handle win, Handle img, int x, int y) { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); @@ -145,7 +145,7 @@ namespace mlx m_graphics[*static_cast(win)]->TexturePut(texture, x, y); } - int Application::GetTexturePixel(void* img, int x, int y) + int Application::GetTexturePixel(Handle img, int x, int y) { MLX_PROFILE_FUNCTION(); CHECK_IMAGE_PTR(img, return 0); @@ -158,7 +158,7 @@ namespace mlx return texture->GetPixel(x, y); } - void Application::setTexturePixel(void* img, int x, int y, std::uint32_t color) + void Application::SetTexturePixel(Handle img, int x, int y, std::uint32_t color) { MLX_PROFILE_FUNCTION(); CHECK_IMAGE_PTR(img, return); diff --git a/runtime/Includes/Core/Enums.h b/runtime/Includes/Core/Enums.h index 082373e..7eb6984 100644 --- a/runtime/Includes/Core/Enums.h +++ b/runtime/Includes/Core/Enums.h @@ -19,10 +19,10 @@ namespace mlx enum class Event { + DescriptorPoolResetEventCode = 55, ResizeEventCode = 56, FrameBeginEventCode = 57, FatalErrorEventCode = 168, - QuitEventCode = 168, EndEnum }; diff --git a/runtime/Includes/Core/EventBase.h b/runtime/Includes/Core/EventBase.h index 7f4464a..5e9aa02 100644 --- a/runtime/Includes/Core/EventBase.h +++ b/runtime/Includes/Core/EventBase.h @@ -5,7 +5,7 @@ namespace mlx { struct EventBase { - virtual std::uint32_t What() const = 0; + virtual Event What() const = 0; }; } diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 842c2c9..e97ea9d 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -3,12 +3,10 @@ #include #include -#include -#include -#include -#include -#include -#include +#include +#include +#include +#include namespace mlx { @@ -18,36 +16,31 @@ namespace mlx GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id); GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id); - inline int& GetID() noexcept; - inline std::shared_ptr GetWindow(); + [[nodiscard]] MLX_FORCEINLINE int& GetID() noexcept { return m_id; } + [[nodiscard]] inline std::shared_ptr GetWindow() { return p_window; } void Render() noexcept; - inline void ClearRenderData() noexcept; + inline void ResetRenderData() noexcept; + inline void PixelPut(int x, int y, std::uint32_t color) noexcept; inline void StringPut(int x, int y, std::uint32_t color, std::string str); inline void TexturePut(NonOwningPtr texture, int x, int y); + inline void LoadFont(const std::filesystem::path& filepath, float scale); - inline void TryEraseTextureFromManager(NonOwningPtr texture) noexcept; - inline bool HasWindow() const noexcept { return m_has_window; } + inline void TryEraseTextureFromRegistry(NonOwningPtr texture) noexcept; - inline Renderer& GetRenderer() { return m_renderer; } + [[nodiscard]] MLX_FORCEINLINE bool HasWindow() const noexcept { return m_has_window; } + [[nodiscard]] MLX_FORCEINLINE Renderer& GetRenderer() { return m_renderer; } ~GraphicsSupport(); private: Renderer m_renderer; - PixelPutPipeline m_pixel_put_pipeline; - - std::vector> m_drawlist; - - TextManager m_text_manager; - TextureRegistry m_texture_registry; - - glm::mat4 m_proj = glm::mat4(1.0); - + SceneRenderer m_scene_renderer; std::shared_ptr p_window; + std::unique_ptr p_scene; std::size_t m_width = 0; std::size_t m_height = 0; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 10286cc..7d866bb 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -3,66 +3,35 @@ namespace mlx { - int& GraphicsSupport::GetID() noexcept { return m_id; } - std::shared_ptr GraphicsSupport::GetWindow() { return p_window; } - - void GraphicsSupport::ClearRenderData() noexcept + void GraphicsSupport::ResetRenderData() noexcept { MLX_PROFILE_FUNCTION(); - m_drawlist.clear(); - m_pixel_put_pipeline.Clear(); - m_text_manager.Clear(); - m_texture_registry.Clear(); + p_scene->ResetSprites(); + m_images_registry.Clear(); } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); - m_pixel_put_pipeline.SetPixel(x, y, color); } void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) { MLX_PROFILE_FUNCTION(); - std::pair, bool> res = m_text_manager.RegisterText(x, y, color, str); - if(!res.second) // if this is not a completly new text draw - { - auto it = std::find(m_drawlist.begin(), m_drawlist.end(), res.first); - if(it != m_drawlist.end()) - m_drawlist.erase(it); - } - m_drawlist.push_back(res.first); } void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) { MLX_PROFILE_FUNCTION(); - auto res = m_texture_registry.RegisterTexture(texture, x, y); - if(!res.second) // if this is not a completly new texture draw - { - auto it = std::find(m_drawlist.begin(), m_drawlist.end(), res.first); - if(it != m_drawlist.end()) - m_drawlist.erase(it); - } - m_drawlist.push_back(res.first); } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) { MLX_PROFILE_FUNCTION(); - m_text_manager.LoadFont(m_renderer, filepath, scale); } - void GraphicsSupport::TryEraseTextureFromManager(NonOwningPtr texture) noexcept + void GraphicsSupport::TryEraseTextureFromRegistry(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); - for(auto it = m_drawlist.begin(); it != m_drawlist.end();) - { - if(m_texture_registry.IsTextureKnown(texture)) - it = m_drawlist.erase(it); - else - ++it; - } - m_texture_registry.EraseTextures(texture); } } diff --git a/runtime/Includes/Embedded/2DVertex.nzsl b/runtime/Includes/Embedded/2DVertex.nzsl index ec65918..67f7e09 100644 --- a/runtime/Includes/Embedded/2DVertex.nzsl +++ b/runtime/Includes/Embedded/2DVertex.nzsl @@ -4,9 +4,7 @@ module; struct VertIn { [location(0)] pos: vec4[f32], - [location(1)] color: vec4[f32], // unused - [location(2)] normal: vec4[f32], // unused - [location(3)] uv: vec2[f32] + [location(1)] uv: vec2[f32] } struct VertOut @@ -24,13 +22,13 @@ struct ViewerData struct SpriteData { color: vec4[f32], - position: vec2[f32] + position: vec4[f32] } external { [set(0), binding(0)] viewer_data: uniform[ViewerData], - model : push_constant[SpriteData] + model: push_constant[SpriteData] } [entry(vert)] @@ -40,6 +38,6 @@ fn main(input: VertIn) -> VertOut let output: VertOut; output.uv = input.uv; output.color = model.color; - output.pos = viewer_data.projection_matrix * vec4[f32](input.pos.xy + model.position, 0.0, 1.0); + output.pos = viewer_data.projection_matrix * (input.pos + model.position); return output; } diff --git a/runtime/Includes/Embedded/2DVertex.spv.h b/runtime/Includes/Embedded/2DVertex.spv.h index 94cf250..5a4f7f8 100644 --- a/runtime/Includes/Embedded/2DVertex.spv.h +++ b/runtime/Includes/Embedded/2DVertex.spv.h @@ -1,29 +1,29 @@ -3,2,35,7,0,0,1,0,39,0,0,0,77,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, -3,0,0,0,0,0,1,0,0,0,15,0,12,0,0,0,0,0,37,0,0,0,109,97,105,110,0,0,0,0, -14,0,0,0,18,0,0,0,20,0,0,0,23,0,0,0,29,0,0,0,31,0,0,0,32,0,0,0,3,0, +3,2,35,7,0,0,1,0,39,0,0,0,73,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,12,0,0,0,0,0,36,0,0,0,109,97,105,110,0,0,0,0, +13,0,0,0,17,0,0,0,19,0,0,0,23,0,0,0,29,0,0,0,31,0,0,0,32,0,0,0,3,0, 3,0,0,0,0,0,100,0,0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0, 6,0,8,0,4,0,0,0,0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0, -0,0,5,0,5,0,8,0,0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,8,0,0,0, -0,0,0,0,99,111,108,111,114,0,0,0,6,0,6,0,8,0,0,0,1,0,0,0,112,111,115,105,116,105, +0,0,5,0,5,0,7,0,0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,7,0,0,0, +0,0,0,0,99,111,108,111,114,0,0,0,6,0,6,0,7,0,0,0,1,0,0,0,112,111,115,105,116,105, 111,110,0,0,0,0,5,0,4,0,26,0,0,0,86,101,114,116,73,110,0,0,6,0,4,0,26,0,0,0, 0,0,0,0,112,111,115,0,6,0,5,0,26,0,0,0,1,0,0,0,99,111,108,111,114,0,0,0,6,0, 5,0,26,0,0,0,2,0,0,0,110,111,114,109,97,108,0,0,6,0,4,0,26,0,0,0,3,0,0,0, 117,118,0,0,5,0,4,0,33,0,0,0,86,101,114,116,79,117,116,0,6,0,5,0,33,0,0,0,0,0, 0,0,99,111,108,111,114,0,0,0,6,0,4,0,33,0,0,0,1,0,0,0,117,118,0,0,6,0,4,0, 33,0,0,0,2,0,0,0,112,111,115,0,5,0,5,0,6,0,0,0,118,105,101,119,101,114,95,100,97,116, -97,0,5,0,4,0,10,0,0,0,109,111,100,101,108,0,0,0,5,0,3,0,14,0,0,0,112,111,115,0, -5,0,4,0,18,0,0,0,99,111,108,111,114,0,0,0,5,0,4,0,20,0,0,0,110,111,114,109,97,108, +97,0,5,0,4,0,9,0,0,0,109,111,100,101,108,0,0,0,5,0,3,0,13,0,0,0,112,111,115,0, +5,0,4,0,17,0,0,0,99,111,108,111,114,0,0,0,5,0,4,0,19,0,0,0,110,111,114,109,97,108, 0,0,5,0,3,0,23,0,0,0,117,118,0,0,5,0,4,0,29,0,0,0,99,111,108,111,114,0,0,0, 5,0,3,0,31,0,0,0,117,118,0,0,5,0,5,0,32,0,0,0,112,111,115,105,116,105,111,110,0,0, -0,0,5,0,4,0,37,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,6,0,0,0,33,0,0,0, +0,0,5,0,4,0,36,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,6,0,0,0,33,0,0,0, 0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0,0,0,0,0,71,0,4,0,32,0,0,0,11,0, -0,0,0,0,0,0,71,0,4,0,14,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,18,0,0,0, -30,0,0,0,1,0,0,0,71,0,4,0,20,0,0,0,30,0,0,0,2,0,0,0,71,0,4,0,23,0, +0,0,0,0,0,0,71,0,4,0,13,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,17,0,0,0, +30,0,0,0,1,0,0,0,71,0,4,0,19,0,0,0,30,0,0,0,2,0,0,0,71,0,4,0,23,0, 0,0,30,0,0,0,3,0,0,0,71,0,4,0,29,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0, 31,0,0,0,30,0,0,0,1,0,0,0,71,0,3,0,4,0,0,0,2,0,0,0,72,0,4,0,4,0, 0,0,0,0,0,0,5,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0,7,0,0,0,16,0,0,0, -72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,71,0,3,0,8,0,0,0,2,0, -0,0,72,0,5,0,8,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,8,0,0,0, +72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,71,0,3,0,7,0,0,0,2,0, +0,0,72,0,5,0,7,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,7,0,0,0, 1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,26,0,0,0,0,0,0,0,35,0,0,0,0,0, 0,0,72,0,5,0,26,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,26,0,0,0, 2,0,0,0,35,0,0,0,32,0,0,0,72,0,5,0,26,0,0,0,3,0,0,0,35,0,0,0,48,0, @@ -31,50 +31,47 @@ 1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,33,0,0,0,2,0,0,0,35,0,0,0,32,0, 0,0,22,0,3,0,1,0,0,0,32,0,0,0,23,0,4,0,2,0,0,0,1,0,0,0,4,0,0,0, 24,0,4,0,3,0,0,0,2,0,0,0,4,0,0,0,30,0,3,0,4,0,0,0,3,0,0,0,32,0, -4,0,5,0,0,0,2,0,0,0,4,0,0,0,23,0,4,0,7,0,0,0,1,0,0,0,2,0,0,0, -30,0,4,0,8,0,0,0,2,0,0,0,7,0,0,0,32,0,4,0,9,0,0,0,9,0,0,0,8,0, -0,0,19,0,2,0,11,0,0,0,33,0,3,0,12,0,0,0,11,0,0,0,32,0,4,0,13,0,0,0, -1,0,0,0,2,0,0,0,21,0,4,0,15,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,15,0, -0,0,16,0,0,0,0,0,0,0,32,0,4,0,17,0,0,0,7,0,0,0,2,0,0,0,43,0,4,0, -15,0,0,0,19,0,0,0,1,0,0,0,43,0,4,0,15,0,0,0,21,0,0,0,2,0,0,0,32,0, -4,0,22,0,0,0,1,0,0,0,7,0,0,0,43,0,4,0,15,0,0,0,24,0,0,0,3,0,0,0, -32,0,4,0,25,0,0,0,7,0,0,0,7,0,0,0,30,0,6,0,26,0,0,0,2,0,0,0,2,0, -0,0,2,0,0,0,7,0,0,0,32,0,4,0,27,0,0,0,7,0,0,0,26,0,0,0,32,0,4,0, -28,0,0,0,3,0,0,0,2,0,0,0,32,0,4,0,30,0,0,0,3,0,0,0,7,0,0,0,30,0, -5,0,33,0,0,0,2,0,0,0,7,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0, -0,0,128,63,32,0,4,0,35,0,0,0,7,0,0,0,33,0,0,0,43,0,4,0,1,0,0,0,36,0, -0,0,0,0,0,0,32,0,4,0,51,0,0,0,7,0,0,0,1,0,0,0,32,0,4,0,56,0,0,0, -9,0,0,0,2,0,0,0,32,0,4,0,60,0,0,0,2,0,0,0,3,0,0,0,32,0,4,0,66,0, -0,0,9,0,0,0,7,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0, -9,0,0,0,10,0,0,0,9,0,0,0,59,0,4,0,13,0,0,0,14,0,0,0,1,0,0,0,59,0, -4,0,13,0,0,0,18,0,0,0,1,0,0,0,59,0,4,0,13,0,0,0,20,0,0,0,1,0,0,0, -59,0,4,0,22,0,0,0,23,0,0,0,1,0,0,0,59,0,4,0,28,0,0,0,29,0,0,0,3,0, -0,0,59,0,4,0,30,0,0,0,31,0,0,0,3,0,0,0,59,0,4,0,28,0,0,0,32,0,0,0, -3,0,0,0,54,0,5,0,11,0,0,0,37,0,0,0,0,0,0,0,12,0,0,0,248,0,2,0,38,0, -0,0,59,0,4,0,35,0,0,0,39,0,0,0,7,0,0,0,59,0,4,0,27,0,0,0,40,0,0,0, -7,0,0,0,65,0,5,0,17,0,0,0,41,0,0,0,40,0,0,0,16,0,0,0,63,0,3,0,41,0, -0,0,14,0,0,0,65,0,5,0,17,0,0,0,42,0,0,0,40,0,0,0,19,0,0,0,63,0,3,0, -42,0,0,0,18,0,0,0,65,0,5,0,17,0,0,0,43,0,0,0,40,0,0,0,21,0,0,0,63,0, -3,0,43,0,0,0,20,0,0,0,65,0,5,0,25,0,0,0,44,0,0,0,40,0,0,0,24,0,0,0, -63,0,3,0,44,0,0,0,23,0,0,0,65,0,5,0,25,0,0,0,45,0,0,0,40,0,0,0,24,0, -0,0,61,0,4,0,7,0,0,0,46,0,0,0,45,0,0,0,81,0,5,0,1,0,0,0,47,0,0,0, -46,0,0,0,0,0,0,0,127,0,4,0,1,0,0,0,48,0,0,0,34,0,0,0,133,0,5,0,1,0, -0,0,49,0,0,0,47,0,0,0,48,0,0,0,65,0,5,0,25,0,0,0,50,0,0,0,40,0,0,0, -24,0,0,0,65,0,5,0,51,0,0,0,52,0,0,0,50,0,0,0,16,0,0,0,62,0,3,0,52,0, -0,0,49,0,0,0,65,0,5,0,25,0,0,0,53,0,0,0,40,0,0,0,24,0,0,0,61,0,4,0, -7,0,0,0,54,0,0,0,53,0,0,0,65,0,5,0,25,0,0,0,55,0,0,0,39,0,0,0,19,0, -0,0,62,0,3,0,55,0,0,0,54,0,0,0,65,0,5,0,56,0,0,0,57,0,0,0,10,0,0,0, -16,0,0,0,61,0,4,0,2,0,0,0,58,0,0,0,57,0,0,0,65,0,5,0,17,0,0,0,59,0, -0,0,39,0,0,0,16,0,0,0,62,0,3,0,59,0,0,0,58,0,0,0,65,0,5,0,60,0,0,0, -61,0,0,0,6,0,0,0,16,0,0,0,61,0,4,0,3,0,0,0,62,0,0,0,61,0,0,0,65,0, -5,0,17,0,0,0,63,0,0,0,40,0,0,0,16,0,0,0,61,0,4,0,2,0,0,0,64,0,0,0, -63,0,0,0,79,0,7,0,7,0,0,0,65,0,0,0,64,0,0,0,64,0,0,0,0,0,0,0,1,0, -0,0,65,0,5,0,66,0,0,0,67,0,0,0,10,0,0,0,19,0,0,0,61,0,4,0,7,0,0,0, -68,0,0,0,67,0,0,0,129,0,5,0,7,0,0,0,69,0,0,0,65,0,0,0,68,0,0,0,80,0, -6,0,2,0,0,0,70,0,0,0,69,0,0,0,36,0,0,0,34,0,0,0,145,0,5,0,2,0,0,0, -71,0,0,0,62,0,0,0,70,0,0,0,65,0,5,0,17,0,0,0,72,0,0,0,39,0,0,0,21,0, -0,0,62,0,3,0,72,0,0,0,71,0,0,0,61,0,4,0,33,0,0,0,73,0,0,0,39,0,0,0, -81,0,5,0,2,0,0,0,74,0,0,0,73,0,0,0,0,0,0,0,62,0,3,0,29,0,0,0,74,0, -0,0,81,0,5,0,7,0,0,0,75,0,0,0,73,0,0,0,1,0,0,0,62,0,3,0,31,0,0,0, -75,0,0,0,81,0,5,0,2,0,0,0,76,0,0,0,73,0,0,0,2,0,0,0,62,0,3,0,32,0, -0,0,76,0,0,0,253,0,1,0,56,0,1,0 +4,0,5,0,0,0,2,0,0,0,4,0,0,0,30,0,4,0,7,0,0,0,2,0,0,0,2,0,0,0, +32,0,4,0,8,0,0,0,9,0,0,0,7,0,0,0,19,0,2,0,10,0,0,0,33,0,3,0,11,0, +0,0,10,0,0,0,32,0,4,0,12,0,0,0,1,0,0,0,2,0,0,0,21,0,4,0,14,0,0,0, +32,0,0,0,1,0,0,0,43,0,4,0,14,0,0,0,15,0,0,0,0,0,0,0,32,0,4,0,16,0, +0,0,7,0,0,0,2,0,0,0,43,0,4,0,14,0,0,0,18,0,0,0,1,0,0,0,43,0,4,0, +14,0,0,0,20,0,0,0,2,0,0,0,23,0,4,0,21,0,0,0,1,0,0,0,2,0,0,0,32,0, +4,0,22,0,0,0,1,0,0,0,21,0,0,0,43,0,4,0,14,0,0,0,24,0,0,0,3,0,0,0, +32,0,4,0,25,0,0,0,7,0,0,0,21,0,0,0,30,0,6,0,26,0,0,0,2,0,0,0,2,0, +0,0,2,0,0,0,21,0,0,0,32,0,4,0,27,0,0,0,7,0,0,0,26,0,0,0,32,0,4,0, +28,0,0,0,3,0,0,0,2,0,0,0,32,0,4,0,30,0,0,0,3,0,0,0,21,0,0,0,30,0, +5,0,33,0,0,0,2,0,0,0,21,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0, +0,0,128,63,32,0,4,0,35,0,0,0,7,0,0,0,33,0,0,0,32,0,4,0,50,0,0,0,7,0, +0,0,1,0,0,0,32,0,4,0,55,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,59,0,0,0, +2,0,0,0,3,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0, +0,0,9,0,0,0,9,0,0,0,59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0, +12,0,0,0,17,0,0,0,1,0,0,0,59,0,4,0,12,0,0,0,19,0,0,0,1,0,0,0,59,0, +4,0,22,0,0,0,23,0,0,0,1,0,0,0,59,0,4,0,28,0,0,0,29,0,0,0,3,0,0,0, +59,0,4,0,30,0,0,0,31,0,0,0,3,0,0,0,59,0,4,0,28,0,0,0,32,0,0,0,3,0, +0,0,54,0,5,0,10,0,0,0,36,0,0,0,0,0,0,0,11,0,0,0,248,0,2,0,37,0,0,0, +59,0,4,0,35,0,0,0,38,0,0,0,7,0,0,0,59,0,4,0,27,0,0,0,39,0,0,0,7,0, +0,0,65,0,5,0,16,0,0,0,40,0,0,0,39,0,0,0,15,0,0,0,63,0,3,0,40,0,0,0, +13,0,0,0,65,0,5,0,16,0,0,0,41,0,0,0,39,0,0,0,18,0,0,0,63,0,3,0,41,0, +0,0,17,0,0,0,65,0,5,0,16,0,0,0,42,0,0,0,39,0,0,0,20,0,0,0,63,0,3,0, +42,0,0,0,19,0,0,0,65,0,5,0,25,0,0,0,43,0,0,0,39,0,0,0,24,0,0,0,63,0, +3,0,43,0,0,0,23,0,0,0,65,0,5,0,25,0,0,0,44,0,0,0,39,0,0,0,24,0,0,0, +61,0,4,0,21,0,0,0,45,0,0,0,44,0,0,0,81,0,5,0,1,0,0,0,46,0,0,0,45,0, +0,0,0,0,0,0,127,0,4,0,1,0,0,0,47,0,0,0,34,0,0,0,133,0,5,0,1,0,0,0, +48,0,0,0,46,0,0,0,47,0,0,0,65,0,5,0,25,0,0,0,49,0,0,0,39,0,0,0,24,0, +0,0,65,0,5,0,50,0,0,0,51,0,0,0,49,0,0,0,15,0,0,0,62,0,3,0,51,0,0,0, +48,0,0,0,65,0,5,0,25,0,0,0,52,0,0,0,39,0,0,0,24,0,0,0,61,0,4,0,21,0, +0,0,53,0,0,0,52,0,0,0,65,0,5,0,25,0,0,0,54,0,0,0,38,0,0,0,18,0,0,0, +62,0,3,0,54,0,0,0,53,0,0,0,65,0,5,0,55,0,0,0,56,0,0,0,9,0,0,0,15,0, +0,0,61,0,4,0,2,0,0,0,57,0,0,0,56,0,0,0,65,0,5,0,16,0,0,0,58,0,0,0, +38,0,0,0,15,0,0,0,62,0,3,0,58,0,0,0,57,0,0,0,65,0,5,0,59,0,0,0,60,0, +0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0,61,0,0,0,60,0,0,0,65,0,5,0, +16,0,0,0,62,0,0,0,39,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,63,0,0,0,62,0, +0,0,65,0,5,0,55,0,0,0,64,0,0,0,9,0,0,0,18,0,0,0,61,0,4,0,2,0,0,0, +65,0,0,0,64,0,0,0,129,0,5,0,2,0,0,0,66,0,0,0,63,0,0,0,65,0,0,0,145,0, +5,0,2,0,0,0,67,0,0,0,61,0,0,0,66,0,0,0,65,0,5,0,16,0,0,0,68,0,0,0, +38,0,0,0,20,0,0,0,62,0,3,0,68,0,0,0,67,0,0,0,61,0,4,0,33,0,0,0,69,0, +0,0,38,0,0,0,81,0,5,0,2,0,0,0,70,0,0,0,69,0,0,0,0,0,0,0,62,0,3,0, +29,0,0,0,70,0,0,0,81,0,5,0,21,0,0,0,71,0,0,0,69,0,0,0,1,0,0,0,62,0, +3,0,31,0,0,0,71,0,0,0,81,0,5,0,2,0,0,0,72,0,0,0,69,0,0,0,2,0,0,0, +62,0,3,0,32,0,0,0,72,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 32f06bf..760c6fb 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -2,6 +2,7 @@ #define __MLX_SCENE__ #include +#include namespace mlx { @@ -18,14 +19,20 @@ namespace mlx Sprite& CreateSprite(std::shared_ptr texture) noexcept; - [[nodiscard]] inline const std::vector>& GetSprites() const noexcept { return m_sprites; } - [[nodiscard]] inline const SceneDescriptor& GetDescription() const noexcept { return m_descriptor; } + inline void ResetSprites() { m_sprites.clear(); } + + [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetSprites() const noexcept { return m_sprites; } + [[nodiscard]] MLX_FORCEINLINE const SceneDescriptor& GetDescription() const noexcept { return m_descriptor; } + [[nodiscard]] MLX_FORCEINLINE DepthImage& GetDepth() noexcept { return m_depth; } + [[nodiscard]] MLX_FORCEINLINE ViewerData& GetViewerData() noexcept { return m_viewer_data; } ~Scene() = default; private: SceneDescriptor m_descriptor; std::vector> m_sprites; + DepthImage m_depth; + ViewerData m_viewer_data; }; } diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index 1720f6a..3f4b426 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -1,7 +1,7 @@ #ifndef __MLX_SPRITE__ #define __MLX_SPRITE__ -#include +#include #include #include #include @@ -17,10 +17,10 @@ namespace mlx Sprite(std::shared_ptr texture); inline void SetColor(Vec4f color) noexcept { m_color = color; } - inline void SetPosition(Vec2ui position) noexcept { m_position = position; } + inline void SetPosition(Vec3f position) noexcept { m_position = position; } [[nodiscard]] inline const Vec4f& GetColor() const noexcept { return m_color; } - [[nodiscard]] inline const Vec2ui& GetPosition() const noexcept { return m_position; } + [[nodiscard]] inline const Vec3f& GetPosition() const noexcept { return m_position; } [[nodiscard]] inline std::shared_ptr GetMesh() const { return p_mesh; } [[nodiscard]] inline std::shared_ptr GetTexture() const { return p_texture; } @@ -46,7 +46,7 @@ namespace mlx std::shared_ptr p_texture; std::shared_ptr p_mesh; Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; - Vec2ui m_position = Vec2ui{ 0, 0 }; + Vec3f m_position = Vec4f{ 0.0f, 0.0f, 0.0f }; }; } diff --git a/runtime/Includes/Renderer/Descriptor.h b/runtime/Includes/Renderer/Descriptor.h index 9a28ee3..b99fbfa 100644 --- a/runtime/Includes/Renderer/Descriptor.h +++ b/runtime/Includes/Renderer/Descriptor.h @@ -28,6 +28,7 @@ namespace mlx void SetStorageBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); void SetUniformBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); void Update(std::size_t i, VkCommandBuffer cmd = VK_NULL_HANDLE) noexcept; + void Reallocate() noexcept; [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t i) const noexcept { return m_set[i]; } [[nodiscard]] inline DescriptorSet Duplicate() const { return DescriptorSet{ m_set_layout, m_descriptors }; } diff --git a/runtime/Includes/Renderer/Enums.h b/runtime/Includes/Renderer/Enums.h index 684f4e0..ddc0487 100644 --- a/runtime/Includes/Renderer/Enums.h +++ b/runtime/Includes/Renderer/Enums.h @@ -17,6 +17,7 @@ namespace mlx enum class ImageType { Color = 0, + Depth, EndEnum }; diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 2c3809e..fb9952e 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -23,7 +23,7 @@ namespace mlx m_layout = layout; } - void Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, VkMemoryPropertyFlags properties, bool is_multisampled = false); + void Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled = false); void CreateImageView(VkImageViewType type, VkImageAspectFlags aspectFlags, int layer_count = 1) noexcept; void CreateSampler() noexcept; void TransitionLayout(VkImageLayout new_layout, VkCommandBuffer cmd = VK_NULL_HANDLE); @@ -34,7 +34,6 @@ namespace mlx virtual void Destroy() noexcept; [[nodiscard]] MLX_FORCEINLINE VkImage Get() const noexcept { return m_image; } - [[nodiscard]] MLX_FORCEINLINE VkImage operator()() const noexcept { return m_image; } [[nodiscard]] MLX_FORCEINLINE VkDeviceMemory GetDeviceMemory() const noexcept { return m_memory.memory; } [[nodiscard]] MLX_FORCEINLINE VkImageView GetImageView() const noexcept { return m_image_view; } [[nodiscard]] MLX_FORCEINLINE VkFormat GetFormat() const noexcept { return m_format; } @@ -62,6 +61,21 @@ namespace mlx bool m_is_multisampled = false; }; + class DepthImage : public Image + { + public: + DepthImage() = default; + inline void Init(std::uint32_t width, std::uint32_t height, bool is_multisampled = false) + { + std::vector candidates = { VK_FORMAT_D32_SFLOAT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT }; + VkFormat format = kvfFindSupportFormatInCandidates(RenderCore::Get().GetDevice(), candidates.data(), candidates.size(), VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT); + Image::Init(ImageType::Depth, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, is_multisampled); + Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_DEPTH_BIT); + Image::TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); + } + ~DepthImage() = default; + }; + class Texture : public Image { public: @@ -72,7 +86,7 @@ namespace mlx } inline void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format = VK_FORMAT_R8G8B8A8_SRGB, bool is_multisampled = false) { - Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, is_multisampled); + Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, is_multisampled); Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); Image::CreateSampler(); if(pixels) diff --git a/runtime/Includes/Renderer/Pipelines/Graphics.h b/runtime/Includes/Renderer/Pipelines/Graphics.h index 027aaf4..46304bf 100644 --- a/runtime/Includes/Renderer/Pipelines/Graphics.h +++ b/runtime/Includes/Renderer/Pipelines/Graphics.h @@ -14,6 +14,7 @@ namespace mlx std::shared_ptr fragment_shader; std::vector> color_attachments; NonOwningPtr renderer = nullptr; + NonOwningPtr depth = nullptr; bool clear_color_attachments = true; bool no_vertex_inputs = false; }; @@ -51,6 +52,7 @@ namespace mlx VkPipeline m_pipeline = VK_NULL_HANDLE; VkPipelineLayout m_pipeline_layout = VK_NULL_HANDLE; NonOwningPtr p_renderer; + NonOwningPtr p_depth; }; } diff --git a/runtime/Includes/Renderer/Vertex.h b/runtime/Includes/Renderer/Vertex.h index bcc501e..d815a3e 100644 --- a/runtime/Includes/Renderer/Vertex.h +++ b/runtime/Includes/Renderer/Vertex.h @@ -8,15 +8,14 @@ namespace mlx { struct Vertex { - alignas(16) Vec2f position = Vec4f{ 0.0f, 0.0f }; - alignas(16) Vec4f color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; + alignas(16) Vec4f position = Vec4f{ 0.0f, 0.0f, 0.0f, 1.0f }; alignas(16) Vec2f uv = Vec2f{ 0.0f, 0.0f }; Vertex() = default; - Vertex(Vec2f p, Vec4f c, Vec2f u) : position(std::move(p)), color(std::move(c)), uv(std::move(u)) {} + Vertex(Vec4f p, Vec2f u) : position(std::move(p)), uv(std::move(u)) {} [[nodiscard]] inline static VkVertexInputBindingDescription GetBindingDescription(); - [[nodiscard]] inline static std::array GetAttributeDescriptions(); + [[nodiscard]] inline static std::array GetAttributeDescriptions(); }; } diff --git a/runtime/Includes/Renderer/Vertex.inl b/runtime/Includes/Renderer/Vertex.inl index 0c6b9ea..f23eb77 100644 --- a/runtime/Includes/Renderer/Vertex.inl +++ b/runtime/Includes/Renderer/Vertex.inl @@ -12,24 +12,19 @@ namespace mlx return binding_description; } - std::array Vertex::GetAttributeDescriptions() + std::array Vertex::GetAttributeDescriptions() { std::array attribute_descriptions; attribute_descriptions[0].binding = 0; attribute_descriptions[0].location = 0; - attribute_descriptions[0].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[0].format = VK_FORMAT_R32G32B32A32_SFLOAT; attribute_descriptions[0].offset = offsetof(Vertex, position); attribute_descriptions[1].binding = 0; attribute_descriptions[1].location = 1; - attribute_descriptions[1].format = VK_FORMAT_R32G32B32A32_SFLOAT; - attribute_descriptions[1].offset = offsetof(Vertex, color); - - attribute_descriptions[2].binding = 0; - attribute_descriptions[2].location = 2; - attribute_descriptions[2].format = VK_FORMAT_R32G32_SFLOAT; - attribute_descriptions[2].offset = offsetof(Vertex, uv); + attribute_descriptions[1].format = VK_FORMAT_R32G32_SFLOAT; + attribute_descriptions[1].offset = offsetof(Vertex, uv); return attribute_descriptions; } diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 52b4419..2f9c720 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -1,5 +1,4 @@ #include - #include namespace mlx @@ -14,8 +13,11 @@ namespace mlx MLX_PROFILE_FUNCTION(); m_renderer.SetWindow(nullptr); m_renderer.Init(render_target); - m_pixel_put_pipeline.Init(w, h, m_renderer); - m_text_manager.Init(m_renderer); + m_scene_renderer.Init(); + + SceneDescriptor descriptor{}; + descriptor.renderer = &m_renderer; + p_scene = std::make_unique(std::move(descriptor)); } GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : @@ -28,29 +30,21 @@ namespace mlx MLX_PROFILE_FUNCTION(); m_renderer.SetWindow(p_window.get()); m_renderer.Init(nullptr); - m_pixel_put_pipeline.Init(w, h, m_renderer); - m_text_manager.Init(m_renderer); + m_scene_renderer.Init(); + + SceneDescriptor descriptor{}; + descriptor.renderer = &m_renderer; + p_scene = std::make_unique(std::move(descriptor)); } void GraphicsSupport::Render() noexcept { MLX_PROFILE_FUNCTION(); - if(!m_renderer.BeginFrame()) - return; - m_proj = glm::ortho(0, m_width, 0, m_height); - m_renderer.GetUniformBuffer()->SetData(sizeof(m_proj), &m_proj); - - m_renderer.getVertDescriptorSet().Bind(); - - for(auto& data : m_drawlist) - data->Render(m_renderer); - - m_pixel_put_pipeline.Render(m_renderer); - - m_renderer.EndFrame(); - - for(auto& data : _drawlist) - data->ResetUpdate(); + if(m_renderer.BeginFrame()) + { + m_scene_renderer.Render(*p_scene, m_renderer); + m_renderer.EndFrame(); + } #ifdef GRAPHICS_MEMORY_DUMP // dump memory to file every two seconds @@ -67,9 +61,9 @@ namespace mlx GraphicsSupport::~GraphicsSupport() { MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); - m_text_manager.Destroy(); - m_pixel_put_pipeline.Destroy(); + RenderCore::Get().WaitDeviceIdle(); + p_scene.reset(); + m_scene_renderer.Destroy(); m_renderer->Destroy(); if(p_window) p_window->Destroy(); diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index c4ce300..f397e82 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -1,7 +1,7 @@ #include #include #include -#include +#include namespace mlx { @@ -101,6 +101,8 @@ namespace mlx default: break; } + + return 0; }, &watcher_data); } diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index 547e641..c95fee7 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -3,11 +3,13 @@ #include #include -namespace Scop +namespace mlx { Scene::Scene(SceneDescriptor desc) : m_descriptor(std::move(desc)) { + Verify((bool)m_descriptor.renderer, "invalid renderer"); + m_depth.Init(m_descriptor.renderer->GetSwapchainImages().back().GetWidth(), m_descriptor.renderer->GetSwapchainImages().back().GetHeight()); } Sprite& Scene::CreateSprite(std::shared_ptr texture) noexcept diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index 8a65924..dbc5e3d 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -40,5 +40,12 @@ namespace mlx Verify((bool)texture, "Sprite: invalid texture"); p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); p_texture = texture; + + func::function functor = [this](const EventBase& event) + { + if(event.What() == Event::DescriptorPoolResetEventCode) + m_set.Reallocate(); + }; + EventBus::RegisterListener({ functor, "__Sprite" + std::to_string(reinterpret_cast(this)) }); } } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 8d4ee93..2fa6d56 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -138,4 +138,10 @@ namespace mlx } vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); } + + void Descriptor::Reallocate() noexcept + { + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), m_set_layout); + } } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 5ee95d0..971e7df 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -5,7 +5,7 @@ namespace mlx { - void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, VkMemoryPropertyFlags properties, bool is_multisampled) + void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled) { m_type = type; m_width = width; @@ -55,6 +55,8 @@ namespace mlx switch(m_type) { case ImageType::Color: kvf_type = KVF_IMAGE_COLOR; break; + case ImageType::Depth: kvf_type = KVF_IMAGE_DEPTH; break; + default: break; } kvfTransitionImageLayout(RenderCore::Get().GetDevice(), m_image, kvf_type, cmd, m_format, m_layout, new_layout, is_single_time_cmd_buffer); diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 81bee86..e85d7f5 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -16,6 +16,7 @@ namespace Scop p_vertex_shader = descriptor.vertex_shader; p_fragment_shader = descriptor.fragment_shader; p_renderer = descriptor.renderer; + p_depth = descriptor.depth; std::vector push_constants; std::vector set_layouts; @@ -37,7 +38,10 @@ namespace Scop kvfGPipelineBuilderSetInputTopology(builder, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST); kvfGPipelineBuilderSetCullMode(builder, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE); kvfGPipelineBuilderEnableAlphaBlending(builder); - kvfGPipelineBuilderDisableDepthTest(builder); + if(p_depth) + kvfGPipelineBuilderEnableDepthTest(builder, (descriptor.depth_test_equal ? VK_COMPARE_OP_EQUAL : VK_COMPARE_OP_LESS), true); + else + kvfGPipelineBuilderDisableDepthTest(builder); kvfGPipelineBuilderSetPolygonMode(builder, VK_POLYGON_MODE_FILL, 1.0f); if(features.sampleRateShading) kvfGPipelineBuilderSetMultisamplingShading(builder, VK_SAMPLE_COUNT_1_BIT, 0.25f); @@ -84,6 +88,9 @@ namespace Scop m_clears[i].color.float32[3] = clear[3]; } + if(p_depth) + m_clears.back().depthStencil = VkClearDepthStencilValue{ 1.0f, 0 }; + kvfBeginRenderPass(m_renderpass, command_buffer, fb, fb_extent, m_clears.data(), m_clears.size()); vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); return true; @@ -127,10 +134,16 @@ namespace Scop for(NonOwningPtr image : render_targets) { - attachments.push_back(kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); + attachments.push_back(kvfBuildAttachmentDescription((kvfIsDepthFormat(image->GetFormat()) ? KVF_IMAGE_DEPTH : KVF_IMAGE_COLOR), image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); attachment_views.push_back(image->GetImageView()); } + if(p_depth) + { + attachments.push_back(kvfBuildAttachmentDescription((kvfIsDepthFormat(p_depth->GetFormat()) ? KVF_IMAGE_DEPTH : KVF_IMAGE_COLOR), p_depth->GetFormat(), p_depth->GetLayout(), p_depth->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); + attachment_views.push_back(p_depth->GetImageView()); + } + m_renderpass = kvfCreateRenderPass(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint()); m_clears.clear(); m_clears.resize(attachments.size()); @@ -154,6 +167,9 @@ namespace Scop void GraphicPipeline::TransitionAttachments(VkCommandBuffer cmd) { + if(p_depth) + p_depth->TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, cmd); + for(NonOwningPtr image : m_attachments) { if(!image->IsInit()) diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 28f199e..bea3c70 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -11,7 +11,7 @@ namespace mlx struct SpriteData { Vec4f color; - Vec2f position; + Vec4f position; }; void Render2DPass::Init() @@ -43,10 +43,20 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader, ShaderType::Fragment, std::move(fragment_shader_layout)); - std::function functor = [this](const EventBase& event) + func::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); + if(event.What() == Event::DescriptorPoolResetEventCode) + { + p_texture_set->Reallocate(); + p_viewer_data_set.Reallocate(); + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + p_viewer_data_set->SetUniformBuffer(i, 0, p_viewer_data_buffer->Get(i)); + p_viewer_data_set->Update(i); + } + } }; EventBus::RegisterListener({ functor, "__ScopRender2DPass" }); @@ -70,6 +80,7 @@ namespace mlx pipeline_descriptor.vertex_shader = p_vertex_shader; pipeline_descriptor.fragment_shader = p_fragment_shader; pipeline_descriptor.color_attachments = { &render_target }; + pipeline_descriptor.depth = scene.GetDepth(); pipeline_descriptor.clear_color_attachments = false; m_pipeline.Init(pipeline_descriptor); } @@ -87,7 +98,7 @@ namespace mlx for(auto sprite : scene.GetSprites()) { SpriteData sprite_data; - sprite_data.position = Vec2f{ static_cast(sprite->GetPosition().x), static_cast(sprite->GetPosition().y) }; + sprite_data.position = Vec4f{ sprite->GetPosition(), 1.0f }; sprite_data.color = sprite->GetColor(); if(!sprite->IsSetInit()) sprite->UpdateDescriptorSet(*p_texture_set); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index d3b5cbe..5c2c410 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -30,10 +30,12 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - std::function functor = [this](const EventBase& event) + func::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); + if(event.What() == Event::DescriptorPoolResetEventCode) + p_set->Reallocate(); }; EventBus::RegisterListener({ functor, "__ScopFinalPass" }); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index a2a3e45..e905280 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -15,7 +15,7 @@ namespace mlx { if(!m_main_render_texture.IsInit()) { - std::function functor = [this, renderer](const EventBase& event) + func::function functor = [this, renderer](const EventBase& event) { if(event.What() == Event::ResizeEventCode) { @@ -31,6 +31,7 @@ namespace mlx } m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), Vec4f{ 0.0f, 0.0f, 0.0f, 1.0f }); + scene.GetDepth().Clear(renderer.GetActiveCommandBuffer(), {}); m_2Dpass.Pass(scene, renderer, m_main_render_texture); m_final.Pass(scene, renderer, m_main_render_texture); diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 00f09b2..8aab412 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -18,11 +18,16 @@ namespace mlx { Event What() const override { return Event::FrameBeginEventCode; } }; + + struct DescriptorPoolResetEventBroadcast : public EventBase + { + Event What() const override { return Event::DescriptorPoolResetEventCode; } + }; } void Renderer::Init(NonOwningPtr window) { - std::function functor = [this](const EventBase& event) + func::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) this->RequireFramebufferResize(); @@ -86,6 +91,7 @@ namespace mlx } m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; kvfResetDeviceDescriptorPools(RenderCore::Get().GetDevice()); + EventBus::SendBroadcast(Internal::DescriptorPoolResetEventBroadcast{}); } void Renderer::CreateSwapchain() From 1b996af83ff5c0c7491faa810ce98d3811e67189 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 2 Sep 2024 13:36:02 +0200 Subject: [PATCH 016/131] starting to fix compilation issues --- Makefile | 2 +- runtime/Includes/Core/Application.inl | 16 ++++++---------- runtime/Includes/Core/Graphics.inl | 1 - runtime/Includes/Graphics/Sprite.h | 2 +- runtime/Includes/Maths/Angles.h | 8 +++----- runtime/Includes/Maths/Angles.inl | 5 +---- runtime/Includes/Maths/Constants.h | 10 +++------- runtime/Includes/Maths/Enums.h | 8 +++----- runtime/Includes/Maths/EulerAngles.h | 8 +++----- runtime/Includes/Maths/EulerAngles.inl | 2 +- runtime/Includes/Maths/Mat4.h | 10 +++------- runtime/Includes/Maths/Mat4.inl | 6 +----- runtime/Includes/Maths/MathsUtils.h | 8 +++----- runtime/Includes/Maths/MathsUtils.inl | 5 +---- runtime/Includes/Maths/Quaternions.h | 6 +++--- runtime/Includes/Maths/Quaternions.inl | 12 ++++++------ runtime/Includes/Maths/Vec2.h | 17 +++++------------ runtime/Includes/Maths/Vec2.inl | 7 +++---- runtime/Includes/Maths/Vec3.h | 17 +++++------------ runtime/Includes/Maths/Vec3.inl | 7 +++---- runtime/Includes/Maths/Vec4.h | 17 +++++------------ runtime/Includes/Maths/Vec4.inl | 7 +++---- runtime/Includes/Platform/Window.h | 1 + runtime/Includes/PreCompiled.h | 2 ++ runtime/Includes/Renderer/Buffer.h | 8 ++++---- runtime/Includes/Renderer/Image.h | 2 +- runtime/Includes/Renderer/Renderer.h | 2 +- runtime/Includes/Renderer/Vertex.inl | 2 +- runtime/Sources/Core/Application.cpp | 11 ++++------- 29 files changed, 77 insertions(+), 132 deletions(-) diff --git a/Makefile b/Makefile index cb43f27..93a6343 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ MODE = "release" CXX = clang++ -CXXFLAGS = -std=c++17 -O3 -fPIC -Wall -Wextra -Wno-deprecated -DSDL_MAIN_HANDLED +CXXFLAGS = -std=c++20 -O3 -fPIC -Wall -Wextra -Wno-deprecated -DSDL_MAIN_HANDLED INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party LDLIBS = diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index f00c206..41d4006 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -19,7 +19,7 @@ Error("invalid image ptr (NULL)"); \ retval; \ } \ - else if(m_image_registry.Find(img)) \ + else if(!m_image_registry.IsTextureKnown(img)) \ { \ Error("invalid image ptr"); \ retval; \ @@ -29,8 +29,8 @@ namespace mlx { void Application::GetMousePos(int* x, int* y) noexcept { - *x = p_in->GetX(); - *y = p_in->GetY(); + *x = m_in.GetX(); + *y = m_in.GetY(); } void Application::MouseMove(Handle win, int x, int y) noexcept @@ -69,11 +69,7 @@ namespace mlx void* Application::NewGraphicsSuport(std::size_t w, std::size_t h, const char* title) { MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_textures.begin(), m_textures.end(), [=](const Texture& texture) - { - return &texture == reinterpret_cast(const_cast(title)); - }); - if(it != _textures.end()) + if(m_image_registry.IsTextureKnown(reinterpret_cast(const_cast(title)))) m_graphics.emplace_back(std::make_unique(w, h, reinterpret_cast(const_cast(title)), m_graphics.size())); else { @@ -92,7 +88,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)]->ClearRenderData(); + m_graphics[*static_cast(win)]->ResetRenderData(); } void Application::DestroyGraphicsSupport(Handle win) @@ -177,6 +173,6 @@ namespace mlx void Application::LoopEnd() noexcept { - p_in->Finish(); + m_in.Finish(); } } diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 7d866bb..e025e03 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -7,7 +7,6 @@ namespace mlx { MLX_PROFILE_FUNCTION(); p_scene->ResetSprites(); - m_images_registry.Clear(); } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index 3f4b426..ce7c7bd 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -46,7 +46,7 @@ namespace mlx std::shared_ptr p_texture; std::shared_ptr p_mesh; Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; - Vec3f m_position = Vec4f{ 0.0f, 0.0f, 0.0f }; + Vec3f m_position = Vec3f{ 0.0f, 0.0f, 0.0f }; }; } diff --git a/runtime/Includes/Maths/Angles.h b/runtime/Includes/Maths/Angles.h index 801d71e..1aa9b3c 100644 --- a/runtime/Includes/Maths/Angles.h +++ b/runtime/Includes/Maths/Angles.h @@ -1,11 +1,9 @@ -#ifndef __SCOP_ANGLES__ -#define __SCOP_ANGLES__ +#ifndef __MLX_ANGLES__ +#define __MLX_ANGLES__ #include -#include -#include -namespace Scop +namespace mlx { template struct EulerAngles; template struct Quat; diff --git a/runtime/Includes/Maths/Angles.inl b/runtime/Includes/Maths/Angles.inl index 5c2631e..7b1cceb 100644 --- a/runtime/Includes/Maths/Angles.inl +++ b/runtime/Includes/Maths/Angles.inl @@ -1,13 +1,10 @@ #pragma once #include -#include -#include - #include #include -namespace Scop +namespace mlx { namespace Internal { diff --git a/runtime/Includes/Maths/Constants.h b/runtime/Includes/Maths/Constants.h index 015346f..29c9a64 100644 --- a/runtime/Includes/Maths/Constants.h +++ b/runtime/Includes/Maths/Constants.h @@ -1,11 +1,7 @@ -#ifndef __SCOP_MATHS_CONSTANTS__ -#define __SCOP_MATHS_CONSTANTS__ +#ifndef __MLX_MATHS_CONSTANTS__ +#define __MLX_MATHS_CONSTANTS__ -#include -#include -#include - -namespace Scop +namespace mlx { template constexpr std::size_t BitCount = CHAR_BIT * sizeof(T); diff --git a/runtime/Includes/Maths/Enums.h b/runtime/Includes/Maths/Enums.h index df12fbd..b74e833 100644 --- a/runtime/Includes/Maths/Enums.h +++ b/runtime/Includes/Maths/Enums.h @@ -1,9 +1,7 @@ -#ifndef __SCOPE_MATHS_ENUMS__ -#define __SCOPE_MATHS_ENUMS__ +#ifndef __MLX_MATHS_ENUMS__ +#define __MLX_MATHS_ENUMS__ -#include - -namespace Scop +namespace mlx { enum class AngleUnit { diff --git a/runtime/Includes/Maths/EulerAngles.h b/runtime/Includes/Maths/EulerAngles.h index 8a873cb..54171c9 100644 --- a/runtime/Includes/Maths/EulerAngles.h +++ b/runtime/Includes/Maths/EulerAngles.h @@ -1,11 +1,9 @@ -#ifndef __SCOP_EULER_ANGLES__ -#define __SCOP_EULER_ANGLES__ - -#include +#ifndef __MLX_EULER_ANGLES__ +#define __MLX_EULER_ANGLES__ #include -namespace Scop +namespace mlx { template struct EulerAngles diff --git a/runtime/Includes/Maths/EulerAngles.inl b/runtime/Includes/Maths/EulerAngles.inl index 97fab12..23a9a57 100644 --- a/runtime/Includes/Maths/EulerAngles.inl +++ b/runtime/Includes/Maths/EulerAngles.inl @@ -1,7 +1,7 @@ #pragma once #include -namespace Scop +namespace mlx { template constexpr EulerAngles::EulerAngles(DegreeAngle P, DegreeAngle Y, DegreeAngle R) : diff --git a/runtime/Includes/Maths/Mat4.h b/runtime/Includes/Maths/Mat4.h index 4151ba9..722f5cd 100644 --- a/runtime/Includes/Maths/Mat4.h +++ b/runtime/Includes/Maths/Mat4.h @@ -1,13 +1,9 @@ -#ifndef __SCOP_MAT4__ -#define __SCOP_MAT4__ - -#include -#include -#include +#ifndef __MLX_MAT4__ +#define __MLX_MAT4__ #include -namespace Scop +namespace mlx { template struct Vec2; template struct Vec3; diff --git a/runtime/Includes/Maths/Mat4.inl b/runtime/Includes/Maths/Mat4.inl index 7f67930..b15e0d4 100644 --- a/runtime/Includes/Maths/Mat4.inl +++ b/runtime/Includes/Maths/Mat4.inl @@ -1,7 +1,6 @@ #pragma once #include -#include #include #include #include @@ -9,10 +8,7 @@ #include #include -#include -#include - -namespace Scop +namespace mlx { template constexpr Mat4::Mat4(T r11, T r12, T r13, T r14, diff --git a/runtime/Includes/Maths/MathsUtils.h b/runtime/Includes/Maths/MathsUtils.h index c17d0da..fb211d9 100644 --- a/runtime/Includes/Maths/MathsUtils.h +++ b/runtime/Includes/Maths/MathsUtils.h @@ -1,9 +1,7 @@ -#ifndef __SCOP_MATHS_UTILS__ -#define __SCOP_MATHS_UTILS__ +#ifndef __MLX_MATHS_UTILS__ +#define __MLX_MATHS_UTILS__ -#include - -namespace Scop +namespace mlx { template [[nodiscard]] constexpr T Mod(T x, T y) noexcept; diff --git a/runtime/Includes/Maths/MathsUtils.inl b/runtime/Includes/Maths/MathsUtils.inl index d47a421..509d2ad 100644 --- a/runtime/Includes/Maths/MathsUtils.inl +++ b/runtime/Includes/Maths/MathsUtils.inl @@ -1,12 +1,9 @@ #pragma once #include -#include -#include - #include -namespace Scop +namespace mlx { template [[nodiscard]] constexpr T Mod(T x, T y) noexcept diff --git a/runtime/Includes/Maths/Quaternions.h b/runtime/Includes/Maths/Quaternions.h index 5937268..dfa1809 100644 --- a/runtime/Includes/Maths/Quaternions.h +++ b/runtime/Includes/Maths/Quaternions.h @@ -1,10 +1,10 @@ -#ifndef __SCOP_QUATERNIONS__ -#define __SCOP_QUATERNIONS__ +#ifndef __MLX_QUATERNIONS__ +#define __MLX_QUATERNIONS__ #include #include -namespace Scop +namespace mlx { template struct Quat diff --git a/runtime/Includes/Maths/Quaternions.inl b/runtime/Includes/Maths/Quaternions.inl index bea9a95..3905510 100644 --- a/runtime/Includes/Maths/Quaternions.inl +++ b/runtime/Includes/Maths/Quaternions.inl @@ -1,7 +1,7 @@ #pragma once #include -namespace Scop +namespace mlx { template constexpr Quat::Quat(T W, T X, T Y, T Z) : w(W), x(X), y(Y), z(Z) @@ -46,7 +46,7 @@ namespace Scop RadianAngle Quat::AngleBetween(const Quat& quat) const { T alpha = Vec3::DotProduct(Vec3(x, y, z), Vec3(quat.x, quat.y, quat.z)); - return std::acos(Scop::Clamp(alpha, T(-1.0), T(1.0))); + return std::acos(mlx::Clamp(alpha, T(-1.0), T(1.0))); } template @@ -366,10 +366,10 @@ namespace Scop constexpr Quat Quat::Lerp(const Quat& from, const Quat& to, T interpolation) { Quat interpolated; - interpolated.w = Scop::Lerp(from.w, to.w, interpolation); - interpolated.x = Scop::Lerp(from.x, to.x, interpolation); - interpolated.y = Scop::Lerp(from.y, to.y, interpolation); - interpolated.z = Scop::Lerp(from.z, to.z, interpolation); + interpolated.w = mlx::Lerp(from.w, to.w, interpolation); + interpolated.x = mlx::Lerp(from.x, to.x, interpolation); + interpolated.y = mlx::Lerp(from.y, to.y, interpolation); + interpolated.z = mlx::Lerp(from.z, to.z, interpolation); return interpolated; } diff --git a/runtime/Includes/Maths/Vec2.h b/runtime/Includes/Maths/Vec2.h index ec6f7a5..a08706d 100755 --- a/runtime/Includes/Maths/Vec2.h +++ b/runtime/Includes/Maths/Vec2.h @@ -1,17 +1,10 @@ -#ifndef __SCOP_VEC2__ -#define __SCOP_VEC2__ +#ifndef __MLX_VEC2__ +#define __MLX_VEC2__ -#include -#include -#include -#include - -#include - -namespace Scop +namespace mlx { - template class Vec3; - template class Vec4; + template struct Vec3; + template struct Vec4; template struct Vec2 diff --git a/runtime/Includes/Maths/Vec2.inl b/runtime/Includes/Maths/Vec2.inl index 1671da8..f31c2f8 100755 --- a/runtime/Includes/Maths/Vec2.inl +++ b/runtime/Includes/Maths/Vec2.inl @@ -1,8 +1,7 @@ #pragma once - #include -namespace Scop +namespace mlx { template constexpr Vec2::Vec2(T X, T Y) : x(X), y(Y) {} @@ -116,14 +115,14 @@ namespace Scop template constexpr T& Vec2::operator[](std::size_t i) { - Scop::Assert(i < 2, "index out of range"); + mlx::Assert(i < 2, "index out of range"); return *(&x + i); } template constexpr T Vec2::operator[](std::size_t i) const { - Scop::Assert(i < 2, "index out of range"); + mlx::Assert(i < 2, "index out of range"); return *(&x + i); } diff --git a/runtime/Includes/Maths/Vec3.h b/runtime/Includes/Maths/Vec3.h index 775431f..ef7fdd5 100755 --- a/runtime/Includes/Maths/Vec3.h +++ b/runtime/Includes/Maths/Vec3.h @@ -1,17 +1,10 @@ -#ifndef __SCOP_VEC3__ -#define __SCOP_VEC3__ +#ifndef __MLX_VEC3__ +#define __MLX_VEC3__ -#include -#include -#include -#include - -#include - -namespace Scop +namespace mlx { - template class Vec2; - template class Vec4; + template struct Vec2; + template struct Vec4; template struct Vec3 diff --git a/runtime/Includes/Maths/Vec3.inl b/runtime/Includes/Maths/Vec3.inl index 73c73ff..158c1d0 100755 --- a/runtime/Includes/Maths/Vec3.inl +++ b/runtime/Includes/Maths/Vec3.inl @@ -1,8 +1,7 @@ #pragma once - #include -namespace Scop +namespace mlx { template constexpr Vec3::Vec3(T X, T Y, T Z) : x(X), y(Y), z(Z) {} @@ -145,14 +144,14 @@ namespace Scop template constexpr T& Vec3::operator[](std::size_t i) { - Scop::Assert(i < 3, "index out of range"); + mlx::Assert(i < 3, "index out of range"); return *(&x + i); } template constexpr const T& Vec3::operator[](std::size_t i) const { - Scop::Assert(i < 3, "index out of range"); + mlx::Assert(i < 3, "index out of range"); return *(&x + i); } diff --git a/runtime/Includes/Maths/Vec4.h b/runtime/Includes/Maths/Vec4.h index e8bff3a..a7e40ee 100755 --- a/runtime/Includes/Maths/Vec4.h +++ b/runtime/Includes/Maths/Vec4.h @@ -1,17 +1,10 @@ -#ifndef __SCOP_VEC4__ -#define __SCOP_VEC4__ +#ifndef __MLX_VEC4__ +#define __MLX_VEC4__ -#include -#include -#include -#include - -#include - -namespace Scop +namespace mlx { - template class Vec2; - template class Vec3; + template struct Vec2; + template struct Vec3; template struct Vec4 diff --git a/runtime/Includes/Maths/Vec4.inl b/runtime/Includes/Maths/Vec4.inl index 7f016d7..deeb5c6 100755 --- a/runtime/Includes/Maths/Vec4.inl +++ b/runtime/Includes/Maths/Vec4.inl @@ -1,8 +1,7 @@ #pragma once - #include -namespace Scop +namespace mlx { template constexpr Vec4::Vec4(T X, T Y, T Z, T W) : x(X), y(Y), z(Z), w(W) {} @@ -120,14 +119,14 @@ namespace Scop template constexpr T& Vec4::operator[](std::size_t i) { - Scop::Assert(i < 4, "index out of range"); + mlx::Assert(i < 4, "index out of range"); return *(&x + i); } template constexpr const T& Vec4::operator[](std::size_t i) const { - Scop::Assert(i < 4, "index out of range"); + mlx::Assert(i < 4, "index out of range"); return *(&x + i); } diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 04ef60e..24adacf 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -2,6 +2,7 @@ #define __MLX_WINDOW__ #include +#include namespace mlx { diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 0284a51..231ca2f 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -6,6 +6,7 @@ #define Window X11Window // f*ck X11 #include +#include #include #include #include @@ -38,6 +39,7 @@ #include #include #include +#include #include #include #include diff --git a/runtime/Includes/Renderer/Buffer.h b/runtime/Includes/Renderer/Buffer.h index 24d7f31..8d0b489 100644 --- a/runtime/Includes/Renderer/Buffer.h +++ b/runtime/Includes/Renderer/Buffer.h @@ -19,11 +19,11 @@ namespace mlx void Swap(GPUBuffer& buffer) noexcept; - [[nodiscard]] MLX_FORCEINLINE void* GetMap() const noexcept { return m_memory.map; } + [[nodiscard]] MLX_FORCEINLINE void* GetMap() const noexcept { return p_map; } [[nodiscard]] MLX_FORCEINLINE VkBuffer Get() const noexcept { return m_buffer; } - [[nodiscard]] MLX_FORCEINLINE VkDeviceMemory GetMemory() const noexcept { return m_memory.memory; } - [[nodiscard]] MLX_FORCEINLINE VkDeviceSize GetSize() const noexcept { return m_memory.size; } - [[nodiscard]] MLX_FORCEINLINE VkDeviceSize GetOffset() const noexcept { return 0; } + [[nodiscard]] MLX_FORCEINLINE VmaAllocation GetAllocation() const noexcept { return m_allocation; } + [[nodiscard]] MLX_FORCEINLINE VkDeviceSize GetSize() const noexcept { return m_size; } + [[nodiscard]] MLX_FORCEINLINE VkDeviceSize GetOffset() const noexcept { return m_offset; } [[nodiscard]] inline bool IsInit() const noexcept { return m_buffer != VK_NULL_HANDLE; } diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index fb9952e..86bff31 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -34,7 +34,7 @@ namespace mlx virtual void Destroy() noexcept; [[nodiscard]] MLX_FORCEINLINE VkImage Get() const noexcept { return m_image; } - [[nodiscard]] MLX_FORCEINLINE VkDeviceMemory GetDeviceMemory() const noexcept { return m_memory.memory; } + [[nodiscard]] MLX_FORCEINLINE VmaAllocation GetAllocation() const noexcept { return m_allocation; } [[nodiscard]] MLX_FORCEINLINE VkImageView GetImageView() const noexcept { return m_image_view; } [[nodiscard]] MLX_FORCEINLINE VkFormat GetFormat() const noexcept { return m_format; } [[nodiscard]] MLX_FORCEINLINE VkImageTiling GetTiling() const noexcept { return m_tiling; } diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index 9502fbe..a2df8cd 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -30,7 +30,7 @@ namespace mlx [[nodiscard]] inline std::size_t& GetPolygonDrawnCounterRef() noexcept { return m_polygons_drawn; } [[nodiscard]] inline std::size_t GetSwapchainImageIndex() const noexcept { return m_swapchain_image_index; } [[nodiscard]] inline std::size_t GetCurrentFrameIndex() const noexcept { return m_current_frame_index; } - [[nodiscard]] inline NonOwningPtr GetWindow() const noexcept { return m_window_ptr; } + [[nodiscard]] inline NonOwningPtr GetWindow() const noexcept { return p_window; } MLX_FORCEINLINE constexpr void RequireFramebufferResize() noexcept { m_framebuffers_resize = true; } diff --git a/runtime/Includes/Renderer/Vertex.inl b/runtime/Includes/Renderer/Vertex.inl index f23eb77..fb09f95 100644 --- a/runtime/Includes/Renderer/Vertex.inl +++ b/runtime/Includes/Renderer/Vertex.inl @@ -14,7 +14,7 @@ namespace mlx std::array Vertex::GetAttributeDescriptions() { - std::array attribute_descriptions; + std::array attribute_descriptions; attribute_descriptions[0].binding = 0; attribute_descriptions[0].location = 0; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 3becaf4..9ed05fa 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -1,13 +1,10 @@ #include #include -#include -#include -#include -#include +#include +#include #include #include -#include namespace mlx { @@ -15,7 +12,7 @@ namespace mlx { EventBus::RegisterListener({[](const EventBase& event) { - }, "__internal_application" }); + }, "__Application" }); m_fps.init(); SDLManager::Get().Init(); @@ -76,7 +73,7 @@ namespace mlx void Application::DestroyTexture(void* ptr) { MLX_PROFILE_FUNCTION(); - vkDeviceWaitIdle(RenderCore::Get().GetDevice().Get()); // TODO : synchronize with another method than waiting for GPU to be idle + RenderCore::Get().WaitDeviceIdle(); // TODO : synchronize with another method than waiting for GPU to be idle if(!m_image_registry.Find(ptr)) { Error("invalid image ptr"); From 0a84ea6a1891e55a64be840a806ea297bf5fed4c Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Sep 2024 02:35:01 +0200 Subject: [PATCH 017/131] fixing compilation issues, working on textures --- runtime/Includes/Core/Application.inl | 2 +- runtime/Includes/Core/Graphics.h | 4 +- runtime/Includes/Core/Graphics.inl | 13 +- runtime/Includes/Graphics/Scene.h | 4 +- runtime/Includes/Graphics/Sprite.h | 6 +- runtime/Includes/Renderer/Image.h | 26 ++- runtime/Sources/Core/Application.cpp | 33 +--- runtime/Sources/Core/Bridge.cpp | 4 +- runtime/Sources/Core/Graphics.cpp | 2 +- runtime/Sources/Graphics/Scene.cpp | 23 ++- runtime/Sources/Graphics/Sprite.cpp | 2 +- runtime/Sources/Platform/Inputs.cpp | 18 -- runtime/Sources/Renderer/Image.cpp | 111 +++++++++++ .../Sources/Renderer/RenderPasses/2DPass.cpp | 2 + third_party/kvf.h | 180 ++++++++++++------ 15 files changed, 315 insertions(+), 115 deletions(-) diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 41d4006..7e6cfd1 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -19,7 +19,7 @@ Error("invalid image ptr (NULL)"); \ retval; \ } \ - else if(!m_image_registry.IsTextureKnown(img)) \ + else if(!m_image_registry.IsTextureKnown(static_cast(img))) \ { \ Error("invalid image ptr"); \ retval; \ diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index e97ea9d..406a39c 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -29,7 +29,7 @@ namespace mlx inline void LoadFont(const std::filesystem::path& filepath, float scale); - inline void TryEraseTextureFromRegistry(NonOwningPtr texture) noexcept; + inline void TryEraseSpritesInScene(NonOwningPtr texture) noexcept; [[nodiscard]] MLX_FORCEINLINE bool HasWindow() const noexcept { return m_has_window; } [[nodiscard]] MLX_FORCEINLINE Renderer& GetRenderer() { return m_renderer; } @@ -42,6 +42,8 @@ namespace mlx std::shared_ptr p_window; std::unique_ptr p_scene; + std::uint64_t m_current_depth = 0; + std::size_t m_width = 0; std::size_t m_height = 0; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index e025e03..8e5a0cd 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -7,6 +7,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); p_scene->ResetSprites(); + m_current_depth = 0; } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept @@ -22,6 +23,15 @@ namespace mlx void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) { MLX_PROFILE_FUNCTION(); + NonOwningPtr sprite = p_scene->GetSpriteFromTextureAndPosition(texture, Vec2f{ static_cast(x), static_cast(y) }); + if(!sprite) + { + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); + } + else + sprite->SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); + m_current_depth++; } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) @@ -29,8 +39,9 @@ namespace mlx MLX_PROFILE_FUNCTION(); } - void GraphicsSupport::TryEraseTextureFromRegistry(NonOwningPtr texture) noexcept + void GraphicsSupport::TryEraseSpritesInScene(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); + p_scene->TryEraseSpriteFromTexture(texture); } } diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 760c6fb..7f1d0ec 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -17,7 +17,9 @@ namespace mlx public: Scene(SceneDescriptor desc); - Sprite& CreateSprite(std::shared_ptr texture) noexcept; + Sprite& CreateSprite(NonOwningPtr texture) noexcept; + NonOwningPtr GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const; + void TryEraseSpriteFromTexture(NonOwningPtr texture); inline void ResetSprites() { m_sprites.clear(); } diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index ce7c7bd..68a1bfb 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -14,7 +14,7 @@ namespace mlx friend class Render2DPass; public: - Sprite(std::shared_ptr texture); + Sprite(NonOwningPtr texture); inline void SetColor(Vec4f color) noexcept { m_color = color; } inline void SetPosition(Vec3f position) noexcept { m_position = position; } @@ -22,7 +22,7 @@ namespace mlx [[nodiscard]] inline const Vec4f& GetColor() const noexcept { return m_color; } [[nodiscard]] inline const Vec3f& GetPosition() const noexcept { return m_position; } [[nodiscard]] inline std::shared_ptr GetMesh() const { return p_mesh; } - [[nodiscard]] inline std::shared_ptr GetTexture() const { return p_texture; } + [[nodiscard]] inline NonOwningPtr GetTexture() const { return p_texture; } ~Sprite() = default; @@ -43,7 +43,7 @@ namespace mlx private: DescriptorSet m_set; - std::shared_ptr p_texture; + NonOwningPtr p_texture; std::shared_ptr p_mesh; Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; Vec3f m_position = Vec3f{ 0.0f, 0.0f, 0.0f }; diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 86bff31..31b8921 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -47,7 +47,7 @@ namespace mlx virtual ~Image() = default; - private: + protected: VmaAllocation m_allocation; VkImage m_image = VK_NULL_HANDLE; VkImageView m_image_view = VK_NULL_HANDLE; @@ -84,6 +84,7 @@ namespace mlx { Init(std::move(pixels), width, height, format, is_multisampled); } + inline void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format = VK_FORMAT_R8G8B8A8_SRGB, bool is_multisampled = false) { Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, is_multisampled); @@ -91,12 +92,12 @@ namespace mlx Image::CreateSampler(); if(pixels) { - TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL); GPUBuffer staging_buffer; std::size_t size = width * height * kvfFormatSize(format); staging_buffer.Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, pixels); VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); vkEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); @@ -104,13 +105,26 @@ namespace mlx kvfDestroyFence(RenderCore::Get().GetDevice(), fence); staging_buffer.Destroy(); } - if(!pixels) - TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); - else - TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); } + + void SetPixel(int x, int y, std::uint32_t color) noexcept; + int GetPixel(int x, int y) noexcept; + + void Update(VkCommandBuffer cmd) const; + ~Texture() override { Destroy(); } + + private: + void OpenCPUBuffer(); + + private: + std::vector m_cpu_buffer; + std::optional m_staging_buffer; + bool m_has_been_modified = false; }; + + Texture* StbTextureLoad(const std::filesystem::path& file, int* w, int* h); } #endif diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 9ed05fa..b82cb4f 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -14,7 +14,7 @@ namespace mlx { }, "__Application" }); - m_fps.init(); + m_fps.Init(); SDLManager::Get().Init(); } @@ -26,7 +26,6 @@ namespace mlx { if(!m_fps.Update()) continue; - m_in.Update(); if(f_loop_hook) f_loop_hook(p_param); @@ -37,27 +36,15 @@ namespace mlx gs->Render(); } } - - RenderCore::Get().GetSingleTimeCmdManager().UpdateSingleTimesCmdBuffersSubmitState(); - - for(auto& gs : m_graphics) - { - if(!gs) - continue; - for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - gs->GetRenderer().GetCmdBuffer(i).WaitForExecution(); - } + RenderCore::Get().WaitDeviceIdle(); } void* Application::NewTexture(int w, int h) { MLX_PROFILE_FUNCTION(); - Texture* texture = new Texture; - #ifdef DEBUG - texture->Create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, "__mlx_unamed_user_texture"); - #else - texture->Create(nullptr, w, h, VK_FORMAT_R8G8B8A8_UNORM, nullptr); - #endif + Texture* texture; + try { texture = new Texture({}, w, h); } + catch(...) { return NULL; } m_image_registry.RegisterTexture(texture); return texture; } @@ -66,6 +53,8 @@ namespace mlx { MLX_PROFILE_FUNCTION(); Texture* texture = StbTextureLoad(file, w, h); + if(texture == nullptr) + return NULL; // NULL for C compatibility m_image_registry.RegisterTexture(texture); return texture; } @@ -74,7 +63,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); RenderCore::Get().WaitDeviceIdle(); // TODO : synchronize with another method than waiting for GPU to be idle - if(!m_image_registry.Find(ptr)) + if(!m_image_registry.IsTextureKnown(static_cast(ptr))) { Error("invalid image ptr"); return; @@ -85,10 +74,10 @@ namespace mlx Error("trying to destroy a texture that has already been destroyed"); else texture->Destroy(); - for(auto& gs : _graphics) + for(auto& gs : m_graphics) { if(gs) - gs->TryEraseTextureFromManager(texture); + gs->TryEraseSpritesInScene(texture); } m_image_registry.UnregisterTexture(texture); delete texture; @@ -96,8 +85,6 @@ namespace mlx Application::~Application() { - TextLibrary::Get().ClearLibrary(); - FontLibrary::Get().ClearLibrary(); SDLManager::Get().Shutdown(); } } diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index ca0ecef..89d741d 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -1,7 +1,7 @@ #include #include -#include +#include #include #include @@ -18,7 +18,7 @@ extern "C" { if(__mlx_ptr != nullptr) { - Error("MLX cannot be initialized multiple times"); + mlx::Error("MLX cannot be initialized multiple times"); return nullptr; } mlx::MemManager::Get(); // just to initialize the C garbage collector diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 2f9c720..15d578b 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -64,7 +64,7 @@ namespace mlx RenderCore::Get().WaitDeviceIdle(); p_scene.reset(); m_scene_renderer.Destroy(); - m_renderer->Destroy(); + m_renderer.Destroy(); if(p_window) p_window->Destroy(); } diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index c95fee7..46821a9 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -12,10 +12,31 @@ namespace mlx m_depth.Init(m_descriptor.renderer->GetSwapchainImages().back().GetWidth(), m_descriptor.renderer->GetSwapchainImages().back().GetHeight()); } - Sprite& Scene::CreateSprite(std::shared_ptr texture) noexcept + Sprite& Scene::CreateSprite(NonOwningPtr texture) noexcept { std::shared_ptr sprite = std::make_shared(texture); m_sprites.push_back(sprite); return *sprite; } + + NonOwningPtr Scene::GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const + { + auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture, position](const Sprite& sprite) + { + return sprite.GetPosition().x == position.x && sprite.GetPosition().y == position.y && sprite.GetTexture() == texture; + }); + return (it != m_sprites.end() ? &(*it) : nullptr); + } + + void Scene::TryEraseSpriteFromTexture(NonOwningPtr texture) + { + do + { + auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture, position](const Sprite& sprite) + { + return sprite.GetPosition().x == position.x && sprite.GetPosition().y == position.y && sprite.GetTexture() == texture; + }); + m_sprites.erase(it); + } while(it != m_sprites.end()); + } } diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index dbc5e3d..e6792ae 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -35,7 +35,7 @@ namespace mlx return mesh; } - Sprite::Sprite(std::shared_ptr texture) + Sprite::Sprite(NonOwningPtr texture) { Verify((bool)texture, "Sprite: invalid texture"); p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); diff --git a/runtime/Sources/Platform/Inputs.cpp b/runtime/Sources/Platform/Inputs.cpp index 770c59b..7dc8377 100644 --- a/runtime/Sources/Platform/Inputs.cpp +++ b/runtime/Sources/Platform/Inputs.cpp @@ -4,22 +4,4 @@ namespace mlx { - void Input::update() - { - MLX_PROFILE_FUNCTION(); - _xRel = 0; - _yRel = 0; - - glfwPollEvents(); - - static int i = 0; - i++; - if(i >= 500) - { - auto& hooks = _events_hooks[0]; - auto& win_hook = hooks[MLX_WINDOW_EVENT]; - if(win_hook.hook) - win_hook.hook(0, win_hook.param); - } - } } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 971e7df..591bb96 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -50,7 +50,10 @@ namespace mlx return; bool is_single_time_cmd_buffer = (cmd == VK_NULL_HANDLE); if(is_single_time_cmd_buffer) + { cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + } KvfImageType kvf_type = KVF_IMAGE_OTHER; switch(m_type) { @@ -61,6 +64,13 @@ namespace mlx } kvfTransitionImageLayout(RenderCore::Get().GetDevice(), m_image, kvf_type, cmd, m_format, m_layout, new_layout, is_single_time_cmd_buffer); m_layout = new_layout; + if(is_single_time_cmd_buffer) + { + vkEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + } } void Image::Clear(VkCommandBuffer cmd, Vec4f color) @@ -112,4 +122,105 @@ namespace mlx RenderCore::Get().GetAllocator().DestroyImage(m_allocation, m_image); m_image = VK_NULL_HANDLE; } + + void Texture::SetPixel(int x, int y, std::uint32_t color) noexcept + { + MLX_PROFILE_FUNCTION(); + if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) + return; + if(!m_staging_buffer.has_value()) + OpenCPUBuffer(); + m_cpu_buffer[(y * m_width) + x] = color; + m_has_been_modified = true; + } + + int GetPixel(int x, int y) noexcept + { + MLX_PROFILE_FUNCTION(); + if(x < 0 || y < 0 || static_cast(x) > getWidth() || static_cast(y) > getHeight()) + return 0; + if(!m_staging_buffer.has_value()) + OpenCPUBuffer(); + std::uint32_t color = m_cpu_buffer[(y * m_width) + x]; + std::uint8_t* bytes = reinterpret_cast(&color); + std::uint8_t tmp = bytes[0]; + bytes[0] = bytes[2]; + bytes[2] = tmp; + return *reinterpret_cast(bytes); + } + + void Update(VkCommandBuffer cmd) const + { + if(!m_has_been_modified) + return; + std::memcpy(m_staging_buffer.GetMap(), m_cpu_buffer.data(), m_cpu_buffer.size() * kvfGetFormatSize(m_format)); + + VkImageLayout old_layout = m_layout; + VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); + kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); + TransitionLayout(old_layout, cmd); + vkEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + + m_has_been_modified = false; + } + + void Texture::OpenCPUBuffer() + { + MLX_PROFILE_FUNCTION(); + if(m_staging_buffer.has_value()) + return; + DebugLog("Texture : enabling CPU mapping"); + m_staging_buffer.emplace(); + std::size_t size = m_width * m_height * kvfGetFormatSize(m_format); + m_staging_buffer->Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, {}); + + VkImageLayout old_layout = m_layout; + VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, cmd); + kvfImageToBuffer(cmd, m_image, m_staging_buffer.Get(), m_staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { m_width, m_height, 1 }); + TransitionLayout(old_layout, cmd); + vkEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + + m_cpu_buffer.resize(m_width * m_height); + std::memcpy(m_cpu_buffer.data(), m_staging_buffer.GetMap(), m_cpu_buffer.size()); + } + + Texture* StbTextureLoad(const std::filesystem::path& file, int* w, int* h) + { + MLX_PROFILE_FUNCTION(); + std::string filename = file.string(); + + if(!std::filesystem::exists(file)) + { + Error("Image : file not found %", file); + return nullptr; + } + if(stbi_is_hdr(filename.c_str())) + { + Error("Texture : unsupported image format %", file); + return nullptr; + } + int dummy_w; + int dummy_h; + int channels; + std::uint8_t* data = stbi_load(filename.c_str(), (w == nullptr ? &dummy_w : w), (h == nullptr ? &dummy_h : h), &channels, 4); + CPUBuffer buffer((w == nullptr ? dummy_w : *w) * (h == nullptr ? dummy_h : *h) * 4); + std::memcpy(buffer.GetData(), data, buffer.GetSize()); + Texture* texture; + + try { texture = new Texture(buffer, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h)); } + catch(...) { return NULL; } + + stbi_image_free(data); + return texture; + } } diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index bea3c70..57cff33 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -102,6 +102,8 @@ namespace mlx sprite_data.color = sprite->GetColor(); if(!sprite->IsSetInit()) sprite->UpdateDescriptorSet(*p_texture_set); + Verify((bool)sprite->GetTexture(), "a sprite has no texture attached"); + sprite->GetTexture()->Update(cmd); sprite->Bind(frame_index, cmd); std::array sets = { p_viewer_data_set->GetSet(frame_index), sprite->GetSet(frame_index) }; vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); diff --git a/third_party/kvf.h b/third_party/kvf.h index a8b6008..4878ef9 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -92,7 +92,8 @@ void kvfAddLayer(const char* layer); VkInstance kvfCreateInstance(const char** extensionsEnabled, uint32_t extensionsCount); void kvfDestroyInstance(VkInstance instance); -VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance); +// If surfaces given to theses functions are VK_NULL_HANDLE no present queues will be searched and thus kvfQueuePresentKHR will not work +VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance, VkSurfaceKHR surface); VkPhysicalDevice kvfPickGoodDefaultPhysicalDevice(VkInstance instance, VkSurfaceKHR surface); VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR surface, const char** deviceExtensions, uint32_t deviceExtensionsCount); @@ -101,7 +102,7 @@ uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue); bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index); // return false when the swapchain must be recreated VkDevice kvfCreateDefaultDevice(VkPhysicalDevice physical); -VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count); +VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features); void kvfDestroyDevice(VkDevice device); VkFence kvfCreateFence(VkDevice device); @@ -118,12 +119,12 @@ uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain); VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain); void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain); -VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage); -void kvfImageBufferToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t size); +VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, KvfImageType type); +void kvfImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t size); void kvfDestroyImage(VkDevice device, VkImage image); -VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect); +VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect, int layer_count); void kvfDestroyImageView(VkDevice device, VkImageView image_view); -void kvfTransitionImageLayout(VkDevice device, VkImage image, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer); +void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer); VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMode address_modes, VkSamplerMipmapMode mipmap_mode); void kvfDestroySampler(VkDevice device, VkSampler sampler); @@ -143,10 +144,11 @@ void kvfEndCommandBuffer(VkCommandBuffer buffer); void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkSemaphore signal, VkSemaphore wait, VkFence fence, VkPipelineStageFlags* stages); void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkFence fence); -VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear); +VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear, VkSampleCountFlagBits samples); VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR swapchain, bool clear); VkRenderPass kvfCreateRenderPass(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point); +VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point, VkSubpassDependency* dependencies, size_t dependencies_count); void kvfDestroyRenderPass(VkDevice device, VkRenderPass renderpass); void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer framebuffer, VkExtent2D framebuffer_extent, VkClearValue* clears, size_t clears_count); @@ -185,6 +187,8 @@ void kvfGPipelineBuilderReset(KvfGraphicsPipelineBuilder* builder); void kvfGPipelineBuilderSetInputTopology(KvfGraphicsPipelineBuilder* builder, VkPrimitiveTopology topology); void kvfGPipelineBuilderSetPolygonMode(KvfGraphicsPipelineBuilder* builder, VkPolygonMode polygon, float line_width); void kvfGPipelineBuilderSetCullMode(KvfGraphicsPipelineBuilder* builder, VkCullModeFlags cull, VkFrontFace face); +void kvfGPipelineBuilderSetMultisampling(KvfGraphicsPipelineBuilder* builder, VkSampleCountFlagBits count); +void kvfGPipelineBuilderSetMultisamplingShading(KvfGraphicsPipelineBuilder* builder, VkSampleCountFlagBits count, float min_sampling_shading); void kvfGPipelineBuilderDisableBlending(KvfGraphicsPipelineBuilder* builder); void kvfGPipelineBuilderEnableAdditiveBlending(KvfGraphicsPipelineBuilder* builder); void kvfGPipelineBuilderEnableAlphaBlending(KvfGraphicsPipelineBuilder* builder); @@ -197,6 +201,8 @@ void kvfGPipelineBuilderResetShaderStages(KvfGraphicsPipelineBuilder* builder); VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, KvfGraphicsPipelineBuilder* builder, VkRenderPass pass); void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline); +void kvfCheckVk(VkResult result); + #ifdef __cplusplus } #endif @@ -287,6 +293,7 @@ struct KvfGraphicsPipelineBuilder VkPipelineRasterizationStateCreateInfo rasterization_state; VkPipelineDepthStencilStateCreateInfo depth_stencil_state; VkPipelineColorBlendAttachmentState color_blend_attachment_state; + VkPipelineMultisampleStateCreateInfo multisampling; size_t shader_stages_count; }; @@ -334,6 +341,11 @@ void __kvfCheckVk(VkResult result, const char* function) #undef __kvfCheckVk #define __kvfCheckVk(res) __kvfCheckVk(res, __FUNCTION__) +void kvfCheckVk(VkResult result) +{ + __kvfCheckVk(result); +} + void __kvfAddDeviceToArray(VkPhysicalDevice device, int32_t graphics_queue, int32_t present_queue) { KVF_ASSERT(device != VK_NULL_HANDLE); @@ -518,7 +530,7 @@ void __kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) } } -__KvfFramebuffer* __kvfGetKvfSwapchainFromVkFramebuffer(VkFramebuffer framebuffer) +__KvfFramebuffer* __kvfGetKvfFramebufferFromVkFramebuffer(VkFramebuffer framebuffer) { KVF_ASSERT(framebuffer != VK_NULL_HANDLE); for(size_t i = 0; i < __kvf_internal_framebuffers_size; i++) @@ -1061,22 +1073,6 @@ void kvfDestroyInstance(VkInstance instance) vkDestroyInstance(instance, NULL); } -VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance) -{ - uint32_t device_count; - VkPhysicalDevice* devices = NULL; - VkPhysicalDevice chosen_one = VK_NULL_HANDLE; - - KVF_ASSERT(instance != VK_NULL_HANDLE); - - vkEnumeratePhysicalDevices(instance, &device_count, NULL); - devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); - vkEnumeratePhysicalDevices(instance, &device_count, devices); - chosen_one = devices[0]; - KVF_FREE(devices); - return chosen_one; -} - __KvfQueueFamilies __kvfFindQueueFamilies(VkPhysicalDevice physical, VkSurfaceKHR surface) { __KvfQueueFamilies queues = { -1, -1, -1 }; @@ -1095,17 +1091,39 @@ __KvfQueueFamilies __kvfFindQueueFamilies(VkPhysicalDevice physical, VkSurfaceKH if(queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) queues.graphics = i; VkBool32 present_support = false; - vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); - if(present_support) - queues.present = i; - - if(queues.graphics != -1 && queues.present != -1 && queues.compute != -1) + if(surface != VK_NULL_HANDLE) + { + vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); + if(present_support) + queues.present = i; + if(queues.graphics != -1 && queues.present != -1 && queues.compute != -1) + break; + } + else if(queues.graphics != -1 && queues.compute != -1) break; } KVF_FREE(queue_families); return queues; } +VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance, VkSurfaceKHR surface) +{ + uint32_t device_count; + VkPhysicalDevice* devices = NULL; + VkPhysicalDevice chosen_one = VK_NULL_HANDLE; + + KVF_ASSERT(instance != VK_NULL_HANDLE); + + vkEnumeratePhysicalDevices(instance, &device_count, NULL); + devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); + vkEnumeratePhysicalDevices(instance, &device_count, devices); + chosen_one = devices[0]; + KVF_FREE(devices); + __KvfQueueFamilies queues = __kvfFindQueueFamilies(chosen_one, surface); + __kvfAddDeviceToArray(chosen_one, queues.graphics, queues.present); + return chosen_one; +} + VkPhysicalDevice kvfPickGoodDefaultPhysicalDevice(VkInstance instance, VkSurfaceKHR surface) { const char* extensions[] = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; @@ -1208,10 +1226,11 @@ VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR sur VkDevice kvfCreateDefaultDevice(VkPhysicalDevice physical) { const char* extensions[] = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; - return kvfCreateDevice(physical, extensions, sizeof(extensions) / sizeof(extensions[0])); + VkPhysicalDeviceFeatures device_features = { VK_FALSE }; + return kvfCreateDevice(physical, extensions, sizeof(extensions) / sizeof(extensions[0]), &device_features); } -VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count) +VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features) { const float queue_priority = 1.0f; @@ -1235,13 +1254,11 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin queue_create_info[1].flags = 0; queue_create_info[1].pNext = NULL; - VkPhysicalDeviceFeatures device_features = { VK_FALSE }; - VkDeviceCreateInfo createInfo; createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; createInfo.queueCreateInfoCount = (kvfdevice->queues.graphics == kvfdevice->queues.present ? 1 : 2); createInfo.pQueueCreateInfos = queue_create_info; - createInfo.pEnabledFeatures = &device_features; + createInfo.pEnabledFeatures = features; createInfo.enabledExtensionCount = extensions_count; createInfo.ppEnabledExtensionNames = extensions; createInfo.enabledLayerCount = 0; @@ -1502,7 +1519,7 @@ void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain) __kvfDestroySwapchain(device, swapchain); } -VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage) +VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, KvfImageType type) { KVF_ASSERT(device != VK_NULL_HANDLE); VkImageCreateInfo image_info = {}; @@ -1519,11 +1536,37 @@ VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkForma image_info.usage = usage; image_info.samples = VK_SAMPLE_COUNT_1_BIT; image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; + + switch(type) + { + case KVF_IMAGE_CUBE: image_info.flags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT; image_info.arrayLayers = 6; break; + default: break; + } + VkImage image; __kvfCheckVk(vkCreateImage(device, &image_info, NULL, &image)); return image; } +void kvfImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent) +{ + KVF_ASSERT(cmd != VK_NULL_HANDLE); + KVF_ASSERT(dst != VK_NULL_HANDLE); + KVF_ASSERT(src != VK_NULL_HANDLE); + VkOffset3D offset = { 0, 0, 0 }; + VkBufferImageCopy region = {}; + region.bufferOffset = buffer_offset; + region.bufferRowLength = 0; + region.bufferImageHeight = 0; + region.imageSubresource.aspectMask = aspect; + region.imageSubresource.mipLevel = 0; + region.imageSubresource.baseArrayLayer = 0; + region.imageSubresource.layerCount = 1; + region.imageOffset = offset; + region.imageExtent = extent; + vkCmdCopyImageToBuffer(cmd, src, dst, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, ®ion); +} + void kvfDestroyImage(VkDevice device, VkImage image) { if(image == VK_NULL_HANDLE) @@ -1532,7 +1575,7 @@ void kvfDestroyImage(VkDevice device, VkImage image) vkDestroyImage(device, image, NULL); } -VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect) +VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect, int layer_count) { KVF_ASSERT(device != VK_NULL_HANDLE); VkImageViewCreateInfo create_info = {}; @@ -1548,7 +1591,7 @@ VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, create_info.subresourceRange.baseMipLevel = 0; create_info.subresourceRange.levelCount = 1; create_info.subresourceRange.baseArrayLayer = 0; - create_info.subresourceRange.layerCount = 1; + create_info.subresourceRange.layerCount = layer_count; VkImageView view; __kvfCheckVk(vkCreateImageView(device, &create_info, NULL, &view)); return view; @@ -1561,7 +1604,7 @@ void kvfDestroyImageView(VkDevice device, VkImageView image_view) vkDestroyImageView(device, image_view, NULL); } -void kvfTransitionImageLayout(VkDevice device, VkImage image, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer) +void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer) { KVF_ASSERT(device != VK_NULL_HANDLE); @@ -1582,7 +1625,7 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, VkCommandBuffer cm barrier.subresourceRange.baseMipLevel = 0; barrier.subresourceRange.levelCount = 1; barrier.subresourceRange.baseArrayLayer = 0; - barrier.subresourceRange.layerCount = 1; + barrier.subresourceRange.layerCount = (type == KVF_IMAGE_CUBE ? 6 : 1); barrier.srcAccessMask = kvfLayoutToAccessMask(old_layout, false); barrier.dstAccessMask = kvfLayoutToAccessMask(new_layout, true); if(kvfIsStencilFormat(format)) @@ -1590,7 +1633,7 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, VkCommandBuffer cm VkPipelineStageFlags source_stage = 0; if(barrier.oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) - source_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; + source_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; else if(barrier.srcAccessMask != 0) source_stage = kvfAccessFlagsToPipelineStage(barrier.srcAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); else @@ -1598,7 +1641,7 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, VkCommandBuffer cm VkPipelineStageFlags destination_stage = 0; if(barrier.newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) - destination_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + destination_stage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; else if(barrier.dstAccessMask != 0) destination_stage = kvfAccessFlagsToPipelineStage(barrier.dstAccessMask, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT); else @@ -1714,7 +1757,7 @@ VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass render_pass, Vk VkExtent2D kvfGetFramebufferSize(VkFramebuffer buffer) { - __KvfFramebuffer* kvf_framebuffer = __kvfGetKvfSwapchainFromVkFramebuffer(buffer); + __KvfFramebuffer* kvf_framebuffer = __kvfGetKvfFramebufferFromVkFramebuffer(buffer); KVF_ASSERT(kvf_framebuffer != NULL); return kvf_framebuffer->extent; } @@ -1805,7 +1848,7 @@ void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, K vkWaitForFences(device, 1, &fence, VK_TRUE, UINT64_MAX); } -VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear) +VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear, VkSampleCountFlagBits samples) { VkAttachmentDescription attachment = {}; @@ -1833,12 +1876,23 @@ VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkForma } else { - attachment.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; - attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD; + if(samples != VK_SAMPLE_COUNT_1_BIT) + { + attachment.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE; + } + else + { + attachment.loadOp = VK_ATTACHMENT_LOAD_OP_LOAD; + attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_LOAD; + } } - attachment.samples = VK_SAMPLE_COUNT_1_BIT; - attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; + attachment.samples = samples; + if(samples != VK_SAMPLE_COUNT_1_BIT) + attachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; + else + attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE; attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; attachment.flags = 0; @@ -1850,13 +1904,17 @@ VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR sw __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); KVF_ASSERT(kvf_swapchain != NULL); KVF_ASSERT(kvf_swapchain->images_count != 0); - return kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, kvf_swapchain->images_format, VK_IMAGE_LAYOUT_UNDEFINED,VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, clear); + return kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, kvf_swapchain->images_format, VK_IMAGE_LAYOUT_UNDEFINED,VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, clear, VK_SAMPLE_COUNT_1_BIT); } VkRenderPass kvfCreateRenderPass(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point) { KVF_ASSERT(device != VK_NULL_HANDLE); + return kvfCreateRenderPassWithSubpassDependencies(device, attachments, attachments_count, bind_point, NULL, 0); +} +VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point, VkSubpassDependency* dependencies, size_t dependencies_count) +{ size_t color_attachment_count = 0; size_t depth_attachment_count = 0; @@ -1911,8 +1969,8 @@ VkRenderPass kvfCreateRenderPass(VkDevice device, VkAttachmentDescription* attac renderpass_create_info.pAttachments = attachments; renderpass_create_info.subpassCount = 1; renderpass_create_info.pSubpasses = &subpass; - renderpass_create_info.dependencyCount = 0; - renderpass_create_info.pDependencies = NULL; + renderpass_create_info.dependencyCount = dependencies_count; + renderpass_create_info.pDependencies = dependencies; VkRenderPass render_pass = VK_NULL_HANDLE; __kvfCheckVk(vkCreateRenderPass(device, &renderpass_create_info, NULL, &render_pass)); @@ -2144,6 +2202,7 @@ void kvfGPipelineBuilderReset(KvfGraphicsPipelineBuilder* builder) builder->tessellation_state.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO; builder->rasterization_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO; builder->depth_stencil_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO; + builder->multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; } void kvfGPipelineBuilderSetInputTopology(KvfGraphicsPipelineBuilder* builder, VkPrimitiveTopology topology) @@ -2167,6 +2226,20 @@ void kvfGPipelineBuilderSetCullMode(KvfGraphicsPipelineBuilder* builder, VkCullM builder->rasterization_state.frontFace = face; } +void kvfGPipelineBuilderSetMultisampling(KvfGraphicsPipelineBuilder* builder, VkSampleCountFlagBits count) +{ + KVF_ASSERT(builder != NULL); + builder->multisampling.rasterizationSamples = count; +} + +void kvfGPipelineBuilderSetMultisamplingShading(KvfGraphicsPipelineBuilder* builder, VkSampleCountFlagBits count, float min_sampling_shading) +{ + KVF_ASSERT(builder != NULL); + builder->multisampling.rasterizationSamples = count; + builder->multisampling.sampleShadingEnable = VK_TRUE; + builder->multisampling.minSampleShading = min_sampling_shading; +} + void kvfGPipelineBuilderDisableBlending(KvfGraphicsPipelineBuilder* builder) { KVF_ASSERT(builder != NULL); @@ -2298,11 +2371,6 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, K viewport_state.scissorCount = 1; viewport_state.pScissors = NULL; - VkPipelineMultisampleStateCreateInfo multisampling = {}; - multisampling.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO; - multisampling.sampleShadingEnable = VK_FALSE; - multisampling.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT; - VkGraphicsPipelineCreateInfo pipeline_info = {}; pipeline_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO; pipeline_info.stageCount = builder->shader_stages_count; @@ -2311,7 +2379,7 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, K pipeline_info.pInputAssemblyState = &builder->input_assembly_state; pipeline_info.pViewportState = &viewport_state; pipeline_info.pRasterizationState = &builder->rasterization_state; - pipeline_info.pMultisampleState = &multisampling; + pipeline_info.pMultisampleState = &builder->multisampling; pipeline_info.pColorBlendState = &color_blending; pipeline_info.pDynamicState = &dynamic_states; pipeline_info.layout = layout; From 6eaf871d4587b8ed1c078519c551ac59844a0dbd Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 5 Sep 2024 00:41:52 +0200 Subject: [PATCH 018/131] fixing compilation issues --- runtime/Includes/Core/Graphics.h | 2 + runtime/Includes/Core/Graphics.inl | 7 +++ runtime/Includes/Core/Logs.h | 2 +- runtime/Includes/Core/Memory.h | 2 - runtime/Includes/Core/SDLManager.h | 4 +- runtime/Includes/Graphics/PutPixelManager.h | 25 ++++++++++ runtime/Includes/Graphics/Scene.h | 1 + runtime/Includes/Platform/Inputs.h | 4 +- runtime/Sources/Core/Application.cpp | 6 ++- runtime/Sources/Core/Graphics.cpp | 9 ++-- runtime/Sources/Core/Logs.cpp | 5 +- runtime/Sources/Core/Profiler.cpp | 2 +- runtime/Sources/Core/SDLManager.cpp | 48 +++++++++---------- runtime/Sources/Core/UUID.cpp | 2 +- runtime/Sources/Graphics/PixelPutManager.cpp | 32 +++++++++++++ runtime/Sources/Graphics/Scene.cpp | 11 +++-- runtime/Sources/Platform/Inputs.cpp | 11 +++++ runtime/Sources/Renderer/Buffer.cpp | 16 +++---- runtime/Sources/Renderer/Descriptor.cpp | 2 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 2 +- .../Renderer/RenderPasses/FinalPass.cpp | 2 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 2 +- runtime/Sources/Renderer/Renderer.cpp | 2 +- 23 files changed, 140 insertions(+), 59 deletions(-) create mode 100644 runtime/Includes/Graphics/PutPixelManager.h create mode 100644 runtime/Sources/Graphics/PixelPutManager.cpp diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 406a39c..28c4653 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -5,6 +5,7 @@ #include #include #include +#include #include #include @@ -39,6 +40,7 @@ namespace mlx private: Renderer m_renderer; SceneRenderer m_scene_renderer; + PutPixelManager m_put_pixel_manager; std::shared_ptr p_window; std::unique_ptr p_scene; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 8e5a0cd..84a1e96 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -7,12 +7,19 @@ namespace mlx { MLX_PROFILE_FUNCTION(); p_scene->ResetSprites(); + m_put_pixel_manager.ResetRenderData(); m_current_depth = 0; } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); + NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_current_depth, color); + if(texture) + { + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec3f{ 0.0f, 0.0f, static_cast(m_current_depth) }); + } } void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) diff --git a/runtime/Includes/Core/Logs.h b/runtime/Includes/Core/Logs.h index 906f969..42bfe7c 100644 --- a/runtime/Includes/Core/Logs.h +++ b/runtime/Includes/Core/Logs.h @@ -48,7 +48,7 @@ namespace mlx namespace mlx { #undef DebugLog - #define DebugLog(...) DebugLog(__LINE__, __FILE__, AK_FUNC_SIG, __VA_ARGS__) + #define DebugLog(...) DebugLog(__LINE__, __FILE__, __func__, __VA_ARGS__) #undef Message #define Message(...) Message(__LINE__, __FILE__, __func__, __VA_ARGS__) diff --git a/runtime/Includes/Core/Memory.h b/runtime/Includes/Core/Memory.h index 4a51eb5..ad0bff9 100644 --- a/runtime/Includes/Core/Memory.h +++ b/runtime/Includes/Core/Memory.h @@ -1,8 +1,6 @@ #ifndef __MLX_MEMORY__ #define __MLX_MEMORY__ -#include - namespace mlx { class MemManager : public Singleton diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 5fcac81..ddb7b21 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -20,7 +20,7 @@ namespace mlx std::vector GetRequiredVulkanInstanceExtentions(Handle window) const noexcept; Vec2ui GetVulkanDrawableSize(Handle window) const noexcept; - inline void SetEventCallback(func::function functor, void* userdata) { f_callback = std::move(functor); p_callback_data = userdata; } + inline void SetEventCallback(func::function functor, void* userdata) { f_callback = std::move(functor); p_callback_data = userdata; } private: SDLManager() = default; @@ -28,7 +28,7 @@ namespace mlx private: std::unordered_set m_windows_registry; - func::function f_callback; + func::function f_callback; void* p_callback_data = nullptr; std::int32_t m_x; std::int32_t m_y; diff --git a/runtime/Includes/Graphics/PutPixelManager.h b/runtime/Includes/Graphics/PutPixelManager.h new file mode 100644 index 0000000..70a9039 --- /dev/null +++ b/runtime/Includes/Graphics/PutPixelManager.h @@ -0,0 +1,25 @@ +#ifndef __MLX_PUT_PIXEL_MANAGER__ +#define __MLX_PUT_PIXEL_MANAGER__ + +#include + +namespace mlx +{ + class PutPixelManager + { + public: + PutPixelManager(NonOwningPtr renderer) : p_renderer(renderer) {} + + // Return a valid pointer when a new texture has been created + NonOwningPtr DrawPixel(int x, int y, std::uint64_t z, std::uint32_t color); + void ResetRenderData(); + + ~PutPixelManager(); + + private: + std::map m_textures; + NonOwningPtr p_renderer; + }; +} + +#endif diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 7f1d0ec..53bd5b9 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -1,6 +1,7 @@ #ifndef __MLX_SCENE__ #define __MLX_SCENE__ +#include #include #include diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h index 6e1929a..bd1c91d 100644 --- a/runtime/Includes/Platform/Inputs.h +++ b/runtime/Includes/Platform/Inputs.h @@ -17,7 +17,7 @@ namespace mlx }; public: - Inputs() = default; + Inputs(); void RegisterWindow(std::shared_ptr window); @@ -39,7 +39,7 @@ namespace mlx ~Inputs() = default; - protected: + private: std::unordered_map> m_windows; std::unordered_map> m_events_hooks; bool m_run = false; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index b82cb4f..6398826 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -12,7 +12,11 @@ namespace mlx { EventBus::RegisterListener({[](const EventBase& event) { - }, "__Application" }); + if(event.What() == Event::FatalErrorEventCode) + { + std::abort(); + } + }, "__MlxApplication" }); m_fps.Init(); SDLManager::Get().Init(); diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 15d578b..52f6137 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -4,6 +4,7 @@ namespace mlx { GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id) : + m_put_pixel_manager(&m_renderer), p_window(nullptr), m_width(w), m_height(h), @@ -11,8 +12,8 @@ namespace mlx m_has_window(false) { MLX_PROFILE_FUNCTION(); - m_renderer.SetWindow(nullptr); - m_renderer.Init(render_target); + // TODO : re-enable render targets + m_renderer.Init(nullptr); m_scene_renderer.Init(); SceneDescriptor descriptor{}; @@ -21,6 +22,7 @@ namespace mlx } GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : + m_put_pixel_manager(&m_renderer), p_window(std::make_shared(w, h, title)), m_width(w), m_height(h), @@ -28,8 +30,7 @@ namespace mlx m_has_window(true) { MLX_PROFILE_FUNCTION(); - m_renderer.SetWindow(p_window.get()); - m_renderer.Init(nullptr); + m_renderer.Init(p_window.get()); m_scene_renderer.Init(); SceneDescriptor descriptor{}; diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index ed15367..88c063a 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -1,4 +1,5 @@ #include +#include #include namespace mlx @@ -7,7 +8,7 @@ namespace mlx { struct FatalErrorEvent : public EventBase { - std::uint32_t What() const override { return 167; } + Event What() const override { return Event::FatalErrorEventCode; } }; } @@ -48,7 +49,7 @@ namespace mlx if(type == LogType::FatalError) { std::cout << Ansi::bg_red << "Fatal Error: emergency exit" << Ansi::bg_def << std::endl; - EventBus::Send("__internal_application", Internal::FatalErrorEvent{}); + EventBus::Send("__MlxApplication", Internal::FatalErrorEvent{}); } } } diff --git a/runtime/Sources/Core/Profiler.cpp b/runtime/Sources/Core/Profiler.cpp index 886668a..a3649f7 100644 --- a/runtime/Sources/Core/Profiler.cpp +++ b/runtime/Sources/Core/Profiler.cpp @@ -20,7 +20,7 @@ namespace mlx void Profiler::AppendProfileData(ProfileResult&& result) { - std::lock_guard lock(_mutex); + std::lock_guard lock(m_mutex); auto it = m_profile_data.find(result.name); if(it != m_profile_data.end()) { diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index f397e82..cdfe5c6 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -32,7 +32,7 @@ namespace mlx m_drop_sdl_responsability = SDL_WasInit(SDL_INIT_VIDEO); if(m_drop_sdl_responsability) // is case the mlx is running in a sandbox like MacroUnitTester where SDL is already init return; - SDL_SetMemoryFunctions(MemManager::malloc, MemManager::calloc, MemManager::realloc, MemManager::free); + SDL_SetMemoryFunctions(MemManager::Get().Malloc, MemManager::Get().Calloc, MemManager::Get().Realloc, MemManager::Get().Free); #ifdef FORCE_WAYLAND SDL_SetHint(SDL_HINT_VIDEODRIVER, "wayland,x11"); @@ -43,7 +43,7 @@ namespace mlx struct WatcherData { - func::function callback; + func::function callback; NonOwningPtr manager; void* userdata; }; @@ -61,38 +61,36 @@ namespace mlx } std::uint32_t id = event->window.windowID; - if(events_hooks.find(id) == events_hooks.end()) - continue; switch(event->type) { - case SDL_KEYUP: data->callback(MLX_KEYUP, event->key.keysym.scancode, data->userdata); break; - case SDL_KEYDOWN: data->callback(MLX_KEYDOWN, event->key.keysym.scancode, data->userdata); break; - case SDL_MOUSEBUTTONUP: data->callback(MLX_MOUSEUP, event->button.button, data->userdata); break; - case SDL_MOUSEBUTTONDOWN: data->callback(MLX_MOUSEDOWN, event->button.button, data->userdata); break; + case SDL_KEYUP: data->callback(MLX_KEYUP, id, event->key.keysym.scancode, data->userdata); break; + case SDL_KEYDOWN: data->callback(MLX_KEYDOWN, id, event->key.keysym.scancode, data->userdata); break; + case SDL_MOUSEBUTTONUP: data->callback(MLX_MOUSEUP, id, event->button.button, data->userdata); break; + case SDL_MOUSEBUTTONDOWN: data->callback(MLX_MOUSEDOWN, id, event->button.button, data->userdata); break; case SDL_MOUSEWHEEL: { if(event->wheel.y > 0) // scroll up - data->callback(MLX_MOUSEWHEEL, 1, data->userdata); + data->callback(MLX_MOUSEWHEEL, id, 1, data->userdata); else if(event->wheel.y < 0) // scroll down - data->callback(MLX_MOUSEWHEEL, 2, data->userdata); + data->callback(MLX_MOUSEWHEEL, id, 2, data->userdata); if(event->wheel.x > 0) // scroll right - data->callback(MLX_MOUSEWHEEL, 3, data->userdata); + data->callback(MLX_MOUSEWHEEL, id, 3, data->userdata); else if(event->wheel.x < 0) // scroll left - data->callback(MLX_MOUSEWHEEL, 4, data->userdata); + data->callback(MLX_MOUSEWHEEL, id, 4, data->userdata); break; } case SDL_WINDOWEVENT: { - switch(event.window.event) + switch(event->window.event) { - case SDL_WINDOWEVENT_CLOSE: data->callback(MLX_WINDOW_EVENT, 0, data->userdata); break; - case SDL_WINDOWEVENT_MOVED: data->callback(MLX_WINDOW_EVENT, 1, data->userdata); break; - case SDL_WINDOWEVENT_MINIMIZED: data->callback(MLX_WINDOW_EVENT, 2, data->userdata); break; - case SDL_WINDOWEVENT_MAXIMIZED: data->callback(MLX_WINDOW_EVENT, 3, data->userdata); break; - case SDL_WINDOWEVENT_ENTER: data->callback(MLX_WINDOW_EVENT, 4, data->userdata); break; - case SDL_WINDOWEVENT_FOCUS_GAINED: data->callback(MLX_WINDOW_EVENT, 5, data->userdata); break; - case SDL_WINDOWEVENT_LEAVE: data->callback(MLX_WINDOW_EVENT, 6, data->userdata); break; - case SDL_WINDOWEVENT_FOCUS_LOST: data->callback(MLX_WINDOW_EVENT, 7, data->userdata); break; + case SDL_WINDOWEVENT_CLOSE: data->callback(MLX_WINDOW_EVENT, id, 0, data->userdata); break; + case SDL_WINDOWEVENT_MOVED: data->callback(MLX_WINDOW_EVENT, id, 1, data->userdata); break; + case SDL_WINDOWEVENT_MINIMIZED: data->callback(MLX_WINDOW_EVENT, id, 2, data->userdata); break; + case SDL_WINDOWEVENT_MAXIMIZED: data->callback(MLX_WINDOW_EVENT, id, 3, data->userdata); break; + case SDL_WINDOWEVENT_ENTER: data->callback(MLX_WINDOW_EVENT, id, 4, data->userdata); break; + case SDL_WINDOWEVENT_FOCUS_GAINED: data->callback(MLX_WINDOW_EVENT, id, 5, data->userdata); break; + case SDL_WINDOWEVENT_LEAVE: data->callback(MLX_WINDOW_EVENT, id, 6, data->userdata); break; + case SDL_WINDOWEVENT_FOCUS_LOST: data->callback(MLX_WINDOW_EVENT, id, 7, data->userdata); break; default : break; } @@ -154,14 +152,14 @@ namespace mlx if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window), &count, extensions.data())) FatalError("Vulkan : cannot get instance extentions from window : %", SDL_GetError()); - return extentions; + return extensions; } Vec2ui SDLManager::GetVulkanDrawableSize(Handle window) const noexcept { - Vec2ui extent; - SDL_Vulkan_GetDrawableSize(window, &extent.x, &extent.y); - return extent; + Vec2i extent; + SDL_Vulkan_GetDrawableSize(static_cast(window), &extent.x, &extent.y); + return Vec2ui{ extent }; } void SDLManager::Shutdown() noexcept diff --git a/runtime/Sources/Core/UUID.cpp b/runtime/Sources/Core/UUID.cpp index f2da9c2..cf2855a 100644 --- a/runtime/Sources/Core/UUID.cpp +++ b/runtime/Sources/Core/UUID.cpp @@ -1,4 +1,4 @@ -#include +#include #include diff --git a/runtime/Sources/Graphics/PixelPutManager.cpp b/runtime/Sources/Graphics/PixelPutManager.cpp new file mode 100644 index 0000000..81c0c6f --- /dev/null +++ b/runtime/Sources/Graphics/PixelPutManager.cpp @@ -0,0 +1,32 @@ +#include + +#include +#include + +namespace mlx +{ + NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t z, std::uint32_t color) + { + Verify((bool)p_renderer, "invalid renderer pointer"); + auto it = m_textures.find(z); + if(it == m_textures.end()) + { + VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain()); + Texture& texture = m_textures[z] = Texture({}, swapchain_extent.width, swapchain_extent.height); + texture.SetPixel(x, y, color); + return &texture; + } + it->second.SetPixel(x, y, color); + return nullptr; + } + + void PutPixelManager::ResetRenderData() + { + m_textures.clear(); + } + + PutPixelManager::~PutPixelManager() + { + ResetRenderData(); + } +} diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index 46821a9..94eac69 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -21,20 +21,21 @@ namespace mlx NonOwningPtr Scene::GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const { - auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture, position](const Sprite& sprite) + auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture, position](std::shared_ptr sprite) { - return sprite.GetPosition().x == position.x && sprite.GetPosition().y == position.y && sprite.GetTexture() == texture; + return sprite->GetPosition().x == position.x && sprite->GetPosition().y == position.y && sprite->GetTexture() == texture; }); - return (it != m_sprites.end() ? &(*it) : nullptr); + return (it != m_sprites.end() ? it->get() : nullptr); } void Scene::TryEraseSpriteFromTexture(NonOwningPtr texture) { + auto it = m_sprites.begin(); do { - auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture, position](const Sprite& sprite) + it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture](std::shared_ptr sprite) { - return sprite.GetPosition().x == position.x && sprite.GetPosition().y == position.y && sprite.GetTexture() == texture; + return sprite->GetTexture() == texture; }); m_sprites.erase(it); } while(it != m_sprites.end()); diff --git a/runtime/Sources/Platform/Inputs.cpp b/runtime/Sources/Platform/Inputs.cpp index 7dc8377..03943e0 100644 --- a/runtime/Sources/Platform/Inputs.cpp +++ b/runtime/Sources/Platform/Inputs.cpp @@ -1,7 +1,18 @@ #include +#include #include +#include namespace mlx { + Inputs::Inputs() + { + SDLManager::Get().SetEventCallback([this](mlx_event_type event, int window_id, int code, [[maybe_unused]] void* userdata) + { + if(m_windows.find(window_id) == m_windows.end()) + return; + m_events_hooks[window_id][event].hook(code, m_events_hooks[window_id][event].param); + }, nullptr); + } } diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp index f6feb3a..b60233d 100644 --- a/runtime/Sources/Renderer/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -32,7 +32,7 @@ namespace mlx if(!data.Empty()) { if(p_map != nullptr) - std::memcpy(m_memory.map, data.GetData(), data.GetSize()); + std::memcpy(p_map, data.GetData(), data.GetSize()); } if(type == BufferType::Constant || type == BufferType::LowDynamic) PushToGPU(); @@ -46,7 +46,7 @@ namespace mlx bufferInfo.usage = usage; bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; - m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &info, m_buffer, nullptr); + m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &alloc_info, m_buffer, nullptr); if(alloc_info.flags != 0) RenderCore::Get().GetAllocator().MapMemory(m_allocation, &p_map); } @@ -66,7 +66,7 @@ namespace mlx VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); - kvfCopyBufferToBuffer(cmd, m_buffer, buffer.Get(), m_memory.size); + kvfCopyBufferToBuffer(cmd, m_buffer, buffer.Get(), m_size); kvfEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); @@ -82,7 +82,7 @@ namespace mlx GPUBuffer new_buffer; new_buffer.m_usage = (this->m_usage & 0xFFFFFFFC) | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - new_buffer.CreateBuffer(m_memory.size, new_buffer.m_usage, alloc_info); + new_buffer.CreateBuffer(m_size, new_buffer.m_usage, alloc_info); if(new_buffer.CopyFrom(*this)) Swap(new_buffer); @@ -111,9 +111,9 @@ namespace mlx void VertexBuffer::SetData(CPUBuffer data) { - if(data.GetSize() > m_memory.size) + if(data.GetSize() > m_size) { - Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", data.GetSize(), m_memory.size); + Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", data.GetSize(), m_size); return; } if(data.Empty()) @@ -129,9 +129,9 @@ namespace mlx void IndexBuffer::SetData(CPUBuffer data) { - if(data.GetSize() > m_memory.size) + if(data.GetSize() > m_size) { - Error("Vulkan : trying to store to much data in an index buffer (% bytes in % bytes)", data.GetSize(), m_memory.size); + Error("Vulkan : trying to store to much data in an index buffer (% bytes in % bytes)", data.GetSize(), m_size); return; } if(data.Empty()) diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 2fa6d56..4ccbced 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -139,7 +139,7 @@ namespace mlx vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); } - void Descriptor::Reallocate() noexcept + void DescriptorSet::Reallocate() noexcept { for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), m_set_layout); diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 57cff33..a694b8c 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -58,7 +58,7 @@ namespace mlx } } }; - EventBus::RegisterListener({ functor, "__ScopRender2DPass" }); + EventBus::RegisterListener({ functor, "__MlxRender2DPass" }); p_viewer_data_set = std::make_shared(p_vertex_shader->GetShaderLayout().set_layouts[0].second, p_vertex_shader->GetPipelineLayout().set_layouts[0], ShaderType::Vertex); p_texture_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index 5c2c410..5a057e4 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -37,7 +37,7 @@ namespace mlx if(event.What() == Event::DescriptorPoolResetEventCode) p_set->Reallocate(); }; - EventBus::RegisterListener({ functor, "__ScopFinalPass" }); + EventBus::RegisterListener({ functor, "__MlxFinalPass" }); p_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); } diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index e905280..83024f1 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -24,7 +24,7 @@ namespace mlx m_main_render_texture.Init({}, extent.width, extent.height); } }; - EventBus::RegisterListener({ functor, "__ScopRenderPasses" }); + EventBus::RegisterListener({ functor, "__MlxRenderPasses" }); auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); m_main_render_texture.Init({}, extent.width, extent.height); diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 8aab412..d544e2a 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -32,7 +32,7 @@ namespace mlx if(event.What() == Event::ResizeEventCode) this->RequireFramebufferResize(); }; - EventBus::RegisterListener({ functor, "__ScopRenderer" }); + EventBus::RegisterListener({ functor, "__MlxRenderer" + std::to_string(reinterpret_cast(this)) }); p_window = window; From 77bfe4ff1168d91c2b7d03ffc4f1ff58ce7d49cd Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 13 Sep 2024 23:51:01 +0200 Subject: [PATCH 019/131] yes --- runtime/Sources/Renderer/RenderCore.cpp | 3 + third_party/kvf.h | 783 +++++++++++++++--------- 2 files changed, 512 insertions(+), 274 deletions(-) diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 6623ae3..ebcb6e6 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -42,6 +42,9 @@ namespace mlx Window window(1, 1, "", true); std::vector instance_extentions = window.GetRequiredVulkanInstanceExtentions(); + #ifdef MLX_PLAT_MACOS + instance_extentions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); + #endif m_instance = kvfCreateInstance(instance_extensions.data(), instance_extensions.size()); DebugLog("Vulkan : instance created"); diff --git a/third_party/kvf.h b/third_party/kvf.h index 4878ef9..2fc82ba 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -43,6 +43,8 @@ * or VK_NO_PROTOTYPES before including this file to avoid conflicts with Vulkan prototypes. * * You can also #define KVF_ENABLE_VALIDATION_LAYERS to enable validation layers. + * + * Use #define KVF_NO_KHR to remove all functions that use KHR calls. */ #ifndef KBZ_8_VULKAN_FRAMEWORK_H @@ -89,20 +91,30 @@ void kvfSetValidationWarningCallback(KvfErrorCallback callback); void kvfAddLayer(const char* layer); -VkInstance kvfCreateInstance(const char** extensionsEnabled, uint32_t extensionsCount); +VkInstance kvfCreateInstance(const char** extensions_enabled, uint32_t extensions_count); void kvfDestroyInstance(VkInstance instance); // If surfaces given to theses functions are VK_NULL_HANDLE no present queues will be searched and thus kvfQueuePresentKHR will not work VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance, VkSurfaceKHR surface); VkPhysicalDevice kvfPickGoodDefaultPhysicalDevice(VkInstance instance, VkSurfaceKHR surface); -VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR surface, const char** deviceExtensions, uint32_t deviceExtensionsCount); +VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR surface, const char** device_extensions, uint32_t device_extensions_count); VkQueue kvfGetDeviceQueue(VkDevice device, KvfQueueType queue); uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue); -bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index); // return false when the swapchain must be recreated +#ifndef KVF_NO_KHR + bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index); // return false when the swapchain must be recreated +#endif + +// Meant to be used when creating a VkDevice with a custom VkPhysicalDevice +int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type); // This function cannot find present queue +#ifndef KVF_NO_KHR + int32_t kvfFindDeviceQueueFamilyKHR(VkPhysicalDevice physical, VkSurfaceKHR surface, KvfQueueType type); // This one can find present queue +#endif VkDevice kvfCreateDefaultDevice(VkPhysicalDevice physical); VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features); +VkDevice kvfCreateDefaultDevicePhysicalDeviceAndCustomQueues(VkPhysicalDevice physical, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue); +VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue); void kvfDestroyDevice(VkDevice device); VkFence kvfCreateFence(VkDevice device); @@ -112,15 +124,17 @@ void kvfDestroyFence(VkDevice device, VkFence fence); VkSemaphore kvfCreateSemaphore(VkDevice device); void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore); -VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool tryVsync); -VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain); -uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain); -uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain); -VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain); -void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain); +#ifndef KVF_NO_KHR + VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool try_vsync); + VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain); + uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain); + uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain); + VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain); + void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain); +#endif VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, KvfImageType type); -void kvfImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t size); +void kvfImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent); void kvfDestroyImage(VkDevice device, VkImage image); VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect, int layer_count); void kvfDestroyImageView(VkDevice device, VkImageView image_view); @@ -145,7 +159,9 @@ void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueTyp void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkFence fence); VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear, VkSampleCountFlagBits samples); -VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR swapchain, bool clear); +#ifndef KVF_NO_KHR + VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR swapchain, bool clear); +#endif VkRenderPass kvfCreateRenderPass(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point); VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point, VkSubpassDependency* dependencies, size_t dependencies_count); @@ -260,23 +276,25 @@ typedef struct size_t sets_pools_size; } __KvfDevice; -typedef struct -{ - VkSurfaceCapabilitiesKHR capabilities; - VkSurfaceFormatKHR* formats; - VkPresentModeKHR* presentModes; - uint32_t formatsCount; - uint32_t presentModesCount; -} __KvfSwapchainSupportInternal; +#ifndef KVF_NO_KHR + typedef struct + { + VkSurfaceCapabilitiesKHR capabilities; + VkSurfaceFormatKHR* formats; + VkPresentModeKHR* presentModes; + uint32_t formats_count; + uint32_t presentModes_count; + } __KvfSwapchainSupportInternal; -typedef struct -{ - __KvfSwapchainSupportInternal support; - VkSwapchainKHR swapchain; - VkExtent2D images_extent; - VkFormat images_format; - uint32_t images_count; -} __KvfSwapchain; + typedef struct + { + __KvfSwapchainSupportInternal support; + VkSwapchainKHR swapchain; + VkExtent2D images_extent; + VkFormat images_format; + uint32_t images_count; + } __KvfSwapchain; +#endif typedef struct { @@ -302,9 +320,11 @@ __KvfDevice* __kvf_internal_devices = NULL; size_t __kvf_internal_devices_size = 0; size_t __kvf_internal_devices_capacity = 0; -__KvfSwapchain* __kvf_internal_swapchains = NULL; -size_t __kvf_internal_swapchains_size = 0; -size_t __kvf_internal_swapchains_capacity = 0; +#ifndef KVF_NO_KHR + __KvfSwapchain* __kvf_internal_swapchains = NULL; + size_t __kvf_internal_swapchains_size = 0; + size_t __kvf_internal_swapchains_capacity = 0; +#endif __KvfFramebuffer* __kvf_internal_framebuffers = NULL; size_t __kvf_internal_framebuffers_size = 0; @@ -346,7 +366,7 @@ void kvfCheckVk(VkResult result) __kvfCheckVk(result); } -void __kvfAddDeviceToArray(VkPhysicalDevice device, int32_t graphics_queue, int32_t present_queue) +void __kvfAddDeviceToArray(VkPhysicalDevice device, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue) { KVF_ASSERT(device != VK_NULL_HANDLE); if(__kvf_internal_devices_size == __kvf_internal_devices_capacity) @@ -358,6 +378,7 @@ void __kvfAddDeviceToArray(VkPhysicalDevice device, int32_t graphics_queue, int3 __kvf_internal_devices[__kvf_internal_devices_size].physical = device; __kvf_internal_devices[__kvf_internal_devices_size].queues.graphics = graphics_queue; + __kvf_internal_devices[__kvf_internal_devices_size].queues.compute = compute_queue; __kvf_internal_devices[__kvf_internal_devices_size].queues.present = present_queue; __kvf_internal_devices_size++; } @@ -390,6 +411,36 @@ void __kvfCompleteDevice(VkPhysicalDevice physical, VkDevice device) kvf_device->sets_pools_size = 0; } +void __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, VkDevice device, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(physical != VK_NULL_HANDLE); + + __kvfAddDeviceToArray(physical, graphics_queue, present_queue, compute_queue); + + __KvfDevice* kvf_device = NULL; + + for(size_t i = 0; i < __kvf_internal_devices_size; i++) + { + if(__kvf_internal_devices[i].physical == physical) + kvf_device = &__kvf_internal_devices[i]; + } + + KVF_ASSERT(kvf_device != NULL); + + VkCommandPool pool; + VkCommandPoolCreateInfo pool_info = {}; + pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; + pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; + pool_info.queueFamilyIndex = kvf_device->queues.graphics; + __kvfCheckVk(vkCreateCommandPool(device, &pool_info, NULL, &pool)); + + kvf_device->device = device; + kvf_device->cmd_pool = pool; + kvf_device->sets_pools = NULL; + kvf_device->sets_pools_size = 0; +} + void __kvfDestroyDescriptorPools(VkDevice device); void __kvfDestroyDevice(VkDevice device) @@ -438,58 +489,60 @@ __KvfDevice* __kvfGetKvfDeviceFromVkDevice(VkDevice device) return NULL; } -void __kvfAddSwapchainToArray(VkSwapchainKHR swapchain, __KvfSwapchainSupportInternal support, VkFormat format, uint32_t images_count, VkExtent2D extent) -{ - KVF_ASSERT(swapchain != VK_NULL_HANDLE); - if(__kvf_internal_swapchains_size == __kvf_internal_swapchains_capacity) +#ifndef KVF_NO_KHR + void __kvfAddSwapchainToArray(VkSwapchainKHR swapchain, __KvfSwapchainSupportInternal support, VkFormat format, uint32_t images_count, VkExtent2D extent) { - // Resize the dynamic array if necessary - __kvf_internal_swapchains_capacity += 2; - __kvf_internal_swapchains = (__KvfSwapchain*)KVF_REALLOC(__kvf_internal_swapchains, __kvf_internal_swapchains_capacity * sizeof(__KvfSwapchain)); - } - - __kvf_internal_swapchains[__kvf_internal_swapchains_size].swapchain = swapchain; - __kvf_internal_swapchains[__kvf_internal_swapchains_size].support = support; - __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_format = format; - __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_count = images_count; - __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_extent = extent; - __kvf_internal_swapchains_size++; -} - -void __kvfDestroySwapchain(VkDevice device, VkSwapchainKHR swapchain) -{ - KVF_ASSERT(swapchain != VK_NULL_HANDLE); - KVF_ASSERT(device != VK_NULL_HANDLE); - - for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) - { - if(__kvf_internal_swapchains[i].swapchain == swapchain) + KVF_ASSERT(swapchain != VK_NULL_HANDLE); + if(__kvf_internal_swapchains_size == __kvf_internal_swapchains_capacity) { - vkDestroySwapchainKHR(device, swapchain, NULL); - // Shift the elements to fill the gap - for(size_t j = i; j < __kvf_internal_swapchains_size - 1; j++) - __kvf_internal_swapchains[j] = __kvf_internal_swapchains[j + 1]; - __kvf_internal_swapchains_size--; - if(__kvf_internal_swapchains_size == 0) - { - KVF_FREE(__kvf_internal_swapchains); - __kvf_internal_swapchains_capacity = 0; - } - return; + // Resize the dynamic array if necessary + __kvf_internal_swapchains_capacity += 2; + __kvf_internal_swapchains = (__KvfSwapchain*)KVF_REALLOC(__kvf_internal_swapchains, __kvf_internal_swapchains_capacity * sizeof(__KvfSwapchain)); } - } -} -__KvfSwapchain* __kvfGetKvfSwapchainFromVkSwapchainKHR(VkSwapchainKHR swapchain) -{ - KVF_ASSERT(swapchain != VK_NULL_HANDLE); - for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) - { - if(__kvf_internal_swapchains[i].swapchain == swapchain) - return &__kvf_internal_swapchains[i]; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].swapchain = swapchain; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].support = support; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_format = format; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_count = images_count; + __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_extent = extent; + __kvf_internal_swapchains_size++; } - return NULL; -} + + void __kvfDestroySwapchain(VkDevice device, VkSwapchainKHR swapchain) + { + KVF_ASSERT(swapchain != VK_NULL_HANDLE); + KVF_ASSERT(device != VK_NULL_HANDLE); + + for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) + { + if(__kvf_internal_swapchains[i].swapchain == swapchain) + { + vkDestroySwapchainKHR(device, swapchain, NULL); + // Shift the elements to fill the gap + for(size_t j = i; j < __kvf_internal_swapchains_size - 1; j++) + __kvf_internal_swapchains[j] = __kvf_internal_swapchains[j + 1]; + __kvf_internal_swapchains_size--; + if(__kvf_internal_swapchains_size == 0) + { + KVF_FREE(__kvf_internal_swapchains); + __kvf_internal_swapchains_capacity = 0; + } + return; + } + } + } + + __KvfSwapchain* __kvfGetKvfSwapchainFromVkSwapchainKHR(VkSwapchainKHR swapchain) + { + KVF_ASSERT(swapchain != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) + { + if(__kvf_internal_swapchains[i].swapchain == swapchain) + return &__kvf_internal_swapchains[i]; + } + return NULL; + } +#endif void __kvfAddFramebufferToArray(VkFramebuffer framebuffer, VkExtent2D extent) { @@ -1025,19 +1078,23 @@ VkInstance kvfCreateInstance(const char** extensions_enabled, uint32_t extension VkInstanceCreateInfo create_info = {}; create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; create_info.pApplicationInfo = NULL; - create_info.flags = 0; create_info.enabledExtensionCount = extensions_count; create_info.ppEnabledExtensionNames = extensions_enabled; create_info.enabledLayerCount = 0; create_info.ppEnabledLayerNames = NULL; create_info.pNext = NULL; + #if defined(VK_USE_PLATFORM_MACOS_MVK) || defined(VK_USE_PLATFORM_METAL_EXT) + create_info.flags = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR; + #else + create_info.flags = 0; + #endif #ifdef KVF_ENABLE_VALIDATION_LAYERS kvfAddLayer("VK_LAYER_KHRONOS_validation"); const char** new_extension_set = NULL; + VkDebugUtilsMessengerCreateInfoEXT debug_create_info = {}; if(__kvfCheckValidationLayerSupport()) { - VkDebugUtilsMessengerCreateInfoEXT debug_create_info = {}; __kvfPopulateDebugMessengerCreateInfo(&debug_create_info); new_extension_set = (const char**)KVF_MALLOC(sizeof(char*) * (extensions_count + 1)); memcpy(new_extension_set, extensions_enabled, sizeof(char*) * extensions_count); @@ -1090,17 +1147,22 @@ __KvfQueueFamilies __kvfFindQueueFamilies(VkPhysicalDevice physical, VkSurfaceKH queues.compute = i; if(queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) queues.graphics = i; - VkBool32 present_support = false; - if(surface != VK_NULL_HANDLE) - { - vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); - if(present_support) - queues.present = i; - if(queues.graphics != -1 && queues.present != -1 && queues.compute != -1) + #ifndef KVF_NO_KHR + VkBool32 present_support = false; + if(surface != VK_NULL_HANDLE) + { + vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); + if(present_support) + queues.present = i; + if(queues.graphics != -1 && queues.present != -1 && queues.compute != -1) + break; + } + else if(queues.graphics != -1 && queues.compute != -1) break; - } - else if(queues.graphics != -1 && queues.compute != -1) - break; + #else + if(queues.graphics != -1 && queues.compute != -1) + break; + #endif } KVF_FREE(queue_families); return queues; @@ -1113,14 +1175,14 @@ VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance, VkSurfaceKHR su VkPhysicalDevice chosen_one = VK_NULL_HANDLE; KVF_ASSERT(instance != VK_NULL_HANDLE); - + vkEnumeratePhysicalDevices(instance, &device_count, NULL); devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); vkEnumeratePhysicalDevices(instance, &device_count, devices); chosen_one = devices[0]; KVF_FREE(devices); __KvfQueueFamilies queues = __kvfFindQueueFamilies(chosen_one, surface); - __kvfAddDeviceToArray(chosen_one, queues.graphics, queues.present); + __kvfAddDeviceToArray(chosen_one, queues.graphics, queues.present, queues.present); return chosen_one; } @@ -1130,21 +1192,21 @@ VkPhysicalDevice kvfPickGoodDefaultPhysicalDevice(VkInstance instance, VkSurface return kvfPickGoodPhysicalDevice(instance, surface, extensions, sizeof(extensions) / sizeof(extensions[0])); } -int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, const char** deviceExtensions, uint32_t deviceExtensionsCount) +int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, const char** device_extensions, uint32_t device_extensions_count) { - /* Check Extensions Support */ + /* Check extensions support */ uint32_t extension_count; vkEnumerateDeviceExtensionProperties(device, NULL, &extension_count, NULL); VkExtensionProperties* props = (VkExtensionProperties*)KVF_MALLOC(sizeof(VkExtensionProperties) * extension_count + 1); vkEnumerateDeviceExtensionProperties(device, NULL, &extension_count, props); bool are_there_required_device_extensions = true; - for(int j = 0; j < deviceExtensionsCount; j++) + for(int j = 0; j < device_extensions_count; j++) { bool is_there_extension = false; for(int k = 0; k < extension_count; k++) { - if(strcmp(deviceExtensions[j], props[k].extensionName) == 0) + if(strcmp(device_extensions[j], props[k].extensionName) == 0) { is_there_extension = true; break; @@ -1162,14 +1224,19 @@ int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, /* Check Queue Families Support */ __KvfQueueFamilies queues = __kvfFindQueueFamilies(device, surface); - if(queues.graphics == -1 || queues.present == -1) + if(queues.graphics == -1 || (surface != VK_NULL_HANDLE && queues.present == -1)) return -1; - /* Check Surface Formats Counts */ - uint32_t format_count; - vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, NULL); - if(format_count == 0) - return -1; + #ifndef KVF_NO_KHR + if(surface != VK_NULL_HANDLE) + { + /* Check surface formats counts */ + uint32_t format_count; + vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, NULL); + if(format_count == 0) + return -1; + } + #endif VkPhysicalDeviceProperties device_props; vkGetPhysicalDeviceProperties(device, &device_props); @@ -1190,7 +1257,7 @@ int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, return score; } -VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR surface, const char** deviceExtensions, uint32_t deviceExtensionsCount) +VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR surface, const char** device_extensions, uint32_t device_extensions_count) { VkPhysicalDevice* devices = NULL; VkPhysicalDevice chosen_one = VK_NULL_HANDLE; @@ -1198,7 +1265,6 @@ VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR sur int32_t best_device_score = -1; KVF_ASSERT(instance != VK_NULL_HANDLE); - KVF_ASSERT(surface != VK_NULL_HANDLE); vkEnumeratePhysicalDevices(instance, &device_count, NULL); devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); @@ -1206,7 +1272,7 @@ VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR sur for(int i = 0; i < device_count; i++) { - int32_t current_device_score = __kvfScorePhysicalDevice(devices[i], surface, deviceExtensions, deviceExtensionsCount); + int32_t current_device_score = __kvfScorePhysicalDevice(devices[i], surface, device_extensions, device_extensions_count); if(current_device_score > best_device_score) { best_device_score = current_device_score; @@ -1217,7 +1283,7 @@ VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR sur if(chosen_one != VK_NULL_HANDLE) { __KvfQueueFamilies queues = __kvfFindQueueFamilies(chosen_one, surface); - __kvfAddDeviceToArray(chosen_one, queues.graphics, queues.present); + __kvfAddDeviceToArray(chosen_one, queues.graphics, queues.present, queues.compute); return chosen_one; } return VK_NULL_HANDLE; @@ -1237,27 +1303,49 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkPhysicalDevice(physical); KVF_ASSERT(kvfdevice != NULL); - KVF_ASSERT(kvfdevice->queues.graphics != -1); - KVF_ASSERT(kvfdevice->queues.present != -1); - VkDeviceQueueCreateInfo queue_create_info[2]; - queue_create_info[0].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queue_create_info[0].queueFamilyIndex = kvfdevice->queues.graphics; - queue_create_info[0].queueCount = 1; - queue_create_info[0].pQueuePriorities = &queue_priority; - queue_create_info[0].flags = 0; - queue_create_info[0].pNext = NULL; - queue_create_info[1].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queue_create_info[1].queueFamilyIndex = kvfdevice->queues.present; - queue_create_info[1].queueCount = 1; - queue_create_info[1].pQueuePriorities = &queue_priority; - queue_create_info[1].flags = 0; - queue_create_info[1].pNext = NULL; + uint32_t queue_count = 0; + queue_count += (kvfdevice->queues.graphics != -1); + queue_count += (kvfdevice->queues.present != -1); + queue_count += (kvfdevice->queues.compute != -1); + + VkDeviceQueueCreateInfo* queue_create_infos = (VkDeviceQueueCreateInfo*)KVF_MALLOC(queue_count * sizeof(VkDeviceQueueCreateInfo)); + size_t i = 0; + if(kvfdevice->queues.graphics != -1) + { + queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_infos[i].queueFamilyIndex = kvfdevice->queues.graphics; + queue_create_infos[i].queueCount = 1; + queue_create_infos[i].pQueuePriorities = &queue_priority; + queue_create_infos[i].flags = 0; + queue_create_infos[i].pNext = NULL; + i++; + } + if(kvfdevice->queues.present != -1 && kvfdevice->queues.present != kvfdevice->queues.graphics) + { + queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_infos[i].queueFamilyIndex = kvfdevice->queues.present; + queue_create_infos[i].queueCount = 1; + queue_create_infos[i].pQueuePriorities = &queue_priority; + queue_create_infos[i].flags = 0; + queue_create_infos[i].pNext = NULL; + i++; + } + if(kvfdevice->queues.compute != -1 && kvfdevice->queues.present != kvfdevice->queues.compute && kvfdevice->queues.graphics != kvfdevice->queues.compute) + { + queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_infos[i].queueFamilyIndex = kvfdevice->queues.compute; + queue_create_infos[i].queueCount = 1; + queue_create_infos[i].pQueuePriorities = &queue_priority; + queue_create_infos[i].flags = 0; + queue_create_infos[i].pNext = NULL; + i++; + } VkDeviceCreateInfo createInfo; createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; - createInfo.queueCreateInfoCount = (kvfdevice->queues.graphics == kvfdevice->queues.present ? 1 : 2); - createInfo.pQueueCreateInfos = queue_create_info; + createInfo.queueCreateInfoCount = i; + createInfo.pQueueCreateInfos = queue_create_infos; createInfo.pEnabledFeatures = features; createInfo.enabledExtensionCount = extensions_count; createInfo.ppEnabledExtensionNames = extensions; @@ -1273,6 +1361,74 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin return device; } +VkDevice kvfCreateDefaultDevicePhysicalDeviceAndCustomQueues(VkPhysicalDevice physical, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue) +{ + const char* extensions[] = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; + VkPhysicalDeviceFeatures device_features = { VK_FALSE }; + return kvfCreateDeviceCustomPhysicalDeviceAndQueues(physical, extensions, sizeof(extensions) / sizeof(extensions[0]), &device_features, graphics_queue, present_queue, compute_queue); +} + +VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue) +{ + const float queue_priority = 1.0f; + + uint32_t queue_count = 0; + queue_count += (graphics_queue != -1); + queue_count += (present_queue != -1); + queue_count += (compute_queue != -1); + + VkDeviceQueueCreateInfo* queue_create_infos = (VkDeviceQueueCreateInfo*)KVF_MALLOC(queue_count * sizeof(VkDeviceQueueCreateInfo)); + size_t i = 0; + if(graphics_queue != -1) + { + queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_infos[i].queueFamilyIndex = graphics_queue; + queue_create_infos[i].queueCount = 1; + queue_create_infos[i].pQueuePriorities = &queue_priority; + queue_create_infos[i].flags = 0; + queue_create_infos[i].pNext = NULL; + i++; + } + if(present_queue != -1 && present_queue != graphics_queue) + { + queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_infos[i].queueFamilyIndex = present_queue; + queue_create_infos[i].queueCount = 1; + queue_create_infos[i].pQueuePriorities = &queue_priority; + queue_create_infos[i].flags = 0; + queue_create_infos[i].pNext = NULL; + i++; + } + if(compute_queue != -1 && present_queue != compute_queue && graphics_queue != compute_queue) + { + queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; + queue_create_infos[i].queueFamilyIndex = compute_queue; + queue_create_infos[i].queueCount = 1; + queue_create_infos[i].pQueuePriorities = &queue_priority; + queue_create_infos[i].flags = 0; + queue_create_infos[i].pNext = NULL; + i++; + } + + VkDeviceCreateInfo createInfo; + createInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; + createInfo.queueCreateInfoCount = queue_count; + createInfo.pQueueCreateInfos = queue_create_infos; + createInfo.pEnabledFeatures = features; + createInfo.enabledExtensionCount = extensions_count; + createInfo.ppEnabledExtensionNames = extensions; + createInfo.enabledLayerCount = 0; + createInfo.ppEnabledLayerNames = NULL; + createInfo.flags = 0; + createInfo.pNext = NULL; + + VkDevice device; + __kvfCheckVk(vkCreateDevice(physical, &createInfo, NULL, &device)); + __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(physical, device, graphics_queue, present_queue, compute_queue); + + return device; +} + void kvfDestroyDevice(VkDevice device) { if(device == VK_NULL_HANDLE) @@ -1287,11 +1443,20 @@ VkQueue kvfGetDeviceQueue(VkDevice device, KvfQueueType queue) KVF_ASSERT(kvfdevice != NULL); VkQueue vk_queue = VK_NULL_HANDLE; if(queue == KVF_GRAPHICS_QUEUE) + { + KVF_ASSERT(kvfdevice->queues.graphics != -1); vkGetDeviceQueue(device, kvfdevice->queues.graphics, 0, &vk_queue); + } else if(queue == KVF_PRESENT_QUEUE) + { + KVF_ASSERT(kvfdevice->queues.present != -1); vkGetDeviceQueue(device, kvfdevice->queues.present, 0, &vk_queue); + } else if(queue == KVF_COMPUTE_QUEUE) + { + KVF_ASSERT(kvfdevice->queues.compute != -1); vkGetDeviceQueue(device, kvfdevice->queues.compute, 0, &vk_queue); + } return vk_queue; } @@ -1311,24 +1476,90 @@ uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) return 0; } -bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index) +#ifndef KVF_NO_KHR + bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index) + { + KVF_ASSERT(device != VK_NULL_HANDLE); + VkPresentInfoKHR present_info = {}; + present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; + present_info.waitSemaphoreCount = 1; + present_info.pWaitSemaphores = &wait; + present_info.swapchainCount = 1; + present_info.pSwapchains = &swapchain; + present_info.pImageIndices = &image_index; + VkResult result = vkQueuePresentKHR(kvfGetDeviceQueue(device, KVF_PRESENT_QUEUE), &present_info); + if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) + return false; + else + __kvfCheckVk(result); + return true; + } +#endif + +int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type) { - KVF_ASSERT(device != VK_NULL_HANDLE); - VkPresentInfoKHR present_info = {}; - present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; - present_info.waitSemaphoreCount = 1; - present_info.pWaitSemaphores = &wait; - present_info.swapchainCount = 1; - present_info.pSwapchains = &swapchain; - present_info.pImageIndices = &image_index; - VkResult result = vkQueuePresentKHR(kvfGetDeviceQueue(device, KVF_PRESENT_QUEUE), &present_info); - if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) - return false; - else - __kvfCheckVk(result); - return true; + KVF_ASSERT(physical != VK_NULL_HANDLE); + KVF_ASSERT(type != KVF_PRESENT_QUEUE && "Use kvfFindDeviceQueueFamilyKHR to find present queue"); + + uint32_t queue_family_count; + vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, NULL); + VkQueueFamilyProperties* queue_families = (VkQueueFamilyProperties*)KVF_MALLOC(sizeof(VkQueueFamilyProperties) * queue_family_count); + vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, queue_families); + + int32_t queue = -1; + + for(int i = 0; i < queue_family_count; i++) + { + if(type == KVF_COMPUTE_QUEUE) + { + if(queue_families[i].queueFlags & VK_QUEUE_COMPUTE_BIT && (queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0) + queue = i; + else if(queue != -1 && queue_families[i].queueFlags & VK_QUEUE_COMPUTE_BIT) // else just find a compute queue + queue = i; + } + else if(type == KVF_GRAPHICS_QUEUE) + { + if(queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) + queue = i; + } + + if(queue != -1) + break; + } + KVF_FREE(queue_families); + return queue; } +#ifndef KVF_NO_KHR + int32_t kvfFindDeviceQueueFamilyKHR(VkPhysicalDevice physical, VkSurfaceKHR surface, KvfQueueType type) + { + KVF_ASSERT(physical != VK_NULL_HANDLE); + KVF_ASSERT(surface != VK_NULL_HANDLE); + + if(type != KVF_PRESENT_QUEUE) + return kvfFindDeviceQueueFamily(physical, type); + + uint32_t queue_family_count; + vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, NULL); + VkQueueFamilyProperties* queue_families = (VkQueueFamilyProperties*)KVF_MALLOC(sizeof(VkQueueFamilyProperties) * queue_family_count); + vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, queue_families); + + int32_t queue = -1; + + for(int i = 0; i < queue_family_count; i++) + { + VkBool32 present_support = false; + vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); + if(present_support) + queue = i; + if(queue != -1) + break; + } + KVF_FREE(queue_families); + return queue; + } +#endif + VkFence kvfCreateFence(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); @@ -1373,151 +1604,153 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) vkDestroySemaphore(device, semaphore, NULL); } -__KvfSwapchainSupportInternal __kvfQuerySwapchainSupport(VkPhysicalDevice physical, VkSurfaceKHR surface) -{ - __KvfSwapchainSupportInternal support; - - __kvfCheckVk(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical, surface, &support.capabilities)); - - vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formatsCount, NULL); - if(support.formatsCount != 0) +#ifndef KVF_NO_KHR + __KvfSwapchainSupportInternal __kvfQuerySwapchainSupport(VkPhysicalDevice physical, VkSurfaceKHR surface) { - support.formats = (VkSurfaceFormatKHR*)KVF_MALLOC(sizeof(VkSurfaceFormatKHR) * support.formatsCount); - vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formatsCount, support.formats); + __KvfSwapchainSupportInternal support; + + __kvfCheckVk(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical, surface, &support.capabilities)); + + vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formats_count, NULL); + if(support.formats_count != 0) + { + support.formats = (VkSurfaceFormatKHR*)KVF_MALLOC(sizeof(VkSurfaceFormatKHR) * support.formats_count); + vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formats_count, support.formats); + } + + vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModes_count, NULL); + if(support.presentModes_count != 0) + { + support.presentModes = (VkPresentModeKHR*)KVF_MALLOC(sizeof(VkPresentModeKHR) * support.presentModes_count); + vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModes_count, support.presentModes); + } + return support; } - vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModesCount, NULL); - if(support.presentModesCount != 0) + VkSurfaceFormatKHR __kvfChooseSwapSurfaceFormat(__KvfSwapchainSupportInternal* support) { - support.presentModes = (VkPresentModeKHR*)KVF_MALLOC(sizeof(VkPresentModeKHR) * support.presentModesCount); - vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModesCount, support.presentModes); - } - return support; -} - -VkSurfaceFormatKHR __kvfChooseSwapSurfaceFormat(__KvfSwapchainSupportInternal* support) -{ - for(int i = 0; i < support->formatsCount; i++) - { - if(support->formats[i].format == VK_FORMAT_R8G8B8A8_SRGB && support->formats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) - return support->formats[i]; - } - return support->formats[0]; -} - -VkPresentModeKHR __kvfChooseSwapPresentMode(__KvfSwapchainSupportInternal* support, bool tryVsync) -{ - if(tryVsync == false) - return VK_PRESENT_MODE_IMMEDIATE_KHR; - for(int i = 0; i < support->presentModesCount; i++) - { - if(support->presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR) - return support->presentModes[i]; - } - return VK_PRESENT_MODE_FIFO_KHR; -} - -uint32_t __kvfClamp(uint32_t i, uint32_t min, uint32_t max) -{ - const uint32_t t = i < min ? min : i; - return t > max ? max : t; -} - -VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool tryVsync) -{ - KVF_ASSERT(device != VK_NULL_HANDLE); - VkSwapchainKHR swapchain; - __KvfSwapchainSupportInternal support = __kvfQuerySwapchainSupport(physical, surface); - - VkSurfaceFormatKHR surfaceFormat = __kvfChooseSwapSurfaceFormat(&support); - VkPresentModeKHR presentMode = __kvfChooseSwapPresentMode(&support, tryVsync); - - uint32_t imageCount = support.capabilities.minImageCount + 1; - if(support.capabilities.maxImageCount > 0 && imageCount > support.capabilities.maxImageCount) - imageCount = support.capabilities.maxImageCount; - - __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvfdevice != NULL); - - uint32_t queueFamilyIndices[] = { (uint32_t)kvfdevice->queues.graphics, (uint32_t)kvfdevice->queues.present }; - - if(support.capabilities.currentExtent.width != UINT32_MAX) - extent = support.capabilities.currentExtent; - else - { - extent.width = __kvfClamp(extent.width, support.capabilities.minImageExtent.width, support.capabilities.maxImageExtent.width); - extent.height = __kvfClamp(extent.height, support.capabilities.minImageExtent.height, support.capabilities.maxImageExtent.height); + for(int i = 0; i < support->formats_count; i++) + { + if(support->formats[i].format == VK_FORMAT_R8G8B8A8_SRGB && support->formats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) + return support->formats[i]; + } + return support->formats[0]; } - VkSwapchainCreateInfoKHR createInfo = {}; - createInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; - createInfo.surface = surface; - createInfo.minImageCount = imageCount; - createInfo.imageFormat = surfaceFormat.format; - createInfo.imageColorSpace = surfaceFormat.colorSpace; - createInfo.imageExtent = extent; - createInfo.imageArrayLayers = 1; - createInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; - createInfo.preTransform = support.capabilities.currentTransform; - createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; - createInfo.presentMode = presentMode; - createInfo.clipped = VK_TRUE; - createInfo.oldSwapchain = VK_NULL_HANDLE; - - if(kvfdevice->queues.graphics != kvfdevice->queues.present) + VkPresentModeKHR __kvfChooseSwapPresentMode(__KvfSwapchainSupportInternal* support, bool try_vsync) { - createInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT; - createInfo.queueFamilyIndexCount = 2; - createInfo.pQueueFamilyIndices = queueFamilyIndices; + if(try_vsync == false) + return VK_PRESENT_MODE_IMMEDIATE_KHR; + for(int i = 0; i < support->presentModes_count; i++) + { + if(support->presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR) + return support->presentModes[i]; + } + return VK_PRESENT_MODE_FIFO_KHR; } - else - createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; - __kvfCheckVk(vkCreateSwapchainKHR(device, &createInfo, NULL, &swapchain)); + uint32_t __kvfClamp(uint32_t i, uint32_t min, uint32_t max) + { + const uint32_t t = i < min ? min : i; + return t > max ? max : t; + } - uint32_t images_count; - vkGetSwapchainImagesKHR(device, swapchain, (uint32_t*)&images_count, NULL); + VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool try_vsync) + { + KVF_ASSERT(device != VK_NULL_HANDLE); + VkSwapchainKHR swapchain; + __KvfSwapchainSupportInternal support = __kvfQuerySwapchainSupport(physical, surface); - __kvfAddSwapchainToArray(swapchain, support, surfaceFormat.format, images_count, extent); + VkSurfaceFormatKHR surfaceFormat = __kvfChooseSwapSurfaceFormat(&support); + VkPresentModeKHR presentMode = __kvfChooseSwapPresentMode(&support, try_vsync); - return swapchain; -} + uint32_t image_count = support.capabilities.minImageCount + 1; + if(support.capabilities.maxImageCount > 0 && image_count > support.capabilities.maxImageCount) + image_count = support.capabilities.maxImageCount; -VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain) -{ - __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); - KVF_ASSERT(kvf_swapchain != NULL); - return kvf_swapchain->images_format; -} + __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvfdevice != NULL); -uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain) -{ - __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); - KVF_ASSERT(kvf_swapchain != NULL); - return kvf_swapchain->images_count; -} + uint32_t queue_family_indices[] = { (uint32_t)kvfdevice->queues.graphics, (uint32_t)kvfdevice->queues.present }; -uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain) -{ - __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); - KVF_ASSERT(kvf_swapchain != NULL); - return kvf_swapchain->support.capabilities.minImageCount; -} + if(support.capabilities.currentExtent.width != UINT32_MAX) + extent = support.capabilities.currentExtent; + else + { + extent.width = __kvfClamp(extent.width, support.capabilities.minImageExtent.width, support.capabilities.maxImageExtent.width); + extent.height = __kvfClamp(extent.height, support.capabilities.minImageExtent.height, support.capabilities.maxImageExtent.height); + } -VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain) -{ - __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); - KVF_ASSERT(kvf_swapchain != NULL); - return kvf_swapchain->images_extent; -} + VkSwapchainCreateInfoKHR createInfo = {}; + createInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; + createInfo.surface = surface; + createInfo.minImageCount = image_count; + createInfo.imageFormat = surfaceFormat.format; + createInfo.imageColorSpace = surfaceFormat.colorSpace; + createInfo.imageExtent = extent; + createInfo.imageArrayLayers = 1; + createInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; + createInfo.preTransform = support.capabilities.currentTransform; + createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; + createInfo.presentMode = presentMode; + createInfo.clipped = VK_TRUE; + createInfo.oldSwapchain = VK_NULL_HANDLE; -void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain) -{ - if(swapchain == VK_NULL_HANDLE) - return; - KVF_ASSERT(device != VK_NULL_HANDLE); - __kvfDestroySwapchain(device, swapchain); -} + if(kvfdevice->queues.graphics != kvfdevice->queues.present) + { + createInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT; + createInfo.queueFamilyIndexCount = 2; + createInfo.pQueueFamilyIndices = queue_family_indices; + } + else + createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; + + __kvfCheckVk(vkCreateSwapchainKHR(device, &createInfo, NULL, &swapchain)); + + uint32_t images_count; + vkGetSwapchainImagesKHR(device, swapchain, (uint32_t*)&images_count, NULL); + + __kvfAddSwapchainToArray(swapchain, support, surfaceFormat.format, images_count, extent); + + return swapchain; + } + + VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain) + { + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->images_format; + } + + uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain) + { + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->images_count; + } + + uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain) + { + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->support.capabilities.minImageCount; + } + + VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain) + { + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + return kvf_swapchain->images_extent; + } + + void kvfDestroySwapchainKHR(VkDevice device, VkSwapchainKHR swapchain) + { + if(swapchain == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + __kvfDestroySwapchain(device, swapchain); + } +#endif VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, KvfImageType type) { @@ -1564,7 +1797,7 @@ void kvfImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t buf region.imageSubresource.layerCount = 1; region.imageOffset = offset; region.imageExtent = extent; - vkCmdCopyImageToBuffer(cmd, src, dst, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, ®ion); + vkCmdCopyImageToBuffer(cmd, src, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst, 1, ®ion); } void kvfDestroyImage(VkDevice device, VkImage image) @@ -1899,13 +2132,15 @@ VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkForma return attachment; } -VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR swapchain, bool clear) -{ - __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); - KVF_ASSERT(kvf_swapchain != NULL); - KVF_ASSERT(kvf_swapchain->images_count != 0); - return kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, kvf_swapchain->images_format, VK_IMAGE_LAYOUT_UNDEFINED,VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, clear, VK_SAMPLE_COUNT_1_BIT); -} +#ifndef KVF_NO_KHR + VkAttachmentDescription kvfBuildSwapchainAttachmentDescription(VkSwapchainKHR swapchain, bool clear) + { + __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); + KVF_ASSERT(kvf_swapchain != NULL); + KVF_ASSERT(kvf_swapchain->images_count != 0); + return kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, kvf_swapchain->images_format, VK_IMAGE_LAYOUT_UNDEFINED,VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, clear, VK_SAMPLE_COUNT_1_BIT); + } +#endif VkRenderPass kvfCreateRenderPass(VkDevice device, VkAttachmentDescription* attachments, size_t attachments_count, VkPipelineBindPoint bind_point) { From f50dd2d94641a68cf199d21ff5fa72a45e4823ff Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Sep 2024 09:55:19 +0200 Subject: [PATCH 020/131] fixing compilation issues --- Makefile | 116 +- compile_commands.json | 1890 +++++++---------- example/build.sh | 4 +- runtime/Includes/Core/SDLManager.h | 5 + runtime/Includes/Platform/Inputs.h | 2 +- runtime/Includes/PreCompiled.h | 2 +- runtime/Includes/Renderer/Image.h | 2 +- runtime/Includes/Renderer/RenderCore.h | 1 - .../Renderer/Vulkan/VulkanPrototypes.h | 4 +- runtime/Includes/Utils/NonOwningPtr.inl | 2 + runtime/Sources/Core/Application.cpp | 2 - runtime/Sources/Core/Logs.cpp | 10 +- runtime/Sources/Core/SDLManager.cpp | 38 +- runtime/Sources/Platform/Inputs.cpp | 20 + runtime/Sources/Renderer/Descriptor.cpp | 2 +- runtime/Sources/Renderer/Image.cpp | 38 +- .../Sources/Renderer/Pipelines/Graphics.cpp | 6 +- runtime/Sources/Renderer/Pipelines/Shader.cpp | 23 +- runtime/Sources/Renderer/RenderCore.cpp | 17 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 12 +- .../Renderer/RenderPasses/FinalPass.cpp | 2 +- runtime/Sources/Renderer/Renderer.cpp | 22 +- runtime/Sources/Renderer/SceneRenderer.cpp | 2 +- .../Sources/Renderer/Vulkan/VulkanLoader.cpp | 22 +- .../Sources/Renderer/Vulkan/VulkanLoader.h | 6 +- third_party/kvf.h | 27 +- 26 files changed, 1036 insertions(+), 1241 deletions(-) diff --git a/Makefile b/Makefile index 93a6343..27f0102 100644 --- a/Makefile +++ b/Makefile @@ -1,102 +1,120 @@ NAME = libmlx.so +MAKE = make --no-print-directory -SRCS = $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Core)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Platform)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Graphics)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/Vulkan)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/Pipelines)) -SRCS += $(wildcard $(addsuffix /*.cpp, ./runtime/Sources/Renderer/RenderPasses)) - -OBJ_DIR = objs/makefile -OBJS = $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) - -PCH = ./runtime/Includes/PreCompiled.h -GCH = ./runtime/Includes/PreCompiled.h.gch - -OS = $(shell uname -s) +OS ?= $(shell uname -s) DEBUG ?= false TOOLCHAIN ?= clang IMAGES_OPTIMIZED ?= true FORCE_INTEGRATED_GPU ?= false GRAPHICS_MEMORY_DUMP ?= false PROFILER ?= false -FORCE_WAYLAND ?= false +_ENABLEDFLAGS = -MODE = "release" +SRCS = $(wildcard $(addsuffix /*.cpp, runtime/Sources/Core)) +SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Graphics)) +SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Platform)) +SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Renderer)) +SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Renderer/**)) + +OBJ_DIR = objs/make/$(shell echo $(OS) | tr '[:upper:]' '[:lower:]') +OBJS := $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) CXX = clang++ - -CXXFLAGS = -std=c++20 -O3 -fPIC -Wall -Wextra -Wno-deprecated -DSDL_MAIN_HANDLED +CXXFLAGS = -std=c++20 -O3 -fPIC -Wall -Wextra -DSDL_MAIN_HANDLED INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party -LDLIBS = - ifeq ($(TOOLCHAIN), gcc) CXX = g++ + CXXFLAGS += -Wno-error=cpp +else + CXXFLAGS += -Wno-error=#warning endif ifeq ($(OS), Darwin) - LDLIBS += -L /opt/homebrew/lib -lSDL2 + LDFLAGS += -L /opt/homebrew/lib -lSDL2 CXXFLAGS += -I /opt/homebrew/include NAME = libmlx.dylib endif ifeq ($(DEBUG), true) - CXXFLAGS += -g -D DEBUG - MODE = "debug" + CXXFLAGS += -g3 -D DEBUG + LDFLAGS += -rdynamic endif ifeq ($(FORCE_INTEGRATED_GPU), true) - CXXFLAGS += -D FORCE_INTEGRATED_GPU + _ENABLEDFLAGS += FORCE_INTEGRATED_GPU endif ifeq ($(IMAGES_OPTIMIZED), true) - CXXFLAGS += -D IMAGE_OPTIMIZED + _ENABLEDFLAGS += IMAGE_OPTIMIZED endif ifeq ($(GRAPHICS_MEMORY_DUMP), true) - CXXFLAGS += -D GRAPHICS_MEMORY_DUMP + _ENABLEDFLAGS += GRAPHICS_MEMORY_DUMP endif ifeq ($(PROFILER), true) - CXXFLAGS += -D PROFILER + _ENABLEDFLAGS += PROFILER endif -ifeq ($(FORCE_WAYLAND), true) - CXXFLAGS += -D FORCE_WAYLAND -endif +CXXFLAGS += $(addprefix -D, $(_ENABLEDFLAGS)) RM = rm -rf -$(OBJ_DIR)/%.o: %.cpp $(GCH) - @printf "\033[1;32m[compiling... "$(MODE)" "$(CXX)"]\033[1;00m "$<"\n" +TPUT = tput -T xterm-256color +_RESET := $(shell $(TPUT) sgr0) +_BOLD := $(shell $(TPUT) bold) +_ITALIC := $(shell $(TPUT) sitm) +_UNDER := $(shell $(TPUT) smul) +_GREEN := $(shell $(TPUT) setaf 2) +_YELLOW := $(shell $(TPUT) setaf 3) +_RED := $(shell $(TPUT) setaf 1) +_GRAY := $(shell $(TPUT) setaf 8) +_PURPLE := $(shell $(TPUT) setaf 5) + +ifeq ($(DEBUG), true) + MODE := $(_RESET)$(_PURPLE)$(_BOLD)Debug$(_RESET)$(_PURPLE) + COLOR := $(_PURPLE) +else + MODE := $(_RESET)$(_GREEN)$(_BOLD)Release$(_RESET)$(_GREEN) + COLOR := $(_GREEN) +endif + +OBJS_TOTAL = $(words $(OBJS)) +N_OBJS := $(shell find $(OBJ_DIR) -type f -name '*.o' 2>/dev/null | wc -l) +OBJS_TOTAL := $(shell echo $$(( $(OBJS_TOTAL) - $(N_OBJS) ))) +CURR_OBJ = 0 + +$(OBJ_DIR)/%.o: %.cpp + @mkdir -p $(dir $@) + @$(eval CURR_OBJ=$(shell echo $$(( $(CURR_OBJ) + 1 )))) + @$(eval PERCENT=$(shell echo $$(( $(CURR_OBJ) * 100 / $(OBJS_TOTAL) )))) + @printf "$(COLOR)($(_BOLD)%3s%%$(_RESET)$(COLOR)) $(_RESET)Compiling $(_BOLD)$<$(_RESET)\n" "$(PERCENT)" @$(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@ -all: $(NAME) +all: _printbuildinfos + @$(MAKE) $(NAME) -$(GCH): - @printf "\033[1;32m[compiling... "$(MODE)" "$(CXX)"]\033[1;00m PreCompiled header\n" - @$(CXX) $(CXXFLAGS) $(INCLUDES) -c $(PCH) -o $(GCH) +$(NAME): $(OBJS) + @printf "Linking $(_BOLD)$(NAME)$(_RESET)\n" + @$(CXX) -shared -o $(NAME) $(OBJS) $(LDFLAGS) + @printf "$(_BOLD)$(NAME)$(_RESET) compiled $(COLOR)$(_BOLD)successfully$(_RESET)\n" -$(NAME): $(OBJ_DIR) $(GCH) $(OBJS) - @printf "\033[1;32m[linking ... "$(MODE)"]\033[1;00m "$@"\n" - @$(CXX) -shared -o $(NAME) $(OBJS) $(LDLIBS) - @printf "\033[1;32m[build finished]\033[1;00m\n" +_printbuildinfos: + @printf "$(_PURPLE)$(_BOLD)MacroLibX $(_RESET)Compiling in $(_BOLD)$(MODE)$(_RESET) mode on $(_BOLD)$(OS)$(_RESET) | Using $(_BOLD)$(CXX)$(_RESET), flags: $(_BOLD)$(_ENABLEDFLAGS)$(_RESET)\n" -$(OBJ_DIR): - @mkdir -p $(sort $(addprefix $(OBJ_DIR)/, $(dir $(SRCS)))) - @printf "\033[1;32m[created objs directory]\033[1;00m\n" +debug: + @$(MAKE) all DEBUG=true -j$(shell nproc) clean: @$(RM) $(OBJ_DIR) - @printf "\033[1;32m[object files removed]\033[1;00m\n" + @printf "Cleaned $(_BOLD)$(OBJ_DIR)$(_RESET)\n" fclean: clean @$(RM) $(NAME) - @$(RM) $(GCH) - @printf "\033[1;32m["$(NAME)" and gch removed]\033[1;00m\n" + @printf "Cleaned $(_BOLD)$(NAME)$(_RESET)\n" -re: fclean all +re: fclean _printbuildinfos + @$(MAKE) $(NAME) -.PHONY: all clean fclean re pch +.PHONY: all clean debug fclean re diff --git a/compile_commands.json b/compile_commands.json index 650f005..e9651ae 100644 --- a/compile_commands.json +++ b/compile_commands.json @@ -1,1082 +1,812 @@ [ - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/application.cpp", - "-o", - "objs/makefile/./src/core/application.o" - ], - "file": "src/core/application.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/bridge.cpp", - "-o", - "objs/makefile/./src/core/bridge.o" - ], - "file": "src/core/bridge.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/errors.cpp", - "-o", - "objs/makefile/./src/core/errors.o" - ], - "file": "src/core/errors.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/graphics.cpp", - "-o", - "objs/makefile/./src/core/graphics.o" - ], - "file": "src/core/graphics.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/memory.cpp", - "-o", - "objs/makefile/./src/core/memory.o" - ], - "file": "src/core/memory.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/UUID.cpp", - "-o", - "objs/makefile/./src/core/UUID.o" - ], - "file": "src/core/UUID.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/platform/inputs.cpp", - "-o", - "objs/makefile/./src/platform/inputs.o" - ], - "file": "src/platform/inputs.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/platform/window.cpp", - "-o", - "objs/makefile/./src/platform/window.o" - ], - "file": "src/platform/window.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/pixel_put.cpp", - "-o", - "objs/makefile/./src/renderer/pixel_put.o" - ], - "file": "src/renderer/pixel_put.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/renderer.cpp", - "-o", - "objs/makefile/./src/renderer/renderer.o" - ], - "file": "src/renderer/renderer.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/buffers/vk_buffer.cpp", - "-o", - "objs/makefile/./src/renderer/buffers/vk_buffer.o" - ], - "file": "src/renderer/buffers/vk_buffer.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/buffers/vk_ubo.cpp", - "-o", - "objs/makefile/./src/renderer/buffers/vk_ubo.o" - ], - "file": "src/renderer/buffers/vk_ubo.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/buffers/vk_vbo.cpp", - "-o", - "objs/makefile/./src/renderer/buffers/vk_vbo.o" - ], - "file": "src/renderer/buffers/vk_vbo.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/command/cmd_manager.cpp", - "-o", - "objs/makefile/./src/renderer/command/cmd_manager.o" - ], - "file": "src/renderer/command/cmd_manager.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/command/single_time_cmd_manager.cpp", - "-o", - "objs/makefile/./src/renderer/command/single_time_cmd_manager.o" - ], - "file": "src/renderer/command/single_time_cmd_manager.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/command/vk_cmd_buffer.cpp", - "-o", - "objs/makefile/./src/renderer/command/vk_cmd_buffer.o" - ], - "file": "src/renderer/command/vk_cmd_buffer.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/command/vk_cmd_pool.cpp", - "-o", - "objs/makefile/./src/renderer/command/vk_cmd_pool.o" - ], - "file": "src/renderer/command/vk_cmd_pool.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/memory.cpp", - "-o", - "objs/makefile/./src/renderer/core/memory.o" - ], - "file": "src/renderer/core/memory.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/render_core.cpp", - "-o", - "objs/makefile/./src/renderer/core/render_core.o" - ], - "file": "src/renderer/core/render_core.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/vk_device.cpp", - "-o", - "objs/makefile/./src/renderer/core/vk_device.o" - ], - "file": "src/renderer/core/vk_device.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/vk_fence.cpp", - "-o", - "objs/makefile/./src/renderer/core/vk_fence.o" - ], - "file": "src/renderer/core/vk_fence.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/vk_instance.cpp", - "-o", - "objs/makefile/./src/renderer/core/vk_instance.o" - ], - "file": "src/renderer/core/vk_instance.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/vk_queues.cpp", - "-o", - "objs/makefile/./src/renderer/core/vk_queues.o" - ], - "file": "src/renderer/core/vk_queues.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/vk_semaphore.cpp", - "-o", - "objs/makefile/./src/renderer/core/vk_semaphore.o" - ], - "file": "src/renderer/core/vk_semaphore.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/vk_surface.cpp", - "-o", - "objs/makefile/./src/renderer/core/vk_surface.o" - ], - "file": "src/renderer/core/vk_surface.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/core/vk_validation_layers.cpp", - "-o", - "objs/makefile/./src/renderer/core/vk_validation_layers.o" - ], - "file": "src/renderer/core/vk_validation_layers.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/descriptors/vk_descriptor_pool.cpp", - "-o", - "objs/makefile/./src/renderer/descriptors/vk_descriptor_pool.o" - ], - "file": "src/renderer/descriptors/vk_descriptor_pool.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/descriptors/vk_descriptor_set.cpp", - "-o", - "objs/makefile/./src/renderer/descriptors/vk_descriptor_set.o" - ], - "file": "src/renderer/descriptors/vk_descriptor_set.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/descriptors/vk_descriptor_set_layout.cpp", - "-o", - "objs/makefile/./src/renderer/descriptors/vk_descriptor_set_layout.o" - ], - "file": "src/renderer/descriptors/vk_descriptor_set_layout.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/images/texture_atlas.cpp", - "-o", - "objs/makefile/./src/renderer/images/texture_atlas.o" - ], - "file": "src/renderer/images/texture_atlas.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/images/texture.cpp", - "-o", - "objs/makefile/./src/renderer/images/texture.o" - ], - "file": "src/renderer/images/texture.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/images/vk_image.cpp", - "-o", - "objs/makefile/./src/renderer/images/vk_image.o" - ], - "file": "src/renderer/images/vk_image.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/pipeline/pipeline.cpp", - "-o", - "objs/makefile/./src/renderer/pipeline/pipeline.o" - ], - "file": "src/renderer/pipeline/pipeline.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/renderpass/vk_framebuffer.cpp", - "-o", - "objs/makefile/./src/renderer/renderpass/vk_framebuffer.o" - ], - "file": "src/renderer/renderpass/vk_framebuffer.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/renderpass/vk_render_pass.cpp", - "-o", - "objs/makefile/./src/renderer/renderpass/vk_render_pass.o" - ], - "file": "src/renderer/renderpass/vk_render_pass.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/swapchain/vk_swapchain.cpp", - "-o", - "objs/makefile/./src/renderer/swapchain/vk_swapchain.o" - ], - "file": "src/renderer/swapchain/vk_swapchain.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/fps.cpp", - "-o", - "objs/makefile/./src/core/fps.o" - ], - "file": "src/core/fps.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/core/profiler.cpp", - "-o", - "objs/makefile/./src/core/profiler.o" - ], - "file": "src/core/profiler.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/descriptors/descriptor_pool_manager.cpp", - "-o", - "objs/makefile/./src/renderer/descriptors/descriptor_pool_manager.o" - ], - "file": "src/renderer/descriptors/descriptor_pool_manager.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/texts/font.cpp", - "-o", - "objs/makefile/./src/renderer/texts/font.o" - ], - "file": "src/renderer/texts/font.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/texts/font_library.cpp", - "-o", - "objs/makefile/./src/renderer/texts/font_library.o" - ], - "file": "src/renderer/texts/font_library.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/texts/text.cpp", - "-o", - "objs/makefile/./src/renderer/texts/text.o" - ], - "file": "src/renderer/texts/text.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/texts/text_descriptor.cpp", - "-o", - "objs/makefile/./src/renderer/texts/text_descriptor.o" - ], - "file": "src/renderer/texts/text_descriptor.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/texts/text_library.cpp", - "-o", - "objs/makefile/./src/renderer/texts/text_library.o" - ], - "file": "src/renderer/texts/text_library.cpp" - }, - { - "directory": "/home/kbz_8/Documents/Programmation/42/other/MacroLibX/MacroLibX", - "arguments": [ - "clang++", - "-std=c++17", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-Werror", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-D", - "IMAGE_OPTIMIZED", - "-I./includes", - "-I./src", - "-I./third_party", - "-c", - "src/renderer/texts/text_manager.cpp", - "-o", - "objs/makefile/./src/renderer/texts/text_manager.o" - ], - "file": "src/renderer/texts/text_manager.cpp" - } + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/Application.o", + "runtime/Sources/Core/Application.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Application.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Application.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/Bridge.o", + "runtime/Sources/Core/Bridge.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Bridge.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Bridge.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/EventBus.o", + "runtime/Sources/Core/EventBus.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/EventBus.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/EventBus.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/EventListener.o", + "runtime/Sources/Core/EventListener.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/EventListener.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/EventListener.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/Fps.o", + "runtime/Sources/Core/Fps.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Fps.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Fps.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/Graphics.o", + "runtime/Sources/Core/Graphics.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Graphics.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Graphics.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/Logs.o", + "runtime/Sources/Core/Logs.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Logs.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Logs.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/Memory.o", + "runtime/Sources/Core/Memory.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Memory.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Memory.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/Profiler.o", + "runtime/Sources/Core/Profiler.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Profiler.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Profiler.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/SDLManager.o", + "runtime/Sources/Core/SDLManager.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/SDLManager.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/SDLManager.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Core/UUID.o", + "runtime/Sources/Core/UUID.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/UUID.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/UUID.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Graphics/Mesh.o", + "runtime/Sources/Graphics/Mesh.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/Mesh.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/Mesh.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Graphics/PixelPutManager.o", + "runtime/Sources/Graphics/PixelPutManager.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/PixelPutManager.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/PixelPutManager.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Graphics/Scene.o", + "runtime/Sources/Graphics/Scene.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/Scene.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/Scene.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Graphics/Sprite.o", + "runtime/Sources/Graphics/Sprite.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/Sprite.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/Sprite.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Platform/Inputs.o", + "runtime/Sources/Platform/Inputs.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Platform/Inputs.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Platform/Inputs.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Platform/Window.o", + "runtime/Sources/Platform/Window.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Platform/Window.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Platform/Window.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Buffer.o", + "runtime/Sources/Renderer/Buffer.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Buffer.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Buffer.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Descriptor.o", + "runtime/Sources/Renderer/Descriptor.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Descriptor.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Descriptor.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Image.o", + "runtime/Sources/Renderer/Image.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Image.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Image.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Memory.o", + "runtime/Sources/Renderer/Memory.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Memory.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Memory.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/RenderCore.o", + "runtime/Sources/Renderer/RenderCore.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderCore.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderCore.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Renderer.o", + "runtime/Sources/Renderer/Renderer.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Renderer.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Renderer.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/SceneRenderer.o", + "runtime/Sources/Renderer/SceneRenderer.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/SceneRenderer.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/SceneRenderer.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Pipelines/Graphics.o", + "runtime/Sources/Renderer/Pipelines/Graphics.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Pipelines/Graphics.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Pipelines/Graphics.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Pipelines/Shader.o", + "runtime/Sources/Renderer/Pipelines/Shader.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Pipelines/Shader.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Pipelines/Shader.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/RenderPasses/2DPass.o", + "runtime/Sources/Renderer/RenderPasses/2DPass.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderPasses/2DPass.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderPasses/2DPass.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/RenderPasses/FinalPass.o", + "runtime/Sources/Renderer/RenderPasses/FinalPass.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderPasses/FinalPass.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/RenderPasses/Passes.o", + "runtime/Sources/Renderer/RenderPasses/Passes.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderPasses/Passes.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderPasses/Passes.o" + }, + { + "arguments": [ + "/usr/bin/clang++", + "-std=c++20", + "-O3", + "-fPIC", + "-Wall", + "-Wextra", + "-DSDL_MAIN_HANDLED", + "-Wno-error=", + "-g3", + "-D", + "DEBUG", + "-DIMAGE_OPTIMIZED", + "-I./includes", + "-I./runtime/Includes", + "-I./runtime/Sources", + "-I./third_party", + "-c", + "-o", + "objs/make/linux/runtime/Sources/Renderer/Vulkan/VulkanLoader.o", + "runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp" + ], + "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", + "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp", + "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Vulkan/VulkanLoader.o" + } ] diff --git a/example/build.sh b/example/build.sh index 1b95a5d..b332c18 100755 --- a/example/build.sh +++ b/example/build.sh @@ -5,8 +5,8 @@ if [ -e a.out ]; then fi if [ $(uname -s) = 'Darwin' ]; then - clang main.c ../libmlx.dylib -L /opt/homebrew/lib -lglfw -g; + clang main.c ../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -g; else - clang main.c ../libmlx.so -lglfw -g -Wall -Wextra -Werror; + clang main.c ../libmlx.so -lSDL2 -g -Wall -Wextra -Werror; fi diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index ddb7b21..cdc514c 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -22,6 +22,11 @@ namespace mlx inline void SetEventCallback(func::function functor, void* userdata) { f_callback = std::move(functor); p_callback_data = userdata; } + std::int32_t GetX() const noexcept; + std::int32_t GetY() const noexcept; + std::int32_t GetXRel() const noexcept; + std::int32_t GetYRel() const noexcept; + private: SDLManager() = default; ~SDLManager() = default; diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h index bd1c91d..1114058 100644 --- a/runtime/Includes/Platform/Inputs.h +++ b/runtime/Includes/Platform/Inputs.h @@ -19,7 +19,7 @@ namespace mlx public: Inputs(); - void RegisterWindow(std::shared_ptr window); + inline void RegisterWindow(std::shared_ptr window) { m_windows[window->GetID()] = window; } std::int32_t GetX() const noexcept; std::int32_t GetY() const noexcept; diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 231ca2f..c50a067 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -14,6 +14,7 @@ #include #include +#include #include #include @@ -71,7 +72,6 @@ #include #include #include -#include using Handle = void*; diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 31b8921..996a3ac 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -111,7 +111,7 @@ namespace mlx void SetPixel(int x, int y, std::uint32_t color) noexcept; int GetPixel(int x, int y) noexcept; - void Update(VkCommandBuffer cmd) const; + void Update(VkCommandBuffer cmd); ~Texture() override { Destroy(); } diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index 01218a3..04140d1 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -21,7 +21,6 @@ namespace mlx [[nodiscard]] MLX_FORCEINLINE VkPhysicalDevice GetPhysicalDevice() const noexcept { return m_physical_device; } [[nodiscard]] MLX_FORCEINLINE GPUAllocator& GetAllocator() noexcept { return m_allocator; } - inline void WaitDeviceIdle() const noexcept { vkDeviceWaitIdle(m_device); } private: diff --git a/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h b/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h index fd96c65..d65439f 100644 --- a/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h +++ b/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h @@ -1,5 +1,5 @@ -#ifndef __SCOP_VK_PROTOTYPES__ -#define __SCOP_VK_PROTOTYPES__ +#ifndef __MLX_VK_PROTOTYPES__ +#define __MLX_VK_PROTOTYPES__ #if defined(VULKAN_H_) && !defined(VK_NO_PROTOTYPES) #error "define VK_NO_PROTOTYPES needed" diff --git a/runtime/Includes/Utils/NonOwningPtr.inl b/runtime/Includes/Utils/NonOwningPtr.inl index b35bee5..579809e 100644 --- a/runtime/Includes/Utils/NonOwningPtr.inl +++ b/runtime/Includes/Utils/NonOwningPtr.inl @@ -16,6 +16,7 @@ namespace mlx NonOwningPtr& NonOwningPtr::operator=(T* ptr) { p_ptr = ptr; + return *this; } template @@ -23,6 +24,7 @@ namespace mlx { p_ptr = ptr.p_ptr; ptr.p_ptr = nullptr; + return *this; } template diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 6398826..5da8017 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -13,9 +13,7 @@ namespace mlx EventBus::RegisterListener({[](const EventBase& event) { if(event.What() == Event::FatalErrorEventCode) - { std::abort(); - } }, "__MlxApplication" }); m_fps.Init(); diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index 88c063a..37a301b 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -38,11 +38,11 @@ namespace mlx switch(type) { - case LogType::Debug: std::cout << Ansi::blue << "[Akel Debug] " << Ansi::def << code_infos << message << '\n'; break; - case LogType::Message: std::cout << Ansi::blue << "[Akel Message] " << Ansi::def << code_infos << message << '\n'; break; - case LogType::Warning: std::cout << Ansi::magenta << "[Akel Warning] " << Ansi::def << code_infos << message << '\n'; break; - case LogType::Error: std::cerr << Ansi::red << "[Akel Error] " << Ansi::def << code_infos << message << '\n'; break; - case LogType::FatalError: std::cerr << Ansi::red << "[Akel Fatal Error] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Debug: std::cout << Ansi::blue << "[MLX Debug] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Message: std::cout << Ansi::blue << "[MLX Message] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Warning: std::cout << Ansi::magenta << "[MLX Warning] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Error: std::cerr << Ansi::red << "[MLX Error] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::FatalError: std::cerr << Ansi::red << "[MLX Fatal Error] " << Ansi::def << code_infos << message << '\n'; break; default: break; } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index cdfe5c6..44d9e44 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -56,10 +56,6 @@ namespace mlx { WatcherData* data = static_cast(userdata); - if(event->type == SDL_MOUSEMOTION) - { - } - std::uint32_t id = event->window.windowID; switch(event->type) { @@ -102,6 +98,7 @@ namespace mlx return 0; }, &watcher_data); + DebugLog("SDL Manager initialized"); } void* SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden) @@ -162,11 +159,44 @@ namespace mlx return Vec2ui{ extent }; } + std::int32_t SDLManager::GetX() const noexcept + { + int dummy; + int x; + SDL_GetMouseState(&x, &dummy); + return x; + } + + std::int32_t SDLManager::GetY() const noexcept + { + int dummy; + int y; + SDL_GetMouseState(&dummy, &y); + return y; + } + + std::int32_t SDLManager::GetXRel() const noexcept + { + int dummy; + int x; + SDL_GetRelativeMouseState(&x, &dummy); + return x; + } + + std::int32_t SDLManager::GetYRel() const noexcept + { + int dummy; + int y; + SDL_GetRelativeMouseState(&dummy, &y); + return y; + } + void SDLManager::Shutdown() noexcept { if(m_drop_sdl_responsability) return; SDL_QuitSubSystem(SDL_INIT_VIDEO | SDL_INIT_TIMER | SDL_INIT_EVENTS); SDL_Quit(); + DebugLog("SDL Manager uninitialized"); } } diff --git a/runtime/Sources/Platform/Inputs.cpp b/runtime/Sources/Platform/Inputs.cpp index 03943e0..b93561f 100644 --- a/runtime/Sources/Platform/Inputs.cpp +++ b/runtime/Sources/Platform/Inputs.cpp @@ -15,4 +15,24 @@ namespace mlx m_events_hooks[window_id][event].hook(code, m_events_hooks[window_id][event].param); }, nullptr); } + + std::int32_t Inputs::GetX() const noexcept + { + return SDLManager::Get().GetX(); + } + + std::int32_t Inputs::GetY() const noexcept + { + return SDLManager::Get().GetY(); + } + + std::int32_t Inputs::GetXRel() const noexcept + { + return SDLManager::Get().GetXRel(); + } + + std::int32_t Inputs::GetYRel() const noexcept + { + return SDLManager::Get().GetYRel(); + } } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 4ccbced..bbed657 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -33,7 +33,7 @@ namespace mlx } DescriptorSet::DescriptorSet(VkDescriptorSetLayout layout, const std::vector& descriptors) - : m_set_layout(layout), m_descriptors(descriptors) + : m_descriptors(descriptors), m_set_layout(layout) { for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), layout); diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 591bb96..0310469 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -3,6 +3,16 @@ #include #include +#define STB_IMAGE_IMPLEMENTATION +#ifdef MLX_COMPILER_GCC + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstringop-overflow" + #include + #pragma GCC diagnostic pop +#else + #include +#endif + namespace mlx { void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled) @@ -31,7 +41,7 @@ namespace mlx image_info.usage = usage; image_info.samples = (m_is_multisampled ? VK_SAMPLE_COUNT_4_BIT : VK_SAMPLE_COUNT_1_BIT); image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; - m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, alloc_info, &m_image); + m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, &alloc_info, m_image); } void Image::CreateImageView(VkImageViewType type, VkImageAspectFlags aspect_flags, int layer_count) noexcept @@ -77,11 +87,11 @@ namespace mlx { VkImageSubresourceRange subresource_range{}; subresource_range.baseMipLevel = 0; - subresource_range.layerCount = (m_type == ImageType::Cube ? 6 : 1); + subresource_range.layerCount = 1; subresource_range.levelCount = 1; subresource_range.baseArrayLayer = 0; - if(m_type == ImageType::Color || m_type == ImageType::Cube) + if(m_type == ImageType::Color) { VkImageLayout old_layout = m_layout; TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); @@ -134,10 +144,10 @@ namespace mlx m_has_been_modified = true; } - int GetPixel(int x, int y) noexcept + int Texture::GetPixel(int x, int y) noexcept { MLX_PROFILE_FUNCTION(); - if(x < 0 || y < 0 || static_cast(x) > getWidth() || static_cast(y) > getHeight()) + if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) return 0; if(!m_staging_buffer.has_value()) OpenCPUBuffer(); @@ -149,22 +159,16 @@ namespace mlx return *reinterpret_cast(bytes); } - void Update(VkCommandBuffer cmd) const + void Texture::Update(VkCommandBuffer cmd) { if(!m_has_been_modified) return; - std::memcpy(m_staging_buffer.GetMap(), m_cpu_buffer.data(), m_cpu_buffer.size() * kvfGetFormatSize(m_format)); + std::memcpy(m_staging_buffer->GetMap(), m_cpu_buffer.data(), m_cpu_buffer.size() * kvfFormatSize(m_format)); VkImageLayout old_layout = m_layout; - VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); - kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); - kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); + kvfCopyBufferToImage(cmd, Image::Get(), m_staging_buffer->Get(), m_staging_buffer->GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { m_width, m_height, 1 }); TransitionLayout(old_layout, cmd); - vkEndCommandBuffer(cmd); - VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); - kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); - kvfDestroyFence(RenderCore::Get().GetDevice(), fence); m_has_been_modified = false; } @@ -176,14 +180,14 @@ namespace mlx return; DebugLog("Texture : enabling CPU mapping"); m_staging_buffer.emplace(); - std::size_t size = m_width * m_height * kvfGetFormatSize(m_format); + std::size_t size = m_width * m_height * kvfFormatSize(m_format); m_staging_buffer->Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, {}); VkImageLayout old_layout = m_layout; VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, cmd); - kvfImageToBuffer(cmd, m_image, m_staging_buffer.Get(), m_staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { m_width, m_height, 1 }); + kvfCopyImageToBuffer(cmd, m_staging_buffer->Get(), m_image, m_staging_buffer->GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { m_width, m_height, 1 }); TransitionLayout(old_layout, cmd); vkEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); @@ -191,7 +195,7 @@ namespace mlx kvfDestroyFence(RenderCore::Get().GetDevice(), fence); m_cpu_buffer.resize(m_width * m_height); - std::memcpy(m_cpu_buffer.data(), m_staging_buffer.GetMap(), m_cpu_buffer.size()); + std::memcpy(m_cpu_buffer.data(), m_staging_buffer->GetMap(), m_cpu_buffer.size()); } Texture* StbTextureLoad(const std::filesystem::path& file, int* w, int* h) diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index e85d7f5..7939b5e 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -5,7 +5,7 @@ #include #include -namespace Scop +namespace mlx { void GraphicPipeline::Init(const GraphicPipelineDescriptor& descriptor) { @@ -39,7 +39,7 @@ namespace Scop kvfGPipelineBuilderSetCullMode(builder, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE); kvfGPipelineBuilderEnableAlphaBlending(builder); if(p_depth) - kvfGPipelineBuilderEnableDepthTest(builder, (descriptor.depth_test_equal ? VK_COMPARE_OP_EQUAL : VK_COMPARE_OP_LESS), true); + kvfGPipelineBuilderEnableDepthTest(builder, VK_COMPARE_OP_LESS, true); else kvfGPipelineBuilderDisableDepthTest(builder); kvfGPipelineBuilderSetPolygonMode(builder, VK_POLYGON_MODE_FILL, 1.0f); @@ -80,7 +80,7 @@ namespace Scop scissor.extent = fb_extent; vkCmdSetScissor(command_buffer, 0, 1, &scissor); - for(int i = 0; i < m_clears.size(); i++) + for(std::size_t i = 0; i < m_clears.size(); i++) { m_clears[i].color.float32[0] = clear[0]; m_clears[i].color.float32[1] = clear[1]; diff --git a/runtime/Sources/Renderer/Pipelines/Shader.cpp b/runtime/Sources/Renderer/Pipelines/Shader.cpp index 63d8873..fd65276 100644 --- a/runtime/Sources/Renderer/Pipelines/Shader.cpp +++ b/runtime/Sources/Renderer/Pipelines/Shader.cpp @@ -2,9 +2,9 @@ #include #include -namespace Scop +namespace mlx { - Shader::Shader(const std::vector& bytecode, ShaderType type, ShaderLayout layout) : m_bytecode(bytecode), m_layout(std::move(layout)) + Shader::Shader(const std::vector& bytecode, ShaderType type, ShaderLayout layout) : m_layout(std::move(layout)), m_bytecode(bytecode) { switch(type) { @@ -14,7 +14,7 @@ namespace Scop default : FatalError("wtf"); break; } - m_module = kvfCreateShaderModule(RenderCore::Get().GetDevice(), m_bytecode.data(), m_bytecode.size() * 4); + m_module = kvfCreateShaderModule(RenderCore::Get().GetDevice(), reinterpret_cast(m_bytecode.data()), m_bytecode.size() * 4); DebugLog("Vulkan : shader module created"); GeneratePipelineLayout(m_layout); @@ -63,21 +63,4 @@ namespace Scop DebugLog("Vulkan : descriptor set layout destroyed"); } } - - std::shared_ptr LoadShaderFromFile(const std::filesystem::path& filepath, ShaderType type, ShaderLayout layout) - { - std::ifstream stream(filepath, std::ios::binary); - if(!stream.is_open()) - FatalError("Renderer : unable to open a spirv shader file, %", filepath); - std::vector data; - stream.seekg(0); - std::uint32_t part = 0; - while(stream.read(reinterpret_cast(&part), sizeof(part))) - data.push_back(part); - stream.close(); - - std::shared_ptr shader = std::make_shared(data, type, layout); - DebugLog("Vulkan : shader loaded %", filepath); - return shader; - } } diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index ebcb6e6..80ca9da 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -12,28 +12,30 @@ namespace mlx { - static VulkanLoader loader; + static std::unique_ptr loader; void ErrorCallback(const char* message) noexcept { - FatalError(message); + FatalError(message, 0, "", ""); std::cout << std::endl; } void ValidationErrorCallback(const char* message) noexcept { - Error(message); + Error(message, 0, "", ""); std::cout << std::endl; } void ValidationWarningCallback(const char* message) noexcept { - Warning(message); + Warning(message, 0, "", ""); std::cout << std::endl; } void RenderCore::Init() noexcept { + loader = std::make_unique(); + kvfSetErrorCallback(&ErrorCallback); kvfSetValidationErrorCallback(&ValidationErrorCallback); kvfSetValidationWarningCallback(&ValidationWarningCallback); @@ -41,15 +43,15 @@ namespace mlx //kvfAddLayer("VK_LAYER_MESA_overlay"); Window window(1, 1, "", true); - std::vector instance_extentions = window.GetRequiredVulkanInstanceExtentions(); + std::vector instance_extensions = window.GetRequiredVulkanInstanceExtentions(); #ifdef MLX_PLAT_MACOS - instance_extentions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); + instance_extensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); #endif m_instance = kvfCreateInstance(instance_extensions.data(), instance_extensions.size()); DebugLog("Vulkan : instance created"); - loader.LoadInstance(m_instance); + loader->LoadInstance(m_instance); VkSurfaceKHR surface = window.CreateVulkanSurface(m_instance); @@ -77,5 +79,6 @@ namespace mlx DebugLog("Vulkan : logical device destroyed"); kvfDestroyInstance(m_instance); DebugLog("Vulkan : instance destroyed"); + loader.reset(); } } diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index a694b8c..962ce99 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -41,7 +41,7 @@ namespace mlx std::vector fragment_shader_code = { #include }; - p_fragment_shader = std::make_shared(fragment_shader, ShaderType::Fragment, std::move(fragment_shader_layout)); + p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); func::function functor = [this](const EventBase& event) { @@ -50,7 +50,7 @@ namespace mlx if(event.What() == Event::DescriptorPoolResetEventCode) { p_texture_set->Reallocate(); - p_viewer_data_set.Reallocate(); + p_viewer_data_set->Reallocate(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { p_viewer_data_set->SetUniformBuffer(i, 0, p_viewer_data_buffer->Get(i)); @@ -64,7 +64,7 @@ namespace mlx p_texture_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); p_viewer_data_buffer = std::make_shared(); - p_viewer_data_buffer->Init(sizeof(ViewerData2D)); + p_viewer_data_buffer->Init(sizeof(ViewerData)); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { p_viewer_data_set->SetUniformBuffer(i, 0, p_viewer_data_buffer->Get(i)); @@ -80,7 +80,7 @@ namespace mlx pipeline_descriptor.vertex_shader = p_vertex_shader; pipeline_descriptor.fragment_shader = p_fragment_shader; pipeline_descriptor.color_attachments = { &render_target }; - pipeline_descriptor.depth = scene.GetDepth(); + pipeline_descriptor.depth = &scene.GetDepth(); pipeline_descriptor.clear_color_attachments = false; m_pipeline.Init(pipeline_descriptor); } @@ -88,8 +88,8 @@ namespace mlx std::uint32_t frame_index = renderer.GetCurrentFrameIndex(); ViewerData viewer_data; - viewer_data.projection = Mat4f::Ortho(0.0f, render_target.GetWidth(), render_target.GetHeight(), 0.0f); - static CPUBuffer buffer(sizeof(ViewerData2D)); + viewer_data.projection_matrix = Mat4f::Ortho(0.0f, render_target.GetWidth(), render_target.GetHeight(), 0.0f); + static CPUBuffer buffer(sizeof(ViewerData)); std::memcpy(buffer.GetData(), &viewer_data, buffer.GetSize()); p_viewer_data_buffer->SetData(buffer, frame_index); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index 5a057e4..c36d703 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -42,7 +42,7 @@ namespace mlx p_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); } - void FinalPass::Pass(Scene& scene, Renderer& renderer, Texture& render_target) + void FinalPass::Pass([[maybe_unused]] Scene& scene, Renderer& renderer, Texture& render_target) { if(m_pipeline.GetPipeline() == VK_NULL_HANDLE) { diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index d544e2a..e8bacc7 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -36,21 +36,20 @@ namespace mlx p_window = window; - auto& render_core = RenderCore::Get(); - m_surface = p_window->CreateVulkanSurface(render_core::GetInstance()); + m_surface = p_window->CreateVulkanSurface(RenderCore::Get().GetInstance()); DebugLog("Vulkan : surface created"); CreateSwapchain(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - m_image_available_semaphores[i] = kvfCreateSemaphore(render_core.GetDevice()); + m_image_available_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); DebugLog("Vulkan : image available semaphore created"); - m_render_finished_semaphores[i] = kvfCreateSemaphore(render_core.GetDevice()); + m_render_finished_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); DebugLog("Vulkan : render finished semaphore created"); - m_cmd_buffers[i] = kvfCreateCommandBuffer(render_core.GetDevice()); + m_cmd_buffers[i] = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); DebugLog("Vulkan : command buffer created"); - m_cmd_fences[i] = kvfCreateFence(render_core.GetDevice()); + m_cmd_fences[i] = kvfCreateFence(RenderCore::Get().GetDevice()); DebugLog("Vulkan : fence created"); } } @@ -124,21 +123,20 @@ namespace mlx void Renderer::Destroy() noexcept { - auto& render_core = RenderCore::Get(); - render_core.WaitDeviceIdle(); + RenderCore::Get().WaitDeviceIdle(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - kvfDestroySemaphore(render_core.GetDevice(), m_image_available_semaphores[i]); + kvfDestroySemaphore(RenderCore::Get().GetDevice(), m_image_available_semaphores[i]); DebugLog("Vulkan : image available semaphore destroyed"); - kvfDestroySemaphore(render_core.GetDevice(), m_render_finished_semaphores[i]); + kvfDestroySemaphore(RenderCore::Get().GetDevice(), m_render_finished_semaphores[i]); DebugLog("Vulkan : render finished semaphore destroyed"); - kvfDestroyFence(render_core.GetDevice(), m_cmd_fences[i]); + kvfDestroyFence(RenderCore::Get().GetDevice(), m_cmd_fences[i]); DebugLog("Vulkan : fence destroyed"); } DestroySwapchain(); - vkDestroySurfaceKHR(render_core.GetInstance(), m_surface, nullptr); + vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); DebugLog("Vulkan : surface destroyed"); m_surface = VK_NULL_HANDLE; } diff --git a/runtime/Sources/Renderer/SceneRenderer.cpp b/runtime/Sources/Renderer/SceneRenderer.cpp index b6f7860..8c176a0 100644 --- a/runtime/Sources/Renderer/SceneRenderer.cpp +++ b/runtime/Sources/Renderer/SceneRenderer.cpp @@ -4,7 +4,7 @@ #include #include -namespacemlx +namespace mlx { void SceneRenderer::Init() { diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp index df743d1..7a7bf81 100644 --- a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp @@ -1,7 +1,7 @@ #include #include -#ifdef _WIN32 +#ifdef MLX_PLAT_WINDOWS __declspec(dllimport) HMODULE __stdcall LoadLibraryA(LPCSTR); __declspec(dllimport) FARPROC __stdcall GetProcAddress(HMODULE, LPCSTR); __declspec(dllimport) int __stdcall FreeLibrary(HMODULE); @@ -22,20 +22,20 @@ namespace mlx { namespace Internal { - static PFN_vkVoidFunction vkGetInstanceProcAddrStub(Handle context, const char* name) + static inline PFN_vkVoidFunction vkGetInstanceProcAddrStub(Handle context, const char* name) { - return vkGetInstanceProcAddr((VkInstance)context, name); + return vkGetInstanceProcAddr(static_cast(context), name); } } VulkanLoader::VulkanLoader() { - #if defined(_WIN32) + #if defined(MLX_PLAT_WINDOWS) p_module = LoadLibraryA("vulkan-1.dll"); if(!p_module) FatalError("Vulkan loader : failed to load libvulkan"); vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)(void(*)(void))GetProcAddress(p_module, "vkGetInstanceProcAddr"); - #elif defined(__APPLE__) + #elif defined(MLX_PLAT_MACOS) p_module = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); if(!p_module) p_module = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); @@ -55,16 +55,18 @@ namespace mlx p_module = dlopen("/usr/local/lib/libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); if(!p_module) FatalError("Vulkan loader : failed to load libvulkan"); - - vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(p_module, "vkGetInstanceProcAddr"); + void* symbol_ptr = dlsym(p_module, "vkGetInstanceProcAddr"); + *(void**)(&vkGetInstanceProcAddr) = symbol_ptr; #else + dlerror(); p_module = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); if(!p_module) p_module = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); if(!p_module) - FatalError("Vulkan loader : failed to load libvulkan"); + FatalError("Vulkan loader : failed to load libvulkan due to %", dlerror()); DISABLE_GCC_PEDANTIC_WARNINGS - vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)dlsym(p_module, "vkGetInstanceProcAddr"); + void* symbol_ptr = dlsym(p_module, "vkGetInstanceProcAddr"); + *(void**)(&vkGetInstanceProcAddr) = symbol_ptr; RESTORE_GCC_PEDANTIC_WARNINGS #endif DebugLog("Vulkan loader : libvulkan loaded"); @@ -251,7 +253,7 @@ namespace mlx VulkanLoader::~VulkanLoader() { - #if defined(_WIN32) + #if defined(MLX_PLAT_WINDOWS) FreeLibrary((HMODULE)p_module); #else dlclose(p_module); diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.h b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h index c6a7274..2d2f320 100644 --- a/runtime/Sources/Renderer/Vulkan/VulkanLoader.h +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h @@ -1,7 +1,9 @@ #ifndef __MLX_VULKAN_LOADER__ #define __MLX_VULKAN_LOADER__ -#ifdef _WIN32 +#include + +#ifdef MLX_PLAT_WINDOWS typedef const char* LPCSTR; typedef struct HINSTANCE__* HINSTANCE; typedef HINSTANCE HMODULE; @@ -31,7 +33,7 @@ namespace mlx void LoadDeviceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept; private: - #ifdef _WIN32 + #ifdef MLX_PLAT_WINDOWS HMODULE p_module = nullptr; #else Handle p_module = nullptr; diff --git a/third_party/kvf.h b/third_party/kvf.h index 2fc82ba..6a22d3e 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -134,7 +134,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore); #endif VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, KvfImageType type); -void kvfImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent); +void kvfCopyImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent); void kvfDestroyImage(VkDevice device, VkImage image); VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect, int layer_count); void kvfDestroyImageView(VkDevice device, VkImageView image_view); @@ -970,6 +970,8 @@ const char* kvfVerbaliseVkResult(VkResult result) VKAPI_ATTR VkBool32 VKAPI_CALL __kvfDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, void* pUserData) { + (void)messageType; + (void)pUserData; if(messageSeverity == VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) { if(__kvf_validation_error_callback != NULL) @@ -1138,7 +1140,7 @@ __KvfQueueFamilies __kvfFindQueueFamilies(VkPhysicalDevice physical, VkSurfaceKH VkQueueFamilyProperties* queue_families = (VkQueueFamilyProperties*)KVF_MALLOC(sizeof(VkQueueFamilyProperties) * queue_family_count); vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, queue_families); - for(int i = 0; i < queue_family_count; i++) + for(uint32_t i = 0; i < queue_family_count; i++) { // try to find a queue family index that supports compute but not graphics if(queue_families[i].queueFlags & VK_QUEUE_COMPUTE_BIT && (queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0) @@ -1201,10 +1203,10 @@ int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, vkEnumerateDeviceExtensionProperties(device, NULL, &extension_count, props); bool are_there_required_device_extensions = true; - for(int j = 0; j < device_extensions_count; j++) + for(uint32_t j = 0; j < device_extensions_count; j++) { bool is_there_extension = false; - for(int k = 0; k < extension_count; k++) + for(uint32_t k = 0; k < extension_count; k++) { if(strcmp(device_extensions[j], props[k].extensionName) == 0) { @@ -1270,7 +1272,7 @@ VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR sur devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); vkEnumeratePhysicalDevices(instance, &device_count, devices); - for(int i = 0; i < device_count; i++) + for(uint32_t i = 0; i < device_count; i++) { int32_t current_device_score = __kvfScorePhysicalDevice(devices[i], surface, device_extensions, device_extensions_count); if(current_device_score > best_device_score) @@ -1465,7 +1467,6 @@ uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); KVF_ASSERT(kvfdevice != NULL); - VkQueue vk_queue = VK_NULL_HANDLE; if(queue == KVF_GRAPHICS_QUEUE) return kvfdevice->queues.graphics; else if(queue == KVF_PRESENT_QUEUE) @@ -1508,7 +1509,7 @@ int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type) int32_t queue = -1; - for(int i = 0; i < queue_family_count; i++) + for(uint32_t i = 0; i < queue_family_count; i++) { if(type == KVF_COMPUTE_QUEUE) { @@ -1546,7 +1547,7 @@ int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type) int32_t queue = -1; - for(int i = 0; i < queue_family_count; i++) + for(uint32_t i = 0; i < queue_family_count; i++) { VkBool32 present_support = false; vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); @@ -1629,7 +1630,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) VkSurfaceFormatKHR __kvfChooseSwapSurfaceFormat(__KvfSwapchainSupportInternal* support) { - for(int i = 0; i < support->formats_count; i++) + for(uint32_t i = 0; i < support->formats_count; i++) { if(support->formats[i].format == VK_FORMAT_R8G8B8A8_SRGB && support->formats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) return support->formats[i]; @@ -1641,7 +1642,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) { if(try_vsync == false) return VK_PRESENT_MODE_IMMEDIATE_KHR; - for(int i = 0; i < support->presentModes_count; i++) + for(uint32_t i = 0; i < support->presentModes_count; i++) { if(support->presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR) return support->presentModes[i]; @@ -1781,7 +1782,7 @@ VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkForma return image; } -void kvfImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent) +void kvfCopyImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent) { KVF_ASSERT(cmd != VK_NULL_HANDLE); KVF_ASSERT(dst != VK_NULL_HANDLE); @@ -2286,7 +2287,7 @@ VkDescriptorSet kvfAllocateDescriptorSet(VkDevice device, VkDescriptorSetLayout __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); KVF_ASSERT(kvf_device != NULL); VkDescriptorPool pool = VK_NULL_HANDLE; - for(int i = 0; i < kvf_device->sets_pools_size; i++) + for(uint32_t i = 0; i < kvf_device->sets_pools_size; i++) { if(kvf_device->sets_pools[i].size < kvf_device->sets_pools[i].capacity) pool = kvf_device->sets_pools[i].pool; @@ -2397,7 +2398,7 @@ void kvfResetDeviceDescriptorPools(VkDevice device) KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); KVF_ASSERT(kvf_device != NULL); - for(int i = 0; i < kvf_device->sets_pools_size; i++) + for(uint32_t i = 0; i < kvf_device->sets_pools_size; i++) { vkResetDescriptorPool(device, kvf_device->sets_pools[i].pool, 0); kvf_device->sets_pools[i].size = 0; From 14b42a7c9e74682a938df68ea4424382345719f6 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Sep 2024 12:44:25 +0200 Subject: [PATCH 021/131] fixing macos and windows CI ? --- .github/workflows/macos_x86.yml | 6 ------ .github/workflows/windows.yml | 7 ------- xmake.lua | 16 ++-------------- 3 files changed, 2 insertions(+), 27 deletions(-) diff --git a/.github/workflows/macos_x86.yml b/.github/workflows/macos_x86.yml index 42e8af3..18919d5 100644 --- a/.github/workflows/macos_x86.yml +++ b/.github/workflows/macos_x86.yml @@ -29,12 +29,6 @@ jobs: uses: actions/checkout@v4 # Install system dependencies - - name: Install Vulkan SDK - uses: humbletim/install-vulkan-sdk@v1.1.1 - with: - version: 1.3.204.1 - cache: true - - name: Install Dependancies run: | brew install SDL2 diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index 83c5cc3..ad4d479 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -28,13 +28,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 - # Install system dependencies - - name: Install Vulkan SDK - uses: humbletim/install-vulkan-sdk@v1.1.1 - with: - version: 1.3.204.1 - cache: true - # Force xmake to a specific folder (for cache) - name: Set xmake env run: echo "XMAKE_GLOBALDIR=${{ runner.workspace }}/xmake-global" >> $GITHUB_ENV diff --git a/xmake.lua b/xmake.lua index d9f1b9b..13d7be0 100644 --- a/xmake.lua +++ b/xmake.lua @@ -1,21 +1,9 @@ --------------------------------------------------------------------------------- --- -- --- ::: :::::::: -- --- xmake.lua :+: :+: :+: -- --- +:+ +:+ +:+ -- --- By: maldavid +#+ +:+ +#+ -- --- +#+#+#+#+#+ +#+ -- --- Created: 2023/12/07 15:21:38 by kbz_8 #+# #+# -- --- Updated: 2024/01/02 23:40:20 by kbz_8 ### ########.fr -- --- -- --------------------------------------------------------------------------------- - -- Global settings -add_requires("libsdl", {configs = { sdlmain = false }}) +add_requires("libsdl", { configs = { sdlmain = false }}) add_rules("mode.debug", "mode.release") -set_languages("cxx17", "c99") +set_languages("cxx20", "c99") set_objectdir("objs/xmake/$(os)_$(arch)") set_targetdir("./") From 9631305e35049ebccb96f3113153c0bd60fd6fe6 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Sep 2024 12:53:29 +0200 Subject: [PATCH 022/131] fixing macos and windows CI ? --- runtime/Includes/PreCompiled.h | 3 +++ xmake.lua | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index c50a067..35311db 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -47,6 +47,9 @@ #include #include #include +#if defined(MLX_PLAT_MACOS) || defined(MLX_PLAT_LINUX) || defined(MLX_PLAT_UNIX) + #include // sincos +#endif #ifdef MLX_COMPILER_CLANG #pragma clang diagnostic push diff --git a/xmake.lua b/xmake.lua index 13d7be0..135f6b8 100644 --- a/xmake.lua +++ b/xmake.lua @@ -46,13 +46,15 @@ target("mlx") add_options("images_optimized") add_options("force_integrated_gpu") add_options("graphics_memory_dump") - add_includedirs("runtime/Includes", "runtime/Sources", "third_party") + add_options("profiler") + add_options("force_wayland") + add_includedirs("runtime/Includes", "runtime/Sources", "includes", "third_party") set_pcxxheader("runtime/Sources/PreCompiled.h") add_defines("MLX_BUILD", "SDL_MAIN_HANDLED") - add_files("src/**.cpp") + add_files("runtime/Sources/**.cpp") add_packages("libsdl") From 8191191fffc592d999b7c08e29d3e414de4d332c Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Sep 2024 13:04:43 +0200 Subject: [PATCH 023/131] fixing macos and windows CI ? x3 --- runtime/Includes/Maths/Angles.inl | 46 +++++++++++++++++++------------ runtime/Includes/PreCompiled.h | 2 +- xmake.lua | 2 +- 3 files changed, 30 insertions(+), 20 deletions(-) diff --git a/runtime/Includes/Maths/Angles.inl b/runtime/Includes/Maths/Angles.inl index 7b1cceb..55091d0 100644 --- a/runtime/Includes/Maths/Angles.inl +++ b/runtime/Includes/Maths/Angles.inl @@ -147,27 +147,37 @@ namespace mlx } }; - template - void SinCos(T x, T* sin, T* cos) - { - double s, c; - ::sincos(x, &s, &c); + #ifdef MLX_PLAT_LINUX + template + void SinCos(T x, T* sin, T* cos) + { + double s, c; + ::sincos(x, &s, &c); - *sin = static_cast(s); - *cos = static_cast(c); - } - template<> - inline void SinCos(float x, float* s, float* c) - { - ::sincosf(x, s, c); - } + *sin = static_cast(s); + *cos = static_cast(c); + } - template<> - inline void SinCos(long double x, long double* s, long double* c) - { - ::sincosl(x, s, c); - } + template<> + inline void SinCos(float x, float* s, float* c) + { + ::sincosf(x, s, c); + } + + template<> + inline void SinCos(long double x, long double* s, long double* c) + { + ::sincosl(x, s, c); + } + #else + template + void SinCos(T x, T* sin, T* cos) + { + *sin = std::sin(x); + *cos = std::cos(x); + } + #endif } template diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 35311db..02c724f 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -47,7 +47,7 @@ #include #include #include -#if defined(MLX_PLAT_MACOS) || defined(MLX_PLAT_LINUX) || defined(MLX_PLAT_UNIX) +#if defined(MLX_PLAT_LINUX) #include // sincos #endif diff --git a/xmake.lua b/xmake.lua index 135f6b8..59e4fe5 100644 --- a/xmake.lua +++ b/xmake.lua @@ -50,7 +50,7 @@ target("mlx") add_options("force_wayland") add_includedirs("runtime/Includes", "runtime/Sources", "includes", "third_party") - set_pcxxheader("runtime/Sources/PreCompiled.h") + set_pcxxheader("runtime/Includes/PreCompiled.h") add_defines("MLX_BUILD", "SDL_MAIN_HANDLED") From f743fc717f9d1365ecde27d848c455041183ab87 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Sep 2024 13:14:36 +0200 Subject: [PATCH 024/131] fixing windows build --- runtime/Includes/Maths/Angles.inl | 2 -- runtime/Includes/PreCompiled.h | 12 ++++++------ runtime/Includes/Utils/AntiWindows.h | 5 +++++ runtime/Includes/Utils/AntiX11.h | 5 +++++ 4 files changed, 16 insertions(+), 8 deletions(-) create mode 100644 runtime/Includes/Utils/AntiWindows.h create mode 100644 runtime/Includes/Utils/AntiX11.h diff --git a/runtime/Includes/Maths/Angles.inl b/runtime/Includes/Maths/Angles.inl index 55091d0..df460ff 100644 --- a/runtime/Includes/Maths/Angles.inl +++ b/runtime/Includes/Maths/Angles.inl @@ -153,8 +153,6 @@ namespace mlx { double s, c; ::sincos(x, &s, &c); - - *sin = static_cast(s); *cos = static_cast(c); } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 02c724f..71dfc71 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -3,8 +3,6 @@ #define VK_NO_PROTOTYPES -#define Window X11Window // f*ck X11 - #include #include #include @@ -14,9 +12,6 @@ #include #include -#include -#include - #include #include #include @@ -47,6 +42,7 @@ #include #include #include + #if defined(MLX_PLAT_LINUX) #include // sincos #endif @@ -69,7 +65,11 @@ #include #endif -#undef Window +#include +#include + +#include +#include #include #include diff --git a/runtime/Includes/Utils/AntiWindows.h b/runtime/Includes/Utils/AntiWindows.h new file mode 100644 index 0000000..a7e1b45 --- /dev/null +++ b/runtime/Includes/Utils/AntiWindows.h @@ -0,0 +1,5 @@ +#undef CreateWindow +#undef GetEnvironmentVariable +#undef GetSystemDirectory +#undef MemoryBarrier +#undef RemoveDirectory diff --git a/runtime/Includes/Utils/AntiX11.h b/runtime/Includes/Utils/AntiX11.h new file mode 100644 index 0000000..2068855 --- /dev/null +++ b/runtime/Includes/Utils/AntiX11.h @@ -0,0 +1,5 @@ +#undef Always +#undef Bool +#undef False +#undef None +#undef True From 25b605d7d927795ce7c2a2e0d9c4241613aacde6 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Sep 2024 13:20:31 +0200 Subject: [PATCH 025/131] fixing windows build x2 --- runtime/Includes/Utils/AntiWindows.h | 2 ++ 1 file changed, 2 insertions(+) diff --git a/runtime/Includes/Utils/AntiWindows.h b/runtime/Includes/Utils/AntiWindows.h index a7e1b45..4f9f600 100644 --- a/runtime/Includes/Utils/AntiWindows.h +++ b/runtime/Includes/Utils/AntiWindows.h @@ -3,3 +3,5 @@ #undef GetSystemDirectory #undef MemoryBarrier #undef RemoveDirectory +#undef min +#undef max From 94873df8fe7b94492250957a15351c72736fb818 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 15 Sep 2024 07:52:25 +0200 Subject: [PATCH 026/131] fixing windows build x3 | replacing compile commands by compile flags --- compile_commands.json | 812 ------------------ compile_flags.txt | 12 + runtime/Includes/PreCompiled.h | 8 + runtime/Sources/Renderer/Memory.cpp | 12 +- runtime/Sources/Renderer/RenderCore.cpp | 5 +- .../Sources/Renderer/Vulkan/VulkanLoader.h | 2 - xmake.lua | 19 +- 7 files changed, 45 insertions(+), 825 deletions(-) delete mode 100644 compile_commands.json create mode 100644 compile_flags.txt diff --git a/compile_commands.json b/compile_commands.json deleted file mode 100644 index e9651ae..0000000 --- a/compile_commands.json +++ /dev/null @@ -1,812 +0,0 @@ -[ - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/Application.o", - "runtime/Sources/Core/Application.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Application.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Application.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/Bridge.o", - "runtime/Sources/Core/Bridge.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Bridge.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Bridge.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/EventBus.o", - "runtime/Sources/Core/EventBus.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/EventBus.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/EventBus.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/EventListener.o", - "runtime/Sources/Core/EventListener.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/EventListener.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/EventListener.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/Fps.o", - "runtime/Sources/Core/Fps.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Fps.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Fps.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/Graphics.o", - "runtime/Sources/Core/Graphics.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Graphics.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Graphics.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/Logs.o", - "runtime/Sources/Core/Logs.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Logs.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Logs.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/Memory.o", - "runtime/Sources/Core/Memory.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Memory.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Memory.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/Profiler.o", - "runtime/Sources/Core/Profiler.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/Profiler.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/Profiler.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/SDLManager.o", - "runtime/Sources/Core/SDLManager.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/SDLManager.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/SDLManager.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Core/UUID.o", - "runtime/Sources/Core/UUID.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Core/UUID.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Core/UUID.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Graphics/Mesh.o", - "runtime/Sources/Graphics/Mesh.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/Mesh.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/Mesh.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Graphics/PixelPutManager.o", - "runtime/Sources/Graphics/PixelPutManager.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/PixelPutManager.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/PixelPutManager.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Graphics/Scene.o", - "runtime/Sources/Graphics/Scene.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/Scene.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/Scene.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Graphics/Sprite.o", - "runtime/Sources/Graphics/Sprite.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Graphics/Sprite.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Graphics/Sprite.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Platform/Inputs.o", - "runtime/Sources/Platform/Inputs.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Platform/Inputs.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Platform/Inputs.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Platform/Window.o", - "runtime/Sources/Platform/Window.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Platform/Window.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Platform/Window.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Buffer.o", - "runtime/Sources/Renderer/Buffer.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Buffer.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Buffer.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Descriptor.o", - "runtime/Sources/Renderer/Descriptor.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Descriptor.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Descriptor.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Image.o", - "runtime/Sources/Renderer/Image.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Image.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Image.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Memory.o", - "runtime/Sources/Renderer/Memory.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Memory.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Memory.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/RenderCore.o", - "runtime/Sources/Renderer/RenderCore.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderCore.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderCore.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Renderer.o", - "runtime/Sources/Renderer/Renderer.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Renderer.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Renderer.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/SceneRenderer.o", - "runtime/Sources/Renderer/SceneRenderer.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/SceneRenderer.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/SceneRenderer.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Pipelines/Graphics.o", - "runtime/Sources/Renderer/Pipelines/Graphics.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Pipelines/Graphics.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Pipelines/Graphics.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Pipelines/Shader.o", - "runtime/Sources/Renderer/Pipelines/Shader.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Pipelines/Shader.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Pipelines/Shader.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/RenderPasses/2DPass.o", - "runtime/Sources/Renderer/RenderPasses/2DPass.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderPasses/2DPass.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderPasses/2DPass.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/RenderPasses/FinalPass.o", - "runtime/Sources/Renderer/RenderPasses/FinalPass.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderPasses/FinalPass.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/RenderPasses/Passes.o", - "runtime/Sources/Renderer/RenderPasses/Passes.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/RenderPasses/Passes.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/RenderPasses/Passes.o" - }, - { - "arguments": [ - "/usr/bin/clang++", - "-std=c++20", - "-O3", - "-fPIC", - "-Wall", - "-Wextra", - "-DSDL_MAIN_HANDLED", - "-Wno-error=", - "-g3", - "-D", - "DEBUG", - "-DIMAGE_OPTIMIZED", - "-I./includes", - "-I./runtime/Includes", - "-I./runtime/Sources", - "-I./third_party", - "-c", - "-o", - "objs/make/linux/runtime/Sources/Renderer/Vulkan/VulkanLoader.o", - "runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp" - ], - "directory": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX", - "file": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp", - "output": "/home/kbz8/Documents/Code/42/Other/MacroLibX/MacroLibX/objs/make/linux/runtime/Sources/Renderer/Vulkan/VulkanLoader.o" - } -] diff --git a/compile_flags.txt b/compile_flags.txt new file mode 100644 index 0000000..00cbbea --- /dev/null +++ b/compile_flags.txt @@ -0,0 +1,12 @@ +-xc++ +-std=c++20 +-Iruntime/Includes +-Iruntime/Sources +-Iincludes +-Ithird_party +-DMLX_BUILD +-DSDL_MAIN_HANDLED +-DDEBUG +-DIMAGE_OPTIMIZED +-D_REENTRANT +-isystem/usr/include/SDL2 diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 71dfc71..243e128 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -47,6 +47,11 @@ #include // sincos #endif +#define VMA_STATIC_VULKAN_FUNCTIONS 0 +#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 +#define VMA_VULKAN_VERSION 1000000 +#define VMA_ASSERT(expr) ((void)0) + #ifdef MLX_COMPILER_CLANG #pragma clang diagnostic push #pragma clang diagnostic ignored "-Weverything" @@ -69,6 +74,9 @@ #include #include +#ifdef DEBUG + #define KVF_ENABLE_VALIDATION_LAYERS +#endif #include #include diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 4b22069..7812525 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -1,15 +1,11 @@ #include +#include -#define VMA_STATIC_VULKAN_FUNCTIONS 0 -#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 -#define VMA_VULKAN_VERSION 1000000 -#define VMA_ASSERT(expr) ((void)0) #define VMA_IMPLEMENTATION - #ifdef MLX_COMPILER_CLANG #pragma clang diagnostic push #pragma clang diagnostic ignored "-Weverything" - #include + #include #pragma clang diagnostic pop #elif defined(MLX_COMPILER_GCC) #pragma GCC diagnostic push @@ -18,10 +14,10 @@ #pragma GCC diagnostic ignored "-Wunused-parameter" #pragma GCC diagnostic ignored "-Wunused-variable" #pragma GCC diagnostic ignored "-Wparentheses" - #include + #include #pragma GCC diagnostic pop #else - #include + #include #endif #include diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 80ca9da..989f138 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -1,8 +1,11 @@ +#include + #define KVF_IMPLEMENTATION #ifdef DEBUG #define KVF_ENABLE_VALIDATION_LAYERS #endif -#include + +#include #include #include diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.h b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h index 2d2f320..73e073b 100644 --- a/runtime/Sources/Renderer/Vulkan/VulkanLoader.h +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h @@ -14,8 +14,6 @@ #else typedef int (__stdcall* FARPROC)(void); #endif -#else - #include #endif namespace mlx diff --git a/xmake.lua b/xmake.lua index 59e4fe5..fb8e175 100644 --- a/xmake.lua +++ b/xmake.lua @@ -1,6 +1,6 @@ -- Global settings -add_requires("libsdl", { configs = { sdlmain = false }}) +add_requires("libsdl", { configs = { sdlmain = false } }) add_rules("mode.debug", "mode.release") set_languages("cxx20", "c99") @@ -61,7 +61,22 @@ target("mlx") if is_mode("debug") then add_defines("DEBUG") end -target_end() -- optional but I think the code is cleaner with this -- optional but I think the code is cleaner with this + + on_clean(function(target) + if target:objectfiles() then + for _, file in ipairs(target:objectfiles()) do + if os.exists(file) then + print("Removing " .. file) + os.rm(file) + end + end + end + if target:targetfile() and os.exists(target:targetfile()) then + print("Removing " .. target:targetfile()) + os.rm(target:targetfile()) + end + end) +target_end() target("Test") set_default(false) From 7243c67c3e7b46b8832b9c27e51f6ad0e1a217ee Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 15 Sep 2024 07:56:28 +0200 Subject: [PATCH 027/131] AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHHHHHHHHHHHHH --- runtime/Includes/PreCompiled.h | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 243e128..59cf651 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -43,6 +43,10 @@ #include #include +#ifndef MLX_PLAT_WINDOWS + #include +#endif + #if defined(MLX_PLAT_LINUX) #include // sincos #endif From 7079e1e42d89c5afb1c07c9e89f6a6a1d7f9c10a Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 15 Sep 2024 08:56:22 +0200 Subject: [PATCH 028/131] implementing mouse move and get screen size, removing warnings --- Makefile | 10 +++++ runtime/Includes/Core/Application.inl | 54 +++++++++++++++------------ runtime/Includes/Core/Format.h | 3 -- runtime/Includes/Core/Format.inl | 2 - runtime/Includes/Core/Graphics.h | 3 -- runtime/Includes/Core/Graphics.inl | 6 +++ runtime/Includes/Core/Logs.h | 2 +- runtime/Includes/Core/SDLManager.h | 2 + runtime/Includes/Platform/Window.h | 3 ++ runtime/Includes/PreCompiled.h | 4 ++ runtime/Sources/Core/Bridge.cpp | 12 ++++-- runtime/Sources/Core/Graphics.cpp | 4 -- runtime/Sources/Core/SDLManager.cpp | 14 +++++++ xmake.lua | 7 ++++ 14 files changed, 85 insertions(+), 41 deletions(-) diff --git a/Makefile b/Makefile index 27f0102..890e9a4 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,8 @@ IMAGES_OPTIMIZED ?= true FORCE_INTEGRATED_GPU ?= false GRAPHICS_MEMORY_DUMP ?= false PROFILER ?= false +FORCE_WAYLAND ?= false +DISABLE_ALL_SAFETIES ?= false _ENABLEDFLAGS = SRCS = $(wildcard $(addsuffix /*.cpp, runtime/Sources/Core)) @@ -57,6 +59,14 @@ ifeq ($(PROFILER), true) _ENABLEDFLAGS += PROFILER endif +ifeq ($(FORCE_WAYLAND), true) + _ENABLEDFLAGS += FORCE_WAYLAND +endif + +ifeq ($(DISABLE_ALL_SAFETIES), true) + _ENABLEDFLAGS += DISABLE_ALL_SAFETIES +endif + CXXFLAGS += $(addprefix -D, $(_ENABLEDFLAGS)) RM = rm -rf diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 7e6cfd1..c76adfb 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -1,29 +1,34 @@ #pragma once #include -#define CHECK_WINDOW_PTR(win) \ - if(win == nullptr) \ - { \ - Error("invalid window ptr (NULL)"); \ - return; \ - } \ - else if(*static_cast(win) < 0 || *static_cast(win) > static_cast(m_graphics.size()))\ - { \ - Error("invalid window ptr"); \ - return; \ - } else {} +#ifndef DISABLE_ALL_SAFETIES + #define CHECK_WINDOW_PTR(win) \ + if(win == nullptr) \ + { \ + Error("invalid window ptr (NULL)"); \ + return; \ + } \ + else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return *static_cast(win) == gs->GetID(); } != m_graphics.end())) \ + { \ + Error("invalid window ptr"); \ + return; \ + } else {} -#define CHECK_IMAGE_PTR(img, retval) \ - if(img == nullptr) \ - { \ - Error("invalid image ptr (NULL)"); \ - retval; \ - } \ - else if(!m_image_registry.IsTextureKnown(static_cast(img))) \ - { \ - Error("invalid image ptr"); \ - retval; \ - } else {} + #define CHECK_IMAGE_PTR(img, retval) \ + if(img == nullptr) \ + { \ + Error("invalid image ptr (NULL)"); \ + retval; \ + } \ + else if(!m_image_registry.IsTextureKnown(static_cast(img))) \ + { \ + Error("invalid image ptr"); \ + retval; \ + } else {} +#else + #define CHECK_WINDOW_PTR(win) + #define CHECK_IMAGE_PTR(img, retval) +#endif namespace mlx { @@ -41,6 +46,7 @@ namespace mlx Warning("trying to move the mouse relative to a window that is targeting an image and not a real window, this is not allowed (move ignored)"); return; } + m_graphics[*static_cast(win)]->GetWindow()->MoveMouse(x, y); } void Application::OnEvent(Handle win, int event, int (*funct_ptr)(int, void*), void* param) noexcept @@ -57,8 +63,7 @@ namespace mlx void Application::GetScreenSize(Handle win, int* w, int* h) noexcept { CHECK_WINDOW_PTR(win); - *w = 0; - *h = 0; + m_graphics[*static_cast(win)]->GetWindow()->GetScreenSizeWindowIsOn(x, y); } void Application::SetFPSCap(std::uint32_t fps) noexcept @@ -96,6 +101,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); m_graphics[*static_cast(win)].reset(); + m_graphics.erase(m_graphics.begin() + *static_cast(win)); } void Application::PixelPut(Handle win, int x, int y, std::uint32_t color) const noexcept diff --git a/runtime/Includes/Core/Format.h b/runtime/Includes/Core/Format.h index e948709..3fdc6bc 100644 --- a/runtime/Includes/Core/Format.h +++ b/runtime/Includes/Core/Format.h @@ -1,9 +1,6 @@ #ifndef __MLX_FORMAT__ #define __MLX_FORMAT__ -#include -#include - namespace mlx { template diff --git a/runtime/Includes/Core/Format.inl b/runtime/Includes/Core/Format.inl index 3bc490a..0b5b3d8 100644 --- a/runtime/Includes/Core/Format.inl +++ b/runtime/Includes/Core/Format.inl @@ -1,7 +1,5 @@ #pragma once #include -#include -#include namespace mlx { diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 28c4653..5c8de28 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -46,9 +46,6 @@ namespace mlx std::uint64_t m_current_depth = 0; - std::size_t m_width = 0; - std::size_t m_height = 0; - int m_id; bool m_has_window; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 84a1e96..c0f4c02 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -25,6 +25,10 @@ namespace mlx void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) { MLX_PROFILE_FUNCTION(); + (void)x; + (void)y; + (void)color; + (void)str; } void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) @@ -44,6 +48,8 @@ namespace mlx void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) { MLX_PROFILE_FUNCTION(); + (void)filepath; + (void)scale; } void GraphicsSupport::TryEraseSpritesInScene(NonOwningPtr texture) noexcept diff --git a/runtime/Includes/Core/Logs.h b/runtime/Includes/Core/Logs.h index 42bfe7c..4b3746b 100644 --- a/runtime/Includes/Core/Logs.h +++ b/runtime/Includes/Core/Logs.h @@ -39,7 +39,7 @@ namespace mlx void Assert(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); #else template - void Assert(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) {} + void Assert([[maybe_unused]] bool cond, [[maybe_unused]] unsigned int line, [[maybe_unused]] std::string_view file, [[maybe_unused]] std::string_view function, [[maybe_unused]] std::string message, [[maybe_unused]] const Args&... args) {} #endif } diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index cdc514c..df49d13 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -19,6 +19,8 @@ namespace mlx VkSurfaceKHR CreateVulkanSurface(Handle window, VkInstance instance) const noexcept; std::vector GetRequiredVulkanInstanceExtentions(Handle window) const noexcept; Vec2ui GetVulkanDrawableSize(Handle window) const noexcept; + void MoveMouseOnWindow(Handle window, int x, int y) const noexcept; + void GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept; inline void SetEventCallback(func::function functor, void* userdata) { f_callback = std::move(functor); p_callback_data = userdata; } diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 24adacf..5895a70 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -16,6 +16,9 @@ namespace mlx inline int GetHeight() const noexcept { return m_height; } inline std::uint32_t GetID() const noexcept { return m_id; } + inline void MoveMouse(int x, int y) { SDLManager::Get().MoveMouseOnWindow(p_window, x, y); } + inline void GetScreenSizeWindowIsOn(int* x, int* y) { SDLManager::Get().GetScreenSizeWindowIsOn(p_window, x, y); } + inline VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } inline Vec2ui GetVulkanDrawableSize() const noexcept { return SDLManager::Get().GetVulkanDrawableSize(p_window); } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 59cf651..acaac11 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -42,6 +42,10 @@ #include #include #include +#include +#include +#include +#include #ifndef MLX_PLAT_WINDOWS #include diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 89d741d..deff1a2 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -7,10 +7,14 @@ static void* __mlx_ptr = nullptr; -#define MLX_CHECK_APPLICATION_POINTER(ptr) \ - if(ptr != __mlx_ptr || ptr == NULL) \ - mlx::FatalError("invalid mlx pointer passed to '%'", MLX_FUNC_SIG); \ - else {} // just to avoid issues with possible if-else statements outside this macro +#ifndef DISABLE_ALL_SAFETIES + #define MLX_CHECK_APPLICATION_POINTER(ptr) \ + if(ptr != __mlx_ptr || ptr == NULL) \ + mlx::FatalError("invalid mlx pointer passed to '%'", MLX_FUNC_SIG); \ + else {} // just to avoid issues with possible if-else statements outside this macro +#else + #define MLX_CHECK_APPLICATION_POINTER(ptr) +#endif extern "C" { diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 52f6137..a2dbd33 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -6,8 +6,6 @@ namespace mlx GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id) : m_put_pixel_manager(&m_renderer), p_window(nullptr), - m_width(w), - m_height(h), m_id(id), m_has_window(false) { @@ -24,8 +22,6 @@ namespace mlx GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : m_put_pixel_manager(&m_renderer), p_window(std::make_shared(w, h, title)), - m_width(w), - m_height(h), m_id(id), m_has_window(true) { diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 44d9e44..8619340 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -159,6 +159,20 @@ namespace mlx return Vec2ui{ extent }; } + void SDLManager::MoveMouseOnWindow(Handle window, int x, int y) const noexcept + { + SDL_WarpMouseInWindow(static_cast(window), x, y); + SDL_PumpEvents(); + } + + void SDLManager::GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept + { + SDL_DisplayMode DM; + SDL_GetDesktopDisplayMode(SDL_GetWindowDisplayIndex(static_cast(window)), &DM); + *x = DM.w; + *y = DM.h; + } + std::int32_t SDLManager::GetX() const noexcept { int dummy; diff --git a/xmake.lua b/xmake.lua index fb8e175..857433a 100644 --- a/xmake.lua +++ b/xmake.lua @@ -37,6 +37,11 @@ option("force_wayland") add_defines("FORCE_WAYLAND") option_end() +option("disable_all_safeties") + set_default(false) + add_defines("DISABLE_ALL_SAFETIES") +option_end() + -- Targets target("mlx") @@ -48,6 +53,8 @@ target("mlx") add_options("graphics_memory_dump") add_options("profiler") add_options("force_wayland") + add_options("disable_all_safeties") + add_includedirs("runtime/Includes", "runtime/Sources", "includes", "third_party") set_pcxxheader("runtime/Includes/PreCompiled.h") From 527a88ebaa008b6d1da57b10ca3cdd4460cebc97 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 15 Sep 2024 09:03:52 +0200 Subject: [PATCH 029/131] fixing compilation issues --- runtime/Includes/Core/Application.inl | 4 ++-- runtime/Sources/Core/Graphics.cpp | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index c76adfb..d537068 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -8,7 +8,7 @@ Error("invalid window ptr (NULL)"); \ return; \ } \ - else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return *static_cast(win) == gs->GetID(); } != m_graphics.end())) \ + else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return *static_cast(win) == gs->GetID(); }) != m_graphics.end()) \ { \ Error("invalid window ptr"); \ return; \ @@ -63,7 +63,7 @@ namespace mlx void Application::GetScreenSize(Handle win, int* w, int* h) noexcept { CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)]->GetWindow()->GetScreenSizeWindowIsOn(x, y); + m_graphics[*static_cast(win)]->GetWindow()->GetScreenSizeWindowIsOn(w, h); } void Application::SetFPSCap(std::uint32_t fps) noexcept diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index a2dbd33..7032816 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -3,7 +3,7 @@ namespace mlx { - GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id) : + GraphicsSupport::GraphicsSupport([[maybe_unused]] std::size_t w, [[maybe_unused]] std::size_t h, NonOwningPtr render_target, int id) : m_put_pixel_manager(&m_renderer), p_window(nullptr), m_id(id), From 83432cb356f27202b9d871b8bc47dff2dfa528bd Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 15 Sep 2024 10:11:08 +0200 Subject: [PATCH 030/131] fixing warnings in kvf --- includes/mlx.h | 13 ++++++++++++- runtime/Includes/Core/Application.h | 1 + runtime/Includes/Core/Application.inl | 9 +++++++++ runtime/Includes/Core/SDLManager.h | 1 + runtime/Includes/Platform/Window.h | 1 + runtime/Sources/Core/Bridge.cpp | 6 ++++++ runtime/Sources/Core/SDLManager.cpp | 5 +++++ runtime/Sources/Renderer/RenderCore.cpp | 10 +++++++++- third_party/kvf.h | 1 + 9 files changed, 45 insertions(+), 2 deletions(-) diff --git a/includes/mlx.h b/includes/mlx.h index 14e4a0e..ef02780 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/01/18 14:36:12 by maldavid ### ########.fr */ +/* Updated: 2024/09/15 09:23:48 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -54,6 +54,17 @@ MLX_API void* mlx_init(); MLX_API void* mlx_new_window(void* mlx, int w, int h, const char* title); +/** + * @brief Creates a new window + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param x New x position + * @param y New y position + * + */ +MLX_API void mlx_set_window_position(void *mlx, void *win, int x, int y); + /** * @brief Gives a function to be executed at each loop turn * diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index ec74102..213010b 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -25,6 +25,7 @@ namespace mlx inline Handle NewGraphicsSuport(std::size_t w, std::size_t h, const char* title); inline void ClearGraphicsSupport(Handle win); inline void DestroyGraphicsSupport(Handle win); + inline void SetGraphicsSupportPosition(Handle win, int x, int y); inline void PixelPut(Handle win, int x, int y, std::uint32_t color) const noexcept; inline void StringPut(Handle win, int x, int y, std::uint32_t color, char* str); diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index d537068..0296216 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -104,6 +104,15 @@ namespace mlx m_graphics.erase(m_graphics.begin() + *static_cast(win)); } + void Application::SetGraphicsSupportPosition(Handle win, int x, int y) + { + CHECK_WINDOW_PTR(win); + if(!m_graphics[*static_cast(win)]->HasWindow()) + Warning("trying to move a window that is targeting an image and not a real window, this is not allowed"); + else + m_graphics[*static_cast(win)]->GetWindow()->SetPosition(x, y); + } + void Application::PixelPut(Handle win, int x, int y, std::uint32_t color) const noexcept { MLX_PROFILE_FUNCTION(); diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index df49d13..8022a25 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -21,6 +21,7 @@ namespace mlx Vec2ui GetVulkanDrawableSize(Handle window) const noexcept; void MoveMouseOnWindow(Handle window, int x, int y) const noexcept; void GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept; + void SetWindowPosition(Handle window, int x, int y) const noexcept; inline void SetEventCallback(func::function functor, void* userdata) { f_callback = std::move(functor); p_callback_data = userdata; } diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 5895a70..427e52f 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -18,6 +18,7 @@ namespace mlx inline void MoveMouse(int x, int y) { SDLManager::Get().MoveMouseOnWindow(p_window, x, y); } inline void GetScreenSizeWindowIsOn(int* x, int* y) { SDLManager::Get().GetScreenSizeWindowIsOn(p_window, x, y); } + inline void SetPosition(int x, int y) { SDLManager::Get().SetWindowPosition(p_window, x, y); } inline VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index deff1a2..3ba1a9e 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -45,6 +45,12 @@ extern "C" return static_cast(mlx)->NewGraphicsSuport(w, h, title); } + void mlx_set_window_position(void *mlx, void *win, int x, int y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->SetGraphicsSupportPosition(win, x, y); + } + int mlx_loop_hook(void* mlx, int (*f)(void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 8619340..36407bf 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -173,6 +173,11 @@ namespace mlx *y = DM.h; } + void SDLManager::SetWindowPosition(Handle window, int x, int y) const noexcept + { + SDL_SetWindowPosition(static_cast(window), x, y); + } + std::int32_t SDLManager::GetX() const noexcept { int dummy; diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 989f138..0ab8904 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -1,3 +1,4 @@ +#include #include #define KVF_IMPLEMENTATION @@ -5,7 +6,14 @@ #define KVF_ENABLE_VALIDATION_LAYERS #endif -#include +#if defined(MLX_COMPILER_GCC) || defined(MLX_COMPILER_CLANG) + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wmissing-field-initializers" + #include + #pragma clang diagnostic pop +#else + #include +#endif #include #include diff --git a/third_party/kvf.h b/third_party/kvf.h index 6a22d3e..5dd0758 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -1062,6 +1062,7 @@ void kvfAddLayer(const char* layer) strcpy(__kvf_extra_layers[__kvf_extra_layers_count], layer); __kvf_extra_layers_count++; #else + (void)layer; if(__kvf_validation_error_callback != NULL) { char buffer[4096]; From e9a8a0cb847a9d0a8f6c09f5da1f7bff5ab1c32a Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Sep 2024 05:07:02 +0200 Subject: [PATCH 031/131] working on vulkan loader --- runtime/Includes/Core/SDLManager.h | 2 +- runtime/Includes/Platform/Window.h | 4 +- runtime/Includes/PreCompiled.h | 1 - runtime/Includes/Renderer/RenderCore.h | 1 + .../Renderer/Vulkan/VulkanPrototypes.h | 12 +- runtime/Sources/Core/SDLManager.cpp | 28 +- runtime/Sources/Renderer/RenderCore.cpp | 6 +- .../Sources/Renderer/Vulkan/VulkanLoader.cpp | 440 ++++++++++-------- .../Sources/Renderer/Vulkan/VulkanLoader.h | 1 + 9 files changed, 274 insertions(+), 221 deletions(-) diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 8022a25..61f273d 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -17,7 +17,7 @@ namespace mlx void DestroyWindow(Handle window) noexcept; VkSurfaceKHR CreateVulkanSurface(Handle window, VkInstance instance) const noexcept; - std::vector GetRequiredVulkanInstanceExtentions(Handle window) const noexcept; + std::vector GetRequiredVulkanInstanceExtentions() const noexcept; Vec2ui GetVulkanDrawableSize(Handle window) const noexcept; void MoveMouseOnWindow(Handle window, int x, int y) const noexcept; void GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept; diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 427e52f..1a15f33 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -21,12 +21,12 @@ namespace mlx inline void SetPosition(int x, int y) { SDLManager::Get().SetWindowPosition(p_window, x, y); } inline VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } - inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } + inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(); } inline Vec2ui GetVulkanDrawableSize() const noexcept { return SDLManager::Get().GetVulkanDrawableSize(p_window); } void Destroy() noexcept; - ~Window() = default; + ~Window() { Destroy(); } private: Handle p_window = nullptr; diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index acaac11..f0041a9 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -81,7 +81,6 @@ #include #include -#include #ifdef DEBUG #define KVF_ENABLE_VALIDATION_LAYERS #endif diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index 04140d1..5885064 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -1,6 +1,7 @@ #ifndef __MLX_RENDER_CORE__ #define __MLX_RENDER_CORE__ +#include #include namespace mlx diff --git a/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h b/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h index d65439f..d3e9ad9 100644 --- a/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h +++ b/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h @@ -13,7 +13,7 @@ #include #endif -#if defined(VK_VERSION_1_0) +#ifdef VK_VERSION_1_0 extern PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; extern PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; extern PFN_vkAllocateMemory vkAllocateMemory; @@ -151,20 +151,20 @@ extern PFN_vkUnmapMemory vkUnmapMemory; extern PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; extern PFN_vkWaitForFences vkWaitForFences; -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_KHR_swapchain) +#endif +#ifdef VK_KHR_swapchain extern PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; extern PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; extern PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; extern PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; extern PFN_vkQueuePresentKHR vkQueuePresentKHR; -#endif /* defined(VK_KHR_swapchain) */ -#if defined(VK_KHR_surface) +#endif +#ifdef VK_KHR_surface extern PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; extern PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; extern PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; extern PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; extern PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; -#endif /* defined(VK_KHR_surface) */ +#endif #endif diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 36407bf..fa0801f 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -139,16 +139,30 @@ namespace mlx return surface; } - std::vector SDLManager::GetRequiredVulkanInstanceExtentions(Handle window) const noexcept + std::vector SDLManager::GetRequiredVulkanInstanceExtentions() const noexcept { - std::uint32_t count; - if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window), &count, nullptr)) - FatalError("Vulkan : cannot get instance extentions from window : %", SDL_GetError()); + std::vector extensions; + extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME); - std::vector extensions(count); + #ifdef VK_USE_PLATFORM_XCB_KHR + extensions.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME); + #endif - if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window), &count, extensions.data())) - FatalError("Vulkan : cannot get instance extentions from window : %", SDL_GetError()); + #ifdef VK_USE_PLATFORM_XLIB_KHR + extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME); + #endif + + #ifdef VK_USE_PLATFORM_WAYLAND_KHR + extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME); + #endif + + #ifdef VK_USE_PLATFORM_WIN32_KHR + extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME); + #endif + + #ifdef VK_USE_PLATFORM_METAL_EXT + extensions.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME); + #endif return extensions; } diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 0ab8904..79ca02d 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -1,5 +1,6 @@ #include #include +#include #define KVF_IMPLEMENTATION #ifdef DEBUG @@ -15,7 +16,6 @@ #include #endif -#include #include #include #include @@ -79,8 +79,10 @@ namespace mlx m_device = kvfCreateDevice(m_physical_device, device_extensions, sizeof(device_extensions) / sizeof(device_extensions[0]), &features); DebugLog("Vulkan : logical device created"); + loader->LoadDevice(m_device); + vkDestroySurfaceKHR(m_instance, surface, nullptr); - window.Destroy(); + FatalError("caca"); } void RenderCore::Destroy() noexcept diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp index 7a7bf81..273a902 100644 --- a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp @@ -1,3 +1,5 @@ +#include "Renderer/Vulkan/VulkanPrototypes.h" +#include "vulkan/vulkan_core.h" #include #include @@ -5,6 +7,9 @@ __declspec(dllimport) HMODULE __stdcall LoadLibraryA(LPCSTR); __declspec(dllimport) FARPROC __stdcall GetProcAddress(HMODULE, LPCSTR); __declspec(dllimport) int __stdcall FreeLibrary(HMODULE); + using LibModule = HMODULE; +#else + using LibModule = Handle; #endif #if defined(MLX_COMPILER_GCC) @@ -24,237 +29,268 @@ namespace mlx { static inline PFN_vkVoidFunction vkGetInstanceProcAddrStub(Handle context, const char* name) { - return vkGetInstanceProcAddr(static_cast(context), name); + PFN_vkVoidFunction function = vkGetInstanceProcAddr(static_cast(context), name); + if(!function) + FatalError("Vulkan loader : could not load '%'", name); + DebugLog("Vulkan loader : loaded %", name); + return function; + } + + static inline PFN_vkVoidFunction vkGetDeviceProcAddrStub(Handle context, const char* name) + { + PFN_vkVoidFunction function = vkGetDeviceProcAddr(static_cast(context), name); + if(!function) + FatalError("Vulkan loader : could not load '%'", name); + DebugLog("Vulkan loader : loaded %", name); + return function; + } + + static inline LibModule LoadLib(const char* libname) + { + #ifdef MLX_PLAT_WINDOWS + return LoadLibraryA(libname); + #else + return dlopen(libname, RTLD_NOW | RTLD_LOCAL); + #endif + } + + static inline void* GetSymbol(LibModule module, const char* name) + { + #ifdef MLX_PLAT_WINDOWS + return (void*)(void(*)(void))GetProcAddress(module, name); + #else + return dlsym(module, name); + #endif } } VulkanLoader::VulkanLoader() { #if defined(MLX_PLAT_WINDOWS) - p_module = LoadLibraryA("vulkan-1.dll"); - if(!p_module) - FatalError("Vulkan loader : failed to load libvulkan"); - vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)(void(*)(void))GetProcAddress(p_module, "vkGetInstanceProcAddr"); + std::array libnames{ + "vulkan-1.dll" + }; #elif defined(MLX_PLAT_MACOS) - p_module = dlopen("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); - if(!p_module) - p_module = dlopen("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); - if(!p_module) - p_module = dlopen("libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL); - - // Add support for using Vulkan and MoltenVK in a Framework. App store rules for iOS - // strictly enforce no .dylib's. If they aren't found it just falls through - if(!p_module) - p_module = dlopen("vulkan.framework/vulkan", RTLD_NOW | RTLD_LOCAL); - if(!p_module) - p_module = dlopen("MoltenVK.framework/MoltenVK", RTLD_NOW | RTLD_LOCAL); - - // modern versions of macOS don't search /usr/local/lib automatically contrary to what man dlopen says - // Vulkan SDK uses this as the system-wide installation location, so we're going to fallback to this if all else fails - if(!p_module && getenv("DYLD_FALLBACK_LIBRARY_PATH") == NULL) - p_module = dlopen("/usr/local/lib/libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); - if(!p_module) - FatalError("Vulkan loader : failed to load libvulkan"); - void* symbol_ptr = dlsym(p_module, "vkGetInstanceProcAddr"); - *(void**)(&vkGetInstanceProcAddr) = symbol_ptr; + std::array libnames{ + "libvulkan.dylib", + "libvulkan.1.dylib", + "libMoltenVK.dylib", + "vulkan.framework/vulkan", + "MoltenVK.framework/MoltenVK", + "/usr/local/lib/libvulkan.dylib", + }; #else - dlerror(); - p_module = dlopen("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); - if(!p_module) - p_module = dlopen("libvulkan.so", RTLD_NOW | RTLD_LOCAL); - if(!p_module) - FatalError("Vulkan loader : failed to load libvulkan due to %", dlerror()); - DISABLE_GCC_PEDANTIC_WARNINGS - void* symbol_ptr = dlsym(p_module, "vkGetInstanceProcAddr"); - *(void**)(&vkGetInstanceProcAddr) = symbol_ptr; - RESTORE_GCC_PEDANTIC_WARNINGS + std::array libnames{ + "libvulkan.so.1", + "libvulkan.so" + }; #endif + + for(auto libname : libnames) + { + p_module = Internal::LoadLib(libname); + if(p_module != nullptr) + break; + } + if(!p_module) + FatalError("Vulkan loader : failed to load libvulkan"); + + DISABLE_GCC_PEDANTIC_WARNINGS + vkGetInstanceProcAddr = reinterpret_cast(Internal::GetSymbol(p_module, "vkGetInstanceProcAddr")); + RESTORE_GCC_PEDANTIC_WARNINGS + + if(!vkGetInstanceProcAddr) + FatalError("Vulkan loader : could not get symbol for 'vkGetInstanceProcAddr'"); DebugLog("Vulkan loader : libvulkan loaded"); - LoadGlobalFunctions(nullptr, Internal::vkGetInstanceProcAddrStub); + LoadGlobalFunctions(NULL, Internal::vkGetInstanceProcAddrStub); } void VulkanLoader::LoadInstance(VkInstance instance) { LoadInstanceFunctions(instance, Internal::vkGetInstanceProcAddrStub); - LoadDeviceFunctions(instance, Internal::vkGetInstanceProcAddrStub); + } + + void VulkanLoader::LoadDevice(VkDevice device) + { + LoadDeviceFunctions(device, Internal::vkGetDeviceProcAddrStub); } void VulkanLoader::LoadGlobalFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept { - #if defined(VK_VERSION_1_0) - vkCreateInstance = (PFN_vkCreateInstance)load(context, "vkCreateInstance"); - vkEnumerateInstanceExtensionProperties = (PFN_vkEnumerateInstanceExtensionProperties)load(context, "vkEnumerateInstanceExtensionProperties"); - vkEnumerateInstanceLayerProperties = (PFN_vkEnumerateInstanceLayerProperties)load(context, "vkEnumerateInstanceLayerProperties"); - #endif /* defined(VK_VERSION_1_0) */ + #ifdef VK_VERSION_1_0 + vkCreateInstance = reinterpret_cast(load(context, "vkCreateInstance")); + vkEnumerateInstanceExtensionProperties = reinterpret_cast(load(context, "vkEnumerateInstanceExtensionProperties")); + vkEnumerateInstanceLayerProperties = reinterpret_cast(load(context, "vkEnumerateInstanceLayerProperties")); + #endif DebugLog("Vulkan loader : global functions loaded"); } void VulkanLoader::LoadInstanceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept { - #if defined(VK_VERSION_1_0) - vkCreateDevice = (PFN_vkCreateDevice)load(context, "vkCreateDevice"); - vkDestroyInstance = (PFN_vkDestroyInstance)load(context, "vkDestroyInstance"); - vkEnumerateDeviceExtensionProperties = (PFN_vkEnumerateDeviceExtensionProperties)load(context, "vkEnumerateDeviceExtensionProperties"); - vkEnumerateDeviceLayerProperties = (PFN_vkEnumerateDeviceLayerProperties)load(context, "vkEnumerateDeviceLayerProperties"); - vkEnumeratePhysicalDevices = (PFN_vkEnumeratePhysicalDevices)load(context, "vkEnumeratePhysicalDevices"); - vkGetDeviceProcAddr = (PFN_vkGetDeviceProcAddr)load(context, "vkGetDeviceProcAddr"); - vkGetPhysicalDeviceFeatures = (PFN_vkGetPhysicalDeviceFeatures)load(context, "vkGetPhysicalDeviceFeatures"); - vkGetPhysicalDeviceFormatProperties = (PFN_vkGetPhysicalDeviceFormatProperties)load(context, "vkGetPhysicalDeviceFormatProperties"); - vkGetPhysicalDeviceImageFormatProperties = (PFN_vkGetPhysicalDeviceImageFormatProperties)load(context, "vkGetPhysicalDeviceImageFormatProperties"); - vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)load(context, "vkGetPhysicalDeviceMemoryProperties"); - vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)load(context, "vkGetPhysicalDeviceProperties"); - vkGetPhysicalDeviceQueueFamilyProperties = (PFN_vkGetPhysicalDeviceQueueFamilyProperties)load(context, "vkGetPhysicalDeviceQueueFamilyProperties"); - vkGetPhysicalDeviceSparseImageFormatProperties = (PFN_vkGetPhysicalDeviceSparseImageFormatProperties)load(context, "vkGetPhysicalDeviceSparseImageFormatProperties"); - #endif /* defined(VK_VERSION_1_0) */ - #if defined(VK_KHR_surface) - vkDestroySurfaceKHR = (PFN_vkDestroySurfaceKHR)load(context, "vkDestroySurfaceKHR"); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = (PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)load(context, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); - vkGetPhysicalDeviceSurfaceFormatsKHR = (PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)load(context, "vkGetPhysicalDeviceSurfaceFormatsKHR"); - vkGetPhysicalDeviceSurfacePresentModesKHR = (PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)load(context, "vkGetPhysicalDeviceSurfacePresentModesKHR"); - vkGetPhysicalDeviceSurfaceSupportKHR = (PFN_vkGetPhysicalDeviceSurfaceSupportKHR)load(context, "vkGetPhysicalDeviceSurfaceSupportKHR"); - #endif /* defined(VK_KHR_surface) */ + #ifdef VK_VERSION_1_0 + vkCreateDevice = reinterpret_cast(load(context, "vkCreateDevice")); + vkDestroyInstance = reinterpret_cast(load(context, "vkDestroyInstance")); + vkEnumerateDeviceExtensionProperties = reinterpret_cast(load(context, "vkEnumerateDeviceExtensionProperties")); + vkEnumerateDeviceLayerProperties = reinterpret_cast(load(context, "vkEnumerateDeviceLayerProperties")); + vkEnumeratePhysicalDevices = reinterpret_cast(load(context, "vkEnumeratePhysicalDevices")); + vkGetDeviceProcAddr = reinterpret_cast(load(context, "vkGetDeviceProcAddr")); + vkGetPhysicalDeviceFeatures = reinterpret_cast(load(context, "vkGetPhysicalDeviceFeatures")); + vkGetPhysicalDeviceFormatProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceFormatProperties")); + vkGetPhysicalDeviceImageFormatProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceImageFormatProperties")); + vkGetPhysicalDeviceMemoryProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceMemoryProperties")); + vkGetPhysicalDeviceProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceProperties")); + vkGetPhysicalDeviceQueueFamilyProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceQueueFamilyProperties")); + vkGetPhysicalDeviceSparseImageFormatProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceSparseImageFormatProperties")); + #endif + #ifdef VK_KHR_surface + vkDestroySurfaceKHR = reinterpret_cast(load(context, "vkDestroySurfaceKHR")); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR")); + vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfaceFormatsKHR")); + vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfacePresentModesKHR")); + vkGetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfaceSupportKHR")); + #endif DebugLog("Vulkan loader : instance functions loaded"); } void VulkanLoader::LoadDeviceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept { - #if defined(VK_VERSION_1_0) - vkAllocateCommandBuffers = (PFN_vkAllocateCommandBuffers)load(context, "vkAllocateCommandBuffers"); - vkAllocateDescriptorSets = (PFN_vkAllocateDescriptorSets)load(context, "vkAllocateDescriptorSets"); - vkAllocateMemory = (PFN_vkAllocateMemory)load(context, "vkAllocateMemory"); - vkBeginCommandBuffer = (PFN_vkBeginCommandBuffer)load(context, "vkBeginCommandBuffer"); - vkBindBufferMemory = (PFN_vkBindBufferMemory)load(context, "vkBindBufferMemory"); - vkBindImageMemory = (PFN_vkBindImageMemory)load(context, "vkBindImageMemory"); - vkCmdBeginQuery = (PFN_vkCmdBeginQuery)load(context, "vkCmdBeginQuery"); - vkCmdBeginRenderPass = (PFN_vkCmdBeginRenderPass)load(context, "vkCmdBeginRenderPass"); - vkCmdBindDescriptorSets = (PFN_vkCmdBindDescriptorSets)load(context, "vkCmdBindDescriptorSets"); - vkCmdBindIndexBuffer = (PFN_vkCmdBindIndexBuffer)load(context, "vkCmdBindIndexBuffer"); - vkCmdBindPipeline = (PFN_vkCmdBindPipeline)load(context, "vkCmdBindPipeline"); - vkCmdBindVertexBuffers = (PFN_vkCmdBindVertexBuffers)load(context, "vkCmdBindVertexBuffers"); - vkCmdBlitImage = (PFN_vkCmdBlitImage)load(context, "vkCmdBlitImage"); - vkCmdClearAttachments = (PFN_vkCmdClearAttachments)load(context, "vkCmdClearAttachments"); - vkCmdClearColorImage = (PFN_vkCmdClearColorImage)load(context, "vkCmdClearColorImage"); - vkCmdClearDepthStencilImage = (PFN_vkCmdClearDepthStencilImage)load(context, "vkCmdClearDepthStencilImage"); - vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)load(context, "vkCmdCopyBuffer"); - vkCmdCopyBufferToImage = (PFN_vkCmdCopyBufferToImage)load(context, "vkCmdCopyBufferToImage"); - vkCmdCopyImage = (PFN_vkCmdCopyImage)load(context, "vkCmdCopyImage"); - vkCmdCopyImageToBuffer = (PFN_vkCmdCopyImageToBuffer)load(context, "vkCmdCopyImageToBuffer"); - vkCmdCopyQueryPoolResults = (PFN_vkCmdCopyQueryPoolResults)load(context, "vkCmdCopyQueryPoolResults"); - vkCmdDispatch = (PFN_vkCmdDispatch)load(context, "vkCmdDispatch"); - vkCmdDispatchIndirect = (PFN_vkCmdDispatchIndirect)load(context, "vkCmdDispatchIndirect"); - vkCmdDraw = (PFN_vkCmdDraw)load(context, "vkCmdDraw"); - vkCmdDrawIndexed = (PFN_vkCmdDrawIndexed)load(context, "vkCmdDrawIndexed"); - vkCmdDrawIndexedIndirect = (PFN_vkCmdDrawIndexedIndirect)load(context, "vkCmdDrawIndexedIndirect"); - vkCmdDrawIndirect = (PFN_vkCmdDrawIndirect)load(context, "vkCmdDrawIndirect"); - vkCmdEndQuery = (PFN_vkCmdEndQuery)load(context, "vkCmdEndQuery"); - vkCmdEndRenderPass = (PFN_vkCmdEndRenderPass)load(context, "vkCmdEndRenderPass"); - vkCmdExecuteCommands = (PFN_vkCmdExecuteCommands)load(context, "vkCmdExecuteCommands"); - vkCmdFillBuffer = (PFN_vkCmdFillBuffer)load(context, "vkCmdFillBuffer"); - vkCmdNextSubpass = (PFN_vkCmdNextSubpass)load(context, "vkCmdNextSubpass"); - vkCmdPipelineBarrier = (PFN_vkCmdPipelineBarrier)load(context, "vkCmdPipelineBarrier"); - vkCmdPushConstants = (PFN_vkCmdPushConstants)load(context, "vkCmdPushConstants"); - vkCmdResetEvent = (PFN_vkCmdResetEvent)load(context, "vkCmdResetEvent"); - vkCmdResetQueryPool = (PFN_vkCmdResetQueryPool)load(context, "vkCmdResetQueryPool"); - vkCmdResolveImage = (PFN_vkCmdResolveImage)load(context, "vkCmdResolveImage"); - vkCmdSetBlendConstants = (PFN_vkCmdSetBlendConstants)load(context, "vkCmdSetBlendConstants"); - vkCmdSetDepthBias = (PFN_vkCmdSetDepthBias)load(context, "vkCmdSetDepthBias"); - vkCmdSetDepthBounds = (PFN_vkCmdSetDepthBounds)load(context, "vkCmdSetDepthBounds"); - vkCmdSetEvent = (PFN_vkCmdSetEvent)load(context, "vkCmdSetEvent"); - vkCmdSetLineWidth = (PFN_vkCmdSetLineWidth)load(context, "vkCmdSetLineWidth"); - vkCmdSetScissor = (PFN_vkCmdSetScissor)load(context, "vkCmdSetScissor"); - vkCmdSetStencilCompareMask = (PFN_vkCmdSetStencilCompareMask)load(context, "vkCmdSetStencilCompareMask"); - vkCmdSetStencilReference = (PFN_vkCmdSetStencilReference)load(context, "vkCmdSetStencilReference"); - vkCmdSetStencilWriteMask = (PFN_vkCmdSetStencilWriteMask)load(context, "vkCmdSetStencilWriteMask"); - vkCmdSetViewport = (PFN_vkCmdSetViewport)load(context, "vkCmdSetViewport"); - vkCmdUpdateBuffer = (PFN_vkCmdUpdateBuffer)load(context, "vkCmdUpdateBuffer"); - vkCmdWaitEvents = (PFN_vkCmdWaitEvents)load(context, "vkCmdWaitEvents"); - vkCmdWriteTimestamp = (PFN_vkCmdWriteTimestamp)load(context, "vkCmdWriteTimestamp"); - vkCreateBuffer = (PFN_vkCreateBuffer)load(context, "vkCreateBuffer"); - vkCreateBufferView = (PFN_vkCreateBufferView)load(context, "vkCreateBufferView"); - vkCreateCommandPool = (PFN_vkCreateCommandPool)load(context, "vkCreateCommandPool"); - vkCreateComputePipelines = (PFN_vkCreateComputePipelines)load(context, "vkCreateComputePipelines"); - vkCreateDescriptorPool = (PFN_vkCreateDescriptorPool)load(context, "vkCreateDescriptorPool"); - vkCreateDescriptorSetLayout = (PFN_vkCreateDescriptorSetLayout)load(context, "vkCreateDescriptorSetLayout"); - vkCreateEvent = (PFN_vkCreateEvent)load(context, "vkCreateEvent"); - vkCreateFence = (PFN_vkCreateFence)load(context, "vkCreateFence"); - vkCreateFramebuffer = (PFN_vkCreateFramebuffer)load(context, "vkCreateFramebuffer"); - vkCreateGraphicsPipelines = (PFN_vkCreateGraphicsPipelines)load(context, "vkCreateGraphicsPipelines"); - vkCreateImage = (PFN_vkCreateImage)load(context, "vkCreateImage"); - vkCreateImageView = (PFN_vkCreateImageView)load(context, "vkCreateImageView"); - vkCreatePipelineCache = (PFN_vkCreatePipelineCache)load(context, "vkCreatePipelineCache"); - vkCreatePipelineLayout = (PFN_vkCreatePipelineLayout)load(context, "vkCreatePipelineLayout"); - vkCreateQueryPool = (PFN_vkCreateQueryPool)load(context, "vkCreateQueryPool"); - vkCreateRenderPass = (PFN_vkCreateRenderPass)load(context, "vkCreateRenderPass"); - vkCreateSampler = (PFN_vkCreateSampler)load(context, "vkCreateSampler"); - vkCreateSemaphore = (PFN_vkCreateSemaphore)load(context, "vkCreateSemaphore"); - vkCreateShaderModule = (PFN_vkCreateShaderModule)load(context, "vkCreateShaderModule"); - vkDestroyBuffer = (PFN_vkDestroyBuffer)load(context, "vkDestroyBuffer"); - vkDestroyBufferView = (PFN_vkDestroyBufferView)load(context, "vkDestroyBufferView"); - vkDestroyCommandPool = (PFN_vkDestroyCommandPool)load(context, "vkDestroyCommandPool"); - vkDestroyDescriptorPool = (PFN_vkDestroyDescriptorPool)load(context, "vkDestroyDescriptorPool"); - vkDestroyDescriptorSetLayout = (PFN_vkDestroyDescriptorSetLayout)load(context, "vkDestroyDescriptorSetLayout"); - vkDestroyDevice = (PFN_vkDestroyDevice)load(context, "vkDestroyDevice"); - vkDestroyEvent = (PFN_vkDestroyEvent)load(context, "vkDestroyEvent"); - vkDestroyFence = (PFN_vkDestroyFence)load(context, "vkDestroyFence"); - vkDestroyFramebuffer = (PFN_vkDestroyFramebuffer)load(context, "vkDestroyFramebuffer"); - vkDestroyImage = (PFN_vkDestroyImage)load(context, "vkDestroyImage"); - vkDestroyImageView = (PFN_vkDestroyImageView)load(context, "vkDestroyImageView"); - vkDestroyPipeline = (PFN_vkDestroyPipeline)load(context, "vkDestroyPipeline"); - vkDestroyPipelineCache = (PFN_vkDestroyPipelineCache)load(context, "vkDestroyPipelineCache"); - vkDestroyPipelineLayout = (PFN_vkDestroyPipelineLayout)load(context, "vkDestroyPipelineLayout"); - vkDestroyQueryPool = (PFN_vkDestroyQueryPool)load(context, "vkDestroyQueryPool"); - vkDestroyRenderPass = (PFN_vkDestroyRenderPass)load(context, "vkDestroyRenderPass"); - vkDestroySampler = (PFN_vkDestroySampler)load(context, "vkDestroySampler"); - vkDestroySemaphore = (PFN_vkDestroySemaphore)load(context, "vkDestroySemaphore"); - vkDestroyShaderModule = (PFN_vkDestroyShaderModule)load(context, "vkDestroyShaderModule"); - vkDeviceWaitIdle = (PFN_vkDeviceWaitIdle)load(context, "vkDeviceWaitIdle"); - vkEndCommandBuffer = (PFN_vkEndCommandBuffer)load(context, "vkEndCommandBuffer"); - vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)load(context, "vkFlushMappedMemoryRanges"); - vkFreeCommandBuffers = (PFN_vkFreeCommandBuffers)load(context, "vkFreeCommandBuffers"); - vkFreeDescriptorSets = (PFN_vkFreeDescriptorSets)load(context, "vkFreeDescriptorSets"); - vkFreeMemory = (PFN_vkFreeMemory)load(context, "vkFreeMemory"); - vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)load(context, "vkGetBufferMemoryRequirements"); - vkGetDeviceMemoryCommitment = (PFN_vkGetDeviceMemoryCommitment)load(context, "vkGetDeviceMemoryCommitment"); - vkGetDeviceQueue = (PFN_vkGetDeviceQueue)load(context, "vkGetDeviceQueue"); - vkGetEventStatus = (PFN_vkGetEventStatus)load(context, "vkGetEventStatus"); - vkGetFenceStatus = (PFN_vkGetFenceStatus)load(context, "vkGetFenceStatus"); - vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)load(context, "vkGetImageMemoryRequirements"); - vkGetImageSparseMemoryRequirements = (PFN_vkGetImageSparseMemoryRequirements)load(context, "vkGetImageSparseMemoryRequirements"); - vkGetImageSubresourceLayout = (PFN_vkGetImageSubresourceLayout)load(context, "vkGetImageSubresourceLayout"); - vkGetPipelineCacheData = (PFN_vkGetPipelineCacheData)load(context, "vkGetPipelineCacheData"); - vkGetQueryPoolResults = (PFN_vkGetQueryPoolResults)load(context, "vkGetQueryPoolResults"); - vkGetRenderAreaGranularity = (PFN_vkGetRenderAreaGranularity)load(context, "vkGetRenderAreaGranularity"); - vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)load(context, "vkInvalidateMappedMemoryRanges"); - vkMapMemory = (PFN_vkMapMemory)load(context, "vkMapMemory"); - vkMergePipelineCaches = (PFN_vkMergePipelineCaches)load(context, "vkMergePipelineCaches"); - vkQueueBindSparse = (PFN_vkQueueBindSparse)load(context, "vkQueueBindSparse"); - vkQueueSubmit = (PFN_vkQueueSubmit)load(context, "vkQueueSubmit"); - vkQueueWaitIdle = (PFN_vkQueueWaitIdle)load(context, "vkQueueWaitIdle"); - vkResetCommandBuffer = (PFN_vkResetCommandBuffer)load(context, "vkResetCommandBuffer"); - vkResetCommandPool = (PFN_vkResetCommandPool)load(context, "vkResetCommandPool"); - vkResetDescriptorPool = (PFN_vkResetDescriptorPool)load(context, "vkResetDescriptorPool"); - vkResetEvent = (PFN_vkResetEvent)load(context, "vkResetEvent"); - vkResetFences = (PFN_vkResetFences)load(context, "vkResetFences"); - vkSetEvent = (PFN_vkSetEvent)load(context, "vkSetEvent"); - vkUnmapMemory = (PFN_vkUnmapMemory)load(context, "vkUnmapMemory"); - vkUpdateDescriptorSets = (PFN_vkUpdateDescriptorSets)load(context, "vkUpdateDescriptorSets"); - vkWaitForFences = (PFN_vkWaitForFences)load(context, "vkWaitForFences"); - #endif /* defined(VK_VERSION_1_0) */ - #if defined(VK_KHR_swapchain) - vkAcquireNextImageKHR = (PFN_vkAcquireNextImageKHR)load(context, "vkAcquireNextImageKHR"); - vkCreateSwapchainKHR = (PFN_vkCreateSwapchainKHR)load(context, "vkCreateSwapchainKHR"); - vkDestroySwapchainKHR = (PFN_vkDestroySwapchainKHR)load(context, "vkDestroySwapchainKHR"); - vkGetSwapchainImagesKHR = (PFN_vkGetSwapchainImagesKHR)load(context, "vkGetSwapchainImagesKHR"); - vkQueuePresentKHR = (PFN_vkQueuePresentKHR)load(context, "vkQueuePresentKHR"); - #endif /* defined(VK_KHR_swapchain) */ + #ifdef VK_VERSION_1_0 + vkAllocateCommandBuffers = reinterpret_cast(load(context, "vkAllocateCommandBuffers")); + vkAllocateDescriptorSets = reinterpret_cast(load(context, "vkAllocateDescriptorSets")); + vkAllocateMemory = reinterpret_cast(load(context, "vkAllocateMemory")); + vkBeginCommandBuffer = reinterpret_cast(load(context, "vkBeginCommandBuffer")); + vkBindBufferMemory = reinterpret_cast(load(context, "vkBindBufferMemory")); + vkBindImageMemory = reinterpret_cast(load(context, "vkBindImageMemory")); + vkCmdBeginQuery = reinterpret_cast(load(context, "vkCmdBeginQuery")); + vkCmdBeginRenderPass = reinterpret_cast(load(context, "vkCmdBeginRenderPass")); + vkCmdBindDescriptorSets = reinterpret_cast(load(context, "vkCmdBindDescriptorSets")); + vkCmdBindIndexBuffer = reinterpret_cast(load(context, "vkCmdBindIndexBuffer")); + vkCmdBindPipeline = reinterpret_cast(load(context, "vkCmdBindPipeline")); + vkCmdBindVertexBuffers = reinterpret_cast(load(context, "vkCmdBindVertexBuffers")); + vkCmdBlitImage = reinterpret_cast(load(context, "vkCmdBlitImage")); + vkCmdClearAttachments = reinterpret_cast(load(context, "vkCmdClearAttachments")); + vkCmdClearColorImage = reinterpret_cast(load(context, "vkCmdClearColorImage")); + vkCmdClearDepthStencilImage = reinterpret_cast(load(context, "vkCmdClearDepthStencilImage")); + vkCmdCopyBuffer = reinterpret_cast(load(context, "vkCmdCopyBuffer")); + vkCmdCopyBufferToImage = reinterpret_cast(load(context, "vkCmdCopyBufferToImage")); + vkCmdCopyImage = reinterpret_cast(load(context, "vkCmdCopyImage")); + vkCmdCopyImageToBuffer = reinterpret_cast(load(context, "vkCmdCopyImageToBuffer")); + vkCmdCopyQueryPoolResults = reinterpret_cast(load(context, "vkCmdCopyQueryPoolResults")); + vkCmdDispatch = reinterpret_cast(load(context, "vkCmdDispatch")); + vkCmdDispatchIndirect = reinterpret_cast(load(context, "vkCmdDispatchIndirect")); + vkCmdDraw = reinterpret_cast(load(context, "vkCmdDraw")); + vkCmdDrawIndexed = reinterpret_cast(load(context, "vkCmdDrawIndexed")); + vkCmdDrawIndexedIndirect = reinterpret_cast(load(context, "vkCmdDrawIndexedIndirect")); + vkCmdDrawIndirect = reinterpret_cast(load(context, "vkCmdDrawIndirect")); + vkCmdEndQuery = reinterpret_cast(load(context, "vkCmdEndQuery")); + vkCmdEndRenderPass = reinterpret_cast(load(context, "vkCmdEndRenderPass")); + vkCmdExecuteCommands = reinterpret_cast(load(context, "vkCmdExecuteCommands")); + vkCmdFillBuffer = reinterpret_cast(load(context, "vkCmdFillBuffer")); + vkCmdNextSubpass = reinterpret_cast(load(context, "vkCmdNextSubpass")); + vkCmdPipelineBarrier = reinterpret_cast(load(context, "vkCmdPipelineBarrier")); + vkCmdPushConstants = reinterpret_cast(load(context, "vkCmdPushConstants")); + vkCmdResetEvent = reinterpret_cast(load(context, "vkCmdResetEvent")); + vkCmdResetQueryPool = reinterpret_cast(load(context, "vkCmdResetQueryPool")); + vkCmdResolveImage = reinterpret_cast(load(context, "vkCmdResolveImage")); + vkCmdSetBlendConstants = reinterpret_cast(load(context, "vkCmdSetBlendConstants")); + vkCmdSetDepthBias = reinterpret_cast(load(context, "vkCmdSetDepthBias")); + vkCmdSetDepthBounds = reinterpret_cast(load(context, "vkCmdSetDepthBounds")); + vkCmdSetEvent = reinterpret_cast(load(context, "vkCmdSetEvent")); + vkCmdSetLineWidth = reinterpret_cast(load(context, "vkCmdSetLineWidth")); + vkCmdSetScissor = reinterpret_cast(load(context, "vkCmdSetScissor")); + vkCmdSetStencilCompareMask = reinterpret_cast(load(context, "vkCmdSetStencilCompareMask")); + vkCmdSetStencilReference = reinterpret_cast(load(context, "vkCmdSetStencilReference")); + vkCmdSetStencilWriteMask = reinterpret_cast(load(context, "vkCmdSetStencilWriteMask")); + vkCmdSetViewport = reinterpret_cast(load(context, "vkCmdSetViewport")); + vkCmdUpdateBuffer = reinterpret_cast(load(context, "vkCmdUpdateBuffer")); + vkCmdWaitEvents = reinterpret_cast(load(context, "vkCmdWaitEvents")); + vkCmdWriteTimestamp = reinterpret_cast(load(context, "vkCmdWriteTimestamp")); + vkCreateBuffer = reinterpret_cast(load(context, "vkCreateBuffer")); + vkCreateBufferView = reinterpret_cast(load(context, "vkCreateBufferView")); + vkCreateCommandPool = reinterpret_cast(load(context, "vkCreateCommandPool")); + vkCreateComputePipelines = reinterpret_cast(load(context, "vkCreateComputePipelines")); + vkCreateDescriptorPool = reinterpret_cast(load(context, "vkCreateDescriptorPool")); + vkCreateDescriptorSetLayout = reinterpret_cast(load(context, "vkCreateDescriptorSetLayout")); + vkCreateEvent = reinterpret_cast(load(context, "vkCreateEvent")); + vkCreateFence = reinterpret_cast(load(context, "vkCreateFence")); + vkCreateFramebuffer = reinterpret_cast(load(context, "vkCreateFramebuffer")); + vkCreateGraphicsPipelines = reinterpret_cast(load(context, "vkCreateGraphicsPipelines")); + vkCreateImage = reinterpret_cast(load(context, "vkCreateImage")); + vkCreateImageView = reinterpret_cast(load(context, "vkCreateImageView")); + vkCreatePipelineCache = reinterpret_cast(load(context, "vkCreatePipelineCache")); + vkCreatePipelineLayout = reinterpret_cast(load(context, "vkCreatePipelineLayout")); + vkCreateQueryPool = reinterpret_cast(load(context, "vkCreateQueryPool")); + vkCreateRenderPass = reinterpret_cast(load(context, "vkCreateRenderPass")); + vkCreateSampler = reinterpret_cast(load(context, "vkCreateSampler")); + vkCreateSemaphore = reinterpret_cast(load(context, "vkCreateSemaphore")); + vkCreateShaderModule = reinterpret_cast(load(context, "vkCreateShaderModule")); + vkDestroyBuffer = reinterpret_cast(load(context, "vkDestroyBuffer")); + vkDestroyBufferView = reinterpret_cast(load(context, "vkDestroyBufferView")); + vkDestroyCommandPool = reinterpret_cast(load(context, "vkDestroyCommandPool")); + vkDestroyDescriptorPool = reinterpret_cast(load(context, "vkDestroyDescriptorPool")); + vkDestroyDescriptorSetLayout = reinterpret_cast(load(context, "vkDestroyDescriptorSetLayout")); + vkDestroyDevice = reinterpret_cast(load(context, "vkDestroyDevice")); + vkDestroyEvent = reinterpret_cast(load(context, "vkDestroyEvent")); + vkDestroyFence = reinterpret_cast(load(context, "vkDestroyFence")); + vkDestroyFramebuffer = reinterpret_cast(load(context, "vkDestroyFramebuffer")); + vkDestroyImage = reinterpret_cast(load(context, "vkDestroyImage")); + vkDestroyImageView = reinterpret_cast(load(context, "vkDestroyImageView")); + vkDestroyPipeline = reinterpret_cast(load(context, "vkDestroyPipeline")); + vkDestroyPipelineCache = reinterpret_cast(load(context, "vkDestroyPipelineCache")); + vkDestroyPipelineLayout = reinterpret_cast(load(context, "vkDestroyPipelineLayout")); + vkDestroyQueryPool = reinterpret_cast(load(context, "vkDestroyQueryPool")); + vkDestroyRenderPass = reinterpret_cast(load(context, "vkDestroyRenderPass")); + vkDestroySampler = reinterpret_cast(load(context, "vkDestroySampler")); + vkDestroySemaphore = reinterpret_cast(load(context, "vkDestroySemaphore")); + vkDestroyShaderModule = reinterpret_cast(load(context, "vkDestroyShaderModule")); + vkDeviceWaitIdle = reinterpret_cast(load(context, "vkDeviceWaitIdle")); + vkEndCommandBuffer = reinterpret_cast(load(context, "vkEndCommandBuffer")); + vkFlushMappedMemoryRanges = reinterpret_cast(load(context, "vkFlushMappedMemoryRanges")); + vkFreeCommandBuffers = reinterpret_cast(load(context, "vkFreeCommandBuffers")); + vkFreeDescriptorSets = reinterpret_cast(load(context, "vkFreeDescriptorSets")); + vkFreeMemory = reinterpret_cast(load(context, "vkFreeMemory")); + vkGetBufferMemoryRequirements = reinterpret_cast(load(context, "vkGetBufferMemoryRequirements")); + vkGetDeviceMemoryCommitment = reinterpret_cast(load(context, "vkGetDeviceMemoryCommitment")); + vkGetDeviceQueue = reinterpret_cast(load(context, "vkGetDeviceQueue")); + vkGetEventStatus = reinterpret_cast(load(context, "vkGetEventStatus")); + vkGetFenceStatus = reinterpret_cast(load(context, "vkGetFenceStatus")); + vkGetImageMemoryRequirements = reinterpret_cast(load(context, "vkGetImageMemoryRequirements")); + vkGetImageSparseMemoryRequirements = reinterpret_cast(load(context, "vkGetImageSparseMemoryRequirements")); + vkGetImageSubresourceLayout = reinterpret_cast(load(context, "vkGetImageSubresourceLayout")); + vkGetPipelineCacheData = reinterpret_cast(load(context, "vkGetPipelineCacheData")); + vkGetQueryPoolResults = reinterpret_cast(load(context, "vkGetQueryPoolResults")); + vkGetRenderAreaGranularity = reinterpret_cast(load(context, "vkGetRenderAreaGranularity")); + vkInvalidateMappedMemoryRanges = reinterpret_cast(load(context, "vkInvalidateMappedMemoryRanges")); + vkMapMemory = reinterpret_cast(load(context, "vkMapMemory")); + vkMergePipelineCaches = reinterpret_cast(load(context, "vkMergePipelineCaches")); + vkQueueBindSparse = reinterpret_cast(load(context, "vkQueueBindSparse")); + vkQueueSubmit = reinterpret_cast(load(context, "vkQueueSubmit")); + vkQueueWaitIdle = reinterpret_cast(load(context, "vkQueueWaitIdle")); + vkResetCommandBuffer = reinterpret_cast(load(context, "vkResetCommandBuffer")); + vkResetCommandPool = reinterpret_cast(load(context, "vkResetCommandPool")); + vkResetDescriptorPool = reinterpret_cast(load(context, "vkResetDescriptorPool")); + vkResetEvent = reinterpret_cast(load(context, "vkResetEvent")); + vkResetFences = reinterpret_cast(load(context, "vkResetFences")); + vkSetEvent = reinterpret_cast(load(context, "vkSetEvent")); + vkUnmapMemory = reinterpret_cast(load(context, "vkUnmapMemory")); + vkUpdateDescriptorSets = reinterpret_cast(load(context, "vkUpdateDescriptorSets")); + vkWaitForFences = reinterpret_cast(load(context, "vkWaitForFences")); + #endif + #ifdef VK_KHR_swapchain + vkAcquireNextImageKHR = reinterpret_cast(load(context, "vkAcquireNextImageKHR")); + vkCreateSwapchainKHR = reinterpret_cast(load(context, "vkCreateSwapchainKHR")); + vkDestroySwapchainKHR = reinterpret_cast(load(context, "vkDestroySwapchainKHR")); + vkGetSwapchainImagesKHR = reinterpret_cast(load(context, "vkGetSwapchainImagesKHR")); + vkQueuePresentKHR = reinterpret_cast(load(context, "vkQueuePresentKHR")); + #endif DebugLog("Vulkan loader : device functions loaded"); } VulkanLoader::~VulkanLoader() { - #if defined(MLX_PLAT_WINDOWS) - FreeLibrary((HMODULE)p_module); + #ifdef MLX_PLAT_WINDOWS + FreeLibrary(p_module); #else dlclose(p_module); #endif @@ -263,7 +299,7 @@ namespace mlx } } -#if defined(VK_VERSION_1_0) +#ifdef VK_VERSION_1_0 PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; PFN_vkAllocateMemory vkAllocateMemory; @@ -401,18 +437,18 @@ namespace mlx PFN_vkUnmapMemory vkUnmapMemory; PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; PFN_vkWaitForFences vkWaitForFences; -#endif /* defined(VK_VERSION_1_0) */ -#if defined(VK_KHR_swapchain) +#endif +#ifdef VK_KHR_swapchain PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; PFN_vkQueuePresentKHR vkQueuePresentKHR; -#endif /* defined(VK_KHR_swapchain) */ -#if defined(VK_KHR_surface) +#endif +#ifdef VK_KHR_surface PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; -#endif /* defined(VK_KHR_surface) */ +#endif diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.h b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h index 73e073b..76ce045 100644 --- a/runtime/Sources/Renderer/Vulkan/VulkanLoader.h +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.h @@ -23,6 +23,7 @@ namespace mlx public: VulkanLoader(); void LoadInstance(VkInstance instance); + void LoadDevice(VkDevice device); ~VulkanLoader(); private: From 48801d1070b85293e8dfafb4b46262157da11e4b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 20 Sep 2024 16:14:14 +0200 Subject: [PATCH 032/131] fixing vulkan loader, moving vulkan pfns to RenderCore --- runtime/Includes/Core/Application.h | 1 + runtime/Includes/PreCompiled.h | 3 + runtime/Includes/Renderer/Buffer.h | 4 +- runtime/Includes/Renderer/Descriptor.h | 1 - runtime/Includes/Renderer/Image.h | 2 +- .../Includes/Renderer/Pipelines/Pipeline.h | 4 +- runtime/Includes/Renderer/RenderCore.h | 30 +- runtime/Includes/Renderer/Vulkan/VulkanDefs.h | 121 ++++ .../Renderer/Vulkan/VulkanPrototypes.h | 170 ----- runtime/Sources/Core/Application.cpp | 2 +- runtime/Sources/Core/Bridge.cpp | 2 - runtime/Sources/Graphics/Mesh.cpp | 2 +- runtime/Sources/Renderer/Descriptor.cpp | 2 +- runtime/Sources/Renderer/Image.cpp | 8 +- runtime/Sources/Renderer/Memory.cpp | 34 +- .../Sources/Renderer/Pipelines/Graphics.cpp | 10 +- runtime/Sources/Renderer/RenderCore.cpp | 110 ++- .../Sources/Renderer/RenderPasses/2DPass.cpp | 4 +- .../Renderer/RenderPasses/FinalPass.cpp | 4 +- runtime/Sources/Renderer/Renderer.cpp | 8 +- .../Sources/Renderer/Vulkan/VulkanLoader.cpp | 354 +--------- third_party/kvf.h | 629 ++++++++++++++---- 22 files changed, 821 insertions(+), 684 deletions(-) create mode 100644 runtime/Includes/Renderer/Vulkan/VulkanDefs.h delete mode 100644 runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 213010b..7521d77 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -47,6 +47,7 @@ namespace mlx ~Application(); private: + RenderCore m_render_core; FpsManager m_fps; Inputs m_in; ImageRegistry m_image_registry; diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index f0041a9..f948b11 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -9,6 +9,8 @@ #include #include +#include + #include #include @@ -81,6 +83,7 @@ #include #include +#define KVF_IMPL_VK_NO_PROTOTYPES #ifdef DEBUG #define KVF_ENABLE_VALIDATION_LAYERS #endif diff --git a/runtime/Includes/Renderer/Buffer.h b/runtime/Includes/Renderer/Buffer.h index 8d0b489..d6574fa 100644 --- a/runtime/Includes/Renderer/Buffer.h +++ b/runtime/Includes/Renderer/Buffer.h @@ -51,7 +51,7 @@ namespace mlx public: inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags = 0) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | additional_flags, {}); } void SetData(CPUBuffer data); - inline void Bind(VkCommandBuffer cmd) const noexcept { VkDeviceSize offset = 0; vkCmdBindVertexBuffers(cmd, 0, 1, &m_buffer, &offset); } + inline void Bind(VkCommandBuffer cmd) const noexcept { VkDeviceSize offset = 0; RenderCore::Get().vkCmdBindVertexBuffers(cmd, 0, 1, &m_buffer, &offset); } }; class IndexBuffer : public GPUBuffer @@ -59,7 +59,7 @@ namespace mlx public: inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags = 0) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | additional_flags, {}); } void SetData(CPUBuffer data); - inline void Bind(VkCommandBuffer cmd) const noexcept { vkCmdBindIndexBuffer(cmd, m_buffer, 0, VK_INDEX_TYPE_UINT32); } + inline void Bind(VkCommandBuffer cmd) const noexcept { RenderCore::Get().vkCmdBindIndexBuffer(cmd, m_buffer, 0, VK_INDEX_TYPE_UINT32); } }; class UniformBuffer diff --git a/runtime/Includes/Renderer/Descriptor.h b/runtime/Includes/Renderer/Descriptor.h index b99fbfa..ad37ae4 100644 --- a/runtime/Includes/Renderer/Descriptor.h +++ b/runtime/Includes/Renderer/Descriptor.h @@ -4,7 +4,6 @@ #include #include #include -#include namespace mlx { diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 996a3ac..ebde1ad 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -99,7 +99,7 @@ namespace mlx kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); - vkEndCommandBuffer(cmd); + RenderCore::Get().vkEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); diff --git a/runtime/Includes/Renderer/Pipelines/Pipeline.h b/runtime/Includes/Renderer/Pipelines/Pipeline.h index 126fb34..f92f1b9 100644 --- a/runtime/Includes/Renderer/Pipelines/Pipeline.h +++ b/runtime/Includes/Renderer/Pipelines/Pipeline.h @@ -1,8 +1,6 @@ #ifndef __MLX_PIPELINE__ #define __MLX_PIPELINE__ -#include - namespace mlx { class Pipeline @@ -10,7 +8,7 @@ namespace mlx public: Pipeline() = default; - inline virtual bool BindPipeline(VkCommandBuffer command_buffer) noexcept { vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); return true; } + inline virtual bool BindPipeline(VkCommandBuffer command_buffer) noexcept { RenderCore::Get().vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); return true; } inline virtual void EndPipeline([[maybe_unused]] VkCommandBuffer command_buffer) noexcept {} virtual VkPipeline GetPipeline() const = 0; diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index 5885064..36947a6 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -1,21 +1,17 @@ #ifndef __MLX_RENDER_CORE__ #define __MLX_RENDER_CORE__ -#include #include namespace mlx { constexpr const int MAX_FRAMES_IN_FLIGHT = 3; - class RenderCore : public Singleton + class RenderCore { - friend class Singleton; + friend class Application; public: - void Init() noexcept; - void Destroy() noexcept; - [[nodiscard]] MLX_FORCEINLINE VkInstance GetInstance() const noexcept { return m_instance; } [[nodiscard]] MLX_FORCEINLINE VkInstance& GetInstanceRef() noexcept { return m_instance; } [[nodiscard]] MLX_FORCEINLINE VkDevice GetDevice() const noexcept { return m_device; } @@ -24,11 +20,27 @@ namespace mlx inline void WaitDeviceIdle() const noexcept { vkDeviceWaitIdle(m_device); } - private: - RenderCore() = default; - ~RenderCore() = default; + inline static bool IsInit() noexcept { return s_instance != nullptr; } + inline static RenderCore& Get() noexcept { return *s_instance; } + + #define MLX_VULKAN_GLOBAL_FUNCTION(fn) PFN_##fn fn = nullptr; + #define MLX_VULKAN_INSTANCE_FUNCTION(fn) PFN_##fn fn = nullptr; + #define MLX_VULKAN_DEVICE_FUNCTION(fn) PFN_##fn fn = nullptr; + #include + #undef MLX_VULKAN_GLOBAL_FUNCTION + #undef MLX_VULKAN_INSTANCE_FUNCTION + #undef MLX_VULKAN_DEVICE_FUNCTION private: + RenderCore(); + void LoadKVFGlobalVulkanFunctionPointers() const noexcept; + void LoadKVFInstanceVulkanFunctionPointers() const noexcept; + void LoadKVFDeviceVulkanFunctionPointers() const noexcept; + ~RenderCore(); + + private: + static RenderCore* s_instance; + GPUAllocator m_allocator; VkInstance m_instance = VK_NULL_HANDLE; VkDevice m_device = VK_NULL_HANDLE; diff --git a/runtime/Includes/Renderer/Vulkan/VulkanDefs.h b/runtime/Includes/Renderer/Vulkan/VulkanDefs.h new file mode 100644 index 0000000..2c9972d --- /dev/null +++ b/runtime/Includes/Renderer/Vulkan/VulkanDefs.h @@ -0,0 +1,121 @@ +// No header guard + +#ifdef VK_VERSION_1_0 + #ifdef MLX_VULKAN_GLOBAL_FUNCTION + MLX_VULKAN_GLOBAL_FUNCTION(vkCreateInstance) + MLX_VULKAN_GLOBAL_FUNCTION(vkEnumerateInstanceExtensionProperties) + MLX_VULKAN_GLOBAL_FUNCTION(vkEnumerateInstanceLayerProperties) + MLX_VULKAN_GLOBAL_FUNCTION(vkGetInstanceProcAddr) + #endif + + #ifdef MLX_VULKAN_INSTANCE_FUNCTION + MLX_VULKAN_INSTANCE_FUNCTION(vkCreateDevice) + MLX_VULKAN_INSTANCE_FUNCTION(vkDestroyInstance) + MLX_VULKAN_INSTANCE_FUNCTION(vkEnumerateDeviceExtensionProperties) + MLX_VULKAN_INSTANCE_FUNCTION(vkEnumeratePhysicalDevices) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetDeviceProcAddr) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceFeatures) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceFormatProperties) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceImageFormatProperties) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceMemoryProperties) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceProperties) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties) + #endif + + #ifdef MLX_VULKAN_DEVICE_FUNCTION + MLX_VULKAN_DEVICE_FUNCTION(vkAllocateCommandBuffers) + MLX_VULKAN_DEVICE_FUNCTION(vkAllocateDescriptorSets) + MLX_VULKAN_DEVICE_FUNCTION(vkAllocateMemory) + MLX_VULKAN_DEVICE_FUNCTION(vkBeginCommandBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkBindBufferMemory) + MLX_VULKAN_DEVICE_FUNCTION(vkBindImageMemory) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdBeginRenderPass) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdBindDescriptorSets) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdBindIndexBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdBindPipeline) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdBindVertexBuffers) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdClearAttachments) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdClearColorImage) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdClearDepthStencilImage) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdCopyBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdCopyBufferToImage) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdCopyImage) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdCopyImageToBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdDraw) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdDrawIndexed) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdEndRenderPass) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdPipelineBarrier) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdPushConstants) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdSetScissor) + MLX_VULKAN_DEVICE_FUNCTION(vkCmdSetViewport) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateCommandPool) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateDescriptorPool) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateDescriptorSetLayout) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateFence) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateFramebuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateGraphicsPipelines) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateImage) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateImageView) + MLX_VULKAN_DEVICE_FUNCTION(vkCreatePipelineLayout) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateRenderPass) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateSampler) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateSemaphore) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateShaderModule) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyCommandPool) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyDescriptorPool) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyDescriptorSetLayout) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyDevice) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyFence) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyFramebuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyImage) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyImageView) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyPipeline) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyPipelineLayout) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyRenderPass) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroySampler) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroySemaphore) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroyShaderModule) + MLX_VULKAN_DEVICE_FUNCTION(vkDeviceWaitIdle) + MLX_VULKAN_DEVICE_FUNCTION(vkEndCommandBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkFlushMappedMemoryRanges) + MLX_VULKAN_DEVICE_FUNCTION(vkFreeCommandBuffers) + MLX_VULKAN_DEVICE_FUNCTION(vkFreeMemory) + MLX_VULKAN_DEVICE_FUNCTION(vkGetBufferMemoryRequirements) + MLX_VULKAN_DEVICE_FUNCTION(vkGetDeviceMemoryCommitment) + MLX_VULKAN_DEVICE_FUNCTION(vkGetDeviceQueue) + MLX_VULKAN_DEVICE_FUNCTION(vkGetFenceStatus) + MLX_VULKAN_DEVICE_FUNCTION(vkGetImageMemoryRequirements) + MLX_VULKAN_DEVICE_FUNCTION(vkGetImageSubresourceLayout) + MLX_VULKAN_DEVICE_FUNCTION(vkInvalidateMappedMemoryRanges) + MLX_VULKAN_DEVICE_FUNCTION(vkMapMemory) + MLX_VULKAN_DEVICE_FUNCTION(vkQueueSubmit) + MLX_VULKAN_DEVICE_FUNCTION(vkQueueWaitIdle) + MLX_VULKAN_DEVICE_FUNCTION(vkResetCommandBuffer) + MLX_VULKAN_DEVICE_FUNCTION(vkResetDescriptorPool) + MLX_VULKAN_DEVICE_FUNCTION(vkResetEvent) + MLX_VULKAN_DEVICE_FUNCTION(vkResetFences) + MLX_VULKAN_DEVICE_FUNCTION(vkUnmapMemory) + MLX_VULKAN_DEVICE_FUNCTION(vkUpdateDescriptorSets) + MLX_VULKAN_DEVICE_FUNCTION(vkWaitForFences) + #endif +#endif +#ifdef VK_KHR_swapchain + #ifdef MLX_VULKAN_DEVICE_FUNCTION + MLX_VULKAN_DEVICE_FUNCTION(vkAcquireNextImageKHR) + MLX_VULKAN_DEVICE_FUNCTION(vkCreateSwapchainKHR) + MLX_VULKAN_DEVICE_FUNCTION(vkDestroySwapchainKHR) + MLX_VULKAN_DEVICE_FUNCTION(vkGetSwapchainImagesKHR) + MLX_VULKAN_DEVICE_FUNCTION(vkQueuePresentKHR) + #endif +#endif +#ifdef VK_KHR_surface + #ifdef MLX_VULKAN_INSTANCE_FUNCTION + MLX_VULKAN_INSTANCE_FUNCTION(vkDestroySurfaceKHR) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceCapabilitiesKHR) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceFormatsKHR) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR) + MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceSupportKHR) + #endif +#endif diff --git a/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h b/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h deleted file mode 100644 index d3e9ad9..0000000 --- a/runtime/Includes/Renderer/Vulkan/VulkanPrototypes.h +++ /dev/null @@ -1,170 +0,0 @@ -#ifndef __MLX_VK_PROTOTYPES__ -#define __MLX_VK_PROTOTYPES__ - -#if defined(VULKAN_H_) && !defined(VK_NO_PROTOTYPES) - #error "define VK_NO_PROTOTYPES needed" -#endif - -#ifndef VK_NO_PROTOTYPES - #define VK_NO_PROTOTYPES -#endif - -#ifndef VULKAN_H_ - #include -#endif - -#ifdef VK_VERSION_1_0 - extern PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; - extern PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; - extern PFN_vkAllocateMemory vkAllocateMemory; - extern PFN_vkBeginCommandBuffer vkBeginCommandBuffer; - extern PFN_vkBindBufferMemory vkBindBufferMemory; - extern PFN_vkBindImageMemory vkBindImageMemory; - extern PFN_vkCmdBeginQuery vkCmdBeginQuery; - extern PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; - extern PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; - extern PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; - extern PFN_vkCmdBindPipeline vkCmdBindPipeline; - extern PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; - extern PFN_vkCmdBlitImage vkCmdBlitImage; - extern PFN_vkCmdClearAttachments vkCmdClearAttachments; - extern PFN_vkCmdClearColorImage vkCmdClearColorImage; - extern PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; - extern PFN_vkCmdCopyBuffer vkCmdCopyBuffer; - extern PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; - extern PFN_vkCmdCopyImage vkCmdCopyImage; - extern PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; - extern PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; - extern PFN_vkCmdDispatch vkCmdDispatch; - extern PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; - extern PFN_vkCmdDraw vkCmdDraw; - extern PFN_vkCmdDrawIndexed vkCmdDrawIndexed; - extern PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; - extern PFN_vkCmdDrawIndirect vkCmdDrawIndirect; - extern PFN_vkCmdEndQuery vkCmdEndQuery; - extern PFN_vkCmdEndRenderPass vkCmdEndRenderPass; - extern PFN_vkCmdExecuteCommands vkCmdExecuteCommands; - extern PFN_vkCmdFillBuffer vkCmdFillBuffer; - extern PFN_vkCmdNextSubpass vkCmdNextSubpass; - extern PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; - extern PFN_vkCmdPushConstants vkCmdPushConstants; - extern PFN_vkCmdResetEvent vkCmdResetEvent; - extern PFN_vkCmdResetQueryPool vkCmdResetQueryPool; - extern PFN_vkCmdResolveImage vkCmdResolveImage; - extern PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; - extern PFN_vkCmdSetDepthBias vkCmdSetDepthBias; - extern PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; - extern PFN_vkCmdSetEvent vkCmdSetEvent; - extern PFN_vkCmdSetLineWidth vkCmdSetLineWidth; - extern PFN_vkCmdSetScissor vkCmdSetScissor; - extern PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; - extern PFN_vkCmdSetStencilReference vkCmdSetStencilReference; - extern PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; - extern PFN_vkCmdSetViewport vkCmdSetViewport; - extern PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; - extern PFN_vkCmdWaitEvents vkCmdWaitEvents; - extern PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; - extern PFN_vkCreateBuffer vkCreateBuffer; - extern PFN_vkCreateBufferView vkCreateBufferView; - extern PFN_vkCreateCommandPool vkCreateCommandPool; - extern PFN_vkCreateComputePipelines vkCreateComputePipelines; - extern PFN_vkCreateDescriptorPool vkCreateDescriptorPool; - extern PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; - extern PFN_vkCreateDevice vkCreateDevice; - extern PFN_vkCreateEvent vkCreateEvent; - extern PFN_vkCreateFence vkCreateFence; - extern PFN_vkCreateFramebuffer vkCreateFramebuffer; - extern PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; - extern PFN_vkCreateImage vkCreateImage; - extern PFN_vkCreateImageView vkCreateImageView; - extern PFN_vkCreateInstance vkCreateInstance; - extern PFN_vkCreatePipelineCache vkCreatePipelineCache; - extern PFN_vkCreatePipelineLayout vkCreatePipelineLayout; - extern PFN_vkCreateQueryPool vkCreateQueryPool; - extern PFN_vkCreateRenderPass vkCreateRenderPass; - extern PFN_vkCreateSampler vkCreateSampler; - extern PFN_vkCreateSemaphore vkCreateSemaphore; - extern PFN_vkCreateShaderModule vkCreateShaderModule; - extern PFN_vkDestroyBuffer vkDestroyBuffer; - extern PFN_vkDestroyBufferView vkDestroyBufferView; - extern PFN_vkDestroyCommandPool vkDestroyCommandPool; - extern PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; - extern PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; - extern PFN_vkDestroyDevice vkDestroyDevice; - extern PFN_vkDestroyEvent vkDestroyEvent; - extern PFN_vkDestroyFence vkDestroyFence; - extern PFN_vkDestroyFramebuffer vkDestroyFramebuffer; - extern PFN_vkDestroyImage vkDestroyImage; - extern PFN_vkDestroyImageView vkDestroyImageView; - extern PFN_vkDestroyInstance vkDestroyInstance; - extern PFN_vkDestroyPipeline vkDestroyPipeline; - extern PFN_vkDestroyPipelineCache vkDestroyPipelineCache; - extern PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; - extern PFN_vkDestroyQueryPool vkDestroyQueryPool; - extern PFN_vkDestroyRenderPass vkDestroyRenderPass; - extern PFN_vkDestroySampler vkDestroySampler; - extern PFN_vkDestroySemaphore vkDestroySemaphore; - extern PFN_vkDestroyShaderModule vkDestroyShaderModule; - extern PFN_vkDeviceWaitIdle vkDeviceWaitIdle; - extern PFN_vkEndCommandBuffer vkEndCommandBuffer; - extern PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; - extern PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; - extern PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; - extern PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; - extern PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; - extern PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; - extern PFN_vkFreeCommandBuffers vkFreeCommandBuffers; - extern PFN_vkFreeDescriptorSets vkFreeDescriptorSets; - extern PFN_vkFreeMemory vkFreeMemory; - extern PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; - extern PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; - extern PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; - extern PFN_vkGetDeviceQueue vkGetDeviceQueue; - extern PFN_vkGetEventStatus vkGetEventStatus; - extern PFN_vkGetFenceStatus vkGetFenceStatus; - extern PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; - extern PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; - extern PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; - extern PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; - extern PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; - extern PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; - extern PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; - extern PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; - extern PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; - extern PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; - extern PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; - extern PFN_vkGetPipelineCacheData vkGetPipelineCacheData; - extern PFN_vkGetQueryPoolResults vkGetQueryPoolResults; - extern PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; - extern PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; - extern PFN_vkMapMemory vkMapMemory; - extern PFN_vkMergePipelineCaches vkMergePipelineCaches; - extern PFN_vkQueueBindSparse vkQueueBindSparse; - extern PFN_vkQueueSubmit vkQueueSubmit; - extern PFN_vkQueueWaitIdle vkQueueWaitIdle; - extern PFN_vkResetCommandBuffer vkResetCommandBuffer; - extern PFN_vkResetCommandPool vkResetCommandPool; - extern PFN_vkResetDescriptorPool vkResetDescriptorPool; - extern PFN_vkResetEvent vkResetEvent; - extern PFN_vkResetFences vkResetFences; - extern PFN_vkSetEvent vkSetEvent; - extern PFN_vkUnmapMemory vkUnmapMemory; - extern PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; - extern PFN_vkWaitForFences vkWaitForFences; -#endif -#ifdef VK_KHR_swapchain - extern PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; - extern PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; - extern PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; - extern PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; - extern PFN_vkQueuePresentKHR vkQueuePresentKHR; -#endif -#ifdef VK_KHR_surface - extern PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; - extern PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; - extern PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; - extern PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; - extern PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; -#endif - -#endif diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 5da8017..29ffbbb 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -8,7 +8,7 @@ namespace mlx { - Application::Application() : m_fps(), m_in() + Application::Application() : m_render_core(), m_fps(), m_in() { EventBus::RegisterListener({[](const EventBase& event) { diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 3ba1a9e..01ba894 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -29,7 +29,6 @@ extern "C" mlx::Application* app = new mlx::Application; if(app == nullptr) mlx::FatalError("Tout a pété"); - mlx::RenderCore::Get().Init(); __mlx_ptr = static_cast(app); return __mlx_ptr; } @@ -281,7 +280,6 @@ extern "C" { MLX_CHECK_APPLICATION_POINTER(mlx); delete static_cast(mlx); - mlx::RenderCore::Get().Destroy(); __mlx_ptr = nullptr; return 0; } diff --git a/runtime/Sources/Graphics/Mesh.cpp b/runtime/Sources/Graphics/Mesh.cpp index 3f8221f..3df2e95 100644 --- a/runtime/Sources/Graphics/Mesh.cpp +++ b/runtime/Sources/Graphics/Mesh.cpp @@ -15,7 +15,7 @@ namespace mlx Verify(submesh_index < m_sub_meshes.size(), "invalid submesh index"); m_sub_meshes[submesh_index].vbo.Bind(cmd); m_sub_meshes[submesh_index].ibo.Bind(cmd); - vkCmdDrawIndexed(cmd, static_cast(m_sub_meshes[submesh_index].ibo.GetSize() / sizeof(std::uint32_t)), 1, 0, 0, 0); + mlx::RenderCore::Get().vkCmdDrawIndexed(cmd, static_cast(m_sub_meshes[submesh_index].ibo.GetSize() / sizeof(std::uint32_t)), 1, 0, 0, 0); polygondrawn += m_sub_meshes[submesh_index].triangle_count; drawcalls++; } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index bbed657..11a4411 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -136,7 +136,7 @@ namespace mlx writes.push_back(kvfWriteStorageBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_set[i], &buffer_infos.back(), descriptor.binding)); } } - vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); + RenderCore::Get().vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); } void DescriptorSet::Reallocate() noexcept diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 0310469..6eff769 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -76,7 +76,7 @@ namespace mlx m_layout = new_layout; if(is_single_time_cmd_buffer) { - vkEndCommandBuffer(cmd); + RenderCore::Get().vkEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); @@ -97,7 +97,7 @@ namespace mlx TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; VkClearColorValue clear_color = VkClearColorValue({ { color.x, color.y, color.z, color.w } }); - vkCmdClearColorImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource_range); + RenderCore::Get().vkCmdClearColorImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource_range); TransitionLayout(old_layout, cmd); } else if(m_type == ImageType::Depth) @@ -105,7 +105,7 @@ namespace mlx VkClearDepthStencilValue clear_depth_stencil = { 1.0f, 1 }; subresource_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); - vkCmdClearDepthStencilImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_depth_stencil, 1, &subresource_range); + RenderCore::Get().vkCmdClearDepthStencilImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_depth_stencil, 1, &subresource_range); } } @@ -189,7 +189,7 @@ namespace mlx TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, cmd); kvfCopyImageToBuffer(cmd, m_staging_buffer->Get(), m_image, m_staging_buffer->GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { m_width, m_height, 1 }); TransitionLayout(old_layout, cmd); - vkEndCommandBuffer(cmd); + RenderCore::Get().vkEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 7812525..3587dca 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -27,23 +27,23 @@ namespace mlx void GPUAllocator::Init() noexcept { VmaVulkanFunctions vma_vulkan_func{}; - vma_vulkan_func.vkAllocateMemory = vkAllocateMemory; - vma_vulkan_func.vkBindBufferMemory = vkBindBufferMemory; - vma_vulkan_func.vkBindImageMemory = vkBindImageMemory; - vma_vulkan_func.vkCreateBuffer = vkCreateBuffer; - vma_vulkan_func.vkCreateImage = vkCreateImage; - vma_vulkan_func.vkDestroyBuffer = vkDestroyBuffer; - vma_vulkan_func.vkDestroyImage = vkDestroyImage; - vma_vulkan_func.vkFlushMappedMemoryRanges = vkFlushMappedMemoryRanges; - vma_vulkan_func.vkFreeMemory = vkFreeMemory; - vma_vulkan_func.vkGetBufferMemoryRequirements = vkGetBufferMemoryRequirements; - vma_vulkan_func.vkGetImageMemoryRequirements = vkGetImageMemoryRequirements; - vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties = vkGetPhysicalDeviceMemoryProperties; - vma_vulkan_func.vkGetPhysicalDeviceProperties = vkGetPhysicalDeviceProperties; - vma_vulkan_func.vkInvalidateMappedMemoryRanges = vkInvalidateMappedMemoryRanges; - vma_vulkan_func.vkMapMemory = vkMapMemory; - vma_vulkan_func.vkUnmapMemory = vkUnmapMemory; - vma_vulkan_func.vkCmdCopyBuffer = vkCmdCopyBuffer; + vma_vulkan_func.vkAllocateMemory = RenderCore::Get().vkAllocateMemory; + vma_vulkan_func.vkBindBufferMemory = RenderCore::Get().vkBindBufferMemory; + vma_vulkan_func.vkBindImageMemory = RenderCore::Get().vkBindImageMemory; + vma_vulkan_func.vkCreateBuffer = RenderCore::Get().vkCreateBuffer; + vma_vulkan_func.vkCreateImage = RenderCore::Get().vkCreateImage; + vma_vulkan_func.vkDestroyBuffer = RenderCore::Get().vkDestroyBuffer; + vma_vulkan_func.vkDestroyImage = RenderCore::Get().vkDestroyImage; + vma_vulkan_func.vkFlushMappedMemoryRanges = RenderCore::Get().vkFlushMappedMemoryRanges; + vma_vulkan_func.vkFreeMemory = RenderCore::Get().vkFreeMemory; + vma_vulkan_func.vkGetBufferMemoryRequirements = RenderCore::Get().vkGetBufferMemoryRequirements; + vma_vulkan_func.vkGetImageMemoryRequirements = RenderCore::Get().vkGetImageMemoryRequirements; + vma_vulkan_func.vkGetPhysicalDeviceMemoryProperties = RenderCore::Get().vkGetPhysicalDeviceMemoryProperties; + vma_vulkan_func.vkGetPhysicalDeviceProperties = RenderCore::Get().vkGetPhysicalDeviceProperties; + vma_vulkan_func.vkInvalidateMappedMemoryRanges = RenderCore::Get().vkInvalidateMappedMemoryRanges; + vma_vulkan_func.vkMapMemory = RenderCore::Get().vkMapMemory; + vma_vulkan_func.vkUnmapMemory = RenderCore::Get().vkUnmapMemory; + vma_vulkan_func.vkCmdCopyBuffer = RenderCore::Get().vkCmdCopyBuffer; VmaAllocatorCreateInfo allocator_create_info{}; allocator_create_info.vulkanApiVersion = VK_API_VERSION_1_0; diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 7939b5e..6fa92e0 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -30,7 +30,7 @@ namespace mlx CreateFramebuffers(m_attachments, descriptor.clear_color_attachments); VkPhysicalDeviceFeatures features{}; - vkGetPhysicalDeviceFeatures(RenderCore::Get().GetPhysicalDevice(), &features); + mlx::RenderCore::Get().vkGetPhysicalDeviceFeatures(RenderCore::Get().GetPhysicalDevice(), &features); KvfGraphicsPipelineBuilder* builder = kvfCreateGPipelineBuilder(); kvfGPipelineBuilderAddShaderStage(builder, p_vertex_shader->GetShaderStage(), p_vertex_shader->GetShaderModule(), "main"); @@ -73,12 +73,12 @@ namespace mlx viewport.height = fb_extent.height; viewport.minDepth = 0.0f; viewport.maxDepth = 1.0f; - vkCmdSetViewport(command_buffer, 0, 1, &viewport); + RenderCore::Get().vkCmdSetViewport(command_buffer, 0, 1, &viewport); VkRect2D scissor{}; scissor.offset = { 0, 0 }; scissor.extent = fb_extent; - vkCmdSetScissor(command_buffer, 0, 1, &scissor); + RenderCore::Get().vkCmdSetScissor(command_buffer, 0, 1, &scissor); for(std::size_t i = 0; i < m_clears.size(); i++) { @@ -92,13 +92,13 @@ namespace mlx m_clears.back().depthStencil = VkClearDepthStencilValue{ 1.0f, 0 }; kvfBeginRenderPass(m_renderpass, command_buffer, fb, fb_extent, m_clears.data(), m_clears.size()); - vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); + RenderCore::Get().vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); return true; } void GraphicPipeline::EndPipeline(VkCommandBuffer command_buffer) noexcept { - vkCmdEndRenderPass(command_buffer); + RenderCore::Get().vkCmdEndRenderPass(command_buffer); } void GraphicPipeline::Destroy() noexcept diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 79ca02d..c04c52f 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -43,10 +43,16 @@ namespace mlx std::cout << std::endl; } - void RenderCore::Init() noexcept + RenderCore* RenderCore::s_instance = nullptr; + + RenderCore::RenderCore() { + s_instance = this; + loader = std::make_unique(); + LoadKVFGlobalVulkanFunctionPointers(); + kvfSetErrorCallback(&ErrorCallback); kvfSetValidationErrorCallback(&ValidationErrorCallback); kvfSetValidationWarningCallback(&ValidationWarningCallback); @@ -63,6 +69,7 @@ namespace mlx DebugLog("Vulkan : instance created"); loader->LoadInstance(m_instance); + LoadKVFInstanceVulkanFunctionPointers(); VkSurfaceKHR surface = window.CreateVulkanSurface(m_instance); @@ -80,12 +87,109 @@ namespace mlx DebugLog("Vulkan : logical device created"); loader->LoadDevice(m_device); + LoadKVFDeviceVulkanFunctionPointers(); vkDestroySurfaceKHR(m_instance, surface, nullptr); FatalError("caca"); } - void RenderCore::Destroy() noexcept +#undef MLX_LOAD_FUNCTION +#define MLX_LOAD_FUNCTION(fn) pfns.fn = this->fn + + void RenderCore::LoadKVFGlobalVulkanFunctionPointers() const noexcept + { + KvfGlobalVulkanFunctions pfns; + MLX_LOAD_FUNCTION(vkCreateInstance); + MLX_LOAD_FUNCTION(vkEnumerateInstanceExtensionProperties); + MLX_LOAD_FUNCTION(vkEnumerateInstanceLayerProperties); + MLX_LOAD_FUNCTION(vkGetInstanceProcAddr); + kvfPassGlobalVulkanFunctionPointers(&pfns); + } + + void RenderCore::LoadKVFInstanceVulkanFunctionPointers() const noexcept + { + KvfInstanceVulkanFunctions pfns; + MLX_LOAD_FUNCTION(vkCreateDevice); + MLX_LOAD_FUNCTION(vkDestroyInstance); + MLX_LOAD_FUNCTION(vkEnumerateDeviceExtensionProperties); + MLX_LOAD_FUNCTION(vkEnumeratePhysicalDevices); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceFeatures); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceFormatProperties); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceImageFormatProperties); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceMemoryProperties); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceProperties); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties); + MLX_LOAD_FUNCTION(vkDestroySurfaceKHR); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceSurfaceCapabilitiesKHR); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceSurfaceFormatsKHR); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR); + MLX_LOAD_FUNCTION(vkGetPhysicalDeviceSurfaceSupportKHR); + kvfPassInstanceVulkanFunctionPointers(&pfns); + } + + void RenderCore::LoadKVFDeviceVulkanFunctionPointers() const noexcept + { + KvfDeviceVulkanFunctions pfns; + MLX_LOAD_FUNCTION(vkAllocateCommandBuffers); + MLX_LOAD_FUNCTION(vkAllocateDescriptorSets); + MLX_LOAD_FUNCTION(vkBeginCommandBuffer); + MLX_LOAD_FUNCTION(vkCmdBeginRenderPass); + MLX_LOAD_FUNCTION(vkCmdCopyBuffer); + MLX_LOAD_FUNCTION(vkCmdCopyBufferToImage); + MLX_LOAD_FUNCTION(vkCmdCopyImage); + MLX_LOAD_FUNCTION(vkCmdCopyImageToBuffer); + MLX_LOAD_FUNCTION(vkCmdEndRenderPass); + MLX_LOAD_FUNCTION(vkCmdPipelineBarrier); + MLX_LOAD_FUNCTION(vkCreateBuffer); + MLX_LOAD_FUNCTION(vkCreateCommandPool); + MLX_LOAD_FUNCTION(vkCreateDescriptorPool); + MLX_LOAD_FUNCTION(vkCreateDescriptorSetLayout); + MLX_LOAD_FUNCTION(vkCreateFence); + MLX_LOAD_FUNCTION(vkCreateFramebuffer); + MLX_LOAD_FUNCTION(vkCreateGraphicsPipelines); + MLX_LOAD_FUNCTION(vkCreateImage); + MLX_LOAD_FUNCTION(vkCreateImageView); + MLX_LOAD_FUNCTION(vkCreatePipelineLayout); + MLX_LOAD_FUNCTION(vkCreateRenderPass); + MLX_LOAD_FUNCTION(vkCreateSampler); + MLX_LOAD_FUNCTION(vkCreateSemaphore); + MLX_LOAD_FUNCTION(vkCreateShaderModule); + MLX_LOAD_FUNCTION(vkDestroyBuffer); + MLX_LOAD_FUNCTION(vkDestroyCommandPool); + MLX_LOAD_FUNCTION(vkDestroyDescriptorPool); + MLX_LOAD_FUNCTION(vkDestroyDescriptorSetLayout); + MLX_LOAD_FUNCTION(vkDestroyDevice); + MLX_LOAD_FUNCTION(vkDestroyFence); + MLX_LOAD_FUNCTION(vkDestroyFramebuffer); + MLX_LOAD_FUNCTION(vkDestroyImage); + MLX_LOAD_FUNCTION(vkDestroyImageView); + MLX_LOAD_FUNCTION(vkDestroyPipeline); + MLX_LOAD_FUNCTION(vkDestroyPipelineLayout); + MLX_LOAD_FUNCTION(vkDestroyRenderPass); + MLX_LOAD_FUNCTION(vkDestroySampler); + MLX_LOAD_FUNCTION(vkDestroySemaphore); + MLX_LOAD_FUNCTION(vkDestroyShaderModule); + MLX_LOAD_FUNCTION(vkDeviceWaitIdle); + MLX_LOAD_FUNCTION(vkEndCommandBuffer); + MLX_LOAD_FUNCTION(vkGetDeviceQueue); + MLX_LOAD_FUNCTION(vkGetImageSubresourceLayout); + MLX_LOAD_FUNCTION(vkQueueSubmit); + MLX_LOAD_FUNCTION(vkResetCommandBuffer); + MLX_LOAD_FUNCTION(vkResetDescriptorPool); + MLX_LOAD_FUNCTION(vkResetEvent); + MLX_LOAD_FUNCTION(vkResetFences); + MLX_LOAD_FUNCTION(vkUpdateDescriptorSets); + MLX_LOAD_FUNCTION(vkWaitForFences); + MLX_LOAD_FUNCTION(vkCreateSwapchainKHR); + MLX_LOAD_FUNCTION(vkDestroySwapchainKHR); + MLX_LOAD_FUNCTION(vkGetSwapchainImagesKHR); + MLX_LOAD_FUNCTION(vkQueuePresentKHR); + kvfPassDeviceVulkanFunctionPointers(m_device, &pfns); + } + +#undef MLX_LOAD_FUNCTION + + RenderCore::~RenderCore() { WaitDeviceIdle(); kvfDestroyDevice(m_device); @@ -93,5 +197,7 @@ namespace mlx kvfDestroyInstance(m_instance); DebugLog("Vulkan : instance destroyed"); loader.reset(); + + s_instance = nullptr; } } diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 962ce99..19ba2d7 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -106,8 +106,8 @@ namespace mlx sprite->GetTexture()->Update(cmd); sprite->Bind(frame_index, cmd); std::array sets = { p_viewer_data_set->GetSet(frame_index), sprite->GetSet(frame_index) }; - vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); - vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &sprite_data); + mlx::RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + mlx::RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &sprite_data); sprite->GetMesh()->Draw(cmd, renderer.GetDrawCallsCounterRef(), renderer.GetPolygonDrawnCounterRef()); } m_pipeline.EndPipeline(cmd); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index c36d703..5a4014b 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -61,8 +61,8 @@ namespace mlx m_pipeline.BindPipeline(cmd, renderer.GetSwapchainImageIndex(), { 0.0f, 0.0f, 0.0f, 1.0f }); VkDescriptorSet set = p_set->GetSet(renderer.GetCurrentFrameIndex()); - vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, 1, &set, 0, nullptr); - vkCmdDraw(cmd, 3, 1, 0, 0); + RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, 1, &set, 0, nullptr); + RenderCore::Get().vkCmdDraw(cmd, 3, 1, 0, 0); renderer.GetDrawCallsCounterRef()++; renderer.GetPolygonDrawnCounterRef()++; m_pipeline.EndPipeline(cmd); diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index e8bacc7..b240290 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -57,7 +57,7 @@ namespace mlx bool Renderer::BeginFrame() { kvfWaitForFence(RenderCore::Get().GetDevice(), m_cmd_fences[m_current_frame_index]); - VkResult result = vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); + VkResult result = RenderCore::Get().vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); if(result == VK_ERROR_OUT_OF_DATE_KHR) { DestroySwapchain(); @@ -68,7 +68,7 @@ namespace mlx else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) FatalError("Vulkan error : failed to acquire swapchain image, %", kvfVerbaliseVkResult(result)); - vkResetCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); + RenderCore::Get().vkResetCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); kvfBeginCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); m_drawcalls = 0; m_polygons_drawn = 0; @@ -102,7 +102,7 @@ namespace mlx std::uint32_t images_count = kvfGetSwapchainImagesCount(m_swapchain); std::vector tmp(images_count); m_swapchain_images.resize(images_count); - vkGetSwapchainImagesKHR(RenderCore::Get().GetDevice(), m_swapchain, &images_count, tmp.data()); + RenderCore::Get().vkGetSwapchainImagesKHR(RenderCore::Get().GetDevice(), m_swapchain, &images_count, tmp.data()); for(std::size_t i = 0; i < images_count; i++) { m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height); @@ -136,7 +136,7 @@ namespace mlx } DestroySwapchain(); - vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); + RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); DebugLog("Vulkan : surface destroyed"); m_surface = VK_NULL_HANDLE; } diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp index 273a902..cba9310 100644 --- a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp @@ -1,7 +1,6 @@ -#include "Renderer/Vulkan/VulkanPrototypes.h" -#include "vulkan/vulkan_core.h" #include #include +#include #ifdef MLX_PLAT_WINDOWS __declspec(dllimport) HMODULE __stdcall LoadLibraryA(LPCSTR); @@ -29,19 +28,19 @@ namespace mlx { static inline PFN_vkVoidFunction vkGetInstanceProcAddrStub(Handle context, const char* name) { - PFN_vkVoidFunction function = vkGetInstanceProcAddr(static_cast(context), name); + PFN_vkVoidFunction function = RenderCore::Get().vkGetInstanceProcAddr(static_cast(context), name); if(!function) FatalError("Vulkan loader : could not load '%'", name); - DebugLog("Vulkan loader : loaded %", name); + //DebugLog("Vulkan loader : loaded %", name); return function; } static inline PFN_vkVoidFunction vkGetDeviceProcAddrStub(Handle context, const char* name) { - PFN_vkVoidFunction function = vkGetDeviceProcAddr(static_cast(context), name); + PFN_vkVoidFunction function = RenderCore::Get().vkGetDeviceProcAddr(static_cast(context), name); if(!function) FatalError("Vulkan loader : could not load '%'", name); - DebugLog("Vulkan loader : loaded %", name); + //DebugLog("Vulkan loader : loaded %", name); return function; } @@ -90,19 +89,20 @@ namespace mlx { p_module = Internal::LoadLib(libname); if(p_module != nullptr) - break; + { + DISABLE_GCC_PEDANTIC_WARNINGS + RenderCore::Get().vkGetInstanceProcAddr = reinterpret_cast(Internal::GetSymbol(p_module, "vkGetInstanceProcAddr")); + RESTORE_GCC_PEDANTIC_WARNINGS + if(RenderCore::Get().vkGetInstanceProcAddr) + { + DebugLog("Vulkan loader : libvulkan loaded using '%'", libname); + break; + } + } } - if(!p_module) + if(!p_module || !RenderCore::Get().vkGetInstanceProcAddr) FatalError("Vulkan loader : failed to load libvulkan"); - - DISABLE_GCC_PEDANTIC_WARNINGS - vkGetInstanceProcAddr = reinterpret_cast(Internal::GetSymbol(p_module, "vkGetInstanceProcAddr")); - RESTORE_GCC_PEDANTIC_WARNINGS - - if(!vkGetInstanceProcAddr) - FatalError("Vulkan loader : could not get symbol for 'vkGetInstanceProcAddr'"); - DebugLog("Vulkan loader : libvulkan loaded"); - LoadGlobalFunctions(NULL, Internal::vkGetInstanceProcAddrStub); + LoadGlobalFunctions(nullptr, Internal::vkGetInstanceProcAddrStub); } void VulkanLoader::LoadInstance(VkInstance instance) @@ -117,173 +117,25 @@ namespace mlx void VulkanLoader::LoadGlobalFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept { - #ifdef VK_VERSION_1_0 - vkCreateInstance = reinterpret_cast(load(context, "vkCreateInstance")); - vkEnumerateInstanceExtensionProperties = reinterpret_cast(load(context, "vkEnumerateInstanceExtensionProperties")); - vkEnumerateInstanceLayerProperties = reinterpret_cast(load(context, "vkEnumerateInstanceLayerProperties")); - #endif + #define MLX_VULKAN_GLOBAL_FUNCTION(fn) RenderCore::Get().fn = reinterpret_cast(load(context, #fn)); + #include + #undef MLX_VULKAN_GLOBAL_FUNCTION DebugLog("Vulkan loader : global functions loaded"); } void VulkanLoader::LoadInstanceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept { - #ifdef VK_VERSION_1_0 - vkCreateDevice = reinterpret_cast(load(context, "vkCreateDevice")); - vkDestroyInstance = reinterpret_cast(load(context, "vkDestroyInstance")); - vkEnumerateDeviceExtensionProperties = reinterpret_cast(load(context, "vkEnumerateDeviceExtensionProperties")); - vkEnumerateDeviceLayerProperties = reinterpret_cast(load(context, "vkEnumerateDeviceLayerProperties")); - vkEnumeratePhysicalDevices = reinterpret_cast(load(context, "vkEnumeratePhysicalDevices")); - vkGetDeviceProcAddr = reinterpret_cast(load(context, "vkGetDeviceProcAddr")); - vkGetPhysicalDeviceFeatures = reinterpret_cast(load(context, "vkGetPhysicalDeviceFeatures")); - vkGetPhysicalDeviceFormatProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceFormatProperties")); - vkGetPhysicalDeviceImageFormatProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceImageFormatProperties")); - vkGetPhysicalDeviceMemoryProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceMemoryProperties")); - vkGetPhysicalDeviceProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceProperties")); - vkGetPhysicalDeviceQueueFamilyProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceQueueFamilyProperties")); - vkGetPhysicalDeviceSparseImageFormatProperties = reinterpret_cast(load(context, "vkGetPhysicalDeviceSparseImageFormatProperties")); - #endif - #ifdef VK_KHR_surface - vkDestroySurfaceKHR = reinterpret_cast(load(context, "vkDestroySurfaceKHR")); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR")); - vkGetPhysicalDeviceSurfaceFormatsKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfaceFormatsKHR")); - vkGetPhysicalDeviceSurfacePresentModesKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfacePresentModesKHR")); - vkGetPhysicalDeviceSurfaceSupportKHR = reinterpret_cast(load(context, "vkGetPhysicalDeviceSurfaceSupportKHR")); - #endif + #define MLX_VULKAN_INSTANCE_FUNCTION(fn) RenderCore::Get().fn = reinterpret_cast(load(context, #fn)); + #include + #undef MLX_VULKAN_INSTANCE_FUNCTION DebugLog("Vulkan loader : instance functions loaded"); } void VulkanLoader::LoadDeviceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept { - #ifdef VK_VERSION_1_0 - vkAllocateCommandBuffers = reinterpret_cast(load(context, "vkAllocateCommandBuffers")); - vkAllocateDescriptorSets = reinterpret_cast(load(context, "vkAllocateDescriptorSets")); - vkAllocateMemory = reinterpret_cast(load(context, "vkAllocateMemory")); - vkBeginCommandBuffer = reinterpret_cast(load(context, "vkBeginCommandBuffer")); - vkBindBufferMemory = reinterpret_cast(load(context, "vkBindBufferMemory")); - vkBindImageMemory = reinterpret_cast(load(context, "vkBindImageMemory")); - vkCmdBeginQuery = reinterpret_cast(load(context, "vkCmdBeginQuery")); - vkCmdBeginRenderPass = reinterpret_cast(load(context, "vkCmdBeginRenderPass")); - vkCmdBindDescriptorSets = reinterpret_cast(load(context, "vkCmdBindDescriptorSets")); - vkCmdBindIndexBuffer = reinterpret_cast(load(context, "vkCmdBindIndexBuffer")); - vkCmdBindPipeline = reinterpret_cast(load(context, "vkCmdBindPipeline")); - vkCmdBindVertexBuffers = reinterpret_cast(load(context, "vkCmdBindVertexBuffers")); - vkCmdBlitImage = reinterpret_cast(load(context, "vkCmdBlitImage")); - vkCmdClearAttachments = reinterpret_cast(load(context, "vkCmdClearAttachments")); - vkCmdClearColorImage = reinterpret_cast(load(context, "vkCmdClearColorImage")); - vkCmdClearDepthStencilImage = reinterpret_cast(load(context, "vkCmdClearDepthStencilImage")); - vkCmdCopyBuffer = reinterpret_cast(load(context, "vkCmdCopyBuffer")); - vkCmdCopyBufferToImage = reinterpret_cast(load(context, "vkCmdCopyBufferToImage")); - vkCmdCopyImage = reinterpret_cast(load(context, "vkCmdCopyImage")); - vkCmdCopyImageToBuffer = reinterpret_cast(load(context, "vkCmdCopyImageToBuffer")); - vkCmdCopyQueryPoolResults = reinterpret_cast(load(context, "vkCmdCopyQueryPoolResults")); - vkCmdDispatch = reinterpret_cast(load(context, "vkCmdDispatch")); - vkCmdDispatchIndirect = reinterpret_cast(load(context, "vkCmdDispatchIndirect")); - vkCmdDraw = reinterpret_cast(load(context, "vkCmdDraw")); - vkCmdDrawIndexed = reinterpret_cast(load(context, "vkCmdDrawIndexed")); - vkCmdDrawIndexedIndirect = reinterpret_cast(load(context, "vkCmdDrawIndexedIndirect")); - vkCmdDrawIndirect = reinterpret_cast(load(context, "vkCmdDrawIndirect")); - vkCmdEndQuery = reinterpret_cast(load(context, "vkCmdEndQuery")); - vkCmdEndRenderPass = reinterpret_cast(load(context, "vkCmdEndRenderPass")); - vkCmdExecuteCommands = reinterpret_cast(load(context, "vkCmdExecuteCommands")); - vkCmdFillBuffer = reinterpret_cast(load(context, "vkCmdFillBuffer")); - vkCmdNextSubpass = reinterpret_cast(load(context, "vkCmdNextSubpass")); - vkCmdPipelineBarrier = reinterpret_cast(load(context, "vkCmdPipelineBarrier")); - vkCmdPushConstants = reinterpret_cast(load(context, "vkCmdPushConstants")); - vkCmdResetEvent = reinterpret_cast(load(context, "vkCmdResetEvent")); - vkCmdResetQueryPool = reinterpret_cast(load(context, "vkCmdResetQueryPool")); - vkCmdResolveImage = reinterpret_cast(load(context, "vkCmdResolveImage")); - vkCmdSetBlendConstants = reinterpret_cast(load(context, "vkCmdSetBlendConstants")); - vkCmdSetDepthBias = reinterpret_cast(load(context, "vkCmdSetDepthBias")); - vkCmdSetDepthBounds = reinterpret_cast(load(context, "vkCmdSetDepthBounds")); - vkCmdSetEvent = reinterpret_cast(load(context, "vkCmdSetEvent")); - vkCmdSetLineWidth = reinterpret_cast(load(context, "vkCmdSetLineWidth")); - vkCmdSetScissor = reinterpret_cast(load(context, "vkCmdSetScissor")); - vkCmdSetStencilCompareMask = reinterpret_cast(load(context, "vkCmdSetStencilCompareMask")); - vkCmdSetStencilReference = reinterpret_cast(load(context, "vkCmdSetStencilReference")); - vkCmdSetStencilWriteMask = reinterpret_cast(load(context, "vkCmdSetStencilWriteMask")); - vkCmdSetViewport = reinterpret_cast(load(context, "vkCmdSetViewport")); - vkCmdUpdateBuffer = reinterpret_cast(load(context, "vkCmdUpdateBuffer")); - vkCmdWaitEvents = reinterpret_cast(load(context, "vkCmdWaitEvents")); - vkCmdWriteTimestamp = reinterpret_cast(load(context, "vkCmdWriteTimestamp")); - vkCreateBuffer = reinterpret_cast(load(context, "vkCreateBuffer")); - vkCreateBufferView = reinterpret_cast(load(context, "vkCreateBufferView")); - vkCreateCommandPool = reinterpret_cast(load(context, "vkCreateCommandPool")); - vkCreateComputePipelines = reinterpret_cast(load(context, "vkCreateComputePipelines")); - vkCreateDescriptorPool = reinterpret_cast(load(context, "vkCreateDescriptorPool")); - vkCreateDescriptorSetLayout = reinterpret_cast(load(context, "vkCreateDescriptorSetLayout")); - vkCreateEvent = reinterpret_cast(load(context, "vkCreateEvent")); - vkCreateFence = reinterpret_cast(load(context, "vkCreateFence")); - vkCreateFramebuffer = reinterpret_cast(load(context, "vkCreateFramebuffer")); - vkCreateGraphicsPipelines = reinterpret_cast(load(context, "vkCreateGraphicsPipelines")); - vkCreateImage = reinterpret_cast(load(context, "vkCreateImage")); - vkCreateImageView = reinterpret_cast(load(context, "vkCreateImageView")); - vkCreatePipelineCache = reinterpret_cast(load(context, "vkCreatePipelineCache")); - vkCreatePipelineLayout = reinterpret_cast(load(context, "vkCreatePipelineLayout")); - vkCreateQueryPool = reinterpret_cast(load(context, "vkCreateQueryPool")); - vkCreateRenderPass = reinterpret_cast(load(context, "vkCreateRenderPass")); - vkCreateSampler = reinterpret_cast(load(context, "vkCreateSampler")); - vkCreateSemaphore = reinterpret_cast(load(context, "vkCreateSemaphore")); - vkCreateShaderModule = reinterpret_cast(load(context, "vkCreateShaderModule")); - vkDestroyBuffer = reinterpret_cast(load(context, "vkDestroyBuffer")); - vkDestroyBufferView = reinterpret_cast(load(context, "vkDestroyBufferView")); - vkDestroyCommandPool = reinterpret_cast(load(context, "vkDestroyCommandPool")); - vkDestroyDescriptorPool = reinterpret_cast(load(context, "vkDestroyDescriptorPool")); - vkDestroyDescriptorSetLayout = reinterpret_cast(load(context, "vkDestroyDescriptorSetLayout")); - vkDestroyDevice = reinterpret_cast(load(context, "vkDestroyDevice")); - vkDestroyEvent = reinterpret_cast(load(context, "vkDestroyEvent")); - vkDestroyFence = reinterpret_cast(load(context, "vkDestroyFence")); - vkDestroyFramebuffer = reinterpret_cast(load(context, "vkDestroyFramebuffer")); - vkDestroyImage = reinterpret_cast(load(context, "vkDestroyImage")); - vkDestroyImageView = reinterpret_cast(load(context, "vkDestroyImageView")); - vkDestroyPipeline = reinterpret_cast(load(context, "vkDestroyPipeline")); - vkDestroyPipelineCache = reinterpret_cast(load(context, "vkDestroyPipelineCache")); - vkDestroyPipelineLayout = reinterpret_cast(load(context, "vkDestroyPipelineLayout")); - vkDestroyQueryPool = reinterpret_cast(load(context, "vkDestroyQueryPool")); - vkDestroyRenderPass = reinterpret_cast(load(context, "vkDestroyRenderPass")); - vkDestroySampler = reinterpret_cast(load(context, "vkDestroySampler")); - vkDestroySemaphore = reinterpret_cast(load(context, "vkDestroySemaphore")); - vkDestroyShaderModule = reinterpret_cast(load(context, "vkDestroyShaderModule")); - vkDeviceWaitIdle = reinterpret_cast(load(context, "vkDeviceWaitIdle")); - vkEndCommandBuffer = reinterpret_cast(load(context, "vkEndCommandBuffer")); - vkFlushMappedMemoryRanges = reinterpret_cast(load(context, "vkFlushMappedMemoryRanges")); - vkFreeCommandBuffers = reinterpret_cast(load(context, "vkFreeCommandBuffers")); - vkFreeDescriptorSets = reinterpret_cast(load(context, "vkFreeDescriptorSets")); - vkFreeMemory = reinterpret_cast(load(context, "vkFreeMemory")); - vkGetBufferMemoryRequirements = reinterpret_cast(load(context, "vkGetBufferMemoryRequirements")); - vkGetDeviceMemoryCommitment = reinterpret_cast(load(context, "vkGetDeviceMemoryCommitment")); - vkGetDeviceQueue = reinterpret_cast(load(context, "vkGetDeviceQueue")); - vkGetEventStatus = reinterpret_cast(load(context, "vkGetEventStatus")); - vkGetFenceStatus = reinterpret_cast(load(context, "vkGetFenceStatus")); - vkGetImageMemoryRequirements = reinterpret_cast(load(context, "vkGetImageMemoryRequirements")); - vkGetImageSparseMemoryRequirements = reinterpret_cast(load(context, "vkGetImageSparseMemoryRequirements")); - vkGetImageSubresourceLayout = reinterpret_cast(load(context, "vkGetImageSubresourceLayout")); - vkGetPipelineCacheData = reinterpret_cast(load(context, "vkGetPipelineCacheData")); - vkGetQueryPoolResults = reinterpret_cast(load(context, "vkGetQueryPoolResults")); - vkGetRenderAreaGranularity = reinterpret_cast(load(context, "vkGetRenderAreaGranularity")); - vkInvalidateMappedMemoryRanges = reinterpret_cast(load(context, "vkInvalidateMappedMemoryRanges")); - vkMapMemory = reinterpret_cast(load(context, "vkMapMemory")); - vkMergePipelineCaches = reinterpret_cast(load(context, "vkMergePipelineCaches")); - vkQueueBindSparse = reinterpret_cast(load(context, "vkQueueBindSparse")); - vkQueueSubmit = reinterpret_cast(load(context, "vkQueueSubmit")); - vkQueueWaitIdle = reinterpret_cast(load(context, "vkQueueWaitIdle")); - vkResetCommandBuffer = reinterpret_cast(load(context, "vkResetCommandBuffer")); - vkResetCommandPool = reinterpret_cast(load(context, "vkResetCommandPool")); - vkResetDescriptorPool = reinterpret_cast(load(context, "vkResetDescriptorPool")); - vkResetEvent = reinterpret_cast(load(context, "vkResetEvent")); - vkResetFences = reinterpret_cast(load(context, "vkResetFences")); - vkSetEvent = reinterpret_cast(load(context, "vkSetEvent")); - vkUnmapMemory = reinterpret_cast(load(context, "vkUnmapMemory")); - vkUpdateDescriptorSets = reinterpret_cast(load(context, "vkUpdateDescriptorSets")); - vkWaitForFences = reinterpret_cast(load(context, "vkWaitForFences")); - #endif - #ifdef VK_KHR_swapchain - vkAcquireNextImageKHR = reinterpret_cast(load(context, "vkAcquireNextImageKHR")); - vkCreateSwapchainKHR = reinterpret_cast(load(context, "vkCreateSwapchainKHR")); - vkDestroySwapchainKHR = reinterpret_cast(load(context, "vkDestroySwapchainKHR")); - vkGetSwapchainImagesKHR = reinterpret_cast(load(context, "vkGetSwapchainImagesKHR")); - vkQueuePresentKHR = reinterpret_cast(load(context, "vkQueuePresentKHR")); - #endif - + #define MLX_VULKAN_DEVICE_FUNCTION(fn) RenderCore::Get().fn = reinterpret_cast(load(context, #fn)); + #include + #undef MLX_VULKAN_DEVICE_FUNCTION DebugLog("Vulkan loader : device functions loaded"); } @@ -298,157 +150,3 @@ namespace mlx DebugLog("Vulkan loader : libvulkan unloaded"); } } - -#ifdef VK_VERSION_1_0 - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets; - PFN_vkAllocateMemory vkAllocateMemory; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer; - PFN_vkBindBufferMemory vkBindBufferMemory; - PFN_vkBindImageMemory vkBindImageMemory; - PFN_vkCmdBeginQuery vkCmdBeginQuery; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer; - PFN_vkCmdBindPipeline vkCmdBindPipeline; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers; - PFN_vkCmdBlitImage vkCmdBlitImage; - PFN_vkCmdClearAttachments vkCmdClearAttachments; - PFN_vkCmdClearColorImage vkCmdClearColorImage; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage; - PFN_vkCmdCopyImage vkCmdCopyImage; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults; - PFN_vkCmdDispatch vkCmdDispatch; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect; - PFN_vkCmdDraw vkCmdDraw; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect; - PFN_vkCmdEndQuery vkCmdEndQuery; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands; - PFN_vkCmdFillBuffer vkCmdFillBuffer; - PFN_vkCmdNextSubpass vkCmdNextSubpass; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier; - PFN_vkCmdPushConstants vkCmdPushConstants; - PFN_vkCmdResetEvent vkCmdResetEvent; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool; - PFN_vkCmdResolveImage vkCmdResolveImage; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds; - PFN_vkCmdSetEvent vkCmdSetEvent; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth; - PFN_vkCmdSetScissor vkCmdSetScissor; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask; - PFN_vkCmdSetViewport vkCmdSetViewport; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer; - PFN_vkCmdWaitEvents vkCmdWaitEvents; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp; - PFN_vkCreateBuffer vkCreateBuffer; - PFN_vkCreateBufferView vkCreateBufferView; - PFN_vkCreateCommandPool vkCreateCommandPool; - PFN_vkCreateComputePipelines vkCreateComputePipelines; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout; - PFN_vkCreateDevice vkCreateDevice; - PFN_vkCreateEvent vkCreateEvent; - PFN_vkCreateFence vkCreateFence; - PFN_vkCreateFramebuffer vkCreateFramebuffer; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines; - PFN_vkCreateImage vkCreateImage; - PFN_vkCreateImageView vkCreateImageView; - PFN_vkCreateInstance vkCreateInstance; - PFN_vkCreatePipelineCache vkCreatePipelineCache; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout; - PFN_vkCreateQueryPool vkCreateQueryPool; - PFN_vkCreateRenderPass vkCreateRenderPass; - PFN_vkCreateSampler vkCreateSampler; - PFN_vkCreateSemaphore vkCreateSemaphore; - PFN_vkCreateShaderModule vkCreateShaderModule; - PFN_vkDestroyBuffer vkDestroyBuffer; - PFN_vkDestroyBufferView vkDestroyBufferView; - PFN_vkDestroyCommandPool vkDestroyCommandPool; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout; - PFN_vkDestroyDevice vkDestroyDevice; - PFN_vkDestroyEvent vkDestroyEvent; - PFN_vkDestroyFence vkDestroyFence; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer; - PFN_vkDestroyImage vkDestroyImage; - PFN_vkDestroyImageView vkDestroyImageView; - PFN_vkDestroyInstance vkDestroyInstance; - PFN_vkDestroyPipeline vkDestroyPipeline; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout; - PFN_vkDestroyQueryPool vkDestroyQueryPool; - PFN_vkDestroyRenderPass vkDestroyRenderPass; - PFN_vkDestroySampler vkDestroySampler; - PFN_vkDestroySemaphore vkDestroySemaphore; - PFN_vkDestroyShaderModule vkDestroyShaderModule; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle; - PFN_vkEndCommandBuffer vkEndCommandBuffer; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets; - PFN_vkFreeMemory vkFreeMemory; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr; - PFN_vkGetDeviceQueue vkGetDeviceQueue; - PFN_vkGetEventStatus vkGetEventStatus; - PFN_vkGetFenceStatus vkGetFenceStatus; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges; - PFN_vkMapMemory vkMapMemory; - PFN_vkMergePipelineCaches vkMergePipelineCaches; - PFN_vkQueueBindSparse vkQueueBindSparse; - PFN_vkQueueSubmit vkQueueSubmit; - PFN_vkQueueWaitIdle vkQueueWaitIdle; - PFN_vkResetCommandBuffer vkResetCommandBuffer; - PFN_vkResetCommandPool vkResetCommandPool; - PFN_vkResetDescriptorPool vkResetDescriptorPool; - PFN_vkResetEvent vkResetEvent; - PFN_vkResetFences vkResetFences; - PFN_vkSetEvent vkSetEvent; - PFN_vkUnmapMemory vkUnmapMemory; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets; - PFN_vkWaitForFences vkWaitForFences; -#endif -#ifdef VK_KHR_swapchain - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR; - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR; - PFN_vkQueuePresentKHR vkQueuePresentKHR; -#endif -#ifdef VK_KHR_surface - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR; -#endif diff --git a/third_party/kvf.h b/third_party/kvf.h index 5dd0758..acb29b2 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -41,6 +41,7 @@ * * If you are using Volk or any other meta loader you must define KVF_IMPL_VK_NO_PROTOTYPES * or VK_NO_PROTOTYPES before including this file to avoid conflicts with Vulkan prototypes. + * You will also need to pass the function pointers to kvf using dedicated functions. * * You can also #define KVF_ENABLE_VALIDATION_LAYERS to enable validation layers. * @@ -50,6 +51,7 @@ #ifndef KBZ_8_VULKAN_FRAMEWORK_H #define KBZ_8_VULKAN_FRAMEWORK_H +#include "vulkan/vulkan_core.h" #ifdef KVF_IMPL_VK_NO_PROTOTYPES #define VK_NO_PROTOTYPES #endif @@ -83,12 +85,22 @@ typedef enum typedef void (*KvfErrorCallback)(const char* message); +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + typedef struct KvfGlobalVulkanFunctions KvfGlobalVulkanFunctions; + typedef struct KvfDeviceVulkanFunctions KvfDeviceVulkanFunctions; + typedef struct KvfInstanceVulkanFunctions KvfInstanceVulkanFunctions; +#endif typedef struct KvfGraphicsPipelineBuilder KvfGraphicsPipelineBuilder; void kvfSetErrorCallback(KvfErrorCallback callback); void kvfSetValidationErrorCallback(KvfErrorCallback callback); void kvfSetValidationWarningCallback(KvfErrorCallback callback); +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + void kvfPassGlobalVulkanFunctionPointers(const KvfGlobalVulkanFunctions* fns); + void kvfPassInstanceVulkanFunctionPointers(const KvfInstanceVulkanFunctions* fns); +#endif + void kvfAddLayer(const char* layer); VkInstance kvfCreateInstance(const char** extensions_enabled, uint32_t extensions_count); @@ -115,6 +127,9 @@ VkDevice kvfCreateDefaultDevice(VkPhysicalDevice physical); VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features); VkDevice kvfCreateDefaultDevicePhysicalDeviceAndCustomQueues(VkPhysicalDevice physical, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue); VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue); +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + void kvfPassDeviceVulkanFunctionPointers(VkDevice device, const KvfDeviceVulkanFunctions* fns); +#endif void kvfDestroyDevice(VkDevice device); VkFence kvfCreateFence(VkDevice device); @@ -219,6 +234,102 @@ void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline); void kvfCheckVk(VkResult result); +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + #ifdef KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE + #undef KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE + #endif + #define KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(fn) PFN_##fn fn + + struct KvfGlobalVulkanFunctions + { + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateInstance); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkEnumerateInstanceExtensionProperties); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkEnumerateInstanceLayerProperties); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetInstanceProcAddr); + }; + + struct KvfInstanceVulkanFunctions + { + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateDevice); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyInstance); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkEnumerateDeviceExtensionProperties); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkEnumeratePhysicalDevices); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceFeatures); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceFormatProperties); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceImageFormatProperties); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceMemoryProperties); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceProperties); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceQueueFamilyProperties); + #ifndef KVF_NO_KHR + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroySurfaceKHR); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceSurfaceCapabilitiesKHR); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceSurfaceFormatsKHR); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceSurfacePresentModesKHR); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetPhysicalDeviceSurfaceSupportKHR); + #endif + }; + + struct KvfDeviceVulkanFunctions + { + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkAllocateCommandBuffers); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkAllocateDescriptorSets); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkBeginCommandBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCmdBeginRenderPass); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCmdCopyBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCmdCopyBufferToImage); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCmdCopyImage); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCmdCopyImageToBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCmdEndRenderPass); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCmdPipelineBarrier); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateCommandPool); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateDescriptorPool); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateDescriptorSetLayout); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateFence); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateFramebuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateGraphicsPipelines); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateImage); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateImageView); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreatePipelineLayout); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateRenderPass); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateSampler); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateSemaphore); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateShaderModule); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyCommandPool); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyDescriptorPool); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyDescriptorSetLayout); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyDevice); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyFence); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyFramebuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyImage); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyImageView); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyPipeline); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyPipelineLayout); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyRenderPass); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroySampler); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroySemaphore); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyShaderModule); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDeviceWaitIdle); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkEndCommandBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetDeviceQueue); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetImageSubresourceLayout); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkQueueSubmit); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkResetCommandBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkResetDescriptorPool); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkResetEvent); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkResetFences); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkUpdateDescriptorSets); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkWaitForFences); + #ifndef KVF_NO_KHR + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkCreateSwapchainKHR); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroySwapchainKHR); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetSwapchainImagesKHR); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkQueuePresentKHR); + #endif + }; +#endif + #ifdef __cplusplus } #endif @@ -243,6 +354,16 @@ void kvfCheckVk(VkResult result); #define KVF_ASSERT(x) assert(x) #endif +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + #define KVF_GET_GLOBAL_FUNCTION(fn) __kvf_g_fns.fn + #define KVF_GET_INSTANCE_FUNCTION(fn) __kvf_i_fns.fn + #define KVF_GET_DEVICE_FUNCTION(fn) kvf_device->fns.fn +#else + #define KVF_GET_GLOBAL_FUNCTION(fn) fn + #define KVF_GET_INSTANCE_FUNCTION(fn) fn + #define KVF_GET_DEVICE_FUNCTION(fn) fn +#endif + #include #include #include @@ -252,6 +373,11 @@ void kvfCheckVk(VkResult result); #endif #define KVF_DESCRIPTOR_POOL_CAPACITY 512 +#ifdef KVF_COMMAND_POOL_CAPACITY + #undef KVF_COMMAND_POOL_CAPACITY +#endif +#define KVF_COMMAND_POOL_CAPACITY 512 + typedef struct { int32_t graphics; @@ -259,25 +385,31 @@ typedef struct int32_t compute; } __KvfQueueFamilies; -typedef struct +typedef struct __KvfDescriptorPool { VkDescriptorPool pool; size_t capacity; size_t size; } __KvfDescriptorPool; -typedef struct +typedef struct __KvfDevice { + __KvfQueueFamilies queues; + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + KvfDeviceVulkanFunctions fns; + #endif VkDevice device; VkPhysicalDevice physical; VkCommandPool cmd_pool; - __KvfQueueFamilies queues; + VkCommandBuffer* cmd_buffers = NULL; __KvfDescriptorPool* sets_pools; + size_t cmd_buffers_size = 0; + size_t cmd_buffers_capacity = 0; size_t sets_pools_size; } __KvfDevice; #ifndef KVF_NO_KHR - typedef struct + typedef struct __KvfSwapchainSupportInternal { VkSurfaceCapabilitiesKHR capabilities; VkSurfaceFormatKHR* formats; @@ -286,7 +418,7 @@ typedef struct uint32_t presentModes_count; } __KvfSwapchainSupportInternal; - typedef struct + typedef struct __KvfSwapchain { __KvfSwapchainSupportInternal support; VkSwapchainKHR swapchain; @@ -296,7 +428,7 @@ typedef struct } __KvfSwapchain; #endif -typedef struct +typedef struct __KvfFramebuffer { VkFramebuffer framebuffer; VkExtent2D extent; @@ -340,6 +472,11 @@ KvfErrorCallback __kvf_error_callback = NULL; KvfErrorCallback __kvf_validation_error_callback = NULL; KvfErrorCallback __kvf_validation_warning_callback = NULL; +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + KvfGlobalVulkanFunctions __kvf_g_fns; + KvfInstanceVulkanFunctions __kvf_i_fns; +#endif + void __kvfCheckVk(VkResult result, const char* function) { if(result != VK_SUCCESS) @@ -403,12 +540,16 @@ void __kvfCompleteDevice(VkPhysicalDevice physical, VkDevice device) pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; pool_info.queueFamilyIndex = kvf_device->queues.graphics; - __kvfCheckVk(vkCreateCommandPool(device, &pool_info, NULL, &pool)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateCommandPool)(device, &pool_info, NULL, &pool)); kvf_device->device = device; kvf_device->cmd_pool = pool; kvf_device->sets_pools = NULL; kvf_device->sets_pools_size = 0; + kvf_device->cmd_buffers_size = 0; + kvf_device->cmd_buffers_capacity = KVF_COMMAND_POOL_CAPACITY; + kvf_device->cmd_buffers = (VkCommandBuffer*)KVF_MALLOC(KVF_COMMAND_POOL_CAPACITY * sizeof(VkCommandBuffer)); + KVF_ASSERT(kvf_device->cmd_buffers != NULL && "allocation failed :("); } void __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, VkDevice device, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue) @@ -433,12 +574,16 @@ void __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO; pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT; pool_info.queueFamilyIndex = kvf_device->queues.graphics; - __kvfCheckVk(vkCreateCommandPool(device, &pool_info, NULL, &pool)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateCommandPool)(device, &pool_info, NULL, &pool)); kvf_device->device = device; kvf_device->cmd_pool = pool; kvf_device->sets_pools = NULL; kvf_device->sets_pools_size = 0; + kvf_device->cmd_buffers_size = 0; + kvf_device->cmd_buffers_capacity = KVF_COMMAND_POOL_CAPACITY; + kvf_device->cmd_buffers = (VkCommandBuffer*)KVF_MALLOC(KVF_COMMAND_POOL_CAPACITY * sizeof(VkCommandBuffer)); + KVF_ASSERT(kvf_device->cmd_buffers != NULL && "allocation failed :("); } void __kvfDestroyDescriptorPools(VkDevice device); @@ -450,9 +595,11 @@ void __kvfDestroyDevice(VkDevice device) { if(__kvf_internal_devices[i].device == device) { - vkDestroyCommandPool(device, __kvf_internal_devices[i].cmd_pool, NULL); + __KvfDevice* kvf_device = &__kvf_internal_devices[i]; + KVF_FREE(kvf_device->cmd_buffers); + KVF_GET_DEVICE_FUNCTION(vkDestroyCommandPool)(device, kvf_device->cmd_pool, NULL); __kvfDestroyDescriptorPools(device); - vkDestroyDevice(device, NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyDevice)(device, NULL); // Shift the elements to fill the gap for(size_t j = i; j < __kvf_internal_devices_size - 1; j++) __kvf_internal_devices[j] = __kvf_internal_devices[j + 1]; @@ -489,6 +636,20 @@ __KvfDevice* __kvfGetKvfDeviceFromVkDevice(VkDevice device) return NULL; } +__KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) +{ + KVF_ASSERT(cmd != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_devices_size; i++) + { + for(size_t j = 0; j < __kvf_internal_devices[i].cmd_buffers_size; j++) + { + if(__kvf_internal_devices[i].cmd_buffers[j] == cmd) + return &__kvf_internal_devices[i]; + } + } + return NULL; +} + #ifndef KVF_NO_KHR void __kvfAddSwapchainToArray(VkSwapchainKHR swapchain, __KvfSwapchainSupportInternal support, VkFormat format, uint32_t images_count, VkExtent2D extent) { @@ -513,11 +674,16 @@ __KvfDevice* __kvfGetKvfDeviceFromVkDevice(VkDevice device) KVF_ASSERT(swapchain != VK_NULL_HANDLE); KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) { if(__kvf_internal_swapchains[i].swapchain == swapchain) { - vkDestroySwapchainKHR(device, swapchain, NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroySwapchainKHR)(device, swapchain, NULL); // Shift the elements to fill the gap for(size_t j = i; j < __kvf_internal_swapchains_size - 1; j++) __kvf_internal_swapchains[j] = __kvf_internal_swapchains[j + 1]; @@ -564,11 +730,16 @@ void __kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) KVF_ASSERT(framebuffer != VK_NULL_HANDLE); KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + for(size_t i = 0; i < __kvf_internal_framebuffers_size; i++) { if(__kvf_internal_framebuffers[i].framebuffer == framebuffer) { - vkDestroyFramebuffer(device, framebuffer, NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyFramebuffer)(device, framebuffer, NULL); // Shift the elements to fill the gap for(size_t j = i; j < __kvf_internal_framebuffers_size - 1; j++) __kvf_internal_framebuffers[j] = __kvf_internal_framebuffers[j + 1]; @@ -624,7 +795,7 @@ VkDescriptorPool __kvfDeviceCreateDescriptorPool(VkDevice device) pool_info.maxSets = KVF_DESCRIPTOR_POOL_CAPACITY; pool_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; - __kvfCheckVk(vkCreateDescriptorPool(device, &pool_info, NULL, &kvf_device->sets_pools[kvf_device->sets_pools_size - 1].pool)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateDescriptorPool)(device, &pool_info, NULL, &kvf_device->sets_pools[kvf_device->sets_pools_size - 1].pool)); kvf_device->sets_pools[kvf_device->sets_pools_size - 1].capacity = KVF_DESCRIPTOR_POOL_CAPACITY; return kvf_device->sets_pools[kvf_device->sets_pools_size - 1].pool; } @@ -636,7 +807,7 @@ void __kvfDestroyDescriptorPools(VkDevice device) KVF_ASSERT(kvf_device != NULL); for(size_t i = 0; i < kvf_device->sets_pools_size; i++) - vkDestroyDescriptorPool(device, kvf_device->sets_pools[i].pool, NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyDescriptorPool)(device, kvf_device->sets_pools[i].pool, NULL); KVF_FREE(kvf_device->sets_pools); kvf_device->sets_pools_size = 0; } @@ -656,6 +827,20 @@ void kvfSetValidationWarningCallback(KvfErrorCallback callback) __kvf_validation_warning_callback = callback; } +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + void kvfPassGlobalVulkanFunctionPointers(const KvfGlobalVulkanFunctions* fns) + { + KVF_ASSERT(fns != NULL); + __kvf_g_fns = *fns; + } + + void kvfPassInstanceVulkanFunctionPointers(const KvfInstanceVulkanFunctions* fns) + { + KVF_ASSERT(fns != NULL); + __kvf_i_fns = *fns; + } +#endif + bool kvfIsStencilFormat(VkFormat format) { switch(format) @@ -763,7 +948,7 @@ VkFormat kvfFindSupportFormatInCandidates(VkDevice device, VkFormat* candidates, for(size_t i = 0; i < candidates_count; i++) { VkFormatProperties props; - vkGetPhysicalDeviceFormatProperties(kvf_device->physical, candidates[i], &props); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceFormatProperties)(kvf_device->physical, candidates[i], &props); if(tiling == VK_IMAGE_TILING_LINEAR && (props.linearTilingFeatures & flags) == flags) return candidates[i]; else if(tiling == VK_IMAGE_TILING_OPTIMAL && (props.optimalTilingFeatures & flags) == flags) @@ -944,9 +1129,10 @@ const char* kvfVerbaliseVkResult(VkResult result) bool __kvfCheckValidationLayerSupport() { uint32_t layer_count; - vkEnumerateInstanceLayerProperties(&layer_count, NULL); + KVF_GET_GLOBAL_FUNCTION(vkEnumerateInstanceLayerProperties)(&layer_count, NULL); VkLayerProperties* available_layers = (VkLayerProperties*)KVF_MALLOC(sizeof(VkLayerProperties) * layer_count); - vkEnumerateInstanceLayerProperties(&layer_count, available_layers); + KVF_ASSERT(available_layers != NULL && "allocation failed :("); + KVF_GET_GLOBAL_FUNCTION(vkEnumerateInstanceLayerProperties)(&layer_count, available_layers); for(size_t i = 0; i < __kvf_extra_layers_count; i++) { bool found = false; @@ -1007,16 +1193,17 @@ const char* kvfVerbaliseVkResult(VkResult result) VkResult __kvfCreateDebugUtilsMessengerEXT(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* create_info, VkDebugUtilsMessengerEXT* messenger) { - PFN_vkCreateDebugUtilsMessengerEXT func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugUtilsMessengerEXT"); + PFN_vkCreateDebugUtilsMessengerEXT func = (PFN_vkCreateDebugUtilsMessengerEXT)KVF_GET_GLOBAL_FUNCTION(vkGetInstanceProcAddr)(instance, "vkCreateDebugUtilsMessengerEXT"); return func ? func(instance, create_info, NULL, messenger) : VK_ERROR_EXTENSION_NOT_PRESENT; } void __kvfInitValidationLayers(VkInstance instance) { uint32_t extension_count; - vkEnumerateInstanceExtensionProperties(NULL, &extension_count, NULL); + KVF_GET_GLOBAL_FUNCTION(vkEnumerateInstanceExtensionProperties)(NULL, &extension_count, NULL); VkExtensionProperties* extensions = (VkExtensionProperties*)KVF_MALLOC(extension_count * sizeof(VkExtensionProperties)); - vkEnumerateInstanceExtensionProperties(NULL, &extension_count, extensions); + KVF_ASSERT(extensions != NULL && "allocation failed :("); + KVF_GET_GLOBAL_FUNCTION(vkEnumerateInstanceExtensionProperties)(NULL, &extension_count, extensions); bool extension_found = false; for(uint32_t i = 0; i < extension_count; i++) { @@ -1046,7 +1233,7 @@ const char* kvfVerbaliseVkResult(VkResult result) void __kvfDestroyDebugUtilsMessengerEXT(VkInstance instance) { - PFN_vkDestroyDebugUtilsMessengerEXT func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugUtilsMessengerEXT"); + PFN_vkDestroyDebugUtilsMessengerEXT func = (PFN_vkDestroyDebugUtilsMessengerEXT)KVF_GET_GLOBAL_FUNCTION(vkGetInstanceProcAddr)(instance, "vkDestroyDebugUtilsMessengerEXT"); if(func) func(instance, __kvf_debug_messenger, NULL); } @@ -1058,7 +1245,7 @@ void kvfAddLayer(const char* layer) __kvf_extra_layers = (char**)KVF_REALLOC(__kvf_extra_layers, sizeof(char*) * (__kvf_extra_layers_count + 1)); KVF_ASSERT(__kvf_extra_layers != NULL); __kvf_extra_layers[__kvf_extra_layers_count] = (char*)KVF_MALLOC(strlen(layer) + 1); - KVF_ASSERT(__kvf_extra_layers[__kvf_extra_layers_count] != NULL); + KVF_ASSERT(__kvf_extra_layers[__kvf_extra_layers_count] != NULL && "allocation failed :("); strcpy(__kvf_extra_layers[__kvf_extra_layers_count], layer); __kvf_extra_layers_count++; #else @@ -1100,6 +1287,7 @@ VkInstance kvfCreateInstance(const char** extensions_enabled, uint32_t extension { __kvfPopulateDebugMessengerCreateInfo(&debug_create_info); new_extension_set = (const char**)KVF_MALLOC(sizeof(char*) * (extensions_count + 1)); + KVF_ASSERT(new_extension_set != NULL && "allocation failed :("); memcpy(new_extension_set, extensions_enabled, sizeof(char*) * extensions_count); new_extension_set[extensions_count] = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; @@ -1111,7 +1299,7 @@ VkInstance kvfCreateInstance(const char** extensions_enabled, uint32_t extension } #endif - __kvfCheckVk(vkCreateInstance(&create_info, NULL, &instance)); + __kvfCheckVk(KVF_GET_GLOBAL_FUNCTION(vkCreateInstance)(&create_info, NULL, &instance)); #ifdef KVF_ENABLE_VALIDATION_LAYERS KVF_FREE(new_extension_set); __kvfInitValidationLayers(instance); @@ -1130,16 +1318,17 @@ void kvfDestroyInstance(VkInstance instance) KVF_FREE(__kvf_extra_layers); __kvf_extra_layers_count = 0; #endif - vkDestroyInstance(instance, NULL); + KVF_GET_INSTANCE_FUNCTION(vkDestroyInstance)(instance, NULL); } __KvfQueueFamilies __kvfFindQueueFamilies(VkPhysicalDevice physical, VkSurfaceKHR surface) { __KvfQueueFamilies queues = { -1, -1, -1 }; uint32_t queue_family_count; - vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties)(physical, &queue_family_count, NULL); VkQueueFamilyProperties* queue_families = (VkQueueFamilyProperties*)KVF_MALLOC(sizeof(VkQueueFamilyProperties) * queue_family_count); - vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, queue_families); + KVF_ASSERT(queue_families != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties)(physical, &queue_family_count, queue_families); for(uint32_t i = 0; i < queue_family_count; i++) { @@ -1154,7 +1343,7 @@ __KvfQueueFamilies __kvfFindQueueFamilies(VkPhysicalDevice physical, VkSurfaceKH VkBool32 present_support = false; if(surface != VK_NULL_HANDLE) { - vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceSupportKHR)(physical, i, surface, &present_support); if(present_support) queues.present = i; if(queues.graphics != -1 && queues.present != -1 && queues.compute != -1) @@ -1179,9 +1368,10 @@ VkPhysicalDevice kvfPickFirstPhysicalDevice(VkInstance instance, VkSurfaceKHR su KVF_ASSERT(instance != VK_NULL_HANDLE); - vkEnumeratePhysicalDevices(instance, &device_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkEnumeratePhysicalDevices)(instance, &device_count, NULL); devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); - vkEnumeratePhysicalDevices(instance, &device_count, devices); + KVF_ASSERT(devices != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkEnumeratePhysicalDevices)(instance, &device_count, devices); chosen_one = devices[0]; KVF_FREE(devices); __KvfQueueFamilies queues = __kvfFindQueueFamilies(chosen_one, surface); @@ -1199,9 +1389,10 @@ int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, { /* Check extensions support */ uint32_t extension_count; - vkEnumerateDeviceExtensionProperties(device, NULL, &extension_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkEnumerateDeviceExtensionProperties)(device, NULL, &extension_count, NULL); VkExtensionProperties* props = (VkExtensionProperties*)KVF_MALLOC(sizeof(VkExtensionProperties) * extension_count + 1); - vkEnumerateDeviceExtensionProperties(device, NULL, &extension_count, props); + KVF_ASSERT(props != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkEnumerateDeviceExtensionProperties)(device, NULL, &extension_count, props); bool are_there_required_device_extensions = true; for(uint32_t j = 0; j < device_extensions_count; j++) @@ -1235,17 +1426,17 @@ int32_t __kvfScorePhysicalDevice(VkPhysicalDevice device, VkSurfaceKHR surface, { /* Check surface formats counts */ uint32_t format_count; - vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &format_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceFormatsKHR)(device, surface, &format_count, NULL); if(format_count == 0) return -1; } #endif VkPhysicalDeviceProperties device_props; - vkGetPhysicalDeviceProperties(device, &device_props); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceProperties)(device, &device_props); VkPhysicalDeviceFeatures device_features; - vkGetPhysicalDeviceFeatures(device, &device_features); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceFeatures)(device, &device_features); int32_t score = -1; if(device_props.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU) @@ -1269,9 +1460,10 @@ VkPhysicalDevice kvfPickGoodPhysicalDevice(VkInstance instance, VkSurfaceKHR sur KVF_ASSERT(instance != VK_NULL_HANDLE); - vkEnumeratePhysicalDevices(instance, &device_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkEnumeratePhysicalDevices)(instance, &device_count, NULL); devices = (VkPhysicalDevice*)KVF_MALLOC(sizeof(VkPhysicalDevice) * device_count + 1); - vkEnumeratePhysicalDevices(instance, &device_count, devices); + KVF_ASSERT(devices != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkEnumeratePhysicalDevices)(instance, &device_count, devices); for(uint32_t i = 0; i < device_count; i++) { @@ -1303,41 +1495,42 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin { const float queue_priority = 1.0f; - __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkPhysicalDevice(physical); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkPhysicalDevice(physical); - KVF_ASSERT(kvfdevice != NULL); + KVF_ASSERT(kvf_device != NULL); uint32_t queue_count = 0; - queue_count += (kvfdevice->queues.graphics != -1); - queue_count += (kvfdevice->queues.present != -1); - queue_count += (kvfdevice->queues.compute != -1); + queue_count += (kvf_device->queues.graphics != -1); + queue_count += (kvf_device->queues.present != -1); + queue_count += (kvf_device->queues.compute != -1); VkDeviceQueueCreateInfo* queue_create_infos = (VkDeviceQueueCreateInfo*)KVF_MALLOC(queue_count * sizeof(VkDeviceQueueCreateInfo)); + KVF_ASSERT(queue_create_infos != NULL && "allocation failed :("); size_t i = 0; - if(kvfdevice->queues.graphics != -1) + if(kvf_device->queues.graphics != -1) { queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queue_create_infos[i].queueFamilyIndex = kvfdevice->queues.graphics; + queue_create_infos[i].queueFamilyIndex = kvf_device->queues.graphics; queue_create_infos[i].queueCount = 1; queue_create_infos[i].pQueuePriorities = &queue_priority; queue_create_infos[i].flags = 0; queue_create_infos[i].pNext = NULL; i++; } - if(kvfdevice->queues.present != -1 && kvfdevice->queues.present != kvfdevice->queues.graphics) + if(kvf_device->queues.present != -1 && kvf_device->queues.present != kvf_device->queues.graphics) { queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queue_create_infos[i].queueFamilyIndex = kvfdevice->queues.present; + queue_create_infos[i].queueFamilyIndex = kvf_device->queues.present; queue_create_infos[i].queueCount = 1; queue_create_infos[i].pQueuePriorities = &queue_priority; queue_create_infos[i].flags = 0; queue_create_infos[i].pNext = NULL; i++; } - if(kvfdevice->queues.compute != -1 && kvfdevice->queues.present != kvfdevice->queues.compute && kvfdevice->queues.graphics != kvfdevice->queues.compute) + if(kvf_device->queues.compute != -1 && kvf_device->queues.present != kvf_device->queues.compute && kvf_device->queues.graphics != kvf_device->queues.compute) { queue_create_infos[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; - queue_create_infos[i].queueFamilyIndex = kvfdevice->queues.compute; + queue_create_infos[i].queueFamilyIndex = kvf_device->queues.compute; queue_create_infos[i].queueCount = 1; queue_create_infos[i].pQueuePriorities = &queue_priority; queue_create_infos[i].flags = 0; @@ -1358,7 +1551,7 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin createInfo.pNext = NULL; VkDevice device; - __kvfCheckVk(vkCreateDevice(physical, &createInfo, NULL, &device)); + __kvfCheckVk(KVF_GET_INSTANCE_FUNCTION(vkCreateDevice)(physical, &createInfo, NULL, &device)); __kvfCompleteDevice(physical, device); return device; @@ -1381,6 +1574,7 @@ VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, queue_count += (compute_queue != -1); VkDeviceQueueCreateInfo* queue_create_infos = (VkDeviceQueueCreateInfo*)KVF_MALLOC(queue_count * sizeof(VkDeviceQueueCreateInfo)); + KVF_ASSERT(queue_create_infos != NULL && "allocation failed :("); size_t i = 0; if(graphics_queue != -1) { @@ -1426,12 +1620,23 @@ VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, createInfo.pNext = NULL; VkDevice device; - __kvfCheckVk(vkCreateDevice(physical, &createInfo, NULL, &device)); + __kvfCheckVk(KVF_GET_INSTANCE_FUNCTION(vkCreateDevice)(physical, &createInfo, NULL, &device)); __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(physical, device, graphics_queue, present_queue, compute_queue); return device; } +#ifdef KVF_IMPL_VK_NO_PROTOTYPES + void kvfPassDeviceVulkanFunctionPointers(VkDevice device, const KvfDeviceVulkanFunctions* fns) + { + KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(fns != NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + kvf_device->fns = *fns; + } +#endif + void kvfDestroyDevice(VkDevice device) { if(device == VK_NULL_HANDLE) @@ -1442,23 +1647,23 @@ void kvfDestroyDevice(VkDevice device) VkQueue kvfGetDeviceQueue(VkDevice device, KvfQueueType queue) { KVF_ASSERT(device != VK_NULL_HANDLE); - __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvfdevice != NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkQueue vk_queue = VK_NULL_HANDLE; if(queue == KVF_GRAPHICS_QUEUE) { - KVF_ASSERT(kvfdevice->queues.graphics != -1); - vkGetDeviceQueue(device, kvfdevice->queues.graphics, 0, &vk_queue); + KVF_ASSERT(kvf_device->queues.graphics != -1); + KVF_GET_DEVICE_FUNCTION(vkGetDeviceQueue)(device, kvf_device->queues.graphics, 0, &vk_queue); } else if(queue == KVF_PRESENT_QUEUE) { - KVF_ASSERT(kvfdevice->queues.present != -1); - vkGetDeviceQueue(device, kvfdevice->queues.present, 0, &vk_queue); + KVF_ASSERT(kvf_device->queues.present != -1); + KVF_GET_DEVICE_FUNCTION(vkGetDeviceQueue)(device, kvf_device->queues.present, 0, &vk_queue); } else if(queue == KVF_COMPUTE_QUEUE) { - KVF_ASSERT(kvfdevice->queues.compute != -1); - vkGetDeviceQueue(device, kvfdevice->queues.compute, 0, &vk_queue); + KVF_ASSERT(kvf_device->queues.compute != -1); + KVF_GET_DEVICE_FUNCTION(vkGetDeviceQueue)(device, kvf_device->queues.compute, 0, &vk_queue); } return vk_queue; } @@ -1466,14 +1671,14 @@ VkQueue kvfGetDeviceQueue(VkDevice device, KvfQueueType queue) uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) { KVF_ASSERT(device != VK_NULL_HANDLE); - __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvfdevice != NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); if(queue == KVF_GRAPHICS_QUEUE) - return kvfdevice->queues.graphics; + return kvf_device->queues.graphics; else if(queue == KVF_PRESENT_QUEUE) - return kvfdevice->queues.present; + return kvf_device->queues.present; else if(queue == KVF_COMPUTE_QUEUE) - return kvfdevice->queues.compute; + return kvf_device->queues.compute; KVF_ASSERT(false && "invalid queue"); return 0; } @@ -1482,6 +1687,10 @@ uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) bool kvfQueuePresentKHR(VkDevice device, VkSemaphore wait, VkSwapchainKHR swapchain, uint32_t image_index) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkPresentInfoKHR present_info = {}; present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; present_info.waitSemaphoreCount = 1; @@ -1489,7 +1698,7 @@ uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) present_info.swapchainCount = 1; present_info.pSwapchains = &swapchain; present_info.pImageIndices = &image_index; - VkResult result = vkQueuePresentKHR(kvfGetDeviceQueue(device, KVF_PRESENT_QUEUE), &present_info); + VkResult result = KVF_GET_DEVICE_FUNCTION(vkQueuePresentKHR)(kvfGetDeviceQueue(device, KVF_PRESENT_QUEUE), &present_info); if(result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR) return false; else @@ -1504,9 +1713,10 @@ int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type) KVF_ASSERT(type != KVF_PRESENT_QUEUE && "Use kvfFindDeviceQueueFamilyKHR to find present queue"); uint32_t queue_family_count; - vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties)(physical, &queue_family_count, NULL); VkQueueFamilyProperties* queue_families = (VkQueueFamilyProperties*)KVF_MALLOC(sizeof(VkQueueFamilyProperties) * queue_family_count); - vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, queue_families); + KVF_ASSERT(queue_families != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties)(physical, &queue_family_count, queue_families); int32_t queue = -1; @@ -1542,16 +1752,17 @@ int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type) return kvfFindDeviceQueueFamily(physical, type); uint32_t queue_family_count; - vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties)(physical, &queue_family_count, NULL); VkQueueFamilyProperties* queue_families = (VkQueueFamilyProperties*)KVF_MALLOC(sizeof(VkQueueFamilyProperties) * queue_family_count); - vkGetPhysicalDeviceQueueFamilyProperties(physical, &queue_family_count, queue_families); + KVF_ASSERT(queue_families != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties)(physical, &queue_family_count, queue_families); int32_t queue = -1; for(uint32_t i = 0; i < queue_family_count; i++) { VkBool32 present_support = false; - vkGetPhysicalDeviceSurfaceSupportKHR(physical, i, surface, &present_support); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceSupportKHR)(physical, i, surface, &present_support); if(present_support) queue = i; if(queue != -1) @@ -1565,11 +1776,15 @@ int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type) VkFence kvfCreateFence(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkFenceCreateInfo fence_info = {}; fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; VkFence fence; - __kvfCheckVk(vkCreateFence(device, &fence_info, NULL, &fence)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateFence)(device, &fence_info, NULL, &fence)); return fence; } @@ -1577,7 +1792,11 @@ void kvfWaitForFence(VkDevice device, VkFence fence) { KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(fence != VK_NULL_HANDLE); - vkWaitForFences(device, 1, &fence, VK_TRUE, UINT64_MAX); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkWaitForFences)(device, 1, &fence, VK_TRUE, UINT64_MAX); } void kvfDestroyFence(VkDevice device, VkFence fence) @@ -1585,16 +1804,24 @@ void kvfDestroyFence(VkDevice device, VkFence fence) if(fence == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyFence(device, fence, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyFence)(device, fence, NULL); } VkSemaphore kvfCreateSemaphore(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkSemaphoreCreateInfo semaphore_info = {}; semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; VkSemaphore semaphore; - __kvfCheckVk(vkCreateSemaphore(device, &semaphore_info, NULL, &semaphore)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSemaphore)(device, &semaphore_info, NULL, &semaphore)); return semaphore; } @@ -1603,7 +1830,11 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) if(semaphore == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroySemaphore(device, semaphore, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroySemaphore)(device, semaphore, NULL); } #ifndef KVF_NO_KHR @@ -1611,20 +1842,22 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) { __KvfSwapchainSupportInternal support; - __kvfCheckVk(vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical, surface, &support.capabilities)); + __kvfCheckVk(KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(physical, surface, &support.capabilities)); - vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formats_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceFormatsKHR)(physical, surface, &support.formats_count, NULL); if(support.formats_count != 0) { support.formats = (VkSurfaceFormatKHR*)KVF_MALLOC(sizeof(VkSurfaceFormatKHR) * support.formats_count); - vkGetPhysicalDeviceSurfaceFormatsKHR(physical, surface, &support.formats_count, support.formats); + KVF_ASSERT(support.formats != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceFormatsKHR)(physical, surface, &support.formats_count, support.formats); } - vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModes_count, NULL); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR)(physical, surface, &support.presentModes_count, NULL); if(support.presentModes_count != 0) { support.presentModes = (VkPresentModeKHR*)KVF_MALLOC(sizeof(VkPresentModeKHR) * support.presentModes_count); - vkGetPhysicalDeviceSurfacePresentModesKHR(physical, surface, &support.presentModes_count, support.presentModes); + KVF_ASSERT(support.presentModes != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR)(physical, surface, &support.presentModes_count, support.presentModes); } return support; } @@ -1670,10 +1903,10 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) if(support.capabilities.maxImageCount > 0 && image_count > support.capabilities.maxImageCount) image_count = support.capabilities.maxImageCount; - __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvfdevice != NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); - uint32_t queue_family_indices[] = { (uint32_t)kvfdevice->queues.graphics, (uint32_t)kvfdevice->queues.present }; + uint32_t queue_family_indices[] = { (uint32_t)kvf_device->queues.graphics, (uint32_t)kvf_device->queues.present }; if(support.capabilities.currentExtent.width != UINT32_MAX) extent = support.capabilities.currentExtent; @@ -1698,7 +1931,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) createInfo.clipped = VK_TRUE; createInfo.oldSwapchain = VK_NULL_HANDLE; - if(kvfdevice->queues.graphics != kvfdevice->queues.present) + if(kvf_device->queues.graphics != kvf_device->queues.present) { createInfo.imageSharingMode = VK_SHARING_MODE_CONCURRENT; createInfo.queueFamilyIndexCount = 2; @@ -1707,10 +1940,10 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) else createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; - __kvfCheckVk(vkCreateSwapchainKHR(device, &createInfo, NULL, &swapchain)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSwapchainKHR)(device, &createInfo, NULL, &swapchain)); uint32_t images_count; - vkGetSwapchainImagesKHR(device, swapchain, (uint32_t*)&images_count, NULL); + KVF_GET_DEVICE_FUNCTION(vkGetSwapchainImagesKHR)(device, swapchain, (uint32_t*)&images_count, NULL); __kvfAddSwapchainToArray(swapchain, support, surfaceFormat.format, images_count, extent); @@ -1757,6 +1990,10 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, KvfImageType type) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkImageCreateInfo image_info = {}; image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; image_info.imageType = VK_IMAGE_TYPE_2D; @@ -1779,7 +2016,7 @@ VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkForma } VkImage image; - __kvfCheckVk(vkCreateImage(device, &image_info, NULL, &image)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateImage)(device, &image_info, NULL, &image)); return image; } @@ -1788,6 +2025,10 @@ void kvfCopyImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t KVF_ASSERT(cmd != VK_NULL_HANDLE); KVF_ASSERT(dst != VK_NULL_HANDLE); KVF_ASSERT(src != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); + KVF_ASSERT(kvf_device != NULL); + #endif VkOffset3D offset = { 0, 0, 0 }; VkBufferImageCopy region = {}; region.bufferOffset = buffer_offset; @@ -1799,7 +2040,7 @@ void kvfCopyImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t region.imageSubresource.layerCount = 1; region.imageOffset = offset; region.imageExtent = extent; - vkCmdCopyImageToBuffer(cmd, src, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst, 1, ®ion); + KVF_GET_DEVICE_FUNCTION(vkCmdCopyImageToBuffer)(cmd, src, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dst, 1, ®ion); } void kvfDestroyImage(VkDevice device, VkImage image) @@ -1807,12 +2048,20 @@ void kvfDestroyImage(VkDevice device, VkImage image) if(image == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyImage(device, image, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyImage)(device, image, NULL); } VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect, int layer_count) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkImageViewCreateInfo create_info = {}; create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; create_info.image = image; @@ -1828,7 +2077,7 @@ VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, create_info.subresourceRange.baseArrayLayer = 0; create_info.subresourceRange.layerCount = layer_count; VkImageView view; - __kvfCheckVk(vkCreateImageView(device, &create_info, NULL, &view)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateImageView)(device, &create_info, NULL, &view)); return view; } @@ -1836,7 +2085,11 @@ void kvfDestroyImageView(VkDevice device, VkImageView image_view) { KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(image_view != VK_NULL_HANDLE); - vkDestroyImageView(device, image_view, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyImageView)(device, image_view, NULL); } void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer) @@ -1846,6 +2099,11 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, if(new_layout == old_layout) return; + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + if(is_single_time_cmd_buffer) kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); @@ -1882,7 +2140,7 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, else destination_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - vkCmdPipelineBarrier(cmd, source_stage, destination_stage, 0, 0, NULL, 0, NULL, 1, &barrier); + KVF_GET_DEVICE_FUNCTION(vkCmdPipelineBarrier)(cmd, source_stage, destination_stage, 0, 0, NULL, 0, NULL, 1, &barrier); if(is_single_time_cmd_buffer) { @@ -1896,6 +2154,10 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMode address_modes, VkSamplerMipmapMode mipmap_mode) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkSamplerCreateInfo info = {}; info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; info.magFilter = filters; @@ -1909,7 +2171,7 @@ VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMo info.anisotropyEnable = VK_FALSE; info.maxAnisotropy = 1.0f; VkSampler sampler; - __kvfCheckVk(vkCreateSampler(device, &info, NULL, &sampler)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSampler)(device, &info, NULL, &sampler)); return sampler; } @@ -1918,19 +2180,27 @@ void kvfDestroySampler(VkDevice device, VkSampler sampler) if(sampler == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroySampler(device, sampler, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroySampler)(device, sampler, NULL); } VkBuffer kvfCreateBuffer(VkDevice device, VkBufferUsageFlags usage, VkDeviceSize size) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkBufferCreateInfo buffer_info = {}; buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; buffer_info.size = size; buffer_info.usage = usage; buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; VkBuffer buffer; - __kvfCheckVk(vkCreateBuffer(device, &buffer_info, NULL, &buffer)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateBuffer)(device, &buffer_info, NULL, &buffer)); return buffer; } @@ -1939,9 +2209,13 @@ void kvfCopyBufferToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkBuffer src, size KVF_ASSERT(cmd != VK_NULL_HANDLE); KVF_ASSERT(dst != VK_NULL_HANDLE); KVF_ASSERT(src != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); + KVF_ASSERT(kvf_device != NULL); + #endif VkBufferCopy copy_region = {}; copy_region.size = size; - vkCmdCopyBuffer(cmd, src, dst, 1, ©_region); + KVF_GET_DEVICE_FUNCTION(vkCmdCopyBuffer)(cmd, src, dst, 1, ©_region); } void kvfCopyBufferToImage(VkCommandBuffer cmd, VkImage dst, VkBuffer src, size_t buffer_offset, VkImageAspectFlagBits aspect, VkExtent3D extent) @@ -1949,6 +2223,10 @@ void kvfCopyBufferToImage(VkCommandBuffer cmd, VkImage dst, VkBuffer src, size_t KVF_ASSERT(cmd != VK_NULL_HANDLE); KVF_ASSERT(dst != VK_NULL_HANDLE); KVF_ASSERT(src != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); + KVF_ASSERT(kvf_device != NULL); + #endif VkOffset3D offset = { 0, 0, 0 }; VkBufferImageCopy region = {}; region.bufferOffset = buffer_offset; @@ -1960,7 +2238,7 @@ void kvfCopyBufferToImage(VkCommandBuffer cmd, VkImage dst, VkBuffer src, size_t region.imageSubresource.layerCount = 1; region.imageOffset = offset; region.imageExtent = extent; - vkCmdCopyBufferToImage(cmd, src, dst, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); + KVF_GET_DEVICE_FUNCTION(vkCmdCopyBufferToImage)(cmd, src, dst, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion); } void kvfDestroyBuffer(VkDevice device, VkBuffer buffer) @@ -1968,13 +2246,21 @@ void kvfDestroyBuffer(VkDevice device, VkBuffer buffer) if(buffer != VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyBuffer(device, buffer, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyBuffer)(device, buffer, NULL); } VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass render_pass, VkImageView* image_views, size_t image_views_count, VkExtent2D extent) { KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(image_views != NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkFramebufferCreateInfo framebuffer_info = {}; framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; @@ -1985,7 +2271,7 @@ VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass render_pass, Vk framebuffer_info.height = extent.height; framebuffer_info.layers = 1; VkFramebuffer framebuffer = VK_NULL_HANDLE; - __kvfCheckVk(vkCreateFramebuffer(device, &framebuffer_info, NULL, &framebuffer)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateFramebuffer)(device, &framebuffer_info, NULL, &framebuffer)); __kvfAddFramebufferToArray(framebuffer, extent); return framebuffer; } @@ -2014,46 +2300,69 @@ VkCommandBuffer kvfCreateCommandBuffer(VkDevice device) VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLevel level) { KVF_ASSERT(device != VK_NULL_HANDLE); - __KvfDevice* kvfdevice = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvfdevice != NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); - VkCommandPool pool = kvfdevice->cmd_pool; + VkCommandPool pool = kvf_device->cmd_pool; VkCommandBuffer buffer; VkCommandBufferAllocateInfo alloc_info = {}; alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; alloc_info.commandPool = pool; alloc_info.level = level; alloc_info.commandBufferCount = 1; - __kvfCheckVk(vkAllocateCommandBuffers(device, &alloc_info, &buffer)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkAllocateCommandBuffers)(device, &alloc_info, &buffer)); + + if(kvf_device->cmd_buffers_size == kvf_device->cmd_buffers_capacity) + { + // Resize the dynamic array if necessary + kvf_device->cmd_buffers_capacity += KVF_COMMAND_POOL_CAPACITY; + kvf_device->cmd_buffers = (VkCommandBuffer*)KVF_REALLOC(kvf_device->cmd_buffers, kvf_device->cmd_buffers_capacity * sizeof(VkCommandBuffer)); + KVF_ASSERT(kvf_device->cmd_buffers != NULL && "allocation failed :("); + kvf_device->cmd_buffers[kvf_device->cmd_buffers_size] = buffer; + kvf_device->cmd_buffers_size++; + } + return buffer; } void kvfBeginCommandBuffer(VkCommandBuffer buffer, VkCommandBufferUsageFlags usage) { KVF_ASSERT(buffer != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(buffer); + KVF_ASSERT(kvf_device != NULL); + #endif VkCommandBufferBeginInfo begin_info = {}; begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; begin_info.flags = usage; - __kvfCheckVk(vkBeginCommandBuffer(buffer, &begin_info)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkBeginCommandBuffer)(buffer, &begin_info)); } void kvfEndCommandBuffer(VkCommandBuffer buffer) { KVF_ASSERT(buffer != VK_NULL_HANDLE); - __kvfCheckVk(vkEndCommandBuffer(buffer)); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(buffer); + KVF_ASSERT(kvf_device != NULL); + #endif + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkEndCommandBuffer)(buffer)); } void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkSemaphore signal, VkSemaphore wait, VkFence fence, VkPipelineStageFlags* stages) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkSemaphore signal_semaphores[1]; VkSemaphore wait_semaphores[1]; signal_semaphores[0] = signal; wait_semaphores[0] = wait; if(fence != VK_NULL_HANDLE) - vkResetFences(device, 1, &fence); + KVF_GET_DEVICE_FUNCTION(vkResetFences)(device, 1, &fence); VkSubmitInfo submit_info = {}; submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; @@ -2064,23 +2373,27 @@ void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueTyp submit_info.pCommandBuffers = &buffer; submit_info.signalSemaphoreCount = (signal == VK_NULL_HANDLE ? 0 : 1); submit_info.pSignalSemaphores = signal_semaphores; - __kvfCheckVk(vkQueueSubmit(kvfGetDeviceQueue(device, queue), 1, &submit_info, fence)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkQueueSubmit)(kvfGetDeviceQueue(device, queue), 1, &submit_info, fence)); } void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkFence fence) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif if(fence != VK_NULL_HANDLE) - vkResetFences(device, 1, &fence); + KVF_GET_DEVICE_FUNCTION(vkResetFences)(device, 1, &fence); VkSubmitInfo submit_info = {}; submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO; submit_info.commandBufferCount = 1; submit_info.pCommandBuffers = &buffer; - __kvfCheckVk(vkQueueSubmit(kvfGetDeviceQueue(device, queue), 1, &submit_info, fence)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkQueueSubmit)(kvfGetDeviceQueue(device, queue), 1, &submit_info, fence)); if(fence != VK_NULL_HANDLE) - vkWaitForFences(device, 1, &fence, VK_TRUE, UINT64_MAX); + KVF_GET_DEVICE_FUNCTION(vkWaitForFences)(device, 1, &fence, VK_TRUE, UINT64_MAX); } VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear, VkSampleCountFlagBits samples) @@ -2169,12 +2482,12 @@ VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttac if(color_attachment_count != 0) { color_references = (VkAttachmentReference*)KVF_MALLOC(color_attachment_count * sizeof(VkAttachmentReference)); - KVF_ASSERT(color_references != NULL); + KVF_ASSERT(color_references != NULL && "allocation failed :("); } if(depth_attachment_count != 0) { depth_references = (VkAttachmentReference*)KVF_MALLOC(depth_attachment_count * sizeof(VkAttachmentReference)); - KVF_ASSERT(depth_references != NULL); + KVF_ASSERT(depth_references != NULL && "allocation failed :("); } for(size_t i = 0, c = 0, d = 0; i < attachments_count; i++) @@ -2194,6 +2507,11 @@ VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttac } } + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + VkSubpassDescription subpass = {}; subpass.pipelineBindPoint = bind_point; subpass.colorAttachmentCount = color_attachment_count; @@ -2210,7 +2528,7 @@ VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttac renderpass_create_info.pDependencies = dependencies; VkRenderPass render_pass = VK_NULL_HANDLE; - __kvfCheckVk(vkCreateRenderPass(device, &renderpass_create_info, NULL, &render_pass)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateRenderPass)(device, &renderpass_create_info, NULL, &render_pass)); KVF_FREE(color_references); KVF_FREE(depth_references); return render_pass; @@ -2221,13 +2539,21 @@ void kvfDestroyRenderPass(VkDevice device, VkRenderPass renderPass) if(renderPass == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyRenderPass(device, renderPass, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyRenderPass)(device, renderPass, NULL); } void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer framebuffer, VkExtent2D framebuffer_extent, VkClearValue* clears, size_t clears_count) { KVF_ASSERT(pass != VK_NULL_HANDLE); KVF_ASSERT(framebuffer != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); + KVF_ASSERT(kvf_device != NULL); + #endif VkOffset2D offset = { 0, 0 }; VkRenderPassBeginInfo renderpass_info = {}; @@ -2238,18 +2564,22 @@ void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer fr renderpass_info.renderArea.extent = framebuffer_extent; renderpass_info.clearValueCount = clears_count; renderpass_info.pClearValues = clears; - vkCmdBeginRenderPass(cmd, &renderpass_info, VK_SUBPASS_CONTENTS_INLINE); + KVF_GET_DEVICE_FUNCTION(vkCmdBeginRenderPass)(cmd, &renderpass_info, VK_SUBPASS_CONTENTS_INLINE); } VkShaderModule kvfCreateShaderModule(VkDevice device, uint32_t* code, size_t size) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkShaderModuleCreateInfo createInfo = {}; createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; createInfo.codeSize = size * sizeof(uint32_t); createInfo.pCode = code; VkShaderModule shader = VK_NULL_HANDLE; - __kvfCheckVk(vkCreateShaderModule(device, &createInfo, NULL, &shader)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateShaderModule)(device, &createInfo, NULL, &shader)); return shader; } @@ -2258,19 +2588,27 @@ void kvfDestroyShaderModule(VkDevice device, VkShaderModule shader) if(shader == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyShaderModule(device, shader, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyShaderModule)(device, shader, NULL); } VkDescriptorSetLayout kvfCreateDescriptorSetLayout(VkDevice device, VkDescriptorSetLayoutBinding* bindings, size_t bindings_count) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkDescriptorSetLayoutCreateInfo layout_info = {}; layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; layout_info.bindingCount = bindings_count; layout_info.pBindings = bindings; VkDescriptorSetLayout layout; - __kvfCheckVk(vkCreateDescriptorSetLayout(device, &layout_info, NULL, &layout)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateDescriptorSetLayout)(device, &layout_info, NULL, &layout)); return layout; } @@ -2279,7 +2617,11 @@ void kvfDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout layout if(layout == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyDescriptorSetLayout(device, layout, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyDescriptorSetLayout)(device, layout, NULL); } VkDescriptorSet kvfAllocateDescriptorSet(VkDevice device, VkDescriptorSetLayout layout) @@ -2303,27 +2645,39 @@ VkDescriptorSet kvfAllocateDescriptorSet(VkDevice device, VkDescriptorSetLayout alloc_info.descriptorPool = pool; alloc_info.descriptorSetCount = 1; alloc_info.pSetLayouts = &layout; - __kvfCheckVk(vkAllocateDescriptorSets(device, &alloc_info, &set)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkAllocateDescriptorSets)(device, &alloc_info, &set)); KVF_ASSERT(set != VK_NULL_HANDLE); return set; } void kvfUpdateStorageBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding) { + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkWriteDescriptorSet write = kvfWriteStorageBufferToDescriptorSet(device, set, info, binding); - vkUpdateDescriptorSets(device, 1, &write, 0, NULL); + KVF_GET_DEVICE_FUNCTION(vkUpdateDescriptorSets)(device, 1, &write, 0, NULL); } void kvfUpdateUniformBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding) { + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkWriteDescriptorSet write = kvfWriteUniformBufferToDescriptorSet(device, set, info, binding); - vkUpdateDescriptorSets(device, 1, &write, 0, NULL); + KVF_GET_DEVICE_FUNCTION(vkUpdateDescriptorSets)(device, 1, &write, 0, NULL); } void kvfUpdateImageToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorImageInfo* info, uint32_t binding) { + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkWriteDescriptorSet write = kvfWriteImageToDescriptorSet(device, set, info, binding); - vkUpdateDescriptorSets(device, 1, &write, 0, NULL); + KVF_GET_DEVICE_FUNCTION(vkUpdateDescriptorSets)(device, 1, &write, 0, NULL); } VkWriteDescriptorSet kvfWriteStorageBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, const VkDescriptorBufferInfo* info, uint32_t binding) @@ -2374,6 +2728,10 @@ VkWriteDescriptorSet kvfWriteImageToDescriptorSet(VkDevice device, VkDescriptorS VkPipelineLayout kvfCreatePipelineLayout(VkDevice device, VkDescriptorSetLayout* set_layouts, size_t set_layouts_count, VkPushConstantRange* pc, size_t pc_count) { KVF_ASSERT(device != VK_NULL_HANDLE); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkPipelineLayoutCreateInfo pipeline_layout_info = {}; pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; pipeline_layout_info.setLayoutCount = set_layouts_count; @@ -2382,7 +2740,7 @@ VkPipelineLayout kvfCreatePipelineLayout(VkDevice device, VkDescriptorSetLayout* pipeline_layout_info.pPushConstantRanges = pc; VkPipelineLayout layout; - __kvfCheckVk(vkCreatePipelineLayout(device, &pipeline_layout_info, NULL, &layout)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreatePipelineLayout)(device, &pipeline_layout_info, NULL, &layout)); return layout; } @@ -2391,7 +2749,11 @@ void kvfDestroyPipelineLayout(VkDevice device, VkPipelineLayout layout) if(layout == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyPipelineLayout(device, layout, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyPipelineLayout)(device, layout, NULL); } void kvfResetDeviceDescriptorPools(VkDevice device) @@ -2401,7 +2763,7 @@ void kvfResetDeviceDescriptorPools(VkDevice device) KVF_ASSERT(kvf_device != NULL); for(uint32_t i = 0; i < kvf_device->sets_pools_size; i++) { - vkResetDescriptorPool(device, kvf_device->sets_pools[i].pool, 0); + KVF_GET_DEVICE_FUNCTION(vkResetDescriptorPool)(device, kvf_device->sets_pools[i].pool, 0); kvf_device->sets_pools[i].size = 0; } } @@ -2409,6 +2771,7 @@ void kvfResetDeviceDescriptorPools(VkDevice device) KvfGraphicsPipelineBuilder* kvfCreateGPipelineBuilder() { KvfGraphicsPipelineBuilder* builder = (KvfGraphicsPipelineBuilder*)KVF_MALLOC(sizeof(KvfGraphicsPipelineBuilder)); + KVF_ASSERT(builder != NULL && "allocation failed :("); memset(builder, 0, sizeof(KvfGraphicsPipelineBuilder)); kvfGPipelineBuilderReset(builder); return builder; @@ -2539,10 +2902,10 @@ void kvfGPipelineBuilderSetVertexInputs(KvfGraphicsPipelineBuilder* builder, VkV KVF_ASSERT(builder != NULL); KVF_ASSERT(attributes != NULL); VkVertexInputBindingDescription* binds_ptr = (VkVertexInputBindingDescription*)KVF_MALLOC(sizeof(VkVertexInputBindingDescription)); - KVF_ASSERT(binds_ptr != NULL); + KVF_ASSERT(binds_ptr != NULL && "allocation failed :("); *binds_ptr = binds; VkVertexInputAttributeDescription* attributes_descriptions = (VkVertexInputAttributeDescription*)KVF_MALLOC(sizeof(VkVertexInputAttributeDescription) * attributes_count); - KVF_ASSERT(attributes_descriptions != NULL); + KVF_ASSERT(attributes_descriptions != NULL && "allocation failed :("); memcpy(attributes_descriptions, attributes, sizeof(VkVertexInputAttributeDescription) * attributes_count); builder->vertex_input_state.vertexBindingDescriptionCount = 1; builder->vertex_input_state.pVertexBindingDescriptions = binds_ptr; @@ -2557,7 +2920,7 @@ void kvfGPipelineBuilderAddShaderStage(KvfGraphicsPipelineBuilder* builder, VkSh KVF_ASSERT(builder->shader_stages != NULL); memset(&builder->shader_stages[builder->shader_stages_count], 0, sizeof(VkPipelineShaderStageCreateInfo)); char* entry_ptr = (char*)KVF_MALLOC(strlen(entry)); - KVF_ASSERT(entry_ptr != NULL); + KVF_ASSERT(entry_ptr != NULL && "allocation failed :("); strcpy(entry_ptr, entry); builder->shader_stages[builder->shader_stages_count].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; builder->shader_stages[builder->shader_stages_count].stage = stage; @@ -2625,15 +2988,23 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, K pipeline_info.basePipelineHandle = VK_NULL_HANDLE; pipeline_info.pDepthStencilState = &builder->depth_stencil_state; + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif VkPipeline pipeline; - __kvfCheckVk(vkCreateGraphicsPipelines(device, VK_NULL_HANDLE, 1, &pipeline_info, NULL, &pipeline)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateGraphicsPipelines)(device, VK_NULL_HANDLE, 1, &pipeline_info, NULL, &pipeline)); return pipeline; } void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline) { KVF_ASSERT(device != VK_NULL_HANDLE); - vkDestroyPipeline(device, pipeline, NULL); + #ifdef KVF_IMPL_VK_NO_PROTOTYPES + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + #endif + KVF_GET_DEVICE_FUNCTION(vkDestroyPipeline)(device, pipeline, NULL); } #endif // KVF_IMPLEMENTATION From 4ed84d43c100db706beeee74059ed1c8113eb865 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 20 Sep 2024 17:02:55 +0200 Subject: [PATCH 033/131] fixing SDLManager --- runtime/Includes/Core/Application.h | 2 +- runtime/Includes/Renderer/RenderCore.h | 8 ++++---- runtime/Sources/Core/Application.cpp | 4 +++- runtime/Sources/Core/SDLManager.cpp | 20 ++++++++++---------- runtime/Sources/Renderer/RenderCore.cpp | 6 +++--- 5 files changed, 21 insertions(+), 19 deletions(-) diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 7521d77..f0b265d 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -47,12 +47,12 @@ namespace mlx ~Application(); private: - RenderCore m_render_core; FpsManager m_fps; Inputs m_in; ImageRegistry m_image_registry; std::vector> m_graphics; std::function f_loop_hook; + std::unique_ptr p_render_core; Handle p_param = nullptr; }; } diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index 36947a6..a119534 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -9,9 +9,9 @@ namespace mlx class RenderCore { - friend class Application; - public: + RenderCore(); + [[nodiscard]] MLX_FORCEINLINE VkInstance GetInstance() const noexcept { return m_instance; } [[nodiscard]] MLX_FORCEINLINE VkInstance& GetInstanceRef() noexcept { return m_instance; } [[nodiscard]] MLX_FORCEINLINE VkDevice GetDevice() const noexcept { return m_device; } @@ -31,12 +31,12 @@ namespace mlx #undef MLX_VULKAN_INSTANCE_FUNCTION #undef MLX_VULKAN_DEVICE_FUNCTION + ~RenderCore(); + private: - RenderCore(); void LoadKVFGlobalVulkanFunctionPointers() const noexcept; void LoadKVFInstanceVulkanFunctionPointers() const noexcept; void LoadKVFDeviceVulkanFunctionPointers() const noexcept; - ~RenderCore(); private: static RenderCore* s_instance; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 29ffbbb..b854f34 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -8,7 +8,7 @@ namespace mlx { - Application::Application() : m_render_core(), m_fps(), m_in() + Application::Application() : m_fps(), m_in() { EventBus::RegisterListener({[](const EventBase& event) { @@ -18,6 +18,7 @@ namespace mlx m_fps.Init(); SDLManager::Get().Init(); + p_render_core = std::make_unique(); } void Application::Run() noexcept @@ -87,6 +88,7 @@ namespace mlx Application::~Application() { + p_render_core.reset(); SDLManager::Get().Shutdown(); } } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index fa0801f..f96fa58 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -17,7 +17,7 @@ namespace mlx constexpr const std::uint32_t amask = 0xff000000; #endif - namespace details + namespace Internal { struct WindowInfos { @@ -101,9 +101,9 @@ namespace mlx DebugLog("SDL Manager initialized"); } - void* SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden) + Handle SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden) { - details::WindowInfos* infos = new details::WindowInfos; + Internal::WindowInfos* infos = new Internal::WindowInfos; Verify(infos != nullptr, "SDL : window allocation failed"); infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); @@ -117,11 +117,11 @@ namespace mlx return infos; } - void SDLManager::DestroyWindow(void* window) noexcept + void SDLManager::DestroyWindow(Handle window) noexcept { Verify(m_windows_registry.find(window) != m_windows_registry.end(), "SDL : cannot destroy window; unknown window pointer"); - details::WindowInfos* infos = static_cast(window); + Internal::WindowInfos* infos = static_cast(window); if(infos->window != nullptr) SDL_DestroyWindow(infos->window); if(infos->icon != nullptr) @@ -134,7 +134,7 @@ namespace mlx VkSurfaceKHR SDLManager::CreateVulkanSurface(Handle window, VkInstance instance) const noexcept { VkSurfaceKHR surface; - if(!SDL_Vulkan_CreateSurface(static_cast(window), instance, &surface)) + if(!SDL_Vulkan_CreateSurface(static_cast(window)->window, instance, &surface)) FatalError("SDL : could not create a Vulkan surface; %", SDL_GetError()); return surface; } @@ -169,27 +169,27 @@ namespace mlx Vec2ui SDLManager::GetVulkanDrawableSize(Handle window) const noexcept { Vec2i extent; - SDL_Vulkan_GetDrawableSize(static_cast(window), &extent.x, &extent.y); + SDL_Vulkan_GetDrawableSize(static_cast(window)->window, &extent.x, &extent.y); return Vec2ui{ extent }; } void SDLManager::MoveMouseOnWindow(Handle window, int x, int y) const noexcept { - SDL_WarpMouseInWindow(static_cast(window), x, y); + SDL_WarpMouseInWindow(static_cast(window)->window, x, y); SDL_PumpEvents(); } void SDLManager::GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept { SDL_DisplayMode DM; - SDL_GetDesktopDisplayMode(SDL_GetWindowDisplayIndex(static_cast(window)), &DM); + SDL_GetDesktopDisplayMode(SDL_GetWindowDisplayIndex(static_cast(window)->window), &DM); *x = DM.w; *y = DM.h; } void SDLManager::SetWindowPosition(Handle window, int x, int y) const noexcept { - SDL_SetWindowPosition(static_cast(window), x, y); + SDL_SetWindowPosition(static_cast(window)->window, x, y); } std::int32_t SDLManager::GetX() const noexcept diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index c04c52f..bb7bc80 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -27,19 +27,19 @@ namespace mlx void ErrorCallback(const char* message) noexcept { - FatalError(message, 0, "", ""); + Logs::Report(LogType::FatalError, 0, "", "", message); std::cout << std::endl; } void ValidationErrorCallback(const char* message) noexcept { - Error(message, 0, "", ""); + Logs::Report(LogType::Error, 0, "", "", message); std::cout << std::endl; } void ValidationWarningCallback(const char* message) noexcept { - Warning(message, 0, "", ""); + Logs::Report(LogType::Warning, 0, "", "", message); std::cout << std::endl; } From 4e1ae347a81bc262da24902c87c6e1a7cf48a2de Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 20 Sep 2024 17:29:42 +0200 Subject: [PATCH 034/131] removing all singletons --- runtime/Includes/Core/Application.h | 7 +++++++ runtime/Includes/Core/Memory.h | 12 +++++++----- runtime/Includes/Core/Profiler.h | 15 ++++++++------- runtime/Includes/Core/SDLManager.h | 20 ++++++++------------ runtime/Includes/PreCompiled.h | 1 + runtime/Includes/Utils/Singleton.h | 20 -------------------- runtime/Sources/Core/Application.cpp | 13 ++++++++++--- runtime/Sources/Core/Memory.cpp | 8 ++++++++ runtime/Sources/Core/Profiler.cpp | 3 +++ runtime/Sources/Core/SDLManager.cpp | 9 +++++++-- 10 files changed, 59 insertions(+), 49 deletions(-) delete mode 100644 runtime/Includes/Utils/Singleton.h diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index f0b265d..bc8c5c9 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -4,6 +4,8 @@ #include #include #include +#include +#include #include namespace mlx @@ -47,12 +49,17 @@ namespace mlx ~Application(); private: + std::unique_ptr p_mem_manager; // Putting ptr here to initialise them before inputs, even if it f*cks the padding + std::unique_ptr p_sdl_manager; FpsManager m_fps; Inputs m_in; ImageRegistry m_image_registry; std::vector> m_graphics; std::function f_loop_hook; std::unique_ptr p_render_core; + #ifdef PROFILER + std::unique_ptr p_profiler; + #endif Handle p_param = nullptr; }; } diff --git a/runtime/Includes/Core/Memory.h b/runtime/Includes/Core/Memory.h index ad0bff9..7597f01 100644 --- a/runtime/Includes/Core/Memory.h +++ b/runtime/Includes/Core/Memory.h @@ -3,21 +3,23 @@ namespace mlx { - class MemManager : public Singleton + class MemManager { - friend class Singleton; - public: + MemManager(); + static void* Malloc(std::size_t size); static void* Calloc(std::size_t n, std::size_t size); static void* Realloc(void* ptr, std::size_t size); static void Free(void* ptr); - private: - MemManager() = default; + inline static bool IsInit() noexcept { return s_instance != nullptr; } + inline static MemManager& Get() noexcept { return *s_instance; } + ~MemManager(); private: + static MemManager* s_instance; inline static std::list s_blocks; }; } diff --git a/runtime/Includes/Core/Profiler.h b/runtime/Includes/Core/Profiler.h index ec977bc..3d5e56d 100644 --- a/runtime/Includes/Core/Profiler.h +++ b/runtime/Includes/Core/Profiler.h @@ -1,8 +1,6 @@ #ifndef __MLX_PROFILER__ #define __MLX_PROFILER__ -#include - namespace mlx { using FloatingPointMilliseconds = std::chrono::duration; @@ -14,20 +12,21 @@ namespace mlx std::thread::id thread_id; }; - class Profiler : public Singleton + class Profiler { - friend class Singleton; - public: Profiler(const Profiler&) = delete; Profiler(Profiler&&) = delete; + Profiler() { BeginRuntimeSession(); s_instance = this; } void AppendProfileData(ProfileResult&& result); - private: - Profiler() { BeginRuntimeSession(); } + inline static bool IsInit() noexcept { return s_instance != nullptr; } + inline static Profiler& Get() noexcept { return *s_instance; } + ~Profiler(); + private: void BeginRuntimeSession(); void WriteProfile(const ProfileResult& result); void EndRuntimeSession(); @@ -44,6 +43,8 @@ namespace mlx } private: + static Profiler* s_instance; + std::unordered_map> m_profile_data; std::ofstream m_output_stream; std::mutex m_mutex; diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 61f273d..e314f05 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -5,13 +5,10 @@ namespace mlx { - class SDLManager : public Singleton + class SDLManager { - friend class Singleton; - public: - void Init() noexcept; - void Shutdown() noexcept; + SDLManager(); Handle CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden); void DestroyWindow(Handle window) noexcept; @@ -30,18 +27,17 @@ namespace mlx std::int32_t GetXRel() const noexcept; std::int32_t GetYRel() const noexcept; - private: - SDLManager() = default; - ~SDLManager() = default; + inline static bool IsInit() noexcept { return s_instance != nullptr; } + inline static SDLManager& Get() noexcept { return *s_instance; } + + ~SDLManager(); private: + static SDLManager* s_instance; + std::unordered_set m_windows_registry; func::function f_callback; void* p_callback_data = nullptr; - std::int32_t m_x; - std::int32_t m_y; - std::int32_t m_rel_x; - std::int32_t m_rel_y; bool m_drop_sdl_responsability = false; }; } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index f948b11..bc3efbc 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -93,6 +93,7 @@ #include #include #include +#include using Handle = void*; diff --git a/runtime/Includes/Utils/Singleton.h b/runtime/Includes/Utils/Singleton.h deleted file mode 100644 index e5e76ad..0000000 --- a/runtime/Includes/Utils/Singleton.h +++ /dev/null @@ -1,20 +0,0 @@ -#ifndef __MLX_SINGLETON__ -#define __MLX_SINGLETON__ - -#include - -namespace mlx -{ - template - class Singleton : public NonCopyable - { - public: - inline static T& Get() - { - static T instance; - return instance; - } - }; -} - -#endif // __MLX_SINGLETON__ diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index b854f34..cd53f58 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -8,7 +8,7 @@ namespace mlx { - Application::Application() : m_fps(), m_in() + Application::Application() : p_mem_manager(std::make_unique()), p_sdl_manager(std::make_unique()), m_fps(), m_in() { EventBus::RegisterListener({[](const EventBase& event) { @@ -16,8 +16,11 @@ namespace mlx std::abort(); }, "__MlxApplication" }); + #ifdef PROFILER + p_profiler = std::make_unique(); + #endif + m_fps.Init(); - SDLManager::Get().Init(); p_render_core = std::make_unique(); } @@ -89,6 +92,10 @@ namespace mlx Application::~Application() { p_render_core.reset(); - SDLManager::Get().Shutdown(); + p_sdl_manager.reset(); + #ifdef PROFILER + p_profiler.reset(); + #endif + p_mem_manager.reset(); } } diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 36f7689..f5f8ce6 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -4,6 +4,13 @@ namespace mlx { + MemManager* MemManager::s_instance = nullptr; + + MemManager::MemManager() + { + s_instance = this; + } + void* MemManager::Malloc(std::size_t size) { void* ptr = std::malloc(size); @@ -49,5 +56,6 @@ namespace mlx { std::free(ptr); }); + s_instance = nullptr; } } diff --git a/runtime/Sources/Core/Profiler.cpp b/runtime/Sources/Core/Profiler.cpp index a3649f7..a4bb9d6 100644 --- a/runtime/Sources/Core/Profiler.cpp +++ b/runtime/Sources/Core/Profiler.cpp @@ -4,6 +4,8 @@ namespace mlx { + Profiler* Profiler::s_instance = nullptr; + void Profiler::BeginRuntimeSession() { std::lock_guard lock(m_mutex); @@ -63,5 +65,6 @@ namespace mlx if(!m_runtime_session_began) return; EndRuntimeSession(); + s_instance = nullptr; } } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index f96fa58..13031aa 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -26,9 +26,13 @@ namespace mlx }; } - void SDLManager::Init() noexcept + SDLManager* SDLManager::s_instance = nullptr; + + SDLManager::SDLManager() { MLX_PROFILE_FUNCTION(); + s_instance = this; + m_drop_sdl_responsability = SDL_WasInit(SDL_INIT_VIDEO); if(m_drop_sdl_responsability) // is case the mlx is running in a sandbox like MacroUnitTester where SDL is already init return; @@ -224,12 +228,13 @@ namespace mlx return y; } - void SDLManager::Shutdown() noexcept + SDLManager::~SDLManager() { if(m_drop_sdl_responsability) return; SDL_QuitSubSystem(SDL_INIT_VIDEO | SDL_INIT_TIMER | SDL_INIT_EVENTS); SDL_Quit(); + s_instance = nullptr; DebugLog("SDL Manager uninitialized"); } } From 8dd6def84d3d215ff5052293891732c127a6b75d Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 20 Sep 2024 17:41:52 +0200 Subject: [PATCH 035/131] fixing kvf issues --- runtime/Includes/Core/Application.h | 1 + runtime/Sources/Renderer/RenderCore.cpp | 3 +-- third_party/kvf.h | 20 ++++++++++++-------- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index bc8c5c9..2233650 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -10,6 +10,7 @@ namespace mlx { + // TODO : FIX THIS DAMN GOD CLASS !!!!!!!!!!!!!!!! class Application { public: diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index bb7bc80..657f2b0 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -90,7 +90,6 @@ namespace mlx LoadKVFDeviceVulkanFunctionPointers(); vkDestroySurfaceKHR(m_instance, surface, nullptr); - FatalError("caca"); } #undef MLX_LOAD_FUNCTION @@ -184,7 +183,7 @@ namespace mlx MLX_LOAD_FUNCTION(vkDestroySwapchainKHR); MLX_LOAD_FUNCTION(vkGetSwapchainImagesKHR); MLX_LOAD_FUNCTION(vkQueuePresentKHR); - kvfPassDeviceVulkanFunctionPointers(m_device, &pfns); + kvfPassDeviceVulkanFunctionPointers(m_physical_device, m_device, &pfns); } #undef MLX_LOAD_FUNCTION diff --git a/third_party/kvf.h b/third_party/kvf.h index acb29b2..6b6fcc6 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -128,7 +128,7 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin VkDevice kvfCreateDefaultDevicePhysicalDeviceAndCustomQueues(VkPhysicalDevice physical, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue); VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, const char** extensions, uint32_t extensions_count, VkPhysicalDeviceFeatures* features, int32_t graphics_queue, int32_t present_queue, int32_t compute_queue); #ifdef KVF_IMPL_VK_NO_PROTOTYPES - void kvfPassDeviceVulkanFunctionPointers(VkDevice device, const KvfDeviceVulkanFunctions* fns); + void kvfPassDeviceVulkanFunctionPointers(VkPhysicalDevice physical, VkDevice device, const KvfDeviceVulkanFunctions* fns); #endif void kvfDestroyDevice(VkDevice device); @@ -1552,7 +1552,9 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin VkDevice device; __kvfCheckVk(KVF_GET_INSTANCE_FUNCTION(vkCreateDevice)(physical, &createInfo, NULL, &device)); - __kvfCompleteDevice(physical, device); + #ifndef KVF_IMPL_VK_NO_PROTOTYPES + __kvfCompleteDevice(physical, device); + #endif return device; } @@ -1621,19 +1623,22 @@ VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, VkDevice device; __kvfCheckVk(KVF_GET_INSTANCE_FUNCTION(vkCreateDevice)(physical, &createInfo, NULL, &device)); - __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(physical, device, graphics_queue, present_queue, compute_queue); + #ifndef KVF_IMPL_VK_NO_PROTOTYPES + __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(physical, device, graphics_queue, present_queue, compute_queue); + #endif return device; } #ifdef KVF_IMPL_VK_NO_PROTOTYPES - void kvfPassDeviceVulkanFunctionPointers(VkDevice device, const KvfDeviceVulkanFunctions* fns) + void kvfPassDeviceVulkanFunctionPointers(VkPhysicalDevice physical, VkDevice device, const KvfDeviceVulkanFunctions* fns) { KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(fns != NULL); - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkPhysicalDevice(physical); KVF_ASSERT(kvf_device != NULL); kvf_device->fns = *fns; + __kvfCompleteDevice(physical, device); } #endif @@ -2318,10 +2323,9 @@ VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLe kvf_device->cmd_buffers_capacity += KVF_COMMAND_POOL_CAPACITY; kvf_device->cmd_buffers = (VkCommandBuffer*)KVF_REALLOC(kvf_device->cmd_buffers, kvf_device->cmd_buffers_capacity * sizeof(VkCommandBuffer)); KVF_ASSERT(kvf_device->cmd_buffers != NULL && "allocation failed :("); - kvf_device->cmd_buffers[kvf_device->cmd_buffers_size] = buffer; - kvf_device->cmd_buffers_size++; } - + kvf_device->cmd_buffers[kvf_device->cmd_buffers_size] = buffer; + kvf_device->cmd_buffers_size++; return buffer; } From 3f065bc77e801c002964f058363baa902a1097db Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 20 Sep 2024 21:43:23 +0200 Subject: [PATCH 036/131] fixing bugs --- runtime/Sources/Renderer/Image.cpp | 11 ----------- runtime/Sources/Renderer/Pipelines/Shader.cpp | 2 +- runtime/Sources/Renderer/RenderCore.cpp | 3 +++ 3 files changed, 4 insertions(+), 12 deletions(-) diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 6eff769..03cf018 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -60,27 +60,16 @@ namespace mlx return; bool is_single_time_cmd_buffer = (cmd == VK_NULL_HANDLE); if(is_single_time_cmd_buffer) - { cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); - kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); - } KvfImageType kvf_type = KVF_IMAGE_OTHER; switch(m_type) { case ImageType::Color: kvf_type = KVF_IMAGE_COLOR; break; case ImageType::Depth: kvf_type = KVF_IMAGE_DEPTH; break; - default: break; } kvfTransitionImageLayout(RenderCore::Get().GetDevice(), m_image, kvf_type, cmd, m_format, m_layout, new_layout, is_single_time_cmd_buffer); m_layout = new_layout; - if(is_single_time_cmd_buffer) - { - RenderCore::Get().vkEndCommandBuffer(cmd); - VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); - kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); - kvfDestroyFence(RenderCore::Get().GetDevice(), fence); - } } void Image::Clear(VkCommandBuffer cmd, Vec4f color) diff --git a/runtime/Sources/Renderer/Pipelines/Shader.cpp b/runtime/Sources/Renderer/Pipelines/Shader.cpp index fd65276..651e560 100644 --- a/runtime/Sources/Renderer/Pipelines/Shader.cpp +++ b/runtime/Sources/Renderer/Pipelines/Shader.cpp @@ -14,7 +14,7 @@ namespace mlx default : FatalError("wtf"); break; } - m_module = kvfCreateShaderModule(RenderCore::Get().GetDevice(), reinterpret_cast(m_bytecode.data()), m_bytecode.size() * 4); + m_module = kvfCreateShaderModule(RenderCore::Get().GetDevice(), reinterpret_cast(m_bytecode.data()), m_bytecode.size() / 4); DebugLog("Vulkan : shader module created"); GeneratePipelineLayout(m_layout); diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 657f2b0..cbb3be0 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -90,6 +90,8 @@ namespace mlx LoadKVFDeviceVulkanFunctionPointers(); vkDestroySurfaceKHR(m_instance, surface, nullptr); + + m_allocator.Init(); } #undef MLX_LOAD_FUNCTION @@ -191,6 +193,7 @@ namespace mlx RenderCore::~RenderCore() { WaitDeviceIdle(); + m_allocator.Destroy(); kvfDestroyDevice(m_device); DebugLog("Vulkan : logical device destroyed"); kvfDestroyInstance(m_instance); From 0b3ca8ec0c6953016bdf24eb60b4e1e3aa53c5f2 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 21 Sep 2024 12:18:42 +0200 Subject: [PATCH 037/131] adding debug vulkan resources names --- runtime/Includes/Core/Application.inl | 2 +- runtime/Includes/Core/Graphics.inl | 2 +- runtime/Includes/Graphics/Mesh.h | 4 +- runtime/Includes/Renderer/Buffer.h | 13 ++-- runtime/Includes/Renderer/Image.h | 42 ++++------- runtime/Includes/Renderer/Memory.h | 4 +- runtime/Includes/Renderer/RenderCore.h | 6 ++ runtime/Includes/Renderer/Vulkan/VulkanDefs.h | 6 ++ runtime/Sources/Core/Application.cpp | 2 +- runtime/Sources/Graphics/Mesh.cpp | 3 + runtime/Sources/Graphics/PixelPutManager.cpp | 32 -------- runtime/Sources/Graphics/PutPixelManager.cpp | 31 ++++++++ runtime/Sources/Graphics/Scene.cpp | 6 +- runtime/Sources/Graphics/Sprite.cpp | 2 + runtime/Sources/Renderer/Buffer.cpp | 71 ++++++++++++++---- runtime/Sources/Renderer/Descriptor.cpp | 8 ++ runtime/Sources/Renderer/Image.cpp | 73 +++++++++++++++++-- runtime/Sources/Renderer/Memory.cpp | 34 ++++++++- .../Sources/Renderer/Pipelines/Graphics.cpp | 6 ++ runtime/Sources/Renderer/Pipelines/Shader.cpp | 3 + runtime/Sources/Renderer/RenderCore.cpp | 5 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 5 +- .../Renderer/RenderPasses/FinalPass.cpp | 3 + .../Sources/Renderer/RenderPasses/Passes.cpp | 12 ++- runtime/Sources/Renderer/Renderer.cpp | 12 ++- runtime/Sources/Renderer/SceneRenderer.cpp | 3 + third_party/kvf.h | 20 ++++- 27 files changed, 306 insertions(+), 104 deletions(-) delete mode 100644 runtime/Sources/Graphics/PixelPutManager.cpp create mode 100644 runtime/Sources/Graphics/PutPixelManager.cpp diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 0296216..915b9e9 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -8,7 +8,7 @@ Error("invalid window ptr (NULL)"); \ return; \ } \ - else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return *static_cast(win) == gs->GetID(); }) != m_graphics.end()) \ + else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return *static_cast(win) == gs->GetID(); }) == m_graphics.end()) \ { \ Error("invalid window ptr"); \ return; \ diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index c0f4c02..5ffa9f9 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -39,10 +39,10 @@ namespace mlx { Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); + m_current_depth++; } else sprite->SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); - m_current_depth++; } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) diff --git a/runtime/Includes/Graphics/Mesh.h b/runtime/Includes/Graphics/Mesh.h index 999213f..215736b 100644 --- a/runtime/Includes/Graphics/Mesh.h +++ b/runtime/Includes/Graphics/Mesh.h @@ -20,12 +20,12 @@ namespace mlx { CPUBuffer vb(vertices.size() * sizeof(Vertex)); std::memcpy(vb.GetData(), vertices.data(), vb.GetSize()); - vbo.Init(vb.GetSize()); + vbo.Init(vb.GetSize(), 0, "mlx_mesh"); vbo.SetData(std::move(vb)); CPUBuffer ib(indices.size() * sizeof(std::uint32_t)); std::memcpy(ib.GetData(), indices.data(), ib.GetSize()); - ibo.Init(ib.GetSize()); + ibo.Init(ib.GetSize(), 0, "mlx_mesh"); ibo.SetData(std::move(ib)); triangle_count = vertices.size() / 3; diff --git a/runtime/Includes/Renderer/Buffer.h b/runtime/Includes/Renderer/Buffer.h index d6574fa..27e7716 100644 --- a/runtime/Includes/Renderer/Buffer.h +++ b/runtime/Includes/Renderer/Buffer.h @@ -12,7 +12,7 @@ namespace mlx public: GPUBuffer() = default; - void Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data); + void Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data, std::string_view debug_name); void Destroy() noexcept; bool CopyFrom(const GPUBuffer& buffer) noexcept; @@ -33,6 +33,9 @@ namespace mlx void PushToGPU() noexcept; protected: + #ifdef DEBUG + std::string m_debug_name; + #endif VkBuffer m_buffer = VK_NULL_HANDLE; VmaAllocation m_allocation; VkDeviceSize m_offset = 0; @@ -40,7 +43,7 @@ namespace mlx void* p_map = nullptr; private: - void CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info); + void CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info, std::string_view debug_name); private: VkBufferUsageFlags m_usage = 0; @@ -49,7 +52,7 @@ namespace mlx class VertexBuffer : public GPUBuffer { public: - inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags = 0) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | additional_flags, {}); } + inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags, std::string_view debug_name) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | additional_flags, {}, std::move(debug_name)); } void SetData(CPUBuffer data); inline void Bind(VkCommandBuffer cmd) const noexcept { VkDeviceSize offset = 0; RenderCore::Get().vkCmdBindVertexBuffers(cmd, 0, 1, &m_buffer, &offset); } }; @@ -57,7 +60,7 @@ namespace mlx class IndexBuffer : public GPUBuffer { public: - inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags = 0) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | additional_flags, {}); } + inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags, std::string_view debug_name) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | additional_flags, {}, std::move(debug_name)); } void SetData(CPUBuffer data); inline void Bind(VkCommandBuffer cmd) const noexcept { RenderCore::Get().vkCmdBindIndexBuffer(cmd, m_buffer, 0, VK_INDEX_TYPE_UINT32); } }; @@ -65,7 +68,7 @@ namespace mlx class UniformBuffer { public: - void Init(std::uint32_t size); + void Init(std::uint32_t size, std::string_view debug_name); void SetData(CPUBuffer data, std::size_t frame_index); void Destroy() noexcept; diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index ebde1ad..ae070d7 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -14,16 +14,19 @@ namespace mlx public: Image() = default; - inline void Init(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout = VK_IMAGE_LAYOUT_UNDEFINED) noexcept + inline void Init(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout, std::string_view debug_name) noexcept { m_image = image; m_format = format; m_width = width; m_height = height; m_layout = layout; + #ifdef DEBUG + m_debug_name = std::move(debug_name); + #endif } - void Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled = false); + void Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, std::string_view debug_name); void CreateImageView(VkImageViewType type, VkImageAspectFlags aspectFlags, int layer_count = 1) noexcept; void CreateSampler() noexcept; void TransitionLayout(VkImageLayout new_layout, VkCommandBuffer cmd = VK_NULL_HANDLE); @@ -48,6 +51,9 @@ namespace mlx virtual ~Image() = default; protected: + #ifdef DEBUG + std::string m_debug_name; + #endif VmaAllocation m_allocation; VkImage m_image = VK_NULL_HANDLE; VkImageView m_image_view = VK_NULL_HANDLE; @@ -65,11 +71,12 @@ namespace mlx { public: DepthImage() = default; - inline void Init(std::uint32_t width, std::uint32_t height, bool is_multisampled = false) + inline void Init(std::uint32_t width, std::uint32_t height, bool is_multisampled, std::string_view debug_name) { + MLX_PROFILE_FUNCTION(); std::vector candidates = { VK_FORMAT_D32_SFLOAT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT }; VkFormat format = kvfFindSupportFormatInCandidates(RenderCore::Get().GetDevice(), candidates.data(), candidates.size(), VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT); - Image::Init(ImageType::Depth, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, is_multisampled); + Image::Init(ImageType::Depth, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, is_multisampled, std::move(debug_name)); Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_DEPTH_BIT); Image::TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); } @@ -80,33 +87,12 @@ namespace mlx { public: Texture() = default; - Texture(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format = VK_FORMAT_R8G8B8A8_SRGB, bool is_multisampled = false) + Texture(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, std::string_view debug_name) { - Init(std::move(pixels), width, height, format, is_multisampled); + Init(std::move(pixels), width, height, format, is_multisampled, std::move(debug_name)); } - inline void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format = VK_FORMAT_R8G8B8A8_SRGB, bool is_multisampled = false) - { - Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, is_multisampled); - Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); - Image::CreateSampler(); - if(pixels) - { - GPUBuffer staging_buffer; - std::size_t size = width * height * kvfFormatSize(format); - staging_buffer.Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, pixels); - VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); - kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); - TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); - kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); - RenderCore::Get().vkEndCommandBuffer(cmd); - VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); - kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); - kvfDestroyFence(RenderCore::Get().GetDevice(), fence); - staging_buffer.Destroy(); - } - TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); - } + void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, std::string_view debug_name); void SetPixel(int x, int y, std::uint32_t color) noexcept; int GetPixel(int x, int y) noexcept; diff --git a/runtime/Includes/Renderer/Memory.h b/runtime/Includes/Renderer/Memory.h index 2470630..26f0b99 100644 --- a/runtime/Includes/Renderer/Memory.h +++ b/runtime/Includes/Renderer/Memory.h @@ -12,10 +12,10 @@ namespace mlx void Destroy() noexcept; VmaAllocation CreateBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name = nullptr) noexcept; - void DestroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept; + void DestroyBuffer(VmaAllocation allocation, VkBuffer buffer, const char* name) noexcept; VmaAllocation CreateImage(const VkImageCreateInfo* iminfo, const VmaAllocationCreateInfo* vinfo, VkImage& image, const char* name = nullptr) noexcept; - void DestroyImage(VmaAllocation allocation, VkImage image) noexcept; + void DestroyImage(VmaAllocation allocation, VkImage image, const char* name) noexcept; void MapMemory(VmaAllocation allocation, void** data) noexcept; void UnmapMemory(VmaAllocation allocation) noexcept; diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index a119534..f17b8d6 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -31,6 +31,12 @@ namespace mlx #undef MLX_VULKAN_INSTANCE_FUNCTION #undef MLX_VULKAN_DEVICE_FUNCTION + #if defined(DEBUG) && defined(VK_EXT_debug_utils) + inline static constexpr bool HAS_DEBUG_UTILS_FUNCTIONS = true; + #else + inline static constexpr bool HAS_DEBUG_UTILS_FUNCTIONS = false; + #endif + ~RenderCore(); private: diff --git a/runtime/Includes/Renderer/Vulkan/VulkanDefs.h b/runtime/Includes/Renderer/Vulkan/VulkanDefs.h index 2c9972d..5ee802e 100644 --- a/runtime/Includes/Renderer/Vulkan/VulkanDefs.h +++ b/runtime/Includes/Renderer/Vulkan/VulkanDefs.h @@ -20,6 +20,12 @@ MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceMemoryProperties) MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceProperties) MLX_VULKAN_INSTANCE_FUNCTION(vkGetPhysicalDeviceQueueFamilyProperties) + #ifdef DEBUG + #ifdef VK_EXT_debug_utils + MLX_VULKAN_INSTANCE_FUNCTION(vkSetDebugUtilsObjectNameEXT) + //MLX_VULKAN_INSTANCE_FUNCTION(vkSetDebugUtilsObjectTagEXT) + #endif + #endif #endif #ifdef MLX_VULKAN_DEVICE_FUNCTION diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index cd53f58..066df7e 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -49,7 +49,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); Texture* texture; - try { texture = new Texture({}, w, h); } + try { texture = new Texture({}, w, h, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_user_image"); } catch(...) { return NULL; } m_image_registry.RegisterTexture(texture); return texture; diff --git a/runtime/Sources/Graphics/Mesh.cpp b/runtime/Sources/Graphics/Mesh.cpp index 3df2e95..751d12e 100644 --- a/runtime/Sources/Graphics/Mesh.cpp +++ b/runtime/Sources/Graphics/Mesh.cpp @@ -6,12 +6,14 @@ namespace mlx { void Mesh::Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn) const noexcept { + MLX_PROFILE_FUNCTION(); for(std::size_t i = 0; i < m_sub_meshes.size(); i++) Draw(cmd, drawcalls, polygondrawn, i); } void Mesh::Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn, std::size_t submesh_index) const noexcept { + MLX_PROFILE_FUNCTION(); Verify(submesh_index < m_sub_meshes.size(), "invalid submesh index"); m_sub_meshes[submesh_index].vbo.Bind(cmd); m_sub_meshes[submesh_index].ibo.Bind(cmd); @@ -22,6 +24,7 @@ namespace mlx Mesh::~Mesh() { + MLX_PROFILE_FUNCTION(); for(auto& mesh : m_sub_meshes) { mesh.vbo.Destroy(); diff --git a/runtime/Sources/Graphics/PixelPutManager.cpp b/runtime/Sources/Graphics/PixelPutManager.cpp deleted file mode 100644 index 81c0c6f..0000000 --- a/runtime/Sources/Graphics/PixelPutManager.cpp +++ /dev/null @@ -1,32 +0,0 @@ -#include - -#include -#include - -namespace mlx -{ - NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t z, std::uint32_t color) - { - Verify((bool)p_renderer, "invalid renderer pointer"); - auto it = m_textures.find(z); - if(it == m_textures.end()) - { - VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain()); - Texture& texture = m_textures[z] = Texture({}, swapchain_extent.width, swapchain_extent.height); - texture.SetPixel(x, y, color); - return &texture; - } - it->second.SetPixel(x, y, color); - return nullptr; - } - - void PutPixelManager::ResetRenderData() - { - m_textures.clear(); - } - - PutPixelManager::~PutPixelManager() - { - ResetRenderData(); - } -} diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp new file mode 100644 index 0000000..67f42dd --- /dev/null +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -0,0 +1,31 @@ +#include + +#include +#include + +namespace mlx +{ + NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t z, std::uint32_t color) + { + Verify((bool)p_renderer, "invalid renderer pointer"); + + VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain()); + #ifdef DEBUG + auto res = m_textures.try_emplace(z, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(z)); + #else + auto res = m_textures.try_emplace(z, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + #endif + res.first->second.SetPixel(x, y, color); + return (res.second ? &res.first->second : nullptr); + } + + void PutPixelManager::ResetRenderData() + { + m_textures.clear(); + } + + PutPixelManager::~PutPixelManager() + { + ResetRenderData(); + } +} diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index 94eac69..dd48850 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -8,12 +8,14 @@ namespace mlx Scene::Scene(SceneDescriptor desc) : m_descriptor(std::move(desc)) { + MLX_PROFILE_FUNCTION(); Verify((bool)m_descriptor.renderer, "invalid renderer"); - m_depth.Init(m_descriptor.renderer->GetSwapchainImages().back().GetWidth(), m_descriptor.renderer->GetSwapchainImages().back().GetHeight()); + m_depth.Init(m_descriptor.renderer->GetSwapchainImages().back().GetWidth(), m_descriptor.renderer->GetSwapchainImages().back().GetHeight(), false, "mlx_scene_depth"); } Sprite& Scene::CreateSprite(NonOwningPtr texture) noexcept { + MLX_PROFILE_FUNCTION(); std::shared_ptr sprite = std::make_shared(texture); m_sprites.push_back(sprite); return *sprite; @@ -21,6 +23,7 @@ namespace mlx NonOwningPtr Scene::GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const { + MLX_PROFILE_FUNCTION(); auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture, position](std::shared_ptr sprite) { return sprite->GetPosition().x == position.x && sprite->GetPosition().y == position.y && sprite->GetTexture() == texture; @@ -30,6 +33,7 @@ namespace mlx void Scene::TryEraseSpriteFromTexture(NonOwningPtr texture) { + MLX_PROFILE_FUNCTION(); auto it = m_sprites.begin(); do { diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index e6792ae..533270b 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -7,6 +7,7 @@ namespace mlx { std::shared_ptr CreateQuad(float x, float y, float width, float height) { + MLX_PROFILE_FUNCTION(); std::vector data(4); data[0].position = Vec4f(x, y, 0.0f, 1.0f); @@ -37,6 +38,7 @@ namespace mlx Sprite::Sprite(NonOwningPtr texture) { + MLX_PROFILE_FUNCTION(); Verify((bool)texture, "Sprite: invalid texture"); p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); p_texture = texture; diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp index b60233d..b1cf50a 100644 --- a/runtime/Sources/Renderer/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -4,8 +4,9 @@ namespace mlx { - void GPUBuffer::Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data) + void GPUBuffer::Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data, std::string_view debug_name) { + MLX_PROFILE_FUNCTION(); VmaAllocationCreateInfo alloc_info{}; alloc_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; alloc_info.usage = VMA_MEMORY_USAGE_AUTO; @@ -24,10 +25,7 @@ namespace mlx else // LowDynamic or Staging m_usage = usage | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; - if(type == BufferType::Staging && data.Empty()) - Warning("Vulkan : trying to create staging buffer without data (wtf?)"); - - CreateBuffer(size, m_usage, alloc_info); + CreateBuffer(size, m_usage, alloc_info, std::move(debug_name)); if(!data.Empty()) { @@ -38,21 +36,38 @@ namespace mlx PushToGPU(); } - void GPUBuffer::CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info) + void GPUBuffer::CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info, std::string_view debug_name) { + MLX_PROFILE_FUNCTION(); VkBufferCreateInfo bufferInfo{}; bufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; bufferInfo.size = size; bufferInfo.usage = usage; bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; - m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &alloc_info, m_buffer, nullptr); + #ifdef DEBUG + std::string alloc_name{ debug_name }; + if(usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) + alloc_name.append("_index_buffer"); + else if(usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) + alloc_name.append("_vertex_buffer"); + else if(usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) + alloc_name.append("_uniform_buffer"); + else + alloc_name.append("_buffer"); + m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &alloc_info, m_buffer, alloc_name.c_str()); + m_debug_name = std::move(alloc_name); + #else + m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &alloc_info, m_buffer, nullptr); + #endif if(alloc_info.flags != 0) RenderCore::Get().GetAllocator().MapMemory(m_allocation, &p_map); + m_size = size; } bool GPUBuffer::CopyFrom(const GPUBuffer& buffer) noexcept { + MLX_PROFILE_FUNCTION(); if(!(m_usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT)) { Error("Vulkan : buffer cannot be the destination of a copy because it does not have the correct usage flag"); @@ -77,12 +92,19 @@ namespace mlx void GPUBuffer::PushToGPU() noexcept { + MLX_PROFILE_FUNCTION(); VmaAllocationCreateInfo alloc_info{}; alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; GPUBuffer new_buffer; new_buffer.m_usage = (this->m_usage & 0xFFFFFFFC) | VK_BUFFER_USAGE_TRANSFER_DST_BIT; - new_buffer.CreateBuffer(m_size, new_buffer.m_usage, alloc_info); + + #ifdef DEBUG + std::string new_name = m_debug_name + "_gpu"; + new_buffer.CreateBuffer(m_size, new_buffer.m_usage, alloc_info, new_name); + #else + new_buffer.CreateBuffer(m_size, new_buffer.m_usage, alloc_info, {}); + #endif if(new_buffer.CopyFrom(*this)) Swap(new_buffer); @@ -92,15 +114,21 @@ namespace mlx void GPUBuffer::Destroy() noexcept { + MLX_PROFILE_FUNCTION(); if(m_buffer == VK_NULL_HANDLE) return; RenderCore::Get().GetAllocator().UnmapMemory(m_allocation); - RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer); + #ifdef DEBUG + RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer, m_debug_name.c_str()); + #else + RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer, nullptr); + #endif m_buffer = VK_NULL_HANDLE; } void GPUBuffer::Swap(GPUBuffer& buffer) noexcept { + MLX_PROFILE_FUNCTION(); std::swap(m_buffer, buffer.m_buffer); std::swap(m_allocation, buffer.m_allocation); std::swap(m_size, buffer.m_size); @@ -111,6 +139,7 @@ namespace mlx void VertexBuffer::SetData(CPUBuffer data) { + MLX_PROFILE_FUNCTION(); if(data.GetSize() > m_size) { Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", data.GetSize(), m_size); @@ -122,13 +151,18 @@ namespace mlx return; } GPUBuffer staging; - staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, data); + #ifdef DEBUG + staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, data, m_debug_name); + #else + staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, data, {}); + #endif CopyFrom(staging); staging.Destroy(); } void IndexBuffer::SetData(CPUBuffer data) { + MLX_PROFILE_FUNCTION(); if(data.GetSize() > m_size) { Error("Vulkan : trying to store to much data in an index buffer (% bytes in % bytes)", data.GetSize(), m_size); @@ -140,16 +174,25 @@ namespace mlx return; } GPUBuffer staging; - staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_INDEX_BUFFER_BIT, data); + #ifdef DEBUG + staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_INDEX_BUFFER_BIT, data, m_debug_name); + #else + staging.Init(BufferType::Staging, data.GetSize(), VK_BUFFER_USAGE_INDEX_BUFFER_BIT, data, {}); + #endif CopyFrom(staging); staging.Destroy(); } - void UniformBuffer::Init(std::uint32_t size) + void UniformBuffer::Init(std::uint32_t size, std::string_view debug_name) { + MLX_PROFILE_FUNCTION(); for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { - m_buffers[i].Init(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, {}); + #ifdef DEBUG + m_buffers[i].Init(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, {}, std::string{ debug_name } + '_' + std::to_string(i)); + #else + m_buffers[i].Init(BufferType::HighDynamic, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, {}, {}); + #endif m_maps[i] = m_buffers[i].GetMap(); if(m_maps[i] == nullptr) FatalError("Vulkan : unable to map a uniform buffer"); @@ -158,6 +201,7 @@ namespace mlx void UniformBuffer::SetData(CPUBuffer data, std::size_t frame_index) { + MLX_PROFILE_FUNCTION(); if(data.GetSize() != m_buffers[frame_index].GetSize()) { Error("Vulkan : invalid data size to update to a uniform buffer, % != %", data.GetSize(), m_buffers[frame_index].GetSize()); @@ -169,6 +213,7 @@ namespace mlx void UniformBuffer::Destroy() noexcept { + MLX_PROFILE_FUNCTION(); for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) m_buffers[i].Destroy(); } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 11a4411..ac3b2ab 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -10,6 +10,7 @@ namespace mlx { void TransitionImageToCorrectLayout(Image& image, VkCommandBuffer cmd) { + MLX_PROFILE_FUNCTION(); if(!image.IsInit()) return; if(image.GetType() == ImageType::Color) @@ -21,6 +22,7 @@ namespace mlx DescriptorSet::DescriptorSet(const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type) : m_set_layout(vklayout) { + MLX_PROFILE_FUNCTION(); for(auto& [binding, type] : layout.binds) { m_descriptors.emplace_back(); @@ -35,12 +37,14 @@ namespace mlx DescriptorSet::DescriptorSet(VkDescriptorSetLayout layout, const std::vector& descriptors) : m_descriptors(descriptors), m_set_layout(layout) { + MLX_PROFILE_FUNCTION(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), layout); } void DescriptorSet::SetImage(std::size_t i, std::uint32_t binding, class Image& image) { + MLX_PROFILE_FUNCTION(); Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) { @@ -61,6 +65,7 @@ namespace mlx void DescriptorSet::SetStorageBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer) { + MLX_PROFILE_FUNCTION(); Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) { @@ -81,6 +86,7 @@ namespace mlx void DescriptorSet::SetUniformBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer) { + MLX_PROFILE_FUNCTION(); Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) { @@ -101,6 +107,7 @@ namespace mlx void DescriptorSet::Update(std::size_t i, VkCommandBuffer cmd) noexcept { + MLX_PROFILE_FUNCTION(); Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); std::vector writes; std::vector buffer_infos; @@ -141,6 +148,7 @@ namespace mlx void DescriptorSet::Reallocate() noexcept { + MLX_PROFILE_FUNCTION(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), m_set_layout); } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 03cf018..61b778b 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -15,14 +15,18 @@ namespace mlx { - void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled) + void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, std::string_view debug_name) { + MLX_PROFILE_FUNCTION(); m_type = type; m_width = width; m_height = height; m_format = format; m_tiling = tiling; m_is_multisampled = is_multisampled; + #ifdef DEBUG + m_debug_name = std::move(debug_name); + #endif VmaAllocationCreateInfo alloc_info{}; alloc_info.usage = VMA_MEMORY_USAGE_AUTO_PREFER_DEVICE; @@ -41,21 +45,39 @@ namespace mlx image_info.usage = usage; image_info.samples = (m_is_multisampled ? VK_SAMPLE_COUNT_4_BIT : VK_SAMPLE_COUNT_1_BIT); image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; - m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, &alloc_info, m_image); + #ifdef DEBUG + m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, &alloc_info, m_image, m_debug_name.c_str()); + #else + m_allocation = RenderCore::Get().GetAllocator().CreateImage(&image_info, &alloc_info, m_image, nullptr); + #endif } void Image::CreateImageView(VkImageViewType type, VkImageAspectFlags aspect_flags, int layer_count) noexcept { + MLX_PROFILE_FUNCTION(); m_image_view = kvfCreateImageView(RenderCore::Get().GetDevice(), m_image, m_format, type, aspect_flags, layer_count); + #ifdef DEBUG + if constexpr(RenderCore::HAS_DEBUG_UTILS_FUNCTIONS) + { + VkDebugUtilsObjectNameInfoEXT name_info{}; + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; + name_info.objectType = VK_OBJECT_TYPE_IMAGE_VIEW; + name_info.objectHandle = reinterpret_cast(m_image_view); + name_info.pObjectName = m_debug_name.c_str(); + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + } + #endif } void Image::CreateSampler() noexcept { + MLX_PROFILE_FUNCTION(); m_sampler = kvfCreateSampler(RenderCore::Get().GetDevice(), VK_FILTER_NEAREST, VK_SAMPLER_ADDRESS_MODE_REPEAT, VK_SAMPLER_MIPMAP_MODE_NEAREST); } void Image::TransitionLayout(VkImageLayout new_layout, VkCommandBuffer cmd) { + MLX_PROFILE_FUNCTION(); if(new_layout == m_layout) return; bool is_single_time_cmd_buffer = (cmd == VK_NULL_HANDLE); @@ -74,6 +96,7 @@ namespace mlx void Image::Clear(VkCommandBuffer cmd, Vec4f color) { + MLX_PROFILE_FUNCTION(); VkImageSubresourceRange subresource_range{}; subresource_range.baseMipLevel = 0; subresource_range.layerCount = 1; @@ -100,6 +123,7 @@ namespace mlx void Image::DestroySampler() noexcept { + MLX_PROFILE_FUNCTION(); if(m_sampler != VK_NULL_HANDLE) kvfDestroySampler(RenderCore::Get().GetDevice(), m_sampler); m_sampler = VK_NULL_HANDLE; @@ -107,6 +131,7 @@ namespace mlx void Image::DestroyImageView() noexcept { + MLX_PROFILE_FUNCTION(); if(m_image_view != VK_NULL_HANDLE) kvfDestroyImageView(RenderCore::Get().GetDevice(), m_image_view); m_image_view = VK_NULL_HANDLE; @@ -114,14 +139,45 @@ namespace mlx void Image::Destroy() noexcept { + MLX_PROFILE_FUNCTION(); DestroySampler(); DestroyImageView(); if(m_image != VK_NULL_HANDLE) - RenderCore::Get().GetAllocator().DestroyImage(m_allocation, m_image); + { + #ifdef DEBUG + RenderCore::Get().GetAllocator().DestroyImage(m_allocation, m_image, m_debug_name.c_str()); + #else + RenderCore::Get().GetAllocator().DestroyImage(m_allocation, m_image, nullptr); + #endif + } m_image = VK_NULL_HANDLE; } + void Texture::Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, std::string_view debug_name) + { + MLX_PROFILE_FUNCTION(); + Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, is_multisampled, std::move(debug_name)); + Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + Image::CreateSampler(); + if(pixels) + { + GPUBuffer staging_buffer; + std::size_t size = width * height * kvfFormatSize(format); + staging_buffer.Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, std::move(pixels), debug_name); + VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); + kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); + RenderCore::Get().vkEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + staging_buffer.Destroy(); + } + TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); + } + void Texture::SetPixel(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); @@ -150,6 +206,7 @@ namespace mlx void Texture::Update(VkCommandBuffer cmd) { + MLX_PROFILE_FUNCTION(); if(!m_has_been_modified) return; std::memcpy(m_staging_buffer->GetMap(), m_cpu_buffer.data(), m_cpu_buffer.size() * kvfFormatSize(m_format)); @@ -167,10 +224,12 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(m_staging_buffer.has_value()) return; - DebugLog("Texture : enabling CPU mapping"); + #ifdef DEBUG + DebugLog("Texture : enabling CPU mapping for '%'", m_debug_name); + #endif m_staging_buffer.emplace(); std::size_t size = m_width * m_height * kvfFormatSize(m_format); - m_staging_buffer->Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, {}); + m_staging_buffer->Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, {}, m_debug_name); VkImageLayout old_layout = m_layout; VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); @@ -199,7 +258,7 @@ namespace mlx } if(stbi_is_hdr(filename.c_str())) { - Error("Texture : unsupported image format %", file); + Error("Texture : unsupported image format % (HDR image)", file); return nullptr; } int dummy_w; @@ -210,7 +269,7 @@ namespace mlx std::memcpy(buffer.GetData(), data, buffer.GetSize()); Texture* texture; - try { texture = new Texture(buffer, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h)); } + try { texture = new Texture(buffer, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_SRGB, false, std::move(filename)); } catch(...) { return NULL; } stbi_image_free(data); diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 3587dca..01e1a5f 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -26,6 +26,7 @@ namespace mlx { void GPUAllocator::Init() noexcept { + MLX_PROFILE_FUNCTION(); VmaVulkanFunctions vma_vulkan_func{}; vma_vulkan_func.vkAllocateMemory = RenderCore::Get().vkAllocateMemory; vma_vulkan_func.vkBindBufferMemory = RenderCore::Get().vkBindBufferMemory; @@ -63,6 +64,15 @@ namespace mlx kvfCheckVk(vmaCreateBuffer(m_allocator, binfo, vinfo, &buffer, &allocation, nullptr)); if(name != nullptr) { + if constexpr(RenderCore::HAS_DEBUG_UTILS_FUNCTIONS) + { + VkDebugUtilsObjectNameInfoEXT name_info{}; + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; + name_info.objectType = VK_OBJECT_TYPE_BUFFER; + name_info.objectHandle = reinterpret_cast(buffer); + name_info.pObjectName = name; + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + } vmaSetAllocationName(m_allocator, allocation, name); } DebugLog("Graphics Allocator : created new buffer '%'", name); @@ -70,12 +80,15 @@ namespace mlx return allocation; } - void GPUAllocator::DestroyBuffer(VmaAllocation allocation, VkBuffer buffer) noexcept + void GPUAllocator::DestroyBuffer(VmaAllocation allocation, VkBuffer buffer, const char* name) noexcept { MLX_PROFILE_FUNCTION(); RenderCore::Get().WaitDeviceIdle(); vmaDestroyBuffer(m_allocator, buffer, allocation); - DebugLog("Graphics Allocator : destroyed buffer"); + if(name != nullptr) + DebugLog("Graphics Allocator : destroyed buffer '%'", name); + else + DebugLog("Graphics Allocator : destroyed buffer"); m_active_buffers_allocations--; } @@ -86,6 +99,15 @@ namespace mlx kvfCheckVk(vmaCreateImage(m_allocator, iminfo, vinfo, &image, &allocation, nullptr)); if(name != nullptr) { + if constexpr(RenderCore::HAS_DEBUG_UTILS_FUNCTIONS) + { + VkDebugUtilsObjectNameInfoEXT name_info{}; + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; + name_info.objectType = VK_OBJECT_TYPE_IMAGE; + name_info.objectHandle = reinterpret_cast(image); + name_info.pObjectName = name; + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + } vmaSetAllocationName(m_allocator, allocation, name); } DebugLog("Graphics Allocator : created new image '%'", name); @@ -93,12 +115,15 @@ namespace mlx return allocation; } - void GPUAllocator::DestroyImage(VmaAllocation allocation, VkImage image) noexcept + void GPUAllocator::DestroyImage(VmaAllocation allocation, VkImage image, const char* name) noexcept { MLX_PROFILE_FUNCTION(); RenderCore::Get().WaitDeviceIdle(); vmaDestroyImage(m_allocator, image, allocation); - DebugLog("Graphics Allocator : destroyed image"); + if(name != nullptr) + DebugLog("Graphics Allocator : destroyed image '%'", name); + else + DebugLog("Graphics Allocator : destroyed image"); m_active_images_allocations--; } @@ -141,6 +166,7 @@ namespace mlx void GPUAllocator::Destroy() noexcept { + MLX_PROFILE_FUNCTION(); if(m_active_images_allocations != 0) Error("Graphics allocator : some user-dependant allocations were not freed before destroying the display (% active allocations). You may have not destroyed all the MLX resources you've created", m_active_images_allocations); else if(m_active_buffers_allocations != 0) diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 6fa92e0..a466786 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -9,6 +9,7 @@ namespace mlx { void GraphicPipeline::Init(const GraphicPipelineDescriptor& descriptor) { + MLX_PROFILE_FUNCTION(); if(!descriptor.vertex_shader || !descriptor.fragment_shader) FatalError("Vulkan : invalid shaders"); @@ -62,6 +63,7 @@ namespace mlx bool GraphicPipeline::BindPipeline(VkCommandBuffer command_buffer, std::size_t framebuffer_index, std::array clear) noexcept { + MLX_PROFILE_FUNCTION(); TransitionAttachments(command_buffer); VkFramebuffer fb = m_framebuffers[framebuffer_index]; VkExtent2D fb_extent = kvfGetFramebufferSize(fb); @@ -98,11 +100,13 @@ namespace mlx void GraphicPipeline::EndPipeline(VkCommandBuffer command_buffer) noexcept { + MLX_PROFILE_FUNCTION(); RenderCore::Get().vkCmdEndRenderPass(command_buffer); } void GraphicPipeline::Destroy() noexcept { + MLX_PROFILE_FUNCTION(); p_vertex_shader.reset(); p_fragment_shader.reset(); for(auto& fb : m_framebuffers) @@ -124,6 +128,7 @@ namespace mlx void GraphicPipeline::CreateFramebuffers(const std::vector>& render_targets, bool clear_attachments) { + MLX_PROFILE_FUNCTION(); std::vector attachments; std::vector attachment_views; if(p_renderer) @@ -167,6 +172,7 @@ namespace mlx void GraphicPipeline::TransitionAttachments(VkCommandBuffer cmd) { + MLX_PROFILE_FUNCTION(); if(p_depth) p_depth->TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, cmd); diff --git a/runtime/Sources/Renderer/Pipelines/Shader.cpp b/runtime/Sources/Renderer/Pipelines/Shader.cpp index 651e560..7dd4691 100644 --- a/runtime/Sources/Renderer/Pipelines/Shader.cpp +++ b/runtime/Sources/Renderer/Pipelines/Shader.cpp @@ -6,6 +6,7 @@ namespace mlx { Shader::Shader(const std::vector& bytecode, ShaderType type, ShaderLayout layout) : m_layout(std::move(layout)), m_bytecode(bytecode) { + MLX_PROFILE_FUNCTION(); switch(type) { case ShaderType::Vertex : m_stage = VK_SHADER_STAGE_VERTEX_BIT; break; @@ -22,6 +23,7 @@ namespace mlx void Shader::GeneratePipelineLayout(ShaderLayout layout) { + MLX_PROFILE_FUNCTION(); for(auto& [n, set] : layout.set_layouts) { std::vector bindings(set.binds.size()); @@ -55,6 +57,7 @@ namespace mlx Shader::~Shader() { + MLX_PROFILE_FUNCTION(); kvfDestroyShaderModule(RenderCore::Get().GetDevice(), m_module); DebugLog("Vulkan : shader module destroyed"); for(auto& layout : m_set_layouts) diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index cbb3be0..872406e 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -37,7 +37,7 @@ namespace mlx std::cout << std::endl; } - void ValidationWarningCallback(const char* message) noexcept + void WarningCallback(const char* message) noexcept { Logs::Report(LogType::Warning, 0, "", "", message); std::cout << std::endl; @@ -54,8 +54,9 @@ namespace mlx LoadKVFGlobalVulkanFunctionPointers(); kvfSetErrorCallback(&ErrorCallback); + kvfSetWarningCallback(&WarningCallback); kvfSetValidationErrorCallback(&ValidationErrorCallback); - kvfSetValidationWarningCallback(&ValidationWarningCallback); + kvfSetValidationWarningCallback(&WarningCallback); //kvfAddLayer("VK_LAYER_MESA_overlay"); diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 19ba2d7..a93a2e7 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -16,6 +16,7 @@ namespace mlx void Render2DPass::Init() { + MLX_PROFILE_FUNCTION(); ShaderLayout vertex_shader_layout( { { 0, @@ -64,7 +65,7 @@ namespace mlx p_texture_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); p_viewer_data_buffer = std::make_shared(); - p_viewer_data_buffer->Init(sizeof(ViewerData)); + p_viewer_data_buffer->Init(sizeof(ViewerData), "mlx_2d_pass_viewer_data"); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { p_viewer_data_set->SetUniformBuffer(i, 0, p_viewer_data_buffer->Get(i)); @@ -74,6 +75,7 @@ namespace mlx void Render2DPass::Pass(Scene& scene, Renderer& renderer, Texture& render_target) { + MLX_PROFILE_FUNCTION(); if(m_pipeline.GetPipeline() == VK_NULL_HANDLE) { GraphicPipelineDescriptor pipeline_descriptor; @@ -115,6 +117,7 @@ namespace mlx void Render2DPass::Destroy() { + MLX_PROFILE_FUNCTION(); m_pipeline.Destroy(); p_vertex_shader.reset(); p_fragment_shader.reset(); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index 5a4014b..b292109 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -9,6 +9,7 @@ namespace mlx { void FinalPass::Init() { + MLX_PROFILE_FUNCTION(); ShaderLayout vertex_shader_layout( {}, {} ); @@ -44,6 +45,7 @@ namespace mlx void FinalPass::Pass([[maybe_unused]] Scene& scene, Renderer& renderer, Texture& render_target) { + MLX_PROFILE_FUNCTION(); if(m_pipeline.GetPipeline() == VK_NULL_HANDLE) { GraphicPipelineDescriptor pipeline_descriptor; @@ -70,6 +72,7 @@ namespace mlx void FinalPass::Destroy() { + MLX_PROFILE_FUNCTION(); m_pipeline.Destroy(); p_vertex_shader.reset(); p_fragment_shader.reset(); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 83024f1..0a4b2bc 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -21,13 +21,21 @@ namespace mlx { m_main_render_texture.Destroy(); auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); - m_main_render_texture.Init({}, extent.width, extent.height); + #ifdef DEBUG + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); + #else + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + #endif } }; EventBus::RegisterListener({ functor, "__MlxRenderPasses" }); auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); - m_main_render_texture.Init({}, extent.width, extent.height); + #ifdef DEBUG + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); + #else + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + #endif } m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), Vec4f{ 0.0f, 0.0f, 0.0f, 1.0f }); diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index b240290..a7f3fcf 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -27,6 +27,7 @@ namespace mlx void Renderer::Init(NonOwningPtr window) { + MLX_PROFILE_FUNCTION(); func::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) @@ -56,6 +57,7 @@ namespace mlx bool Renderer::BeginFrame() { + MLX_PROFILE_FUNCTION(); kvfWaitForFence(RenderCore::Get().GetDevice(), m_cmd_fences[m_current_frame_index]); VkResult result = RenderCore::Get().vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); if(result == VK_ERROR_OUT_OF_DATE_KHR) @@ -78,6 +80,7 @@ namespace mlx void Renderer::EndFrame() { + MLX_PROFILE_FUNCTION(); VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; kvfEndCommandBuffer(m_cmd_buffers[m_current_frame_index]); kvfSubmitCommandBuffer(RenderCore::Get().GetDevice(), m_cmd_buffers[m_current_frame_index], KVF_GRAPHICS_QUEUE, m_render_finished_semaphores[m_current_frame_index], m_image_available_semaphores[m_current_frame_index], m_cmd_fences[m_current_frame_index], wait_stages); @@ -95,6 +98,7 @@ namespace mlx void Renderer::CreateSwapchain() { + MLX_PROFILE_FUNCTION(); Vec2ui drawable_size = p_window->GetVulkanDrawableSize(); VkExtent2D extent = { drawable_size.x, drawable_size.y }; m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, false); @@ -105,7 +109,11 @@ namespace mlx RenderCore::Get().vkGetSwapchainImagesKHR(RenderCore::Get().GetDevice(), m_swapchain, &images_count, tmp.data()); for(std::size_t i = 0; i < images_count; i++) { - m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height); + #ifdef DEBUG + m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height, VK_IMAGE_LAYOUT_UNDEFINED, "mlx_swapchain_image_" + std::to_string(i)); + #else + m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height, VK_IMAGE_LAYOUT_UNDEFINED, {}); + #endif m_swapchain_images[i].TransitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); m_swapchain_images[i].CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); } @@ -114,6 +122,7 @@ namespace mlx void Renderer::DestroySwapchain() { + MLX_PROFILE_FUNCTION(); RenderCore::Get().WaitDeviceIdle(); for(Image& img : m_swapchain_images) img.DestroyImageView(); @@ -123,6 +132,7 @@ namespace mlx void Renderer::Destroy() noexcept { + MLX_PROFILE_FUNCTION(); RenderCore::Get().WaitDeviceIdle(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) diff --git a/runtime/Sources/Renderer/SceneRenderer.cpp b/runtime/Sources/Renderer/SceneRenderer.cpp index 8c176a0..09903cb 100644 --- a/runtime/Sources/Renderer/SceneRenderer.cpp +++ b/runtime/Sources/Renderer/SceneRenderer.cpp @@ -8,16 +8,19 @@ namespace mlx { void SceneRenderer::Init() { + MLX_PROFILE_FUNCTION(); m_passes.Init(); } void SceneRenderer::Render(Scene& scene, Renderer& renderer) { + MLX_PROFILE_FUNCTION(); m_passes.Pass(scene, renderer); } void SceneRenderer::Destroy() { + MLX_PROFILE_FUNCTION(); m_passes.Destroy(); } } diff --git a/third_party/kvf.h b/third_party/kvf.h index 6b6fcc6..164466e 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -93,6 +93,7 @@ typedef void (*KvfErrorCallback)(const char* message); typedef struct KvfGraphicsPipelineBuilder KvfGraphicsPipelineBuilder; void kvfSetErrorCallback(KvfErrorCallback callback); +void kvfSetWarningCallback(KvfErrorCallback callback); void kvfSetValidationErrorCallback(KvfErrorCallback callback); void kvfSetValidationWarningCallback(KvfErrorCallback callback); @@ -469,6 +470,7 @@ size_t __kvf_internal_framebuffers_capacity = 0; #endif KvfErrorCallback __kvf_error_callback = NULL; +KvfErrorCallback __kvf_warning_callback = NULL; KvfErrorCallback __kvf_validation_error_callback = NULL; KvfErrorCallback __kvf_validation_warning_callback = NULL; @@ -479,7 +481,7 @@ KvfErrorCallback __kvf_validation_warning_callback = NULL; void __kvfCheckVk(VkResult result, const char* function) { - if(result != VK_SUCCESS) + if(result < VK_SUCCESS) { if(__kvf_error_callback != NULL) { @@ -493,6 +495,17 @@ void __kvfCheckVk(VkResult result, const char* function) exit(EXIT_FAILURE); #endif } + else if(result > VK_SUCCESS) + { + if(__kvf_warning_callback != NULL) + { + char buffer[1024]; + snprintf(buffer, 1024, "KVF Vulkan warning in '%s': %s", function, kvfVerbaliseVkResult(result)); + __kvf_warning_callback(buffer); + return; + } + printf("KVF Vulkan warning in '%s': %s\n", function, kvfVerbaliseVkResult(result)); + } } #undef __kvfCheckVk @@ -817,6 +830,11 @@ void kvfSetErrorCallback(KvfErrorCallback callback) __kvf_error_callback = callback; } +void kvfSetWarningCallback(KvfErrorCallback callback) +{ + __kvf_warning_callback = callback; +} + void kvfSetValidationErrorCallback(KvfErrorCallback callback) { __kvf_validation_error_callback = callback; From c718915927523b52f3f21afc004cd8941766d144 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 21 Sep 2024 12:56:21 +0200 Subject: [PATCH 038/131] fixing release compilation issue --- runtime/Includes/Core/Graphics.inl | 2 +- runtime/Includes/Renderer/Buffer.h | 10 ++++---- runtime/Includes/Renderer/Image.h | 8 +++--- runtime/Includes/Renderer/RenderCore.h | 10 +++----- runtime/Sources/Graphics/PutPixelManager.cpp | 2 +- runtime/Sources/Renderer/Buffer.cpp | 6 ++--- runtime/Sources/Renderer/Image.cpp | 27 ++++++++++---------- runtime/Sources/Renderer/Memory.cpp | 10 +++----- 8 files changed, 36 insertions(+), 39 deletions(-) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 5ffa9f9..c0f4c02 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -39,10 +39,10 @@ namespace mlx { Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); - m_current_depth++; } else sprite->SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); + m_current_depth++; } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) diff --git a/runtime/Includes/Renderer/Buffer.h b/runtime/Includes/Renderer/Buffer.h index 27e7716..0c4dcd1 100644 --- a/runtime/Includes/Renderer/Buffer.h +++ b/runtime/Includes/Renderer/Buffer.h @@ -12,7 +12,7 @@ namespace mlx public: GPUBuffer() = default; - void Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data, std::string_view debug_name); + void Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data, [[maybe_unused]] std::string_view debug_name); void Destroy() noexcept; bool CopyFrom(const GPUBuffer& buffer) noexcept; @@ -43,7 +43,7 @@ namespace mlx void* p_map = nullptr; private: - void CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info, std::string_view debug_name); + void CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info, [[maybe_unused]] std::string_view debug_name); private: VkBufferUsageFlags m_usage = 0; @@ -52,7 +52,7 @@ namespace mlx class VertexBuffer : public GPUBuffer { public: - inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags, std::string_view debug_name) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | additional_flags, {}, std::move(debug_name)); } + inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags, [[maybe_unused]] std::string_view debug_name) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT | additional_flags, {}, std::move(debug_name)); } void SetData(CPUBuffer data); inline void Bind(VkCommandBuffer cmd) const noexcept { VkDeviceSize offset = 0; RenderCore::Get().vkCmdBindVertexBuffers(cmd, 0, 1, &m_buffer, &offset); } }; @@ -60,7 +60,7 @@ namespace mlx class IndexBuffer : public GPUBuffer { public: - inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags, std::string_view debug_name) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | additional_flags, {}, std::move(debug_name)); } + inline void Init(std::uint32_t size, VkBufferUsageFlags additional_flags, [[maybe_unused]] std::string_view debug_name) { GPUBuffer::Init(BufferType::LowDynamic, size, VK_BUFFER_USAGE_INDEX_BUFFER_BIT | additional_flags, {}, std::move(debug_name)); } void SetData(CPUBuffer data); inline void Bind(VkCommandBuffer cmd) const noexcept { RenderCore::Get().vkCmdBindIndexBuffer(cmd, m_buffer, 0, VK_INDEX_TYPE_UINT32); } }; @@ -68,7 +68,7 @@ namespace mlx class UniformBuffer { public: - void Init(std::uint32_t size, std::string_view debug_name); + void Init(std::uint32_t size, [[maybe_unused]] std::string_view debug_name); void SetData(CPUBuffer data, std::size_t frame_index); void Destroy() noexcept; diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index ae070d7..fe4768e 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -14,7 +14,7 @@ namespace mlx public: Image() = default; - inline void Init(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout, std::string_view debug_name) noexcept + inline void Init(VkImage image, VkFormat format, std::uint32_t width, std::uint32_t height, VkImageLayout layout, [[maybe_unused]] std::string_view debug_name) noexcept { m_image = image; m_format = format; @@ -71,7 +71,7 @@ namespace mlx { public: DepthImage() = default; - inline void Init(std::uint32_t width, std::uint32_t height, bool is_multisampled, std::string_view debug_name) + inline void Init(std::uint32_t width, std::uint32_t height, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); std::vector candidates = { VK_FORMAT_D32_SFLOAT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT }; @@ -87,12 +87,12 @@ namespace mlx { public: Texture() = default; - Texture(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, std::string_view debug_name) + Texture(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { Init(std::move(pixels), width, height, format, is_multisampled, std::move(debug_name)); } - void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, std::string_view debug_name); + void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name); void SetPixel(int x, int y, std::uint32_t color) noexcept; int GetPixel(int x, int y) noexcept; diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index f17b8d6..97f764a 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -7,6 +7,10 @@ namespace mlx { constexpr const int MAX_FRAMES_IN_FLIGHT = 3; + #if defined(DEBUG) && defined(VK_EXT_debug_utils) + #define MLX_HAS_DEBUG_UTILS_FUNCTIONS + #endif + class RenderCore { public: @@ -31,12 +35,6 @@ namespace mlx #undef MLX_VULKAN_INSTANCE_FUNCTION #undef MLX_VULKAN_DEVICE_FUNCTION - #if defined(DEBUG) && defined(VK_EXT_debug_utils) - inline static constexpr bool HAS_DEBUG_UTILS_FUNCTIONS = true; - #else - inline static constexpr bool HAS_DEBUG_UTILS_FUNCTIONS = false; - #endif - ~RenderCore(); private: diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 67f42dd..b4727cf 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -13,7 +13,7 @@ namespace mlx #ifdef DEBUG auto res = m_textures.try_emplace(z, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(z)); #else - auto res = m_textures.try_emplace(z, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + auto res = m_textures.try_emplace(z, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); #endif res.first->second.SetPixel(x, y, color); return (res.second ? &res.first->second : nullptr); diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp index b1cf50a..250bbd6 100644 --- a/runtime/Sources/Renderer/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -4,7 +4,7 @@ namespace mlx { - void GPUBuffer::Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data, std::string_view debug_name) + void GPUBuffer::Init(BufferType type, VkDeviceSize size, VkBufferUsageFlags usage, CPUBuffer data, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); VmaAllocationCreateInfo alloc_info{}; @@ -36,7 +36,7 @@ namespace mlx PushToGPU(); } - void GPUBuffer::CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info, std::string_view debug_name) + void GPUBuffer::CreateBuffer(VkDeviceSize size, VkBufferUsageFlags usage, VmaAllocationCreateInfo alloc_info, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); VkBufferCreateInfo bufferInfo{}; @@ -183,7 +183,7 @@ namespace mlx staging.Destroy(); } - void UniformBuffer::Init(std::uint32_t size, std::string_view debug_name) + void UniformBuffer::Init(std::uint32_t size, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 61b778b..d74c643 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -15,7 +15,7 @@ namespace mlx { - void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, std::string_view debug_name) + void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); m_type = type; @@ -56,16 +56,13 @@ namespace mlx { MLX_PROFILE_FUNCTION(); m_image_view = kvfCreateImageView(RenderCore::Get().GetDevice(), m_image, m_format, type, aspect_flags, layer_count); - #ifdef DEBUG - if constexpr(RenderCore::HAS_DEBUG_UTILS_FUNCTIONS) - { - VkDebugUtilsObjectNameInfoEXT name_info{}; - name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; - name_info.objectType = VK_OBJECT_TYPE_IMAGE_VIEW; - name_info.objectHandle = reinterpret_cast(m_image_view); - name_info.pObjectName = m_debug_name.c_str(); - RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); - } + #ifdef MLX_HAS_DEBUG_UTILS_FUNCTIONS + VkDebugUtilsObjectNameInfoEXT name_info{}; + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; + name_info.objectType = VK_OBJECT_TYPE_IMAGE_VIEW; + name_info.objectHandle = reinterpret_cast(m_image_view); + name_info.pObjectName = m_debug_name.c_str(); + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); #endif } @@ -154,7 +151,7 @@ namespace mlx m_image = VK_NULL_HANDLE; } - void Texture::Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, std::string_view debug_name) + void Texture::Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, is_multisampled, std::move(debug_name)); @@ -229,7 +226,11 @@ namespace mlx #endif m_staging_buffer.emplace(); std::size_t size = m_width * m_height * kvfFormatSize(m_format); - m_staging_buffer->Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, {}, m_debug_name); + #ifdef DEBUG + m_staging_buffer->Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, {}, m_debug_name); + #else + m_staging_buffer->Init(BufferType::Staging, size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, {}, {}); + #endif VkImageLayout old_layout = m_layout; VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 01e1a5f..92dcb55 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -64,15 +64,14 @@ namespace mlx kvfCheckVk(vmaCreateBuffer(m_allocator, binfo, vinfo, &buffer, &allocation, nullptr)); if(name != nullptr) { - if constexpr(RenderCore::HAS_DEBUG_UTILS_FUNCTIONS) - { + #ifdef MLX_HAS_DEBUG_UTILS_FUNCTIONS VkDebugUtilsObjectNameInfoEXT name_info{}; name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; name_info.objectType = VK_OBJECT_TYPE_BUFFER; name_info.objectHandle = reinterpret_cast(buffer); name_info.pObjectName = name; RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); - } + #endif vmaSetAllocationName(m_allocator, allocation, name); } DebugLog("Graphics Allocator : created new buffer '%'", name); @@ -99,15 +98,14 @@ namespace mlx kvfCheckVk(vmaCreateImage(m_allocator, iminfo, vinfo, &image, &allocation, nullptr)); if(name != nullptr) { - if constexpr(RenderCore::HAS_DEBUG_UTILS_FUNCTIONS) - { + #ifdef MLX_HAS_DEBUG_UTILS_FUNCTIONS VkDebugUtilsObjectNameInfoEXT name_info{}; name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; name_info.objectType = VK_OBJECT_TYPE_IMAGE; name_info.objectHandle = reinterpret_cast(image); name_info.pObjectName = name; RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); - } + #endif vmaSetAllocationName(m_allocator, allocation, name); } DebugLog("Graphics Allocator : created new image '%'", name); From cf773b586e7772ee6faa15a423478ebb86b162d6 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 22 Sep 2024 21:39:20 +0200 Subject: [PATCH 039/131] adding shaders compilation in makefile --- Makefile | 34 +++- runtime/Includes/Core/Graphics.inl | 4 +- runtime/Includes/Embedded/2DVertex.spv.h | 145 ++++++++---------- .../Includes/Embedded/ScreenFragment.spv.h | 78 ++++------ runtime/Includes/Embedded/ScreenVertex.spv.h | 38 ++--- runtime/Includes/Graphics/Sprite.h | 10 +- runtime/Includes/Renderer/Descriptor.h | 46 +++++- runtime/Includes/Renderer/Image.h | 2 +- .../Includes/Renderer/Pipelines/Graphics.h | 5 +- .../Includes/Renderer/RenderPasses/2DPass.h | 2 +- .../Renderer/RenderPasses/FinalPass.h | 2 +- .../Includes/Renderer/RenderPasses/Passes.h | 2 +- runtime/Includes/Renderer/Renderer.h | 3 + runtime/Includes/Renderer/ScenesRenderer.h | 2 +- runtime/Sources/Core/Graphics.cpp | 4 +- runtime/Sources/Graphics/Scene.cpp | 8 +- runtime/Sources/Graphics/Sprite.cpp | 7 +- runtime/Sources/Renderer/Buffer.cpp | 3 +- runtime/Sources/Renderer/Descriptor.cpp | 101 ++++++++++-- runtime/Sources/Renderer/Image.cpp | 7 +- .../Sources/Renderer/Pipelines/Graphics.cpp | 36 ++++- .../Sources/Renderer/RenderPasses/2DPass.cpp | 33 ++-- .../Renderer/RenderPasses/FinalPass.cpp | 14 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 54 ++++--- runtime/Sources/Renderer/Renderer.cpp | 10 +- runtime/Sources/Renderer/SceneRenderer.cpp | 4 +- third_party/kvf.h | 2 +- 27 files changed, 415 insertions(+), 241 deletions(-) diff --git a/Makefile b/Makefile index 890e9a4..7617caa 100644 --- a/Makefile +++ b/Makefile @@ -16,15 +16,23 @@ SRCS = $(wildcard $(addsuffix /*.cpp, runtime/Sources/Core)) SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Graphics)) SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Platform)) SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Renderer)) -SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Renderer/**)) +SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Renderer/Vulkan)) +SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Renderer/Pipelines)) +SRCS += $(wildcard $(addsuffix /*.cpp, runtime/Sources/Renderer/RenderPasses)) OBJ_DIR = objs/make/$(shell echo $(OS) | tr '[:upper:]' '[:lower:]') OBJS := $(addprefix $(OBJ_DIR)/, $(SRCS:.cpp=.o)) +SHADERS_DIR = runtime/Includes/Embedded +SHADERS_SRCS = $(wildcard $(addsuffix /*.nzsl, $(SHADERS_DIR))) +SPVS = $(SHADERS_SRCS:.nzsl=.spv.h) + CXX = clang++ CXXFLAGS = -std=c++20 -O3 -fPIC -Wall -Wextra -DSDL_MAIN_HANDLED INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party +NZSLC = nzslc + ifeq ($(TOOLCHAIN), gcc) CXX = g++ CXXFLAGS += -Wno-error=cpp @@ -93,6 +101,9 @@ endif OBJS_TOTAL = $(words $(OBJS)) N_OBJS := $(shell find $(OBJ_DIR) -type f -name '*.o' 2>/dev/null | wc -l) OBJS_TOTAL := $(shell echo $$(( $(OBJS_TOTAL) - $(N_OBJS) ))) +ifeq ($(OBJS_TOTAL), 0) # To avoid division per 0 + OBJS_TOTAL := 1 +endif CURR_OBJ = 0 $(OBJ_DIR)/%.o: %.cpp @@ -102,6 +113,20 @@ $(OBJ_DIR)/%.o: %.cpp @printf "$(COLOR)($(_BOLD)%3s%%$(_RESET)$(COLOR)) $(_RESET)Compiling $(_BOLD)$<$(_RESET)\n" "$(PERCENT)" @$(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@ +SPVS_TOTAL = $(words $(SPVS)) +N_SPVS := $(shell find $(SHADERS_DIR) -type f -name '*.spv.h' 2>/dev/null | wc -l) +SPVS_TOTAL := $(shell echo $$(( $(SPVS_TOTAL) - $(N_SPVS) ))) +ifeq ($(SPVS_TOTAL), 0) # Same + SPVS_TOTAL := 1 +endif +CURR_SPV = 0 + +%.spv.h: %.nzsl + @$(eval CURR_SPV=$(shell echo $$(( $(CURR_SPV) + 1 )))) + @$(eval PERCENT=$(shell echo $$(( $(CURR_SPV) * 100 / $(SPVS_TOTAL) )))) + @printf "$(COLOR)($(_BOLD)%3s%%$(_RESET)$(COLOR)) $(_RESET)Compiling $(_BOLD)$<$(_RESET)\n" "$(PERCENT)" + @$(NZSLC) --compile=spv-header $< -o $(SHADERS_DIR) --optimize + all: _printbuildinfos @$(MAKE) $(NAME) @@ -116,6 +141,11 @@ _printbuildinfos: debug: @$(MAKE) all DEBUG=true -j$(shell nproc) +clean-shaders: + @$(RM) $(SPVS) + +shaders: clean-shaders $(SPVS) + clean: @$(RM) $(OBJ_DIR) @printf "Cleaned $(_BOLD)$(OBJ_DIR)$(_RESET)\n" @@ -127,4 +157,4 @@ fclean: clean re: fclean _printbuildinfos @$(MAKE) $(NAME) -.PHONY: all clean debug fclean re +.PHONY: all clean debug shaders clean-shaders fclean re diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index c0f4c02..33b0df6 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -14,12 +14,14 @@ namespace mlx void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); + /* NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_current_depth, color); if(texture) { Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec3f{ 0.0f, 0.0f, static_cast(m_current_depth) }); } + */ } void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) @@ -39,10 +41,10 @@ namespace mlx { Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); + m_current_depth++; } else sprite->SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); - m_current_depth++; } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) diff --git a/runtime/Includes/Embedded/2DVertex.spv.h b/runtime/Includes/Embedded/2DVertex.spv.h index 5a4f7f8..ac303d2 100644 --- a/runtime/Includes/Embedded/2DVertex.spv.h +++ b/runtime/Includes/Embedded/2DVertex.spv.h @@ -1,77 +1,68 @@ -3,2,35,7,0,0,1,0,39,0,0,0,73,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, -3,0,0,0,0,0,1,0,0,0,15,0,12,0,0,0,0,0,36,0,0,0,109,97,105,110,0,0,0,0, -13,0,0,0,17,0,0,0,19,0,0,0,23,0,0,0,29,0,0,0,31,0,0,0,32,0,0,0,3,0, -3,0,0,0,0,0,100,0,0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0, -6,0,8,0,4,0,0,0,0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0, -0,0,5,0,5,0,7,0,0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,7,0,0,0, -0,0,0,0,99,111,108,111,114,0,0,0,6,0,6,0,7,0,0,0,1,0,0,0,112,111,115,105,116,105, -111,110,0,0,0,0,5,0,4,0,26,0,0,0,86,101,114,116,73,110,0,0,6,0,4,0,26,0,0,0, -0,0,0,0,112,111,115,0,6,0,5,0,26,0,0,0,1,0,0,0,99,111,108,111,114,0,0,0,6,0, -5,0,26,0,0,0,2,0,0,0,110,111,114,109,97,108,0,0,6,0,4,0,26,0,0,0,3,0,0,0, -117,118,0,0,5,0,4,0,33,0,0,0,86,101,114,116,79,117,116,0,6,0,5,0,33,0,0,0,0,0, -0,0,99,111,108,111,114,0,0,0,6,0,4,0,33,0,0,0,1,0,0,0,117,118,0,0,6,0,4,0, -33,0,0,0,2,0,0,0,112,111,115,0,5,0,5,0,6,0,0,0,118,105,101,119,101,114,95,100,97,116, -97,0,5,0,4,0,9,0,0,0,109,111,100,101,108,0,0,0,5,0,3,0,13,0,0,0,112,111,115,0, -5,0,4,0,17,0,0,0,99,111,108,111,114,0,0,0,5,0,4,0,19,0,0,0,110,111,114,109,97,108, -0,0,5,0,3,0,23,0,0,0,117,118,0,0,5,0,4,0,29,0,0,0,99,111,108,111,114,0,0,0, -5,0,3,0,31,0,0,0,117,118,0,0,5,0,5,0,32,0,0,0,112,111,115,105,116,105,111,110,0,0, -0,0,5,0,4,0,36,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,6,0,0,0,33,0,0,0, -0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0,0,0,0,0,71,0,4,0,32,0,0,0,11,0, -0,0,0,0,0,0,71,0,4,0,13,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,17,0,0,0, -30,0,0,0,1,0,0,0,71,0,4,0,19,0,0,0,30,0,0,0,2,0,0,0,71,0,4,0,23,0, -0,0,30,0,0,0,3,0,0,0,71,0,4,0,29,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0, -31,0,0,0,30,0,0,0,1,0,0,0,71,0,3,0,4,0,0,0,2,0,0,0,72,0,4,0,4,0, -0,0,0,0,0,0,5,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0,7,0,0,0,16,0,0,0, -72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,71,0,3,0,7,0,0,0,2,0, -0,0,72,0,5,0,7,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,7,0,0,0, -1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,26,0,0,0,0,0,0,0,35,0,0,0,0,0, -0,0,72,0,5,0,26,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,26,0,0,0, -2,0,0,0,35,0,0,0,32,0,0,0,72,0,5,0,26,0,0,0,3,0,0,0,35,0,0,0,48,0, -0,0,72,0,5,0,33,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,33,0,0,0, -1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,33,0,0,0,2,0,0,0,35,0,0,0,32,0, -0,0,22,0,3,0,1,0,0,0,32,0,0,0,23,0,4,0,2,0,0,0,1,0,0,0,4,0,0,0, -24,0,4,0,3,0,0,0,2,0,0,0,4,0,0,0,30,0,3,0,4,0,0,0,3,0,0,0,32,0, -4,0,5,0,0,0,2,0,0,0,4,0,0,0,30,0,4,0,7,0,0,0,2,0,0,0,2,0,0,0, -32,0,4,0,8,0,0,0,9,0,0,0,7,0,0,0,19,0,2,0,10,0,0,0,33,0,3,0,11,0, -0,0,10,0,0,0,32,0,4,0,12,0,0,0,1,0,0,0,2,0,0,0,21,0,4,0,14,0,0,0, -32,0,0,0,1,0,0,0,43,0,4,0,14,0,0,0,15,0,0,0,0,0,0,0,32,0,4,0,16,0, -0,0,7,0,0,0,2,0,0,0,43,0,4,0,14,0,0,0,18,0,0,0,1,0,0,0,43,0,4,0, -14,0,0,0,20,0,0,0,2,0,0,0,23,0,4,0,21,0,0,0,1,0,0,0,2,0,0,0,32,0, -4,0,22,0,0,0,1,0,0,0,21,0,0,0,43,0,4,0,14,0,0,0,24,0,0,0,3,0,0,0, -32,0,4,0,25,0,0,0,7,0,0,0,21,0,0,0,30,0,6,0,26,0,0,0,2,0,0,0,2,0, -0,0,2,0,0,0,21,0,0,0,32,0,4,0,27,0,0,0,7,0,0,0,26,0,0,0,32,0,4,0, -28,0,0,0,3,0,0,0,2,0,0,0,32,0,4,0,30,0,0,0,3,0,0,0,21,0,0,0,30,0, -5,0,33,0,0,0,2,0,0,0,21,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0, -0,0,128,63,32,0,4,0,35,0,0,0,7,0,0,0,33,0,0,0,32,0,4,0,50,0,0,0,7,0, -0,0,1,0,0,0,32,0,4,0,55,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,59,0,0,0, -2,0,0,0,3,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0, -0,0,9,0,0,0,9,0,0,0,59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0, -12,0,0,0,17,0,0,0,1,0,0,0,59,0,4,0,12,0,0,0,19,0,0,0,1,0,0,0,59,0, -4,0,22,0,0,0,23,0,0,0,1,0,0,0,59,0,4,0,28,0,0,0,29,0,0,0,3,0,0,0, -59,0,4,0,30,0,0,0,31,0,0,0,3,0,0,0,59,0,4,0,28,0,0,0,32,0,0,0,3,0, -0,0,54,0,5,0,10,0,0,0,36,0,0,0,0,0,0,0,11,0,0,0,248,0,2,0,37,0,0,0, -59,0,4,0,35,0,0,0,38,0,0,0,7,0,0,0,59,0,4,0,27,0,0,0,39,0,0,0,7,0, -0,0,65,0,5,0,16,0,0,0,40,0,0,0,39,0,0,0,15,0,0,0,63,0,3,0,40,0,0,0, -13,0,0,0,65,0,5,0,16,0,0,0,41,0,0,0,39,0,0,0,18,0,0,0,63,0,3,0,41,0, -0,0,17,0,0,0,65,0,5,0,16,0,0,0,42,0,0,0,39,0,0,0,20,0,0,0,63,0,3,0, -42,0,0,0,19,0,0,0,65,0,5,0,25,0,0,0,43,0,0,0,39,0,0,0,24,0,0,0,63,0, -3,0,43,0,0,0,23,0,0,0,65,0,5,0,25,0,0,0,44,0,0,0,39,0,0,0,24,0,0,0, -61,0,4,0,21,0,0,0,45,0,0,0,44,0,0,0,81,0,5,0,1,0,0,0,46,0,0,0,45,0, -0,0,0,0,0,0,127,0,4,0,1,0,0,0,47,0,0,0,34,0,0,0,133,0,5,0,1,0,0,0, -48,0,0,0,46,0,0,0,47,0,0,0,65,0,5,0,25,0,0,0,49,0,0,0,39,0,0,0,24,0, -0,0,65,0,5,0,50,0,0,0,51,0,0,0,49,0,0,0,15,0,0,0,62,0,3,0,51,0,0,0, -48,0,0,0,65,0,5,0,25,0,0,0,52,0,0,0,39,0,0,0,24,0,0,0,61,0,4,0,21,0, -0,0,53,0,0,0,52,0,0,0,65,0,5,0,25,0,0,0,54,0,0,0,38,0,0,0,18,0,0,0, -62,0,3,0,54,0,0,0,53,0,0,0,65,0,5,0,55,0,0,0,56,0,0,0,9,0,0,0,15,0, -0,0,61,0,4,0,2,0,0,0,57,0,0,0,56,0,0,0,65,0,5,0,16,0,0,0,58,0,0,0, -38,0,0,0,15,0,0,0,62,0,3,0,58,0,0,0,57,0,0,0,65,0,5,0,59,0,0,0,60,0, -0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0,61,0,0,0,60,0,0,0,65,0,5,0, -16,0,0,0,62,0,0,0,39,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,63,0,0,0,62,0, -0,0,65,0,5,0,55,0,0,0,64,0,0,0,9,0,0,0,18,0,0,0,61,0,4,0,2,0,0,0, -65,0,0,0,64,0,0,0,129,0,5,0,2,0,0,0,66,0,0,0,63,0,0,0,65,0,0,0,145,0, -5,0,2,0,0,0,67,0,0,0,61,0,0,0,66,0,0,0,65,0,5,0,16,0,0,0,68,0,0,0, -38,0,0,0,20,0,0,0,62,0,3,0,68,0,0,0,67,0,0,0,61,0,4,0,33,0,0,0,69,0, -0,0,38,0,0,0,81,0,5,0,2,0,0,0,70,0,0,0,69,0,0,0,0,0,0,0,62,0,3,0, -29,0,0,0,70,0,0,0,81,0,5,0,21,0,0,0,71,0,0,0,69,0,0,0,1,0,0,0,62,0, -3,0,31,0,0,0,71,0,0,0,81,0,5,0,2,0,0,0,72,0,0,0,69,0,0,0,2,0,0,0, -62,0,3,0,32,0,0,0,72,0,0,0,253,0,1,0,56,0,1,0 +3,2,35,7,0,0,1,0,39,0,0,0,67,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,33,0,0,0,109,97,105,110,0,0,0,0, +13,0,0,0,19,0,0,0,25,0,0,0,27,0,0,0,28,0,0,0,3,0,3,0,0,0,0,0,100,0, +0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0,6,0,8,0,4,0,0,0, +0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0,0,0,5,0,5,0,7,0, +0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,7,0,0,0,0,0,0,0,99,111,108,111, +114,0,0,0,6,0,6,0,7,0,0,0,1,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0, +4,0,22,0,0,0,86,101,114,116,73,110,0,0,6,0,4,0,22,0,0,0,0,0,0,0,112,111,115,0, +6,0,4,0,22,0,0,0,1,0,0,0,117,118,0,0,5,0,4,0,29,0,0,0,86,101,114,116,79,117, +116,0,6,0,5,0,29,0,0,0,0,0,0,0,99,111,108,111,114,0,0,0,6,0,4,0,29,0,0,0, +1,0,0,0,117,118,0,0,6,0,4,0,29,0,0,0,2,0,0,0,112,111,115,0,5,0,5,0,6,0, +0,0,118,105,101,119,101,114,95,100,97,116,97,0,5,0,4,0,9,0,0,0,109,111,100,101,108,0,0,0, +5,0,3,0,13,0,0,0,112,111,115,0,5,0,3,0,19,0,0,0,117,118,0,0,5,0,4,0,25,0, +0,0,99,111,108,111,114,0,0,0,5,0,3,0,27,0,0,0,117,118,0,0,5,0,5,0,28,0,0,0, +112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,33,0,0,0,109,97,105,110,0,0,0,0,71,0, +4,0,6,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0,0,0,0,0, +71,0,4,0,28,0,0,0,11,0,0,0,0,0,0,0,71,0,4,0,13,0,0,0,30,0,0,0,0,0, +0,0,71,0,4,0,19,0,0,0,30,0,0,0,1,0,0,0,71,0,4,0,25,0,0,0,30,0,0,0, +0,0,0,0,71,0,4,0,27,0,0,0,30,0,0,0,1,0,0,0,71,0,3,0,4,0,0,0,2,0, +0,0,72,0,4,0,4,0,0,0,0,0,0,0,5,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0, +7,0,0,0,16,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,71,0, +3,0,7,0,0,0,2,0,0,0,72,0,5,0,7,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0, +72,0,5,0,7,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,22,0,0,0,0,0, +0,0,35,0,0,0,0,0,0,0,72,0,5,0,22,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0, +72,0,5,0,29,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,29,0,0,0,1,0, +0,0,35,0,0,0,16,0,0,0,72,0,5,0,29,0,0,0,2,0,0,0,35,0,0,0,32,0,0,0, +22,0,3,0,1,0,0,0,32,0,0,0,23,0,4,0,2,0,0,0,1,0,0,0,4,0,0,0,24,0, +4,0,3,0,0,0,2,0,0,0,4,0,0,0,30,0,3,0,4,0,0,0,3,0,0,0,32,0,4,0, +5,0,0,0,2,0,0,0,4,0,0,0,30,0,4,0,7,0,0,0,2,0,0,0,2,0,0,0,32,0, +4,0,8,0,0,0,9,0,0,0,7,0,0,0,19,0,2,0,10,0,0,0,33,0,3,0,11,0,0,0, +10,0,0,0,32,0,4,0,12,0,0,0,1,0,0,0,2,0,0,0,21,0,4,0,14,0,0,0,32,0, +0,0,1,0,0,0,43,0,4,0,14,0,0,0,15,0,0,0,0,0,0,0,32,0,4,0,16,0,0,0, +7,0,0,0,2,0,0,0,23,0,4,0,17,0,0,0,1,0,0,0,2,0,0,0,32,0,4,0,18,0, +0,0,1,0,0,0,17,0,0,0,43,0,4,0,14,0,0,0,20,0,0,0,1,0,0,0,32,0,4,0, +21,0,0,0,7,0,0,0,17,0,0,0,30,0,4,0,22,0,0,0,2,0,0,0,17,0,0,0,32,0, +4,0,23,0,0,0,7,0,0,0,22,0,0,0,32,0,4,0,24,0,0,0,3,0,0,0,2,0,0,0, +32,0,4,0,26,0,0,0,3,0,0,0,17,0,0,0,30,0,5,0,29,0,0,0,2,0,0,0,17,0, +0,0,2,0,0,0,43,0,4,0,1,0,0,0,30,0,0,0,0,0,128,191,32,0,4,0,31,0,0,0, +7,0,0,0,29,0,0,0,43,0,4,0,14,0,0,0,32,0,0,0,2,0,0,0,32,0,4,0,44,0, +0,0,7,0,0,0,1,0,0,0,32,0,4,0,49,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0, +53,0,0,0,2,0,0,0,3,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0, +4,0,8,0,0,0,9,0,0,0,9,0,0,0,59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0, +59,0,4,0,18,0,0,0,19,0,0,0,1,0,0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0, +0,0,59,0,4,0,26,0,0,0,27,0,0,0,3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0, +3,0,0,0,54,0,5,0,10,0,0,0,33,0,0,0,0,0,0,0,11,0,0,0,248,0,2,0,34,0, +0,0,59,0,4,0,31,0,0,0,35,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,36,0,0,0, +7,0,0,0,65,0,5,0,16,0,0,0,37,0,0,0,36,0,0,0,15,0,0,0,63,0,3,0,37,0, +0,0,13,0,0,0,65,0,5,0,21,0,0,0,38,0,0,0,36,0,0,0,20,0,0,0,63,0,3,0, +38,0,0,0,19,0,0,0,65,0,5,0,21,0,0,0,39,0,0,0,36,0,0,0,20,0,0,0,61,0, +4,0,17,0,0,0,40,0,0,0,39,0,0,0,81,0,5,0,1,0,0,0,41,0,0,0,40,0,0,0, +0,0,0,0,133,0,5,0,1,0,0,0,42,0,0,0,41,0,0,0,30,0,0,0,65,0,5,0,21,0, +0,0,43,0,0,0,36,0,0,0,20,0,0,0,65,0,5,0,44,0,0,0,45,0,0,0,43,0,0,0, +15,0,0,0,62,0,3,0,45,0,0,0,42,0,0,0,65,0,5,0,21,0,0,0,46,0,0,0,36,0, +0,0,20,0,0,0,61,0,4,0,17,0,0,0,47,0,0,0,46,0,0,0,65,0,5,0,21,0,0,0, +48,0,0,0,35,0,0,0,20,0,0,0,62,0,3,0,48,0,0,0,47,0,0,0,65,0,5,0,49,0, +0,0,50,0,0,0,9,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,51,0,0,0,50,0,0,0, +65,0,5,0,16,0,0,0,52,0,0,0,35,0,0,0,15,0,0,0,62,0,3,0,52,0,0,0,51,0, +0,0,65,0,5,0,53,0,0,0,54,0,0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0, +55,0,0,0,54,0,0,0,65,0,5,0,16,0,0,0,56,0,0,0,36,0,0,0,15,0,0,0,61,0, +4,0,2,0,0,0,57,0,0,0,56,0,0,0,65,0,5,0,49,0,0,0,58,0,0,0,9,0,0,0, +20,0,0,0,61,0,4,0,2,0,0,0,59,0,0,0,58,0,0,0,129,0,5,0,2,0,0,0,60,0, +0,0,57,0,0,0,59,0,0,0,145,0,5,0,2,0,0,0,61,0,0,0,55,0,0,0,60,0,0,0, +65,0,5,0,16,0,0,0,62,0,0,0,35,0,0,0,32,0,0,0,62,0,3,0,62,0,0,0,61,0, +0,0,61,0,4,0,29,0,0,0,63,0,0,0,35,0,0,0,81,0,5,0,2,0,0,0,64,0,0,0, +63,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0,64,0,0,0,81,0,5,0,17,0,0,0,65,0, +0,0,63,0,0,0,1,0,0,0,62,0,3,0,27,0,0,0,65,0,0,0,81,0,5,0,2,0,0,0, +66,0,0,0,63,0,0,0,2,0,0,0,62,0,3,0,28,0,0,0,66,0,0,0,253,0,1,0,56,0, +1,0 diff --git a/runtime/Includes/Embedded/ScreenFragment.spv.h b/runtime/Includes/Embedded/ScreenFragment.spv.h index ed5f334..727a9d3 100644 --- a/runtime/Includes/Embedded/ScreenFragment.spv.h +++ b/runtime/Includes/Embedded/ScreenFragment.spv.h @@ -1,49 +1,31 @@ -3,2,35,7,0,0,1,0,39,0,0,0,62,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,11,0, -6,0,32,0,0,0,71,76,83,76,46,115,116,100,46,52,53,48,0,0,0,0,14,0,3,0,0,0,0,0, -1,0,0,0,15,0,7,0,4,0,0,0,34,0,0,0,109,97,105,110,0,0,0,0,23,0,0,0,29,0, -0,0,16,0,3,0,34,0,0,0,7,0,0,0,3,0,3,0,0,0,0,0,100,0,0,0,5,0,4,0, -25,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,25,0,0,0,0,0,0,0,117,118,0,0,5,0, -4,0,30,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,30,0,0,0,0,0,0,0,99,111,108,111, -114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0,5,0,3,0,23,0, -0,0,117,118,0,0,5,0,4,0,29,0,0,0,99,111,108,111,114,0,0,0,5,0,6,0,33,0,0,0, -76,105,110,101,97,114,84,111,115,82,71,66,0,0,0,0,5,0,4,0,34,0,0,0,109,97,105,110,0,0, -0,0,71,0,4,0,5,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,5,0,0,0,34,0,0,0, -0,0,0,0,71,0,4,0,23,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,29,0,0,0,30,0, -0,0,0,0,0,0,72,0,5,0,25,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0, -30,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0,3,0,1,0,0,0,32,0,0,0,25,0, -9,0,2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,0,0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0,4,0,4,0,0,0,0,0,0,0,3,0, -0,0,23,0,4,0,6,0,0,0,1,0,0,0,3,0,0,0,32,0,4,0,7,0,0,0,7,0,0,0, -6,0,0,0,33,0,4,0,8,0,0,0,6,0,0,0,7,0,0,0,43,0,4,0,1,0,0,0,9,0, -0,0,28,46,77,59,21,0,4,0,10,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,10,0,0,0, -11,0,0,0,0,0,0,0,20,0,2,0,12,0,0,0,23,0,4,0,13,0,0,0,12,0,0,0,3,0, -0,0,43,0,4,0,1,0,0,0,14,0,0,0,61,10,135,63,43,0,4,0,1,0,0,0,15,0,0,0, -0,0,128,63,43,0,4,0,1,0,0,0,16,0,0,0,154,153,25,64,43,0,4,0,1,0,0,0,17,0, -0,0,174,71,97,61,43,0,4,0,1,0,0,0,18,0,0,0,82,184,78,65,19,0,2,0,19,0,0,0, -33,0,3,0,20,0,0,0,19,0,0,0,23,0,4,0,21,0,0,0,1,0,0,0,2,0,0,0,32,0, -4,0,22,0,0,0,1,0,0,0,21,0,0,0,32,0,4,0,24,0,0,0,7,0,0,0,21,0,0,0, -30,0,3,0,25,0,0,0,21,0,0,0,32,0,4,0,26,0,0,0,7,0,0,0,25,0,0,0,23,0, -4,0,27,0,0,0,1,0,0,0,4,0,0,0,32,0,4,0,28,0,0,0,3,0,0,0,27,0,0,0, -30,0,3,0,30,0,0,0,27,0,0,0,32,0,4,0,31,0,0,0,7,0,0,0,30,0,0,0,32,0, -4,0,59,0,0,0,7,0,0,0,27,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0, -59,0,4,0,22,0,0,0,23,0,0,0,1,0,0,0,59,0,4,0,28,0,0,0,29,0,0,0,3,0, -0,0,54,0,5,0,6,0,0,0,33,0,0,0,0,0,0,0,8,0,0,0,55,0,3,0,7,0,0,0, -35,0,0,0,248,0,2,0,36,0,0,0,61,0,4,0,6,0,0,0,37,0,0,0,35,0,0,0,80,0, -6,0,6,0,0,0,38,0,0,0,9,0,0,0,9,0,0,0,9,0,0,0,186,0,5,0,13,0,0,0, -39,0,0,0,37,0,0,0,38,0,0,0,61,0,4,0,6,0,0,0,40,0,0,0,35,0,0,0,136,0, -5,0,1,0,0,0,41,0,0,0,15,0,0,0,16,0,0,0,80,0,6,0,6,0,0,0,42,0,0,0, -41,0,0,0,41,0,0,0,41,0,0,0,12,0,7,0,6,0,0,0,43,0,0,0,32,0,0,0,26,0, -0,0,40,0,0,0,42,0,0,0,142,0,5,0,6,0,0,0,44,0,0,0,43,0,0,0,14,0,0,0, -80,0,6,0,6,0,0,0,45,0,0,0,17,0,0,0,17,0,0,0,17,0,0,0,131,0,5,0,6,0, -0,0,46,0,0,0,44,0,0,0,45,0,0,0,61,0,4,0,6,0,0,0,47,0,0,0,35,0,0,0, -142,0,5,0,6,0,0,0,48,0,0,0,47,0,0,0,18,0,0,0,169,0,6,0,6,0,0,0,49,0, -0,0,39,0,0,0,46,0,0,0,48,0,0,0,254,0,2,0,49,0,0,0,56,0,1,0,54,0,5,0, -19,0,0,0,34,0,0,0,0,0,0,0,20,0,0,0,248,0,2,0,50,0,0,0,59,0,4,0,31,0, -0,0,51,0,0,0,7,0,0,0,59,0,4,0,26,0,0,0,52,0,0,0,7,0,0,0,65,0,5,0, -24,0,0,0,53,0,0,0,52,0,0,0,11,0,0,0,63,0,3,0,53,0,0,0,23,0,0,0,61,0, -4,0,3,0,0,0,54,0,0,0,5,0,0,0,65,0,5,0,24,0,0,0,55,0,0,0,52,0,0,0, -11,0,0,0,61,0,4,0,21,0,0,0,56,0,0,0,55,0,0,0,87,0,5,0,27,0,0,0,57,0, -0,0,54,0,0,0,56,0,0,0,65,0,5,0,59,0,0,0,58,0,0,0,51,0,0,0,11,0,0,0, -62,0,3,0,58,0,0,0,57,0,0,0,61,0,4,0,30,0,0,0,60,0,0,0,51,0,0,0,81,0, -5,0,27,0,0,0,61,0,0,0,60,0,0,0,0,0,0,0,62,0,3,0,29,0,0,0,61,0,0,0, +3,2,35,7,0,0,1,0,39,0,0,0,34,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,7,0,4,0,0,0,21,0,0,0,109,97,105,110,0,0,0,0, +10,0,0,0,18,0,0,0,16,0,3,0,21,0,0,0,7,0,0,0,3,0,3,0,0,0,0,0,100,0, +0,0,5,0,4,0,14,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,14,0,0,0,0,0,0,0, +117,118,0,0,5,0,4,0,19,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,19,0,0,0,0,0, +0,0,99,111,108,111,114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0, +5,0,3,0,10,0,0,0,117,118,0,0,5,0,4,0,18,0,0,0,99,111,108,111,114,0,0,0,5,0, +4,0,21,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,5,0,0,0,33,0,0,0,0,0,0,0, +71,0,4,0,5,0,0,0,34,0,0,0,0,0,0,0,71,0,4,0,10,0,0,0,30,0,0,0,0,0, +0,0,71,0,4,0,18,0,0,0,30,0,0,0,0,0,0,0,72,0,5,0,14,0,0,0,0,0,0,0, +35,0,0,0,0,0,0,0,72,0,5,0,19,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0, +3,0,1,0,0,0,32,0,0,0,25,0,9,0,2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0, +4,0,4,0,0,0,0,0,0,0,3,0,0,0,19,0,2,0,6,0,0,0,33,0,3,0,7,0,0,0, +6,0,0,0,23,0,4,0,8,0,0,0,1,0,0,0,2,0,0,0,32,0,4,0,9,0,0,0,1,0, +0,0,8,0,0,0,21,0,4,0,11,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,11,0,0,0, +12,0,0,0,0,0,0,0,32,0,4,0,13,0,0,0,7,0,0,0,8,0,0,0,30,0,3,0,14,0, +0,0,8,0,0,0,32,0,4,0,15,0,0,0,7,0,0,0,14,0,0,0,23,0,4,0,16,0,0,0, +1,0,0,0,4,0,0,0,32,0,4,0,17,0,0,0,3,0,0,0,16,0,0,0,30,0,3,0,19,0, +0,0,16,0,0,0,32,0,4,0,20,0,0,0,7,0,0,0,19,0,0,0,32,0,4,0,31,0,0,0, +7,0,0,0,16,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0,59,0,4,0,9,0, +0,0,10,0,0,0,1,0,0,0,59,0,4,0,17,0,0,0,18,0,0,0,3,0,0,0,54,0,5,0, +6,0,0,0,21,0,0,0,0,0,0,0,7,0,0,0,248,0,2,0,22,0,0,0,59,0,4,0,20,0, +0,0,23,0,0,0,7,0,0,0,59,0,4,0,15,0,0,0,24,0,0,0,7,0,0,0,65,0,5,0, +13,0,0,0,25,0,0,0,24,0,0,0,12,0,0,0,63,0,3,0,25,0,0,0,10,0,0,0,61,0, +4,0,3,0,0,0,26,0,0,0,5,0,0,0,65,0,5,0,13,0,0,0,27,0,0,0,24,0,0,0, +12,0,0,0,61,0,4,0,8,0,0,0,28,0,0,0,27,0,0,0,87,0,5,0,16,0,0,0,29,0, +0,0,26,0,0,0,28,0,0,0,65,0,5,0,31,0,0,0,30,0,0,0,23,0,0,0,12,0,0,0, +62,0,3,0,30,0,0,0,29,0,0,0,61,0,4,0,19,0,0,0,32,0,0,0,23,0,0,0,81,0, +5,0,16,0,0,0,33,0,0,0,32,0,0,0,0,0,0,0,62,0,3,0,18,0,0,0,33,0,0,0, 253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Embedded/ScreenVertex.spv.h b/runtime/Includes/Embedded/ScreenVertex.spv.h index 3ba7900..e4e53d8 100644 --- a/runtime/Includes/Embedded/ScreenVertex.spv.h +++ b/runtime/Includes/Embedded/ScreenVertex.spv.h @@ -1,12 +1,12 @@ 3,2,35,7,0,0,1,0,39,0,0,0,59,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, -3,0,0,0,0,0,1,0,0,0,15,0,8,0,0,0,0,0,36,0,0,0,109,97,105,110,0,0,0,0, +3,0,0,0,0,0,1,0,0,0,15,0,8,0,0,0,0,0,37,0,0,0,109,97,105,110,0,0,0,0, 20,0,0,0,26,0,0,0,29,0,0,0,3,0,3,0,0,0,0,0,100,0,0,0,5,0,4,0,23,0, 0,0,86,101,114,116,73,110,0,0,6,0,6,0,23,0,0,0,0,0,0,0,118,101,114,116,95,105,110,100, 101,120,0,0,5,0,4,0,30,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,30,0,0,0,0,0, 0,0,117,118,0,0,6,0,6,0,30,0,0,0,1,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0, 5,0,5,0,15,0,0,0,118,101,114,116,105,99,101,115,0,0,0,0,5,0,6,0,20,0,0,0,118,101, 114,116,101,120,95,105,110,100,101,120,0,0,0,0,5,0,3,0,26,0,0,0,117,118,0,0,5,0,5,0, -29,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,36,0,0,0,109,97,105,110,0,0, +29,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,37,0,0,0,109,97,105,110,0,0, 0,0,71,0,4,0,20,0,0,0,11,0,0,0,42,0,0,0,71,0,4,0,29,0,0,0,11,0,0,0, 0,0,0,0,71,0,4,0,26,0,0,0,30,0,0,0,0,0,0,0,72,0,5,0,23,0,0,0,0,0, 0,0,35,0,0,0,0,0,0,0,72,0,5,0,30,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0, @@ -27,22 +27,22 @@ 30,0,4,0,30,0,0,0,2,0,0,0,27,0,0,0,32,0,4,0,31,0,0,0,7,0,0,0,2,0, 0,0,32,0,4,0,32,0,0,0,7,0,0,0,30,0,0,0,43,0,4,0,18,0,0,0,33,0,0,0, 1,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0,0,0,0,0,43,0,4,0,1,0,0,0,35,0, -0,0,0,0,0,63,32,0,4,0,44,0,0,0,6,0,0,0,2,0,0,0,32,0,4,0,50,0,0,0, -7,0,0,0,27,0,0,0,59,0,5,0,6,0,0,0,15,0,0,0,6,0,0,0,14,0,0,0,59,0, -4,0,19,0,0,0,20,0,0,0,1,0,0,0,59,0,4,0,25,0,0,0,26,0,0,0,3,0,0,0, -59,0,4,0,28,0,0,0,29,0,0,0,3,0,0,0,54,0,5,0,16,0,0,0,36,0,0,0,0,0, -0,0,17,0,0,0,248,0,2,0,37,0,0,0,59,0,4,0,31,0,0,0,38,0,0,0,7,0,0,0, -59,0,4,0,32,0,0,0,39,0,0,0,7,0,0,0,59,0,4,0,24,0,0,0,40,0,0,0,7,0, -0,0,65,0,5,0,22,0,0,0,41,0,0,0,40,0,0,0,21,0,0,0,63,0,3,0,41,0,0,0, -20,0,0,0,65,0,5,0,22,0,0,0,42,0,0,0,40,0,0,0,21,0,0,0,61,0,4,0,18,0, -0,0,43,0,0,0,42,0,0,0,65,0,5,0,44,0,0,0,45,0,0,0,15,0,0,0,43,0,0,0, -61,0,4,0,2,0,0,0,46,0,0,0,45,0,0,0,62,0,3,0,38,0,0,0,46,0,0,0,61,0, -4,0,2,0,0,0,47,0,0,0,38,0,0,0,80,0,6,0,27,0,0,0,48,0,0,0,47,0,0,0, -34,0,0,0,10,0,0,0,65,0,5,0,50,0,0,0,49,0,0,0,39,0,0,0,33,0,0,0,62,0, -3,0,49,0,0,0,48,0,0,0,61,0,4,0,2,0,0,0,51,0,0,0,38,0,0,0,142,0,5,0, -2,0,0,0,52,0,0,0,51,0,0,0,35,0,0,0,80,0,5,0,2,0,0,0,53,0,0,0,35,0, -0,0,35,0,0,0,129,0,5,0,2,0,0,0,54,0,0,0,52,0,0,0,53,0,0,0,65,0,5,0, -31,0,0,0,55,0,0,0,39,0,0,0,21,0,0,0,62,0,3,0,55,0,0,0,54,0,0,0,61,0, -4,0,30,0,0,0,56,0,0,0,39,0,0,0,81,0,5,0,2,0,0,0,57,0,0,0,56,0,0,0, +0,0,0,0,0,63,44,0,5,0,2,0,0,0,36,0,0,0,35,0,0,0,35,0,0,0,32,0,4,0, +45,0,0,0,6,0,0,0,2,0,0,0,32,0,4,0,51,0,0,0,7,0,0,0,27,0,0,0,59,0, +5,0,6,0,0,0,15,0,0,0,6,0,0,0,14,0,0,0,59,0,4,0,19,0,0,0,20,0,0,0, +1,0,0,0,59,0,4,0,25,0,0,0,26,0,0,0,3,0,0,0,59,0,4,0,28,0,0,0,29,0, +0,0,3,0,0,0,54,0,5,0,16,0,0,0,37,0,0,0,0,0,0,0,17,0,0,0,248,0,2,0, +38,0,0,0,59,0,4,0,31,0,0,0,39,0,0,0,7,0,0,0,59,0,4,0,32,0,0,0,40,0, +0,0,7,0,0,0,59,0,4,0,24,0,0,0,41,0,0,0,7,0,0,0,65,0,5,0,22,0,0,0, +42,0,0,0,41,0,0,0,21,0,0,0,63,0,3,0,42,0,0,0,20,0,0,0,65,0,5,0,22,0, +0,0,43,0,0,0,41,0,0,0,21,0,0,0,61,0,4,0,18,0,0,0,44,0,0,0,43,0,0,0, +65,0,5,0,45,0,0,0,46,0,0,0,15,0,0,0,44,0,0,0,61,0,4,0,2,0,0,0,47,0, +0,0,46,0,0,0,62,0,3,0,39,0,0,0,47,0,0,0,61,0,4,0,2,0,0,0,48,0,0,0, +39,0,0,0,80,0,6,0,27,0,0,0,49,0,0,0,48,0,0,0,34,0,0,0,10,0,0,0,65,0, +5,0,51,0,0,0,50,0,0,0,40,0,0,0,33,0,0,0,62,0,3,0,50,0,0,0,49,0,0,0, +61,0,4,0,2,0,0,0,52,0,0,0,39,0,0,0,142,0,5,0,2,0,0,0,53,0,0,0,52,0, +0,0,35,0,0,0,129,0,5,0,2,0,0,0,54,0,0,0,53,0,0,0,36,0,0,0,65,0,5,0, +31,0,0,0,55,0,0,0,40,0,0,0,21,0,0,0,62,0,3,0,55,0,0,0,54,0,0,0,61,0, +4,0,30,0,0,0,56,0,0,0,40,0,0,0,81,0,5,0,2,0,0,0,57,0,0,0,56,0,0,0, 0,0,0,0,62,0,3,0,26,0,0,0,57,0,0,0,81,0,5,0,27,0,0,0,58,0,0,0,56,0, 0,0,1,0,0,0,62,0,3,0,29,0,0,0,58,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index 68a1bfb..3221852 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -14,15 +14,15 @@ namespace mlx friend class Render2DPass; public: - Sprite(NonOwningPtr texture); + Sprite(class Renderer& renderer, NonOwningPtr texture); inline void SetColor(Vec4f color) noexcept { m_color = color; } inline void SetPosition(Vec3f position) noexcept { m_position = position; } - [[nodiscard]] inline const Vec4f& GetColor() const noexcept { return m_color; } - [[nodiscard]] inline const Vec3f& GetPosition() const noexcept { return m_position; } - [[nodiscard]] inline std::shared_ptr GetMesh() const { return p_mesh; } - [[nodiscard]] inline NonOwningPtr GetTexture() const { return p_texture; } + [[nodiscard]] MLX_FORCEINLINE const Vec4f& GetColor() const noexcept { return m_color; } + [[nodiscard]] MLX_FORCEINLINE const Vec3f& GetPosition() const noexcept { return m_position; } + [[nodiscard]] MLX_FORCEINLINE std::shared_ptr GetMesh() const { return p_mesh; } + [[nodiscard]] MLX_FORCEINLINE NonOwningPtr GetTexture() const { return p_texture; } ~Sprite() = default; diff --git a/runtime/Includes/Renderer/Descriptor.h b/runtime/Includes/Renderer/Descriptor.h index ad37ae4..8689d3f 100644 --- a/runtime/Includes/Renderer/Descriptor.h +++ b/runtime/Includes/Renderer/Descriptor.h @@ -17,31 +17,69 @@ namespace mlx std::uint32_t binding; }; + class DescriptorPool + { + public: + DescriptorPool() = default; + + void Init() noexcept; + void Destroy() noexcept; + + VkDescriptorSet AllocateDescriptorSet(std::uint32_t frame_index, VkDescriptorSetLayout layout); + + void ResetPoolFromFrameIndex(std::size_t frame_index); + + [[nodiscard]] inline VkDescriptorPool Get(std::uint32_t index) const noexcept { return m_pools[index]; } + [[nodiscard]] MLX_FORCEINLINE std::size_t GetNumberOfSetsAllocated() const noexcept { return m_allocation_count; } + + ~DescriptorPool() = default; + + private: + std::array m_pools; + std::size_t m_allocation_count = 0; + }; + + class DescriptorPoolManager + { + public: + DescriptorPoolManager() = default; + + void ResetPoolsFromFrameIndex(std::size_t frame_index); + DescriptorPool& GetAvailablePool(); + void Destroy(); + + ~DescriptorPoolManager() = default; + + private: + std::list m_pools; + }; + class DescriptorSet { public: DescriptorSet() { m_set.fill(VK_NULL_HANDLE); } - DescriptorSet(const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type); + DescriptorSet(DescriptorPoolManager& pools_manager, const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type); void SetImage(std::size_t i, std::uint32_t binding, class Image& image); void SetStorageBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); void SetUniformBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); void Update(std::size_t i, VkCommandBuffer cmd = VK_NULL_HANDLE) noexcept; - void Reallocate() noexcept; + void Reallocate(std::size_t frame_index) noexcept; [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t i) const noexcept { return m_set[i]; } - [[nodiscard]] inline DescriptorSet Duplicate() const { return DescriptorSet{ m_set_layout, m_descriptors }; } + [[nodiscard]] inline DescriptorSet Duplicate() const { return DescriptorSet{ *p_pools_manager, m_set_layout, m_descriptors }; } [[nodiscard]] inline bool IsInit() const noexcept { return m_set[0] != VK_NULL_HANDLE; } ~DescriptorSet() = default; private: - DescriptorSet(VkDescriptorSetLayout layout, const std::vector& descriptors); + DescriptorSet(DescriptorPoolManager& pools_manager, VkDescriptorSetLayout layout, const std::vector& descriptors); private: std::vector m_descriptors; std::array m_set; VkDescriptorSetLayout m_set_layout; + NonOwningPtr p_pools_manager; }; } diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index fe4768e..55840ed 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -76,7 +76,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); std::vector candidates = { VK_FORMAT_D32_SFLOAT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT }; VkFormat format = kvfFindSupportFormatInCandidates(RenderCore::Get().GetDevice(), candidates.data(), candidates.size(), VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT); - Image::Init(ImageType::Depth, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, is_multisampled, std::move(debug_name)); + Image::Init(ImageType::Depth, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, is_multisampled, std::move(debug_name)); Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_DEPTH_BIT); Image::TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); } diff --git a/runtime/Includes/Renderer/Pipelines/Graphics.h b/runtime/Includes/Renderer/Pipelines/Graphics.h index 46304bf..09aa939 100644 --- a/runtime/Includes/Renderer/Pipelines/Graphics.h +++ b/runtime/Includes/Renderer/Pipelines/Graphics.h @@ -24,7 +24,7 @@ namespace mlx public: GraphicPipeline() = default; - void Init(const GraphicPipelineDescriptor& descriptor); + void Init(const GraphicPipelineDescriptor& descriptor, std::string_view debug_name); bool BindPipeline(VkCommandBuffer command_buffer, std::size_t framebuffer_index, std::array clear) noexcept; void EndPipeline(VkCommandBuffer command_buffer) noexcept override; void Destroy() noexcept; @@ -46,6 +46,9 @@ namespace mlx std::vector> m_attachments; std::vector m_framebuffers; std::vector m_clears; + #ifdef DEBUG + std::string m_debug_name; + #endif std::shared_ptr p_vertex_shader; std::shared_ptr p_fragment_shader; VkRenderPass m_renderpass = VK_NULL_HANDLE; diff --git a/runtime/Includes/Renderer/RenderPasses/2DPass.h b/runtime/Includes/Renderer/RenderPasses/2DPass.h index eef1282..dc78aeb 100644 --- a/runtime/Includes/Renderer/RenderPasses/2DPass.h +++ b/runtime/Includes/Renderer/RenderPasses/2DPass.h @@ -11,7 +11,7 @@ namespace mlx { public: Render2DPass() = default; - void Init(); + void Init(class Renderer& renderer); void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target); void Destroy(); ~Render2DPass() = default; diff --git a/runtime/Includes/Renderer/RenderPasses/FinalPass.h b/runtime/Includes/Renderer/RenderPasses/FinalPass.h index 93a71cf..fe5571c 100644 --- a/runtime/Includes/Renderer/RenderPasses/FinalPass.h +++ b/runtime/Includes/Renderer/RenderPasses/FinalPass.h @@ -11,7 +11,7 @@ namespace mlx { public: FinalPass() = default; - void Init(); + void Init(class Renderer& renderer); void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target); void Destroy(); ~FinalPass() = default; diff --git a/runtime/Includes/Renderer/RenderPasses/Passes.h b/runtime/Includes/Renderer/RenderPasses/Passes.h index 7800912..3888a23 100644 --- a/runtime/Includes/Renderer/RenderPasses/Passes.h +++ b/runtime/Includes/Renderer/RenderPasses/Passes.h @@ -11,7 +11,7 @@ namespace mlx { public: RenderPasses() = default; - void Init(); + void Init(class Renderer& renderer); void Pass(class Scene& scene, class Renderer& renderer); void Destroy(); ~RenderPasses() = default; diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index a2df8cd..1bcba3a 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -5,6 +5,7 @@ #include #include #include +#include #include namespace mlx @@ -31,6 +32,7 @@ namespace mlx [[nodiscard]] inline std::size_t GetSwapchainImageIndex() const noexcept { return m_swapchain_image_index; } [[nodiscard]] inline std::size_t GetCurrentFrameIndex() const noexcept { return m_current_frame_index; } [[nodiscard]] inline NonOwningPtr GetWindow() const noexcept { return p_window; } + [[nodiscard]] inline DescriptorPoolManager& GetDescriptorPoolManager() noexcept { return m_descriptor_pool_manager; } MLX_FORCEINLINE constexpr void RequireFramebufferResize() noexcept { m_framebuffers_resize = true; } @@ -43,6 +45,7 @@ namespace mlx void DestroySwapchain(); private: + DescriptorPoolManager m_descriptor_pool_manager; std::array m_image_available_semaphores; std::array m_render_finished_semaphores; std::array m_cmd_buffers; diff --git a/runtime/Includes/Renderer/ScenesRenderer.h b/runtime/Includes/Renderer/ScenesRenderer.h index 03c391e..36dcdad 100644 --- a/runtime/Includes/Renderer/ScenesRenderer.h +++ b/runtime/Includes/Renderer/ScenesRenderer.h @@ -9,7 +9,7 @@ namespace mlx { public: SceneRenderer() = default; - void Init(); + void Init(class Renderer& renderer); void Render(class Scene& scene, class Renderer& renderer); // TODO : add RTT support void Destroy(); ~SceneRenderer() = default; diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 7032816..184831b 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -12,7 +12,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); // TODO : re-enable render targets m_renderer.Init(nullptr); - m_scene_renderer.Init(); + m_scene_renderer.Init(m_renderer); SceneDescriptor descriptor{}; descriptor.renderer = &m_renderer; @@ -27,7 +27,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); m_renderer.Init(p_window.get()); - m_scene_renderer.Init(); + m_scene_renderer.Init(m_renderer); SceneDescriptor descriptor{}; descriptor.renderer = &m_renderer; diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index dd48850..998ed64 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -16,7 +16,7 @@ namespace mlx Sprite& Scene::CreateSprite(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); - std::shared_ptr sprite = std::make_shared(texture); + std::shared_ptr sprite = std::make_shared(*m_descriptor.renderer, texture); m_sprites.push_back(sprite); return *sprite; } @@ -24,9 +24,9 @@ namespace mlx NonOwningPtr Scene::GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const { MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture, position](std::shared_ptr sprite) + auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [&texture, &position](std::shared_ptr sprite) { - return sprite->GetPosition().x == position.x && sprite->GetPosition().y == position.y && sprite->GetTexture() == texture; + return sprite->GetTexture() == texture && sprite->GetPosition().x == position.x && sprite->GetPosition().y == position.y; }); return (it != m_sprites.end() ? it->get() : nullptr); } @@ -37,7 +37,7 @@ namespace mlx auto it = m_sprites.begin(); do { - it = std::find_if(m_sprites.begin(), m_sprites.end(), [texture](std::shared_ptr sprite) + it = std::find_if(m_sprites.begin(), m_sprites.end(), [&texture](std::shared_ptr sprite) { return sprite->GetTexture() == texture; }); diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index 533270b..a57d27a 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -2,6 +2,7 @@ #include #include #include +#include namespace mlx { @@ -36,17 +37,17 @@ namespace mlx return mesh; } - Sprite::Sprite(NonOwningPtr texture) + Sprite::Sprite(Renderer& renderer, NonOwningPtr texture) { MLX_PROFILE_FUNCTION(); Verify((bool)texture, "Sprite: invalid texture"); p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); p_texture = texture; - func::function functor = [this](const EventBase& event) + func::function functor = [this, &renderer](const EventBase& event) { if(event.What() == Event::DescriptorPoolResetEventCode) - m_set.Reallocate(); + m_set.Reallocate(renderer.GetCurrentFrameIndex()); }; EventBus::RegisterListener({ functor, "__Sprite" + std::to_string(reinterpret_cast(this)) }); } diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp index 250bbd6..3e64d99 100644 --- a/runtime/Sources/Renderer/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -46,6 +46,7 @@ namespace mlx bufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE; #ifdef DEBUG + m_debug_name = debug_name; std::string alloc_name{ debug_name }; if(usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) alloc_name.append("_index_buffer"); @@ -56,7 +57,6 @@ namespace mlx else alloc_name.append("_buffer"); m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &alloc_info, m_buffer, alloc_name.c_str()); - m_debug_name = std::move(alloc_name); #else m_allocation = RenderCore::Get().GetAllocator().CreateBuffer(&bufferInfo, &alloc_info, m_buffer, nullptr); #endif @@ -85,7 +85,6 @@ namespace mlx kvfEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); - kvfWaitForFence(RenderCore::Get().GetDevice(), fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); return true; } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index ac3b2ab..4e168a3 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -5,9 +5,12 @@ #include #include #include +#include namespace mlx { + constexpr std::size_t MAX_SETS_PER_POOL = MAX_FRAMES_IN_FLIGHT * 1024; + void TransitionImageToCorrectLayout(Image& image, VkCommandBuffer cmd) { MLX_PROFILE_FUNCTION(); @@ -19,8 +22,89 @@ namespace mlx Error("Vulkan : cannot transition descriptor image layout, unkown image type"); } - DescriptorSet::DescriptorSet(const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type) - : m_set_layout(vklayout) + void DescriptorPool::Init() noexcept + { + VkDescriptorPoolSize pool_sizes[] = { + { VK_DESCRIPTOR_TYPE_SAMPLER, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, MAX_SETS_PER_POOL }, + { VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, MAX_SETS_PER_POOL } + }; + + VkDescriptorPoolCreateInfo poolInfo{}; + poolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; + poolInfo.poolSizeCount = sizeof(pool_sizes) / sizeof(pool_sizes[0]); + poolInfo.pPoolSizes = pool_sizes; + poolInfo.maxSets = MAX_SETS_PER_POOL; + poolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + kvfCheckVk(RenderCore::Get().vkCreateDescriptorPool(RenderCore::Get().GetDevice(), &poolInfo, nullptr, &m_pools[i])); + m_allocation_count = 0; + } + + void DescriptorPool::Destroy() noexcept + { + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + if(m_pools[i] == VK_NULL_HANDLE) + continue; + RenderCore::Get().vkDestroyDescriptorPool(RenderCore::Get().GetDevice(), m_pools[i], nullptr); + m_pools[i] = VK_NULL_HANDLE; + } + m_allocation_count = 0; + } + + VkDescriptorSet DescriptorPool::AllocateDescriptorSet(std::uint32_t frame_index, VkDescriptorSetLayout layout) + { + VkDescriptorSet set; + VkDescriptorSetAllocateInfo alloc_info = {}; + alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + alloc_info.descriptorPool = m_pools[frame_index]; + alloc_info.descriptorSetCount = 1; + alloc_info.pSetLayouts = &layout; + kvfCheckVk(RenderCore::Get().vkAllocateDescriptorSets(RenderCore::Get().GetDevice(), &alloc_info, &set)); + m_allocation_count++; + return set; + } + + void DescriptorPool::ResetPoolFromFrameIndex(std::size_t frame_index) + { + Assert(frame_index < MAX_FRAMES_IN_FLIGHT, "invalid frame index"); + RenderCore::Get().vkResetDescriptorPool(RenderCore::Get().GetDevice(), m_pools[frame_index], 0); + } + + void DescriptorPoolManager::ResetPoolsFromFrameIndex(std::size_t frame_index) + { + for(auto& pool : m_pools) + pool.ResetPoolFromFrameIndex(frame_index); + } + + DescriptorPool& DescriptorPoolManager::GetAvailablePool() + { + for(auto& pool : m_pools) + { + if(pool.GetNumberOfSetsAllocated() < MAX_SETS_PER_POOL) + return pool; + } + m_pools.emplace_front().Init(); + return m_pools.front(); + } + + void DescriptorPoolManager::Destroy() + { + for(auto& pool : m_pools) + pool.Destroy(); + } + + DescriptorSet::DescriptorSet(DescriptorPoolManager& pools_manager, const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type) + : m_set_layout(vklayout), p_pools_manager(&pools_manager) { MLX_PROFILE_FUNCTION(); for(auto& [binding, type] : layout.binds) @@ -31,15 +115,15 @@ namespace mlx m_descriptors.back().binding = binding; } for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), vklayout); + m_set[i] = p_pools_manager->GetAvailablePool().AllocateDescriptorSet(i, vklayout); } - DescriptorSet::DescriptorSet(VkDescriptorSetLayout layout, const std::vector& descriptors) - : m_descriptors(descriptors), m_set_layout(layout) + DescriptorSet::DescriptorSet(DescriptorPoolManager& pools_manager, VkDescriptorSetLayout layout, const std::vector& descriptors) + : m_descriptors(descriptors), m_set_layout(layout), p_pools_manager(&pools_manager) { MLX_PROFILE_FUNCTION(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), layout); + m_set[i] = p_pools_manager->GetAvailablePool().AllocateDescriptorSet(i, layout); } void DescriptorSet::SetImage(std::size_t i, std::uint32_t binding, class Image& image) @@ -146,10 +230,9 @@ namespace mlx RenderCore::Get().vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); } - void DescriptorSet::Reallocate() noexcept + void DescriptorSet::Reallocate(std::size_t frame_index) noexcept { MLX_PROFILE_FUNCTION(); - for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_set[i] = kvfAllocateDescriptorSet(RenderCore::Get().GetDevice(), m_set_layout); + m_set[frame_index] = p_pools_manager->GetAvailablePool().AllocateDescriptorSet(frame_index, m_set_layout); } } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index d74c643..173523a 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -100,22 +100,21 @@ namespace mlx subresource_range.levelCount = 1; subresource_range.baseArrayLayer = 0; + VkImageLayout old_layout = m_layout; + TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); if(m_type == ImageType::Color) { - VkImageLayout old_layout = m_layout; - TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; VkClearColorValue clear_color = VkClearColorValue({ { color.x, color.y, color.z, color.w } }); RenderCore::Get().vkCmdClearColorImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource_range); - TransitionLayout(old_layout, cmd); } else if(m_type == ImageType::Depth) { VkClearDepthStencilValue clear_depth_stencil = { 1.0f, 1 }; subresource_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; - TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); RenderCore::Get().vkCmdClearDepthStencilImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_depth_stencil, 1, &subresource_range); } + TransitionLayout(old_layout, cmd); } void Image::DestroySampler() noexcept diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index a466786..267b061 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -1,3 +1,4 @@ +#include "vulkan/vulkan_core.h" #include #include #include @@ -7,7 +8,7 @@ namespace mlx { - void GraphicPipeline::Init(const GraphicPipelineDescriptor& descriptor) + void GraphicPipeline::Init(const GraphicPipelineDescriptor& descriptor, std::string_view debug_name) { MLX_PROFILE_FUNCTION(); if(!descriptor.vertex_shader || !descriptor.fragment_shader) @@ -19,6 +20,10 @@ namespace mlx p_renderer = descriptor.renderer; p_depth = descriptor.depth; + #ifdef DEBUG + m_debug_name = debug_name; + #endif + std::vector push_constants; std::vector set_layouts; push_constants.insert(push_constants.end(), p_vertex_shader->GetPipelineLayout().push_constants.begin(), p_vertex_shader->GetPipelineLayout().push_constants.end()); @@ -31,7 +36,7 @@ namespace mlx CreateFramebuffers(m_attachments, descriptor.clear_color_attachments); VkPhysicalDeviceFeatures features{}; - mlx::RenderCore::Get().vkGetPhysicalDeviceFeatures(RenderCore::Get().GetPhysicalDevice(), &features); + RenderCore::Get().vkGetPhysicalDeviceFeatures(RenderCore::Get().GetPhysicalDevice(), &features); KvfGraphicsPipelineBuilder* builder = kvfCreateGPipelineBuilder(); kvfGPipelineBuilderAddShaderStage(builder, p_vertex_shader->GetShaderStage(), p_vertex_shader->GetShaderModule(), "main"); @@ -59,6 +64,33 @@ namespace mlx m_pipeline = kvfCreateGraphicsPipeline(RenderCore::Get().GetDevice(), m_pipeline_layout, builder, m_renderpass); DebugLog("Vulkan : graphics pipeline created"); kvfDestroyGPipelineBuilder(builder); + + #ifdef MLX_HAS_DEBUG_UTILS_FUNCTIONS + VkDebugUtilsObjectNameInfoEXT name_info{}; + name_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT; + name_info.objectType = VK_OBJECT_TYPE_PIPELINE; + name_info.objectHandle = reinterpret_cast(m_pipeline); + name_info.pObjectName = m_debug_name.c_str(); + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + + name_info.objectType = VK_OBJECT_TYPE_RENDER_PASS; + name_info.objectHandle = reinterpret_cast(m_renderpass); + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + + name_info.objectType = VK_OBJECT_TYPE_SHADER_MODULE; + name_info.objectHandle = reinterpret_cast(p_vertex_shader->GetShaderModule()); + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + + name_info.objectHandle = reinterpret_cast(p_fragment_shader->GetShaderModule()); + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + + name_info.objectType = VK_OBJECT_TYPE_FRAMEBUFFER; + for(VkFramebuffer fb : m_framebuffers) + { + name_info.objectHandle = reinterpret_cast(fb); + RenderCore::Get().vkSetDebugUtilsObjectNameEXT(RenderCore::Get().GetDevice(), &name_info); + } + #endif } bool GraphicPipeline::BindPipeline(VkCommandBuffer command_buffer, std::size_t framebuffer_index, std::array clear) noexcept diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index a93a2e7..f17b915 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -14,7 +14,7 @@ namespace mlx Vec4f position; }; - void Render2DPass::Init() + void Render2DPass::Init(Renderer& renderer) { MLX_PROFILE_FUNCTION(); ShaderLayout vertex_shader_layout( @@ -44,25 +44,24 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - func::function functor = [this](const EventBase& event) + + func::function functor = [this, &renderer](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); if(event.What() == Event::DescriptorPoolResetEventCode) { - p_texture_set->Reallocate(); - p_viewer_data_set->Reallocate(); - for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - { - p_viewer_data_set->SetUniformBuffer(i, 0, p_viewer_data_buffer->Get(i)); - p_viewer_data_set->Update(i); - } + std::uint32_t frame_index = renderer.GetCurrentFrameIndex(); + p_texture_set->Reallocate(frame_index); + p_viewer_data_set->Reallocate(frame_index); + p_viewer_data_set->SetUniformBuffer(frame_index, 0, p_viewer_data_buffer->Get(frame_index)); + p_viewer_data_set->Update(frame_index); } }; EventBus::RegisterListener({ functor, "__MlxRender2DPass" }); - p_viewer_data_set = std::make_shared(p_vertex_shader->GetShaderLayout().set_layouts[0].second, p_vertex_shader->GetPipelineLayout().set_layouts[0], ShaderType::Vertex); - p_texture_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); + p_viewer_data_set = std::make_shared(renderer.GetDescriptorPoolManager(), p_vertex_shader->GetShaderLayout().set_layouts[0].second, p_vertex_shader->GetPipelineLayout().set_layouts[0], ShaderType::Vertex); + p_texture_set = std::make_shared(renderer.GetDescriptorPoolManager(), p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); p_viewer_data_buffer = std::make_shared(); p_viewer_data_buffer->Init(sizeof(ViewerData), "mlx_2d_pass_viewer_data"); @@ -84,13 +83,17 @@ namespace mlx pipeline_descriptor.color_attachments = { &render_target }; pipeline_descriptor.depth = &scene.GetDepth(); pipeline_descriptor.clear_color_attachments = false; - m_pipeline.Init(pipeline_descriptor); + #ifdef DEBUG + m_pipeline.Init(pipeline_descriptor, "mlx_2D_pass"); + #else + m_pipeline.Init(pipeline_descriptor, {}); + #endif } std::uint32_t frame_index = renderer.GetCurrentFrameIndex(); ViewerData viewer_data; - viewer_data.projection_matrix = Mat4f::Ortho(0.0f, render_target.GetWidth(), render_target.GetHeight(), 0.0f); + viewer_data.projection_matrix = Mat4f::Ortho(0.0f, render_target.GetWidth(), 0.0f, render_target.GetHeight(), -1.0f, 100'000.0f); static CPUBuffer buffer(sizeof(ViewerData)); std::memcpy(buffer.GetData(), &viewer_data, buffer.GetSize()); p_viewer_data_buffer->SetData(buffer, frame_index); @@ -108,8 +111,8 @@ namespace mlx sprite->GetTexture()->Update(cmd); sprite->Bind(frame_index, cmd); std::array sets = { p_viewer_data_set->GetSet(frame_index), sprite->GetSet(frame_index) }; - mlx::RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); - mlx::RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &sprite_data); + RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &sprite_data); sprite->GetMesh()->Draw(cmd, renderer.GetDrawCallsCounterRef(), renderer.GetPolygonDrawnCounterRef()); } m_pipeline.EndPipeline(cmd); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index b292109..a0b42af 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -7,7 +7,7 @@ namespace mlx { - void FinalPass::Init() + void FinalPass::Init(Renderer& renderer) { MLX_PROFILE_FUNCTION(); ShaderLayout vertex_shader_layout( @@ -31,16 +31,16 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - func::function functor = [this](const EventBase& event) + func::function functor = [this, &renderer](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); if(event.What() == Event::DescriptorPoolResetEventCode) - p_set->Reallocate(); + p_set->Reallocate(renderer.GetCurrentFrameIndex()); }; EventBus::RegisterListener({ functor, "__MlxFinalPass" }); - p_set = std::make_shared(p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); + p_set = std::make_shared(renderer.GetDescriptorPoolManager(), p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); } void FinalPass::Pass([[maybe_unused]] Scene& scene, Renderer& renderer, Texture& render_target) @@ -53,7 +53,11 @@ namespace mlx pipeline_descriptor.fragment_shader = p_fragment_shader; pipeline_descriptor.renderer = &renderer; pipeline_descriptor.no_vertex_inputs = true; - m_pipeline.Init(pipeline_descriptor); + #ifdef DEBUG + m_pipeline.Init(pipeline_descriptor, "mlx_final_pass"); + #else + m_pipeline.Init(pipeline_descriptor, {}); + #endif } VkCommandBuffer cmd = renderer.GetActiveCommandBuffer(); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 0a4b2bc..b23ebc8 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -5,39 +5,37 @@ namespace mlx { - void RenderPasses::Init() + void RenderPasses::Init(Renderer& renderer) { - m_2Dpass.Init(); - m_final.Init(); + m_2Dpass.Init(renderer); + m_final.Init(renderer); + func::function functor = [this, renderer](const EventBase& event) + { + if(event.What() == Event::ResizeEventCode) + { + m_main_render_texture.Destroy(); + auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); + #ifdef DEBUG + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); + #else + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + #endif + m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + } + }; + EventBus::RegisterListener({ functor, "__MlxRenderPasses" }); + auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); + + #ifdef DEBUG + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); + #else + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + #endif + m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); } void RenderPasses::Pass(Scene& scene, Renderer& renderer) { - if(!m_main_render_texture.IsInit()) - { - func::function functor = [this, renderer](const EventBase& event) - { - if(event.What() == Event::ResizeEventCode) - { - m_main_render_texture.Destroy(); - auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); - #ifdef DEBUG - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); - #else - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); - #endif - } - }; - EventBus::RegisterListener({ functor, "__MlxRenderPasses" }); - auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); - - #ifdef DEBUG - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); - #else - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); - #endif - } - m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), Vec4f{ 0.0f, 0.0f, 0.0f, 1.0f }); scene.GetDepth().Clear(renderer.GetActiveCommandBuffer(), {}); diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index a7f3fcf..d1a4f07 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -59,6 +59,13 @@ namespace mlx { MLX_PROFILE_FUNCTION(); kvfWaitForFence(RenderCore::Get().GetDevice(), m_cmd_fences[m_current_frame_index]); + static bool first_run = true; + if(!first_run) + { + m_descriptor_pool_manager.ResetPoolsFromFrameIndex(m_current_frame_index); + EventBus::SendBroadcast(Internal::DescriptorPoolResetEventBroadcast{}); + } + first_run = false; VkResult result = RenderCore::Get().vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); if(result == VK_ERROR_OUT_OF_DATE_KHR) { @@ -92,8 +99,6 @@ namespace mlx EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); } m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; - kvfResetDeviceDescriptorPools(RenderCore::Get().GetDevice()); - EventBus::SendBroadcast(Internal::DescriptorPoolResetEventBroadcast{}); } void Renderer::CreateSwapchain() @@ -145,6 +150,7 @@ namespace mlx DebugLog("Vulkan : fence destroyed"); } + m_descriptor_pool_manager.Destroy(); DestroySwapchain(); RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); DebugLog("Vulkan : surface destroyed"); diff --git a/runtime/Sources/Renderer/SceneRenderer.cpp b/runtime/Sources/Renderer/SceneRenderer.cpp index 09903cb..c1939fd 100644 --- a/runtime/Sources/Renderer/SceneRenderer.cpp +++ b/runtime/Sources/Renderer/SceneRenderer.cpp @@ -6,10 +6,10 @@ namespace mlx { - void SceneRenderer::Init() + void SceneRenderer::Init(Renderer& renderer) { MLX_PROFILE_FUNCTION(); - m_passes.Init(); + m_passes.Init(renderer); } void SceneRenderer::Render(Scene& scene, Renderer& renderer) diff --git a/third_party/kvf.h b/third_party/kvf.h index 164466e..8256b24 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -2415,7 +2415,7 @@ void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, K submit_info.pCommandBuffers = &buffer; __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkQueueSubmit)(kvfGetDeviceQueue(device, queue), 1, &submit_info, fence)); if(fence != VK_NULL_HANDLE) - KVF_GET_DEVICE_FUNCTION(vkWaitForFences)(device, 1, &fence, VK_TRUE, UINT64_MAX); + kvfWaitForFence(device, fence); } VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear, VkSampleCountFlagBits samples) From 74dd8a01e2d96b63a441b539d9f99ae9730ba5f0 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 17 Oct 2024 15:37:39 +0200 Subject: [PATCH 040/131] fixing projection issue --- example/main.c | 55 ++++++++++++--- runtime/Includes/Core/Graphics.inl | 1 + runtime/Includes/Embedded/2DVertex.nzsl | 5 +- runtime/Includes/Embedded/2DVertex.spv.h | 69 ++++++++++--------- runtime/Includes/Maths/Readme.md | 1 + runtime/Sources/Core/Application.cpp | 2 + runtime/Sources/Core/EventBus.cpp | 2 + runtime/Sources/Graphics/Mesh.cpp | 2 + runtime/Sources/Graphics/Sprite.cpp | 6 +- runtime/Sources/Renderer/Descriptor.cpp | 1 + .../Sources/Renderer/Pipelines/Graphics.cpp | 7 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 9 ++- runtime/Sources/Renderer/Renderer.cpp | 14 ++-- 13 files changed, 117 insertions(+), 57 deletions(-) create mode 100644 runtime/Includes/Maths/Readme.md diff --git a/example/main.c b/example/main.c index 23c5b8e..3b27ec7 100644 --- a/example/main.c +++ b/example/main.c @@ -6,13 +6,14 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 17:55:21 by maldavid #+# #+# */ -/* Updated: 2024/03/25 18:10:41 by maldavid ### ########.fr */ +/* Updated: 2024/10/03 06:39:01 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ #include #include "../includes/mlx.h" +/* typedef struct { void* mlx; @@ -32,13 +33,13 @@ int update(void* param) if(i == 200) mlx_clear_window(mlx->mlx, mlx->win); -/* - if(img) - mlx_destroy_image(mlx->mlx,img); - img = mlx_new_image(mlx->mlx, 800, 800); - mlx_set_image_pixel(mlx->mlx, img, 4, 4, 0xFF00FF00); - mlx_put_image_to_window(mlx->mlx, mlx->win, img, 0, 0); -*/ + +// if(img) +// mlx_destroy_image(mlx->mlx,img); +// img = mlx_new_image(mlx->mlx, 800, 800); +// mlx_set_image_pixel(mlx->mlx, img, 4, 4, 0xFF00FF00); +// mlx_put_image_to_window(mlx->mlx, mlx->win, img, 0, 0); + if(i >= 250) mlx_set_font_scale(mlx->mlx, mlx->win, "default", 16.f); else @@ -176,3 +177,41 @@ int main(void) return 0; } +*/ + +int window_hook(int event, void* param) +{ + if(event == 0) + mlx_loop_end(param); + return 0; +} + +int main(void) +{ + void* mlx; + void* win; + + mlx = mlx_init(); + win = mlx_new_window(mlx, 400, 400, "My window"); + + mlx_set_fps_goal(mlx, 60); + + mlx_on_event(mlx, win, MLX_WINDOW_EVENT, window_hook, mlx); + + int dummy; + void* logo_png = mlx_png_file_to_image(mlx, "42_logo.png", &dummy, &dummy); + + mlx_put_image_to_window(mlx, win, logo_png, 10, 190); + mlx_put_image_to_window(mlx, win, logo_png, 11, 190); + mlx_put_image_to_window(mlx, win, logo_png, 10, 190); + mlx_put_image_to_window(mlx, win, logo_png, 100, 190); + mlx_put_image_to_window(mlx, win, logo_png, 10, 190); + + mlx_loop(mlx); + + mlx_destroy_image(mlx, logo_png); + mlx_destroy_window(mlx, win); + mlx_destroy_display(mlx); + + return 0; +} diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 33b0df6..93a7760 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -1,5 +1,6 @@ #pragma once #include +#include namespace mlx { diff --git a/runtime/Includes/Embedded/2DVertex.nzsl b/runtime/Includes/Embedded/2DVertex.nzsl index 67f7e09..8e62204 100644 --- a/runtime/Includes/Embedded/2DVertex.nzsl +++ b/runtime/Includes/Embedded/2DVertex.nzsl @@ -34,10 +34,11 @@ external [entry(vert)] fn main(input: VertIn) -> VertOut { - input.uv.x *= -1.0; + let position: vec2[f32] = input.pos.xy + model.position.xy; + input.uv *= -1.0; let output: VertOut; output.uv = input.uv; output.color = model.color; - output.pos = viewer_data.projection_matrix * (input.pos + model.position); + output.pos = viewer_data.projection_matrix * vec4[f32](position, 0.0, 1.0); return output; } diff --git a/runtime/Includes/Embedded/2DVertex.spv.h b/runtime/Includes/Embedded/2DVertex.spv.h index ac303d2..3f13839 100644 --- a/runtime/Includes/Embedded/2DVertex.spv.h +++ b/runtime/Includes/Embedded/2DVertex.spv.h @@ -1,5 +1,5 @@ -3,2,35,7,0,0,1,0,39,0,0,0,67,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, -3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,33,0,0,0,109,97,105,110,0,0,0,0, +3,2,35,7,0,0,1,0,39,0,0,0,71,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,35,0,0,0,109,97,105,110,0,0,0,0, 13,0,0,0,19,0,0,0,25,0,0,0,27,0,0,0,28,0,0,0,3,0,3,0,0,0,0,0,100,0, 0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0,6,0,8,0,4,0,0,0, 0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0,0,0,5,0,5,0,7,0, @@ -12,7 +12,7 @@ 0,0,118,105,101,119,101,114,95,100,97,116,97,0,5,0,4,0,9,0,0,0,109,111,100,101,108,0,0,0, 5,0,3,0,13,0,0,0,112,111,115,0,5,0,3,0,19,0,0,0,117,118,0,0,5,0,4,0,25,0, 0,0,99,111,108,111,114,0,0,0,5,0,3,0,27,0,0,0,117,118,0,0,5,0,5,0,28,0,0,0, -112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,33,0,0,0,109,97,105,110,0,0,0,0,71,0, +112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,35,0,0,0,109,97,105,110,0,0,0,0,71,0, 4,0,6,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0,0,0,0,0, 71,0,4,0,28,0,0,0,11,0,0,0,0,0,0,0,71,0,4,0,13,0,0,0,30,0,0,0,0,0, 0,0,71,0,4,0,19,0,0,0,30,0,0,0,1,0,0,0,71,0,4,0,25,0,0,0,30,0,0,0, @@ -36,33 +36,36 @@ 4,0,23,0,0,0,7,0,0,0,22,0,0,0,32,0,4,0,24,0,0,0,3,0,0,0,2,0,0,0, 32,0,4,0,26,0,0,0,3,0,0,0,17,0,0,0,30,0,5,0,29,0,0,0,2,0,0,0,17,0, 0,0,2,0,0,0,43,0,4,0,1,0,0,0,30,0,0,0,0,0,128,191,32,0,4,0,31,0,0,0, -7,0,0,0,29,0,0,0,43,0,4,0,14,0,0,0,32,0,0,0,2,0,0,0,32,0,4,0,44,0, -0,0,7,0,0,0,1,0,0,0,32,0,4,0,49,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0, -53,0,0,0,2,0,0,0,3,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0, -4,0,8,0,0,0,9,0,0,0,9,0,0,0,59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0, -59,0,4,0,18,0,0,0,19,0,0,0,1,0,0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0, -0,0,59,0,4,0,26,0,0,0,27,0,0,0,3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0, -3,0,0,0,54,0,5,0,10,0,0,0,33,0,0,0,0,0,0,0,11,0,0,0,248,0,2,0,34,0, -0,0,59,0,4,0,31,0,0,0,35,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,36,0,0,0, -7,0,0,0,65,0,5,0,16,0,0,0,37,0,0,0,36,0,0,0,15,0,0,0,63,0,3,0,37,0, -0,0,13,0,0,0,65,0,5,0,21,0,0,0,38,0,0,0,36,0,0,0,20,0,0,0,63,0,3,0, -38,0,0,0,19,0,0,0,65,0,5,0,21,0,0,0,39,0,0,0,36,0,0,0,20,0,0,0,61,0, -4,0,17,0,0,0,40,0,0,0,39,0,0,0,81,0,5,0,1,0,0,0,41,0,0,0,40,0,0,0, -0,0,0,0,133,0,5,0,1,0,0,0,42,0,0,0,41,0,0,0,30,0,0,0,65,0,5,0,21,0, -0,0,43,0,0,0,36,0,0,0,20,0,0,0,65,0,5,0,44,0,0,0,45,0,0,0,43,0,0,0, -15,0,0,0,62,0,3,0,45,0,0,0,42,0,0,0,65,0,5,0,21,0,0,0,46,0,0,0,36,0, -0,0,20,0,0,0,61,0,4,0,17,0,0,0,47,0,0,0,46,0,0,0,65,0,5,0,21,0,0,0, -48,0,0,0,35,0,0,0,20,0,0,0,62,0,3,0,48,0,0,0,47,0,0,0,65,0,5,0,49,0, -0,0,50,0,0,0,9,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,51,0,0,0,50,0,0,0, -65,0,5,0,16,0,0,0,52,0,0,0,35,0,0,0,15,0,0,0,62,0,3,0,52,0,0,0,51,0, -0,0,65,0,5,0,53,0,0,0,54,0,0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0, -55,0,0,0,54,0,0,0,65,0,5,0,16,0,0,0,56,0,0,0,36,0,0,0,15,0,0,0,61,0, -4,0,2,0,0,0,57,0,0,0,56,0,0,0,65,0,5,0,49,0,0,0,58,0,0,0,9,0,0,0, -20,0,0,0,61,0,4,0,2,0,0,0,59,0,0,0,58,0,0,0,129,0,5,0,2,0,0,0,60,0, -0,0,57,0,0,0,59,0,0,0,145,0,5,0,2,0,0,0,61,0,0,0,55,0,0,0,60,0,0,0, -65,0,5,0,16,0,0,0,62,0,0,0,35,0,0,0,32,0,0,0,62,0,3,0,62,0,0,0,61,0, -0,0,61,0,4,0,29,0,0,0,63,0,0,0,35,0,0,0,81,0,5,0,2,0,0,0,64,0,0,0, -63,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0,64,0,0,0,81,0,5,0,17,0,0,0,65,0, -0,0,63,0,0,0,1,0,0,0,62,0,3,0,27,0,0,0,65,0,0,0,81,0,5,0,2,0,0,0, -66,0,0,0,63,0,0,0,2,0,0,0,62,0,3,0,28,0,0,0,66,0,0,0,253,0,1,0,56,0, -1,0 +7,0,0,0,29,0,0,0,43,0,4,0,14,0,0,0,32,0,0,0,2,0,0,0,43,0,4,0,1,0, +0,0,33,0,0,0,0,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0,0,0,128,63,32,0,4,0, +45,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,60,0,0,0,2,0,0,0,3,0,0,0,59,0, +4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0,0,0,9,0,0,0,9,0,0,0, +59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0,18,0,0,0,19,0,0,0,1,0, +0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0,0,0,59,0,4,0,26,0,0,0,27,0,0,0, +3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0,54,0,5,0,10,0,0,0,35,0, +0,0,0,0,0,0,11,0,0,0,248,0,2,0,36,0,0,0,59,0,4,0,21,0,0,0,37,0,0,0, +7,0,0,0,59,0,4,0,31,0,0,0,38,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,39,0, +0,0,7,0,0,0,65,0,5,0,16,0,0,0,40,0,0,0,39,0,0,0,15,0,0,0,63,0,3,0, +40,0,0,0,13,0,0,0,65,0,5,0,21,0,0,0,41,0,0,0,39,0,0,0,20,0,0,0,63,0, +3,0,41,0,0,0,19,0,0,0,65,0,5,0,16,0,0,0,42,0,0,0,39,0,0,0,15,0,0,0, +61,0,4,0,2,0,0,0,43,0,0,0,42,0,0,0,79,0,7,0,17,0,0,0,44,0,0,0,43,0, +0,0,43,0,0,0,0,0,0,0,1,0,0,0,65,0,5,0,45,0,0,0,46,0,0,0,9,0,0,0, +20,0,0,0,61,0,4,0,2,0,0,0,47,0,0,0,46,0,0,0,79,0,7,0,17,0,0,0,48,0, +0,0,47,0,0,0,47,0,0,0,0,0,0,0,1,0,0,0,129,0,5,0,17,0,0,0,49,0,0,0, +44,0,0,0,48,0,0,0,62,0,3,0,37,0,0,0,49,0,0,0,65,0,5,0,21,0,0,0,50,0, +0,0,39,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,51,0,0,0,50,0,0,0,142,0,5,0, +17,0,0,0,52,0,0,0,51,0,0,0,30,0,0,0,65,0,5,0,21,0,0,0,53,0,0,0,39,0, +0,0,20,0,0,0,62,0,3,0,53,0,0,0,52,0,0,0,65,0,5,0,21,0,0,0,54,0,0,0, +39,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,55,0,0,0,54,0,0,0,65,0,5,0,21,0, +0,0,56,0,0,0,38,0,0,0,20,0,0,0,62,0,3,0,56,0,0,0,55,0,0,0,65,0,5,0, +45,0,0,0,57,0,0,0,9,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,58,0,0,0,57,0, +0,0,65,0,5,0,16,0,0,0,59,0,0,0,38,0,0,0,15,0,0,0,62,0,3,0,59,0,0,0, +58,0,0,0,65,0,5,0,60,0,0,0,61,0,0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0, +0,0,62,0,0,0,61,0,0,0,61,0,4,0,17,0,0,0,63,0,0,0,37,0,0,0,80,0,6,0, +2,0,0,0,64,0,0,0,63,0,0,0,33,0,0,0,34,0,0,0,145,0,5,0,2,0,0,0,65,0, +0,0,62,0,0,0,64,0,0,0,65,0,5,0,16,0,0,0,66,0,0,0,38,0,0,0,32,0,0,0, +62,0,3,0,66,0,0,0,65,0,0,0,61,0,4,0,29,0,0,0,67,0,0,0,38,0,0,0,81,0, +5,0,2,0,0,0,68,0,0,0,67,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0,68,0,0,0, +81,0,5,0,17,0,0,0,69,0,0,0,67,0,0,0,1,0,0,0,62,0,3,0,27,0,0,0,69,0, +0,0,81,0,5,0,2,0,0,0,70,0,0,0,67,0,0,0,2,0,0,0,62,0,3,0,28,0,0,0, +70,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Maths/Readme.md b/runtime/Includes/Maths/Readme.md new file mode 100644 index 0000000..1ef71b6 --- /dev/null +++ b/runtime/Includes/Maths/Readme.md @@ -0,0 +1 @@ +Highly inspired by [Nazara Maths library](https://github.com/NazaraEngine/NazaraEngine/tree/main/include/Nazara/Math) diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 066df7e..433bb81 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -36,6 +36,7 @@ namespace mlx if(f_loop_hook) f_loop_hook(p_param); + #pragma omp parallel for for(auto& gs : m_graphics) { if(gs) @@ -80,6 +81,7 @@ namespace mlx Error("trying to destroy a texture that has already been destroyed"); else texture->Destroy(); + #pragma omp parallel for for(auto& gs : m_graphics) { if(gs) diff --git a/runtime/Sources/Core/EventBus.cpp b/runtime/Sources/Core/EventBus.cpp index 721796a..d67711a 100644 --- a/runtime/Sources/Core/EventBus.cpp +++ b/runtime/Sources/Core/EventBus.cpp @@ -6,6 +6,7 @@ namespace mlx { void EventBus::Send(const std::string& listener_name, const EventBase& event) { + #pragma omp parallel for for(const EventListener& listener : s_listeners) { if(listener.GetName() == listener_name) @@ -19,6 +20,7 @@ namespace mlx void EventBus::SendBroadcast(const EventBase& event) { + #pragma omp parallel for for(const EventListener& listener : s_listeners) listener.Call(event); } diff --git a/runtime/Sources/Graphics/Mesh.cpp b/runtime/Sources/Graphics/Mesh.cpp index 751d12e..cebfe54 100644 --- a/runtime/Sources/Graphics/Mesh.cpp +++ b/runtime/Sources/Graphics/Mesh.cpp @@ -7,6 +7,7 @@ namespace mlx void Mesh::Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn) const noexcept { MLX_PROFILE_FUNCTION(); + #pragma omp parallel for for(std::size_t i = 0; i < m_sub_meshes.size(); i++) Draw(cmd, drawcalls, polygondrawn, i); } @@ -25,6 +26,7 @@ namespace mlx Mesh::~Mesh() { MLX_PROFILE_FUNCTION(); + #pragma omp parallel for for(auto& mesh : m_sub_meshes) { mesh.vbo.Destroy(); diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index a57d27a..a30df18 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -23,7 +23,7 @@ namespace mlx data[3].position = Vec4f(x, y + height, 0.0f, 1.0f); data[3].uv = Vec2f(1.0f, 0.0f); - std::vector indices = { + std::vector indices{ 0, 1, 2, @@ -40,7 +40,7 @@ namespace mlx Sprite::Sprite(Renderer& renderer, NonOwningPtr texture) { MLX_PROFILE_FUNCTION(); - Verify((bool)texture, "Sprite: invalid texture"); + Verify((bool)texture, "Sprite: invalid texture (internal mlx issue, please report to devs)"); p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); p_texture = texture; @@ -49,6 +49,6 @@ namespace mlx if(event.What() == Event::DescriptorPoolResetEventCode) m_set.Reallocate(renderer.GetCurrentFrameIndex()); }; - EventBus::RegisterListener({ functor, "__Sprite" + std::to_string(reinterpret_cast(this)) }); + EventBus::RegisterListener({ functor, "__MlxSprite" + std::to_string(reinterpret_cast(this)) }); } } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 4e168a3..5fb07bc 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -99,6 +99,7 @@ namespace mlx void DescriptorPoolManager::Destroy() { + #pragma omp parallel for for(auto& pool : m_pools) pool.Destroy(); } diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 267b061..226d606 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -1,10 +1,10 @@ -#include "vulkan/vulkan_core.h" #include #include #include #include #include #include +#include namespace mlx { @@ -114,6 +114,7 @@ namespace mlx scissor.extent = fb_extent; RenderCore::Get().vkCmdSetScissor(command_buffer, 0, 1, &scissor); + #pragma omp parallel for for(std::size_t i = 0; i < m_clears.size(); i++) { m_clears[i].color.float32[0] = clear[0]; @@ -141,6 +142,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); p_vertex_shader.reset(); p_fragment_shader.reset(); + #pragma omp parallel for for(auto& fb : m_framebuffers) { kvfDestroyFramebuffer(RenderCore::Get().GetDevice(), fb); @@ -169,6 +171,7 @@ namespace mlx attachment_views.push_back(p_renderer->GetSwapchainImages()[0].GetImageView()); } + #pragma omp parallel for for(NonOwningPtr image : render_targets) { attachments.push_back(kvfBuildAttachmentDescription((kvfIsDepthFormat(image->GetFormat()) ? KVF_IMAGE_DEPTH : KVF_IMAGE_COLOR), image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); @@ -195,6 +198,7 @@ namespace mlx DebugLog("Vulkan : framebuffer created"); } } + #pragma omp parallel for for(NonOwningPtr image : render_targets) { m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image->GetWidth(), .height = image->GetHeight() })); @@ -208,6 +212,7 @@ namespace mlx if(p_depth) p_depth->TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, cmd); + #pragma omp parallel for for(NonOwningPtr image : m_attachments) { if(!image->IsInit()) diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index f17b915..5b9a9dd 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -5,6 +5,7 @@ #include #include #include +#include namespace mlx { @@ -44,7 +45,6 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - func::function functor = [this, &renderer](const EventBase& event) { if(event.What() == Event::ResizeEventCode) @@ -65,6 +65,7 @@ namespace mlx p_viewer_data_buffer = std::make_shared(); p_viewer_data_buffer->Init(sizeof(ViewerData), "mlx_2d_pass_viewer_data"); + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { p_viewer_data_set->SetUniformBuffer(i, 0, p_viewer_data_buffer->Get(i)); @@ -93,13 +94,15 @@ namespace mlx std::uint32_t frame_index = renderer.GetCurrentFrameIndex(); ViewerData viewer_data; - viewer_data.projection_matrix = Mat4f::Ortho(0.0f, render_target.GetWidth(), 0.0f, render_target.GetHeight(), -1.0f, 100'000.0f); + viewer_data.projection_matrix = Mat4f::Ortho(0.0f, render_target.GetWidth(), render_target.GetHeight(), 0.0f, -1.0f, 100'000.0f); static CPUBuffer buffer(sizeof(ViewerData)); std::memcpy(buffer.GetData(), &viewer_data, buffer.GetSize()); p_viewer_data_buffer->SetData(buffer, frame_index); VkCommandBuffer cmd = renderer.GetActiveCommandBuffer(); m_pipeline.BindPipeline(cmd, 0, {}); + + #pragma omp parallel for for(auto sprite : scene.GetSprites()) { SpriteData sprite_data; @@ -107,7 +110,7 @@ namespace mlx sprite_data.color = sprite->GetColor(); if(!sprite->IsSetInit()) sprite->UpdateDescriptorSet(*p_texture_set); - Verify((bool)sprite->GetTexture(), "a sprite has no texture attached"); + Verify((bool)sprite->GetTexture(), "a sprite has no texture attached (internal mlx issue, please report to the devs)"); sprite->GetTexture()->Update(cmd); sprite->Bind(frame_index, cmd); std::array sets = { p_viewer_data_set->GetSet(frame_index), sprite->GetSet(frame_index) }; diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index d1a4f07..d128f79 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -69,10 +69,10 @@ namespace mlx VkResult result = RenderCore::Get().vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); if(result == VK_ERROR_OUT_OF_DATE_KHR) { - DestroySwapchain(); - CreateSwapchain(); - EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); - return false; + //DestroySwapchain(); + //CreateSwapchain(); + //EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); + //return false; } else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) FatalError("Vulkan error : failed to acquire swapchain image, %", kvfVerbaliseVkResult(result)); @@ -94,9 +94,9 @@ namespace mlx if(!kvfQueuePresentKHR(RenderCore::Get().GetDevice(), m_render_finished_semaphores[m_current_frame_index], m_swapchain, m_swapchain_image_index) || m_framebuffers_resize) { m_framebuffers_resize = false; - DestroySwapchain(); - CreateSwapchain(); - EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); + //DestroySwapchain(); + //CreateSwapchain(); + //EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); } m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; } From 0ea0cde668f88b013de1ed31903ec24f625b44bb Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 17 Oct 2024 18:35:33 +0200 Subject: [PATCH 041/131] fixing inputs bug, fixing missing depth image destruction in scenes --- .github/workflows/fetch_dependencies.yml | 2 +- includes/mlx.h | 4 +- runtime/Includes/Core/SDLManager.h | 8 +- runtime/Includes/Embedded/2DVertex.nzsl | 4 +- runtime/Includes/Embedded/2DVertex.spv.h | 54 ++++----- runtime/Includes/Graphics/Scene.h | 2 +- runtime/Includes/Platform/Inputs.h | 4 +- runtime/Includes/Platform/Window.h | 2 +- runtime/Sources/Core/Application.cpp | 2 + runtime/Sources/Core/SDLManager.cpp | 109 ++++++++---------- runtime/Sources/Graphics/Scene.cpp | 5 + runtime/Sources/Platform/Inputs.cpp | 10 +- runtime/Sources/Platform/Window.cpp | 4 +- .../Sources/Renderer/Pipelines/Graphics.cpp | 1 - .../Sources/Renderer/RenderPasses/2DPass.cpp | 1 - 15 files changed, 104 insertions(+), 108 deletions(-) diff --git a/.github/workflows/fetch_dependencies.yml b/.github/workflows/fetch_dependencies.yml index f9e1311..76c6f3d 100644 --- a/.github/workflows/fetch_dependencies.yml +++ b/.github/workflows/fetch_dependencies.yml @@ -2,7 +2,7 @@ name: Fetch Dependencies on: schedule: - - cron: '0 0 * * *' # Runs daily + - cron: '0 0 * * 0' # Runs weekly jobs: update-dependencies: diff --git a/includes/mlx.h b/includes/mlx.h index ef02780..e4f7d43 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -3,10 +3,10 @@ /* ::: :::::::: */ /* mlx.h :+: :+: :+: */ /* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ +/* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/09/15 09:23:48 by maldavid ### ########.fr */ +/* Updated: 2024/10/17 17:51:28 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index e314f05..568f45e 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -10,9 +10,11 @@ namespace mlx public: SDLManager(); - Handle CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden); + Handle CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id); void DestroyWindow(Handle window) noexcept; + void InputsFetcher(func::function functor); + VkSurfaceKHR CreateVulkanSurface(Handle window, VkInstance instance) const noexcept; std::vector GetRequiredVulkanInstanceExtentions() const noexcept; Vec2ui GetVulkanDrawableSize(Handle window) const noexcept; @@ -20,8 +22,6 @@ namespace mlx void GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept; void SetWindowPosition(Handle window, int x, int y) const noexcept; - inline void SetEventCallback(func::function functor, void* userdata) { f_callback = std::move(functor); p_callback_data = userdata; } - std::int32_t GetX() const noexcept; std::int32_t GetY() const noexcept; std::int32_t GetXRel() const noexcept; @@ -36,8 +36,6 @@ namespace mlx static SDLManager* s_instance; std::unordered_set m_windows_registry; - func::function f_callback; - void* p_callback_data = nullptr; bool m_drop_sdl_responsability = false; }; } diff --git a/runtime/Includes/Embedded/2DVertex.nzsl b/runtime/Includes/Embedded/2DVertex.nzsl index 8e62204..a69261d 100644 --- a/runtime/Includes/Embedded/2DVertex.nzsl +++ b/runtime/Includes/Embedded/2DVertex.nzsl @@ -34,11 +34,11 @@ external [entry(vert)] fn main(input: VertIn) -> VertOut { - let position: vec2[f32] = input.pos.xy + model.position.xy; + let position: vec4[f32] = vec4[f32](input.pos.xyz + model.position.xyz, 1.0); input.uv *= -1.0; let output: VertOut; output.uv = input.uv; output.color = model.color; - output.pos = viewer_data.projection_matrix * vec4[f32](position, 0.0, 1.0); + output.pos = viewer_data.projection_matrix * position; return output; } diff --git a/runtime/Includes/Embedded/2DVertex.spv.h b/runtime/Includes/Embedded/2DVertex.spv.h index 3f13839..27b04db 100644 --- a/runtime/Includes/Embedded/2DVertex.spv.h +++ b/runtime/Includes/Embedded/2DVertex.spv.h @@ -35,37 +35,37 @@ 21,0,0,0,7,0,0,0,17,0,0,0,30,0,4,0,22,0,0,0,2,0,0,0,17,0,0,0,32,0, 4,0,23,0,0,0,7,0,0,0,22,0,0,0,32,0,4,0,24,0,0,0,3,0,0,0,2,0,0,0, 32,0,4,0,26,0,0,0,3,0,0,0,17,0,0,0,30,0,5,0,29,0,0,0,2,0,0,0,17,0, -0,0,2,0,0,0,43,0,4,0,1,0,0,0,30,0,0,0,0,0,128,191,32,0,4,0,31,0,0,0, -7,0,0,0,29,0,0,0,43,0,4,0,14,0,0,0,32,0,0,0,2,0,0,0,43,0,4,0,1,0, -0,0,33,0,0,0,0,0,0,0,43,0,4,0,1,0,0,0,34,0,0,0,0,0,128,63,32,0,4,0, -45,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,60,0,0,0,2,0,0,0,3,0,0,0,59,0, +0,0,2,0,0,0,43,0,4,0,14,0,0,0,30,0,0,0,2,0,0,0,23,0,4,0,31,0,0,0, +1,0,0,0,3,0,0,0,43,0,4,0,1,0,0,0,32,0,0,0,0,0,128,63,43,0,4,0,1,0, +0,0,33,0,0,0,0,0,128,191,32,0,4,0,34,0,0,0,7,0,0,0,29,0,0,0,32,0,4,0, +45,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,61,0,0,0,2,0,0,0,3,0,0,0,59,0, 4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0,0,0,9,0,0,0,9,0,0,0, 59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0,18,0,0,0,19,0,0,0,1,0, 0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0,0,0,59,0,4,0,26,0,0,0,27,0,0,0, 3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0,54,0,5,0,10,0,0,0,35,0, -0,0,0,0,0,0,11,0,0,0,248,0,2,0,36,0,0,0,59,0,4,0,21,0,0,0,37,0,0,0, -7,0,0,0,59,0,4,0,31,0,0,0,38,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,39,0, +0,0,0,0,0,0,11,0,0,0,248,0,2,0,36,0,0,0,59,0,4,0,16,0,0,0,37,0,0,0, +7,0,0,0,59,0,4,0,34,0,0,0,38,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,39,0, 0,0,7,0,0,0,65,0,5,0,16,0,0,0,40,0,0,0,39,0,0,0,15,0,0,0,63,0,3,0, 40,0,0,0,13,0,0,0,65,0,5,0,21,0,0,0,41,0,0,0,39,0,0,0,20,0,0,0,63,0, 3,0,41,0,0,0,19,0,0,0,65,0,5,0,16,0,0,0,42,0,0,0,39,0,0,0,15,0,0,0, -61,0,4,0,2,0,0,0,43,0,0,0,42,0,0,0,79,0,7,0,17,0,0,0,44,0,0,0,43,0, -0,0,43,0,0,0,0,0,0,0,1,0,0,0,65,0,5,0,45,0,0,0,46,0,0,0,9,0,0,0, -20,0,0,0,61,0,4,0,2,0,0,0,47,0,0,0,46,0,0,0,79,0,7,0,17,0,0,0,48,0, -0,0,47,0,0,0,47,0,0,0,0,0,0,0,1,0,0,0,129,0,5,0,17,0,0,0,49,0,0,0, -44,0,0,0,48,0,0,0,62,0,3,0,37,0,0,0,49,0,0,0,65,0,5,0,21,0,0,0,50,0, -0,0,39,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,51,0,0,0,50,0,0,0,142,0,5,0, -17,0,0,0,52,0,0,0,51,0,0,0,30,0,0,0,65,0,5,0,21,0,0,0,53,0,0,0,39,0, -0,0,20,0,0,0,62,0,3,0,53,0,0,0,52,0,0,0,65,0,5,0,21,0,0,0,54,0,0,0, -39,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,55,0,0,0,54,0,0,0,65,0,5,0,21,0, -0,0,56,0,0,0,38,0,0,0,20,0,0,0,62,0,3,0,56,0,0,0,55,0,0,0,65,0,5,0, -45,0,0,0,57,0,0,0,9,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,58,0,0,0,57,0, -0,0,65,0,5,0,16,0,0,0,59,0,0,0,38,0,0,0,15,0,0,0,62,0,3,0,59,0,0,0, -58,0,0,0,65,0,5,0,60,0,0,0,61,0,0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0, -0,0,62,0,0,0,61,0,0,0,61,0,4,0,17,0,0,0,63,0,0,0,37,0,0,0,80,0,6,0, -2,0,0,0,64,0,0,0,63,0,0,0,33,0,0,0,34,0,0,0,145,0,5,0,2,0,0,0,65,0, -0,0,62,0,0,0,64,0,0,0,65,0,5,0,16,0,0,0,66,0,0,0,38,0,0,0,32,0,0,0, -62,0,3,0,66,0,0,0,65,0,0,0,61,0,4,0,29,0,0,0,67,0,0,0,38,0,0,0,81,0, -5,0,2,0,0,0,68,0,0,0,67,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0,68,0,0,0, -81,0,5,0,17,0,0,0,69,0,0,0,67,0,0,0,1,0,0,0,62,0,3,0,27,0,0,0,69,0, -0,0,81,0,5,0,2,0,0,0,70,0,0,0,67,0,0,0,2,0,0,0,62,0,3,0,28,0,0,0, -70,0,0,0,253,0,1,0,56,0,1,0 +61,0,4,0,2,0,0,0,43,0,0,0,42,0,0,0,79,0,8,0,31,0,0,0,44,0,0,0,43,0, +0,0,43,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,65,0,5,0,45,0,0,0,46,0,0,0, +9,0,0,0,20,0,0,0,61,0,4,0,2,0,0,0,47,0,0,0,46,0,0,0,79,0,8,0,31,0, +0,0,48,0,0,0,47,0,0,0,47,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,129,0,5,0, +31,0,0,0,49,0,0,0,44,0,0,0,48,0,0,0,80,0,5,0,2,0,0,0,50,0,0,0,49,0, +0,0,32,0,0,0,62,0,3,0,37,0,0,0,50,0,0,0,65,0,5,0,21,0,0,0,51,0,0,0, +39,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,52,0,0,0,51,0,0,0,142,0,5,0,17,0, +0,0,53,0,0,0,52,0,0,0,33,0,0,0,65,0,5,0,21,0,0,0,54,0,0,0,39,0,0,0, +20,0,0,0,62,0,3,0,54,0,0,0,53,0,0,0,65,0,5,0,21,0,0,0,55,0,0,0,39,0, +0,0,20,0,0,0,61,0,4,0,17,0,0,0,56,0,0,0,55,0,0,0,65,0,5,0,21,0,0,0, +57,0,0,0,38,0,0,0,20,0,0,0,62,0,3,0,57,0,0,0,56,0,0,0,65,0,5,0,45,0, +0,0,58,0,0,0,9,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,59,0,0,0,58,0,0,0, +65,0,5,0,16,0,0,0,60,0,0,0,38,0,0,0,15,0,0,0,62,0,3,0,60,0,0,0,59,0, +0,0,65,0,5,0,61,0,0,0,62,0,0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0, +63,0,0,0,62,0,0,0,61,0,4,0,2,0,0,0,64,0,0,0,37,0,0,0,145,0,5,0,2,0, +0,0,65,0,0,0,63,0,0,0,64,0,0,0,65,0,5,0,16,0,0,0,66,0,0,0,38,0,0,0, +30,0,0,0,62,0,3,0,66,0,0,0,65,0,0,0,61,0,4,0,29,0,0,0,67,0,0,0,38,0, +0,0,81,0,5,0,2,0,0,0,68,0,0,0,67,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0, +68,0,0,0,81,0,5,0,17,0,0,0,69,0,0,0,67,0,0,0,1,0,0,0,62,0,3,0,27,0, +0,0,69,0,0,0,81,0,5,0,2,0,0,0,70,0,0,0,67,0,0,0,2,0,0,0,62,0,3,0, +28,0,0,0,70,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 53bd5b9..e4bd110 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -29,7 +29,7 @@ namespace mlx [[nodiscard]] MLX_FORCEINLINE DepthImage& GetDepth() noexcept { return m_depth; } [[nodiscard]] MLX_FORCEINLINE ViewerData& GetViewerData() noexcept { return m_viewer_data; } - ~Scene() = default; + ~Scene(); private: SceneDescriptor m_descriptor; diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h index 1114058..3ac5b90 100644 --- a/runtime/Includes/Platform/Inputs.h +++ b/runtime/Includes/Platform/Inputs.h @@ -17,7 +17,9 @@ namespace mlx }; public: - Inputs(); + Inputs() = default; + + void FetchInputs(); inline void RegisterWindow(std::shared_ptr window) { m_windows[window->GetID()] = window; } diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 1a15f33..903b8cf 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -30,7 +30,7 @@ namespace mlx private: Handle p_window = nullptr; - std::uint32_t m_id = -1; + std::int32_t m_id; int m_width = 0; int m_height = 0; }; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 433bb81..5b7c18b 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -33,6 +33,8 @@ namespace mlx if(!m_fps.Update()) continue; + m_in.FetchInputs(); + if(f_loop_hook) f_loop_hook(p_param); diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 13031aa..0c5c20f 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -44,68 +44,10 @@ namespace mlx if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_TIMER) != 0) FatalError("SDL : unable to init all subsystems; %", SDL_GetError()); - - struct WatcherData - { - func::function callback; - NonOwningPtr manager; - void* userdata; - }; - - WatcherData watcher_data; - watcher_data.callback = f_callback; - watcher_data.userdata = p_callback_data; - - SDL_AddEventWatch([](void* userdata, SDL_Event* event) -> int - { - WatcherData* data = static_cast(userdata); - - std::uint32_t id = event->window.windowID; - switch(event->type) - { - case SDL_KEYUP: data->callback(MLX_KEYUP, id, event->key.keysym.scancode, data->userdata); break; - case SDL_KEYDOWN: data->callback(MLX_KEYDOWN, id, event->key.keysym.scancode, data->userdata); break; - case SDL_MOUSEBUTTONUP: data->callback(MLX_MOUSEUP, id, event->button.button, data->userdata); break; - case SDL_MOUSEBUTTONDOWN: data->callback(MLX_MOUSEDOWN, id, event->button.button, data->userdata); break; - case SDL_MOUSEWHEEL: - { - if(event->wheel.y > 0) // scroll up - data->callback(MLX_MOUSEWHEEL, id, 1, data->userdata); - else if(event->wheel.y < 0) // scroll down - data->callback(MLX_MOUSEWHEEL, id, 2, data->userdata); - if(event->wheel.x > 0) // scroll right - data->callback(MLX_MOUSEWHEEL, id, 3, data->userdata); - else if(event->wheel.x < 0) // scroll left - data->callback(MLX_MOUSEWHEEL, id, 4, data->userdata); - break; - } - case SDL_WINDOWEVENT: - { - switch(event->window.event) - { - case SDL_WINDOWEVENT_CLOSE: data->callback(MLX_WINDOW_EVENT, id, 0, data->userdata); break; - case SDL_WINDOWEVENT_MOVED: data->callback(MLX_WINDOW_EVENT, id, 1, data->userdata); break; - case SDL_WINDOWEVENT_MINIMIZED: data->callback(MLX_WINDOW_EVENT, id, 2, data->userdata); break; - case SDL_WINDOWEVENT_MAXIMIZED: data->callback(MLX_WINDOW_EVENT, id, 3, data->userdata); break; - case SDL_WINDOWEVENT_ENTER: data->callback(MLX_WINDOW_EVENT, id, 4, data->userdata); break; - case SDL_WINDOWEVENT_FOCUS_GAINED: data->callback(MLX_WINDOW_EVENT, id, 5, data->userdata); break; - case SDL_WINDOWEVENT_LEAVE: data->callback(MLX_WINDOW_EVENT, id, 6, data->userdata); break; - case SDL_WINDOWEVENT_FOCUS_LOST: data->callback(MLX_WINDOW_EVENT, id, 7, data->userdata); break; - - default : break; - } - break; - } - - default: break; - } - - return 0; - }, &watcher_data); DebugLog("SDL Manager initialized"); } - Handle SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden) + Handle SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id) { Internal::WindowInfos* infos = new Internal::WindowInfos; Verify(infos != nullptr, "SDL : window allocation failed"); @@ -118,6 +60,8 @@ namespace mlx m_windows_registry.insert(infos); + id = SDL_GetWindowID(infos->window); + return infos; } @@ -228,6 +172,53 @@ namespace mlx return y; } + void SDLManager::InputsFetcher(func::function functor) + { + SDL_Event event; + while(SDL_PollEvent(&event)) + { + std::uint32_t id = event.window.windowID; + switch(event.type) + { + case SDL_KEYUP: functor(MLX_KEYUP, id, event.key.keysym.scancode); break; + case SDL_KEYDOWN: functor(MLX_KEYDOWN, id, event.key.keysym.scancode); break; + case SDL_MOUSEBUTTONUP: functor(MLX_MOUSEUP, id, event.button.button); break; + case SDL_MOUSEBUTTONDOWN: functor(MLX_MOUSEDOWN, id, event.button.button); break; + case SDL_MOUSEWHEEL: + { + if(event.wheel.y > 0) // scroll up + functor(MLX_MOUSEWHEEL, id, 1); + else if(event.wheel.y < 0) // scroll down + functor(MLX_MOUSEWHEEL, id, 2); + if(event.wheel.x > 0) // scroll right + functor(MLX_MOUSEWHEEL, id, 3); + else if(event.wheel.x < 0) // scroll left + functor(MLX_MOUSEWHEEL, id, 4); + break; + } + case SDL_WINDOWEVENT: + { + switch(event.window.event) + { + case SDL_WINDOWEVENT_CLOSE: functor(MLX_WINDOW_EVENT, id, 0); break; + case SDL_WINDOWEVENT_MOVED: functor(MLX_WINDOW_EVENT, id, 1); break; + case SDL_WINDOWEVENT_MINIMIZED: functor(MLX_WINDOW_EVENT, id, 2); break; + case SDL_WINDOWEVENT_MAXIMIZED: functor(MLX_WINDOW_EVENT, id, 3); break; + case SDL_WINDOWEVENT_ENTER: functor(MLX_WINDOW_EVENT, id, 4); break; + case SDL_WINDOWEVENT_FOCUS_GAINED: functor(MLX_WINDOW_EVENT, id, 5); break; + case SDL_WINDOWEVENT_LEAVE: functor(MLX_WINDOW_EVENT, id, 6); break; + case SDL_WINDOWEVENT_FOCUS_LOST: functor(MLX_WINDOW_EVENT, id, 7); break; + + default : break; + } + break; + } + + default: break; + } + } + } + SDLManager::~SDLManager() { if(m_drop_sdl_responsability) diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index 998ed64..c508553 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -44,4 +44,9 @@ namespace mlx m_sprites.erase(it); } while(it != m_sprites.end()); } + + Scene::~Scene() + { + m_depth.Destroy(); + } } diff --git a/runtime/Sources/Platform/Inputs.cpp b/runtime/Sources/Platform/Inputs.cpp index b93561f..25ea36f 100644 --- a/runtime/Sources/Platform/Inputs.cpp +++ b/runtime/Sources/Platform/Inputs.cpp @@ -6,14 +6,16 @@ namespace mlx { - Inputs::Inputs() + void Inputs::FetchInputs() { - SDLManager::Get().SetEventCallback([this](mlx_event_type event, int window_id, int code, [[maybe_unused]] void* userdata) + SDLManager::Get().InputsFetcher([this](mlx_event_type event, int window_id, int code) { - if(m_windows.find(window_id) == m_windows.end()) + if(!m_windows.contains(window_id)) + return; + if(!m_events_hooks.contains(window_id) || !m_events_hooks[window_id][event].hook) return; m_events_hooks[window_id][event].hook(code, m_events_hooks[window_id][event].param); - }, nullptr); + }); } std::int32_t Inputs::GetX() const noexcept diff --git a/runtime/Sources/Platform/Window.cpp b/runtime/Sources/Platform/Window.cpp index bc15aec..4c3173b 100644 --- a/runtime/Sources/Platform/Window.cpp +++ b/runtime/Sources/Platform/Window.cpp @@ -7,9 +7,7 @@ namespace mlx { Window::Window(std::size_t w, std::size_t h, const std::string& title, bool hidden) : m_width(w), m_height(h) { - static std::uint64_t ids = 0; - p_window = SDLManager::Get().CreateWindow(title, w, h, hidden); - m_id = ids++; + p_window = SDLManager::Get().CreateWindow(title, w, h, hidden, m_id); } void Window::Destroy() noexcept diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 226d606..ecab564 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -4,7 +4,6 @@ #include #include #include -#include namespace mlx { diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 5b9a9dd..f2f5ed8 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -5,7 +5,6 @@ #include #include #include -#include namespace mlx { From 632117a3a3ae3a32e0ed2107e42784cbdc60b4ed Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 19 Oct 2024 00:43:39 +0200 Subject: [PATCH 042/131] yes --- example/main.c | 74 +++---------------- runtime/Includes/Core/Graphics.h | 4 +- runtime/Includes/Core/Graphics.inl | 16 ++-- runtime/Includes/Embedded/2DFragment.spv.h | 26 +++---- runtime/Includes/Graphics/PutPixelManager.h | 4 +- runtime/Includes/Graphics/Scene.h | 5 +- runtime/Includes/Graphics/Sprite.h | 6 +- runtime/Includes/PreCompiled.h | 1 + runtime/Includes/Renderer/Image.h | 16 ---- .../Includes/Renderer/Pipelines/Graphics.h | 2 - runtime/Sources/Core/SDLManager.cpp | 2 +- runtime/Sources/Graphics/PutPixelManager.cpp | 20 +++-- runtime/Sources/Graphics/Scene.cpp | 18 +++-- runtime/Sources/Renderer/Image.cpp | 19 ++++- .../Sources/Renderer/Pipelines/Graphics.cpp | 29 +------- runtime/Sources/Renderer/Pipelines/Shader.cpp | 4 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 28 ++++--- .../Sources/Renderer/RenderPasses/Passes.cpp | 1 - third_party/kvf.h | 8 +- 19 files changed, 109 insertions(+), 174 deletions(-) diff --git a/example/main.c b/example/main.c index 3b27ec7..f243767 100644 --- a/example/main.c +++ b/example/main.c @@ -1,19 +1,6 @@ -/* ************************************************************************** */ -/* */ -/* ::: :::::::: */ -/* main.c :+: :+: :+: */ -/* +:+ +:+ +:+ */ -/* By: maldavid +#+ +:+ +#+ */ -/* +#+#+#+#+#+ +#+ */ -/* Created: 2022/10/04 17:55:21 by maldavid #+# #+# */ -/* Updated: 2024/10/03 06:39:01 by maldavid ### ########.fr */ -/* */ -/* ************************************************************************** */ - #include #include "../includes/mlx.h" -/* typedef struct { void* mlx; @@ -24,8 +11,6 @@ typedef struct void* img; } mlx_t; -//void* img = NULL; - int update(void* param) { static int i = 0; @@ -34,34 +19,35 @@ int update(void* param) if(i == 200) mlx_clear_window(mlx->mlx, mlx->win); -// if(img) -// mlx_destroy_image(mlx->mlx,img); -// img = mlx_new_image(mlx->mlx, 800, 800); -// mlx_set_image_pixel(mlx->mlx, img, 4, 4, 0xFF00FF00); -// mlx_put_image_to_window(mlx->mlx, mlx->win, img, 0, 0); - if(i >= 250) mlx_set_font_scale(mlx->mlx, mlx->win, "default", 16.f); else mlx_set_font_scale(mlx->mlx, mlx->win, "default", 6.f); + mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFFFF2066, "this text should be hidden"); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_png, 100, 100); - mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); mlx_set_font(mlx->mlx, mlx->win, "default"); mlx_string_put(mlx->mlx, mlx->win, 20, 50, 0xFFFFFFFF, "that's a text"); - int color = 0; - for(int j = 0; j < 400; j++) + for(int j = 0, color = 0; j < 400; j++) { mlx_pixel_put(mlx->mlx, mlx->win, j, j, 0xFFFF0000 + color); mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, 0xFF0000FF); color += (color < 255); } + mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150); + + for(int j = 0; j < 20; j++) + { + for(int k = 0; k < 20; k++) + mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFFFF0000); + } + i++; return 0; } @@ -174,44 +160,6 @@ int main(void) mlx_destroy_image(mlx.mlx, mlx.img); mlx_destroy_window(mlx.mlx, mlx.win); mlx_destroy_display(mlx.mlx); - - return 0; -} -*/ - -int window_hook(int event, void* param) -{ - if(event == 0) - mlx_loop_end(param); - return 0; -} - -int main(void) -{ - void* mlx; - void* win; - - mlx = mlx_init(); - win = mlx_new_window(mlx, 400, 400, "My window"); - - mlx_set_fps_goal(mlx, 60); - - mlx_on_event(mlx, win, MLX_WINDOW_EVENT, window_hook, mlx); - - int dummy; - void* logo_png = mlx_png_file_to_image(mlx, "42_logo.png", &dummy, &dummy); - - mlx_put_image_to_window(mlx, win, logo_png, 10, 190); - mlx_put_image_to_window(mlx, win, logo_png, 11, 190); - mlx_put_image_to_window(mlx, win, logo_png, 10, 190); - mlx_put_image_to_window(mlx, win, logo_png, 100, 190); - mlx_put_image_to_window(mlx, win, logo_png, 10, 190); - - mlx_loop(mlx); - - mlx_destroy_image(mlx, logo_png); - mlx_destroy_window(mlx, win); - mlx_destroy_display(mlx); - + return 0; } diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 5c8de28..c2be4c0 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -44,11 +44,11 @@ namespace mlx std::shared_ptr p_window; std::unique_ptr p_scene; - std::uint64_t m_current_depth = 0; - int m_id; bool m_has_window; + + bool m_insert_new_pixel_put_texture = false; }; } diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 93a7760..2ba4a09 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -1,6 +1,5 @@ #pragma once #include -#include namespace mlx { @@ -9,20 +8,19 @@ namespace mlx MLX_PROFILE_FUNCTION(); p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); - m_current_depth = 0; + m_insert_new_pixel_put_texture = true; } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); - /* - NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_current_depth, color); + NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_insert_new_pixel_put_texture, color); if(texture) { Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetPosition(Vec3f{ 0.0f, 0.0f, static_cast(m_current_depth) }); + new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); } - */ + m_insert_new_pixel_put_texture = false; } void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) @@ -41,11 +39,11 @@ namespace mlx if(!sprite) { Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); - m_current_depth++; + new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); + m_insert_new_pixel_put_texture = true; } else - sprite->SetPosition(Vec3f{ static_cast(x), static_cast(y), static_cast(m_current_depth) }); + p_scene->BringToFront(std::move(sprite)); } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) diff --git a/runtime/Includes/Embedded/2DFragment.spv.h b/runtime/Includes/Embedded/2DFragment.spv.h index 5792158..f71f131 100644 --- a/runtime/Includes/Embedded/2DFragment.spv.h +++ b/runtime/Includes/Embedded/2DFragment.spv.h @@ -1,4 +1,4 @@ -3,2,35,7,0,0,1,0,39,0,0,0,51,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,2,35,7,0,0,1,0,39,0,0,0,48,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, 3,0,0,0,0,0,1,0,0,0,15,0,8,0,4,0,0,0,28,0,0,0,109,97,105,110,0,0,0,0, 10,0,0,0,16,0,0,0,22,0,0,0,16,0,3,0,28,0,0,0,7,0,0,0,3,0,3,0,0,0, 0,0,100,0,0,0,5,0,4,0,19,0,0,0,86,101,114,116,79,117,116,0,6,0,5,0,19,0,0,0, @@ -29,16 +29,14 @@ 7,0,0,0,248,0,2,0,29,0,0,0,59,0,4,0,24,0,0,0,30,0,0,0,7,0,0,0,59,0, 4,0,20,0,0,0,31,0,0,0,7,0,0,0,65,0,5,0,13,0,0,0,32,0,0,0,31,0,0,0, 12,0,0,0,63,0,3,0,32,0,0,0,10,0,0,0,65,0,5,0,18,0,0,0,33,0,0,0,31,0, -0,0,17,0,0,0,63,0,3,0,33,0,0,0,16,0,0,0,65,0,5,0,13,0,0,0,34,0,0,0, -31,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0,35,0,0,0,34,0,0,0,61,0,4,0,3,0, -0,0,36,0,0,0,5,0,0,0,65,0,5,0,18,0,0,0,37,0,0,0,31,0,0,0,17,0,0,0, -61,0,4,0,14,0,0,0,38,0,0,0,37,0,0,0,87,0,5,0,8,0,0,0,39,0,0,0,36,0, -0,0,38,0,0,0,133,0,5,0,8,0,0,0,40,0,0,0,35,0,0,0,39,0,0,0,65,0,5,0, -13,0,0,0,41,0,0,0,30,0,0,0,12,0,0,0,62,0,3,0,41,0,0,0,40,0,0,0,65,0, -5,0,13,0,0,0,45,0,0,0,30,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0,46,0,0,0, -45,0,0,0,81,0,5,0,1,0,0,0,47,0,0,0,46,0,0,0,3,0,0,0,180,0,5,0,27,0, -0,0,48,0,0,0,47,0,0,0,26,0,0,0,247,0,3,0,42,0,0,0,0,0,0,0,250,0,4,0, -48,0,0,0,43,0,0,0,44,0,0,0,248,0,2,0,43,0,0,0,252,0,1,0,248,0,2,0,44,0, -0,0,249,0,2,0,42,0,0,0,248,0,2,0,42,0,0,0,61,0,4,0,23,0,0,0,49,0,0,0, -30,0,0,0,81,0,5,0,8,0,0,0,50,0,0,0,49,0,0,0,0,0,0,0,62,0,3,0,22,0, -0,0,50,0,0,0,253,0,1,0,56,0,1,0 +0,0,17,0,0,0,63,0,3,0,33,0,0,0,16,0,0,0,61,0,4,0,3,0,0,0,34,0,0,0, +5,0,0,0,65,0,5,0,18,0,0,0,35,0,0,0,31,0,0,0,17,0,0,0,61,0,4,0,14,0, +0,0,36,0,0,0,35,0,0,0,87,0,5,0,8,0,0,0,37,0,0,0,34,0,0,0,36,0,0,0, +65,0,5,0,13,0,0,0,38,0,0,0,30,0,0,0,12,0,0,0,62,0,3,0,38,0,0,0,37,0, +0,0,65,0,5,0,13,0,0,0,42,0,0,0,30,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0, +43,0,0,0,42,0,0,0,81,0,5,0,1,0,0,0,44,0,0,0,43,0,0,0,3,0,0,0,180,0, +5,0,27,0,0,0,45,0,0,0,44,0,0,0,26,0,0,0,247,0,3,0,39,0,0,0,0,0,0,0, +250,0,4,0,45,0,0,0,40,0,0,0,41,0,0,0,248,0,2,0,40,0,0,0,252,0,1,0,248,0, +2,0,41,0,0,0,249,0,2,0,39,0,0,0,248,0,2,0,39,0,0,0,61,0,4,0,23,0,0,0, +46,0,0,0,30,0,0,0,81,0,5,0,8,0,0,0,47,0,0,0,46,0,0,0,0,0,0,0,62,0, +3,0,22,0,0,0,47,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/PutPixelManager.h b/runtime/Includes/Graphics/PutPixelManager.h index 70a9039..5249045 100644 --- a/runtime/Includes/Graphics/PutPixelManager.h +++ b/runtime/Includes/Graphics/PutPixelManager.h @@ -11,13 +11,13 @@ namespace mlx PutPixelManager(NonOwningPtr renderer) : p_renderer(renderer) {} // Return a valid pointer when a new texture has been created - NonOwningPtr DrawPixel(int x, int y, std::uint64_t z, std::uint32_t color); + NonOwningPtr DrawPixel(int x, int y, bool insert_new_texture, std::uint32_t color); void ResetRenderData(); ~PutPixelManager(); private: - std::map m_textures; + std::list m_textures; NonOwningPtr p_renderer; }; } diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index e4bd110..4c7f6a7 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -20,21 +20,20 @@ namespace mlx Sprite& CreateSprite(NonOwningPtr texture) noexcept; NonOwningPtr GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const; + void BringToFront(NonOwningPtr sprite); void TryEraseSpriteFromTexture(NonOwningPtr texture); inline void ResetSprites() { m_sprites.clear(); } [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetSprites() const noexcept { return m_sprites; } [[nodiscard]] MLX_FORCEINLINE const SceneDescriptor& GetDescription() const noexcept { return m_descriptor; } - [[nodiscard]] MLX_FORCEINLINE DepthImage& GetDepth() noexcept { return m_depth; } [[nodiscard]] MLX_FORCEINLINE ViewerData& GetViewerData() noexcept { return m_viewer_data; } - ~Scene(); + ~Scene() = default; private: SceneDescriptor m_descriptor; std::vector> m_sprites; - DepthImage m_depth; ViewerData m_viewer_data; }; } diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index 3221852..757e30d 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -17,10 +17,10 @@ namespace mlx Sprite(class Renderer& renderer, NonOwningPtr texture); inline void SetColor(Vec4f color) noexcept { m_color = color; } - inline void SetPosition(Vec3f position) noexcept { m_position = position; } + inline void SetPosition(Vec2f position) noexcept { m_position = position; } [[nodiscard]] MLX_FORCEINLINE const Vec4f& GetColor() const noexcept { return m_color; } - [[nodiscard]] MLX_FORCEINLINE const Vec3f& GetPosition() const noexcept { return m_position; } + [[nodiscard]] MLX_FORCEINLINE const Vec2f& GetPosition() const noexcept { return m_position; } [[nodiscard]] MLX_FORCEINLINE std::shared_ptr GetMesh() const { return p_mesh; } [[nodiscard]] MLX_FORCEINLINE NonOwningPtr GetTexture() const { return p_texture; } @@ -46,7 +46,7 @@ namespace mlx NonOwningPtr p_texture; std::shared_ptr p_mesh; Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; - Vec3f m_position = Vec3f{ 0.0f, 0.0f, 0.0f }; + Vec2f m_position = Vec2f{ 0.0f, 0.0f }; }; } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index bc3efbc..fde27c8 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -48,6 +48,7 @@ #include #include #include +#include #ifndef MLX_PLAT_WINDOWS #include diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 55840ed..c857089 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -67,22 +67,6 @@ namespace mlx bool m_is_multisampled = false; }; - class DepthImage : public Image - { - public: - DepthImage() = default; - inline void Init(std::uint32_t width, std::uint32_t height, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) - { - MLX_PROFILE_FUNCTION(); - std::vector candidates = { VK_FORMAT_D32_SFLOAT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT }; - VkFormat format = kvfFindSupportFormatInCandidates(RenderCore::Get().GetDevice(), candidates.data(), candidates.size(), VK_IMAGE_TILING_OPTIMAL, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT); - Image::Init(ImageType::Depth, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, is_multisampled, std::move(debug_name)); - Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_DEPTH_BIT); - Image::TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); - } - ~DepthImage() = default; - }; - class Texture : public Image { public: diff --git a/runtime/Includes/Renderer/Pipelines/Graphics.h b/runtime/Includes/Renderer/Pipelines/Graphics.h index 09aa939..246494a 100644 --- a/runtime/Includes/Renderer/Pipelines/Graphics.h +++ b/runtime/Includes/Renderer/Pipelines/Graphics.h @@ -14,7 +14,6 @@ namespace mlx std::shared_ptr fragment_shader; std::vector> color_attachments; NonOwningPtr renderer = nullptr; - NonOwningPtr depth = nullptr; bool clear_color_attachments = true; bool no_vertex_inputs = false; }; @@ -55,7 +54,6 @@ namespace mlx VkPipeline m_pipeline = VK_NULL_HANDLE; VkPipelineLayout m_pipeline_layout = VK_NULL_HANDLE; NonOwningPtr p_renderer; - NonOwningPtr p_depth; }; } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 0c5c20f..1e0b873 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -101,7 +101,7 @@ namespace mlx #endif #ifdef VK_USE_PLATFORM_WAYLAND_KHR - extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME); + // extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME); #endif #ifdef VK_USE_PLATFORM_WIN32_KHR diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index b4727cf..929e504 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -5,18 +5,22 @@ namespace mlx { - NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t z, std::uint32_t color) + NonOwningPtr PutPixelManager::DrawPixel(int x, int y, bool insert_new_texture, std::uint32_t color) { Verify((bool)p_renderer, "invalid renderer pointer"); VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain()); - #ifdef DEBUG - auto res = m_textures.try_emplace(z, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(z)); - #else - auto res = m_textures.try_emplace(z, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); - #endif - res.first->second.SetPixel(x, y, color); - return (res.second ? &res.first->second : nullptr); + if(insert_new_texture) + { + #ifdef DEBUG + Texture& texture = m_textures.emplace_back(CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(m_textures.size())); + #else + Texture& texture = m_textures.emplace_back(CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); + #endif + texture.Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); + } + m_textures.back().SetPixel(x, y, color); + return (insert_new_texture ? &m_textures.back() : nullptr); } void PutPixelManager::ResetRenderData() diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index c508553..e1bf59b 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -10,7 +10,6 @@ namespace mlx { MLX_PROFILE_FUNCTION(); Verify((bool)m_descriptor.renderer, "invalid renderer"); - m_depth.Init(m_descriptor.renderer->GetSwapchainImages().back().GetWidth(), m_descriptor.renderer->GetSwapchainImages().back().GetHeight(), false, "mlx_scene_depth"); } Sprite& Scene::CreateSprite(NonOwningPtr texture) noexcept @@ -31,6 +30,18 @@ namespace mlx return (it != m_sprites.end() ? it->get() : nullptr); } + void Scene::BringToFront(NonOwningPtr sprite) + { + MLX_PROFILE_FUNCTION(); + auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [&sprite](std::shared_ptr sprite_ptr) + { + return sprite_ptr.get() == sprite.Get(); + }); + if(it == m_sprites.end()) + return; + std::rotate(it, it + 1, m_sprites.end()); + } + void Scene::TryEraseSpriteFromTexture(NonOwningPtr texture) { MLX_PROFILE_FUNCTION(); @@ -44,9 +55,4 @@ namespace mlx m_sprites.erase(it); } while(it != m_sprites.end()); } - - Scene::~Scene() - { - m_depth.Destroy(); - } } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 173523a..9b8a0b7 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -100,6 +100,13 @@ namespace mlx subresource_range.levelCount = 1; subresource_range.baseArrayLayer = 0; + bool is_single_time_cmd_buffer = (cmd == VK_NULL_HANDLE); + if(is_single_time_cmd_buffer) + { + cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + } + VkImageLayout old_layout = m_layout; TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); if(m_type == ImageType::Color) @@ -115,6 +122,14 @@ namespace mlx RenderCore::Get().vkCmdClearDepthStencilImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_depth_stencil, 1, &subresource_range); } TransitionLayout(old_layout, cmd); + + if(is_single_time_cmd_buffer) + { + kvfEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + } } void Image::DestroySampler() noexcept @@ -165,7 +180,7 @@ namespace mlx kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); kvfCopyBufferToImage(cmd, Image::Get(), staging_buffer.Get(), staging_buffer.GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { width, height, 1 }); - RenderCore::Get().vkEndCommandBuffer(cmd); + kvfEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); @@ -237,7 +252,7 @@ namespace mlx TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, cmd); kvfCopyImageToBuffer(cmd, m_staging_buffer->Get(), m_image, m_staging_buffer->GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { m_width, m_height, 1 }); TransitionLayout(old_layout, cmd); - RenderCore::Get().vkEndCommandBuffer(cmd); + kvfEndCommandBuffer(cmd); VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index ecab564..7013dfd 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -17,7 +17,6 @@ namespace mlx p_vertex_shader = descriptor.vertex_shader; p_fragment_shader = descriptor.fragment_shader; p_renderer = descriptor.renderer; - p_depth = descriptor.depth; #ifdef DEBUG m_debug_name = debug_name; @@ -34,24 +33,15 @@ namespace mlx TransitionAttachments(); CreateFramebuffers(m_attachments, descriptor.clear_color_attachments); - VkPhysicalDeviceFeatures features{}; - RenderCore::Get().vkGetPhysicalDeviceFeatures(RenderCore::Get().GetPhysicalDevice(), &features); - KvfGraphicsPipelineBuilder* builder = kvfCreateGPipelineBuilder(); kvfGPipelineBuilderAddShaderStage(builder, p_vertex_shader->GetShaderStage(), p_vertex_shader->GetShaderModule(), "main"); kvfGPipelineBuilderAddShaderStage(builder, p_fragment_shader->GetShaderStage(), p_fragment_shader->GetShaderModule(), "main"); kvfGPipelineBuilderSetInputTopology(builder, VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST); kvfGPipelineBuilderSetCullMode(builder, VK_CULL_MODE_NONE, VK_FRONT_FACE_CLOCKWISE); - kvfGPipelineBuilderEnableAlphaBlending(builder); - if(p_depth) - kvfGPipelineBuilderEnableDepthTest(builder, VK_COMPARE_OP_LESS, true); - else - kvfGPipelineBuilderDisableDepthTest(builder); + kvfGPipelineBuilderDisableDepthTest(builder); kvfGPipelineBuilderSetPolygonMode(builder, VK_POLYGON_MODE_FILL, 1.0f); - if(features.sampleRateShading) - kvfGPipelineBuilderSetMultisamplingShading(builder, VK_SAMPLE_COUNT_1_BIT, 0.25f); - else - kvfGPipelineBuilderSetMultisampling(builder, VK_SAMPLE_COUNT_1_BIT); + kvfGPipelineBuilderSetMultisampling(builder, VK_SAMPLE_COUNT_1_BIT); + kvfGPipelineBuilderEnableAlphaBlending(builder); if(!descriptor.no_vertex_inputs) { @@ -122,9 +112,6 @@ namespace mlx m_clears[i].color.float32[3] = clear[3]; } - if(p_depth) - m_clears.back().depthStencil = VkClearDepthStencilValue{ 1.0f, 0 }; - kvfBeginRenderPass(m_renderpass, command_buffer, fb, fb_extent, m_clears.data(), m_clears.size()); RenderCore::Get().vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); return true; @@ -177,12 +164,6 @@ namespace mlx attachment_views.push_back(image->GetImageView()); } - if(p_depth) - { - attachments.push_back(kvfBuildAttachmentDescription((kvfIsDepthFormat(p_depth->GetFormat()) ? KVF_IMAGE_DEPTH : KVF_IMAGE_COLOR), p_depth->GetFormat(), p_depth->GetLayout(), p_depth->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); - attachment_views.push_back(p_depth->GetImageView()); - } - m_renderpass = kvfCreateRenderPass(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint()); m_clears.clear(); m_clears.resize(attachments.size()); @@ -208,10 +189,6 @@ namespace mlx void GraphicPipeline::TransitionAttachments(VkCommandBuffer cmd) { MLX_PROFILE_FUNCTION(); - if(p_depth) - p_depth->TransitionLayout(VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, cmd); - - #pragma omp parallel for for(NonOwningPtr image : m_attachments) { if(!image->IsInit()) diff --git a/runtime/Sources/Renderer/Pipelines/Shader.cpp b/runtime/Sources/Renderer/Pipelines/Shader.cpp index 7dd4691..0e527f2 100644 --- a/runtime/Sources/Renderer/Pipelines/Shader.cpp +++ b/runtime/Sources/Renderer/Pipelines/Shader.cpp @@ -41,15 +41,13 @@ namespace mlx } std::size_t i = 0; - std::vector push_constants(layout.push_constants.size()); m_pipeline_layout_part.push_constants.resize(layout.push_constants.size()); - for(auto& pc : layout.push_constants) + for(const auto& pc : layout.push_constants) { VkPushConstantRange push_constant_range = {}; push_constant_range.offset = pc.offset; push_constant_range.size = pc.size; push_constant_range.stageFlags = m_stage; - push_constants[i] = push_constant_range; m_pipeline_layout_part.push_constants[i] = push_constant_range; i++; } diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index f2f5ed8..642804e 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -17,6 +17,7 @@ namespace mlx void Render2DPass::Init(Renderer& renderer) { MLX_PROFILE_FUNCTION(); + ShaderLayout vertex_shader_layout( { { 0, @@ -81,7 +82,6 @@ namespace mlx pipeline_descriptor.vertex_shader = p_vertex_shader; pipeline_descriptor.fragment_shader = p_fragment_shader; pipeline_descriptor.color_attachments = { &render_target }; - pipeline_descriptor.depth = &scene.GetDepth(); pipeline_descriptor.clear_color_attachments = false; #ifdef DEBUG m_pipeline.Init(pipeline_descriptor, "mlx_2D_pass"); @@ -93,28 +93,38 @@ namespace mlx std::uint32_t frame_index = renderer.GetCurrentFrameIndex(); ViewerData viewer_data; - viewer_data.projection_matrix = Mat4f::Ortho(0.0f, render_target.GetWidth(), render_target.GetHeight(), 0.0f, -1.0f, 100'000.0f); + viewer_data.projection_matrix = Mat4f::Ortho(0.0f, render_target.GetWidth(), render_target.GetHeight(), 0.0f, -1.0f, 1.0f); static CPUBuffer buffer(sizeof(ViewerData)); std::memcpy(buffer.GetData(), &viewer_data, buffer.GetSize()); p_viewer_data_buffer->SetData(buffer, frame_index); VkCommandBuffer cmd = renderer.GetActiveCommandBuffer(); - m_pipeline.BindPipeline(cmd, 0, {}); - #pragma omp parallel for - for(auto sprite : scene.GetSprites()) + const auto& sprites = scene.GetSprites(); + + for(auto sprite : sprites) { - SpriteData sprite_data; - sprite_data.position = Vec4f{ sprite->GetPosition(), 1.0f }; - sprite_data.color = sprite->GetColor(); + // Check every textures and update modified ones to GPU before starting the render pass if(!sprite->IsSetInit()) sprite->UpdateDescriptorSet(*p_texture_set); Verify((bool)sprite->GetTexture(), "a sprite has no texture attached (internal mlx issue, please report to the devs)"); sprite->GetTexture()->Update(cmd); + } + + m_pipeline.BindPipeline(cmd, 0, {}); + for(auto sprite : sprites) + { + SpriteData sprite_data; + sprite_data.position = Vec4f{ sprite->GetPosition(), 0.0f, 1.0f }; + sprite_data.color = sprite->GetColor(); + sprite->Bind(frame_index, cmd); + std::array sets = { p_viewer_data_set->GetSet(frame_index), sprite->GetSet(frame_index) }; - RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &sprite_data); + RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); + sprite->GetMesh()->Draw(cmd, renderer.GetDrawCallsCounterRef(), renderer.GetPolygonDrawnCounterRef()); } m_pipeline.EndPipeline(cmd); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index b23ebc8..ce31c2c 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -37,7 +37,6 @@ namespace mlx void RenderPasses::Pass(Scene& scene, Renderer& renderer) { m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), Vec4f{ 0.0f, 0.0f, 0.0f, 1.0f }); - scene.GetDepth().Clear(renderer.GetActiveCommandBuffer(), {}); m_2Dpass.Pass(scene, renderer, m_main_render_texture); m_final.Pass(scene, renderer, m_main_render_texture); diff --git a/third_party/kvf.h b/third_party/kvf.h index 8256b24..6bc5e75 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -2974,10 +2974,10 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, K color_blending.logicOp = VK_LOGIC_OP_COPY; color_blending.attachmentCount = 1; color_blending.pAttachments = &builder->color_blend_attachment_state; - color_blending.blendConstants[0] = 0.0f; - color_blending.blendConstants[1] = 0.0f; - color_blending.blendConstants[2] = 0.0f; - color_blending.blendConstants[3] = 0.0f; + color_blending.blendConstants[0] = 1.0f; + color_blending.blendConstants[1] = 1.0f; + color_blending.blendConstants[2] = 1.0f; + color_blending.blendConstants[3] = 1.0f; VkDynamicState states[] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; From 59455495b62c88d296c5851daf0c7f3aace0e4ce Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 19 Oct 2024 10:46:31 +0200 Subject: [PATCH 043/131] yes --- runtime/Sources/Renderer/Descriptor.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 5fb07bc..96e8b57 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -234,6 +234,7 @@ namespace mlx void DescriptorSet::Reallocate(std::size_t frame_index) noexcept { MLX_PROFILE_FUNCTION(); + Assert(!p_pools_manager, "invalid pools manager"); m_set[frame_index] = p_pools_manager->GetAvailablePool().AllocateDescriptorSet(frame_index, m_set_layout); } } From 0c35f2702e0c0b697da0d752a2bb90c391cc70f6 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 20 Oct 2024 00:40:22 +0000 Subject: [PATCH 044/131] [BOT] update dependencies --- third_party/vulkan/vulkan.cppm | 20 +- third_party/vulkan/vulkan.hpp | 213 +- third_party/vulkan/vulkan_core.h | 146 +- third_party/vulkan/vulkan_enums.hpp | 37 +- third_party/vulkan/vulkan_funcs.hpp | 148 +- third_party/vulkan/vulkan_handles.hpp | 68 +- third_party/vulkan/vulkan_raii.hpp | 113 +- .../vulkan/vulkan_static_assertions.hpp | 86 +- third_party/vulkan/vulkan_structs.hpp | 2935 ----------------- third_party/vulkan/vulkan_to_string.hpp | 24 +- 10 files changed, 425 insertions(+), 3365 deletions(-) diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index b4e23a8..5a031ca 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -3250,13 +3250,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD; using VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD; - //=== VK_KHR_dynamic_rendering === - using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; - using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; - using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX; - using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT; - using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR; - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP; @@ -3363,6 +3356,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE; //=== VK_NVX_multiview_per_view_attributes === + using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX; using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; //=== VK_NV_viewport_swizzle === @@ -3464,6 +3458,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_AMD_mixed_attachment_samples === + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; + //=== VK_EXT_sample_locations === using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT; using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT; @@ -3633,7 +3631,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV; //=== VK_NV_device_diagnostic_checkpoints === + using VULKAN_HPP_NAMESPACE::CheckpointData2NV; using VULKAN_HPP_NAMESPACE::CheckpointDataNV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV; using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV; //=== VK_INTEL_shader_integer_functions2 === @@ -3670,6 +3670,7 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_fragment_density_map === using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT; using VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT; //=== VK_KHR_fragment_shading_rate === @@ -3678,6 +3679,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR; using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR; using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR; //=== VK_AMD_shader_core_properties2 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD; @@ -3906,10 +3908,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - using VULKAN_HPP_NAMESPACE::CheckpointData2NV; - using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV; - //=== VK_EXT_descriptor_buffer === using VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT; using VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT; diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index 91a0724..406d820 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -63,7 +63,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 298, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 299, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -4160,6 +4160,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); } + void vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } + //=== VK_EXT_calibrated_timestamps === VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, @@ -4232,6 +4238,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); } + void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } + //=== VK_KHR_timeline_semaphore === VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT @@ -4848,17 +4859,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); } - void vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); - } - - void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); - } - //=== VK_EXT_descriptor_buffer === void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT @@ -10715,70 +10715,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_dynamic_rendering === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_NV_corner_sampled_image === template <> struct StructExtends @@ -11122,6 +11058,33 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_viewport_swizzle === template <> struct StructExtends @@ -11436,6 +11399,25 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_AMD_mixed_attachment_samples === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_sample_locations === template <> struct StructExtends @@ -12153,6 +12135,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_INTEL_shader_integer_functions2 === template <> struct StructExtends @@ -12257,6 +12248,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_fragment_shading_rate === template <> struct StructExtends @@ -12303,6 +12303,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_AMD_shader_core_properties2 === template <> struct StructExtends @@ -13602,16 +13611,6 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_EXT_descriptor_buffer === template <> struct StructExtends @@ -17834,7 +17833,8 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_EXT_calibrated_timestamps === PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; @@ -17850,8 +17850,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_KHR_timeline_semaphore === PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; @@ -18027,14 +18028,12 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; //=== VK_EXT_descriptor_buffer === PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; @@ -19152,7 +19151,8 @@ namespace VULKAN_HPP_NAMESPACE vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); //=== VK_EXT_calibrated_timestamps === vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = @@ -19173,8 +19173,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); //=== VK_KHR_timeline_semaphore === vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); @@ -19424,8 +19425,6 @@ namespace VULKAN_HPP_NAMESPACE vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); //=== VK_EXT_descriptor_buffer === vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); @@ -20298,7 +20297,8 @@ namespace VULKAN_HPP_NAMESPACE vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); //=== VK_EXT_calibrated_timestamps === vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); @@ -20315,8 +20315,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); //=== VK_KHR_timeline_semaphore === vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); @@ -20526,8 +20527,6 @@ namespace VULKAN_HPP_NAMESPACE vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); //=== VK_EXT_descriptor_buffer === vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index d8d673f..4d9776c 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 298 +#define VK_HEADER_VERSION 299 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -508,10 +508,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005, VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006, VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, - VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, @@ -553,6 +549,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, @@ -612,6 +609,7 @@ typedef enum VkStructureType { #ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, #endif + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, @@ -703,6 +701,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, @@ -718,11 +718,13 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR = 1000232000, @@ -848,8 +850,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009, VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010, VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, - VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002, @@ -1172,7 +1172,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, @@ -1246,6 +1245,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, @@ -2647,8 +2647,6 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, @@ -2657,6 +2655,8 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, @@ -2674,12 +2674,12 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000, VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000, VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, - // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -8903,38 +8903,6 @@ typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderin typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; -typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; - VkExtent2D shadingRateAttachmentTexelSize; -} VkRenderingFragmentShadingRateAttachmentInfoKHR; - -typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; -} VkRenderingFragmentDensityMapAttachmentInfoEXT; - -typedef struct VkAttachmentSampleCountInfoAMD { - VkStructureType sType; - const void* pNext; - uint32_t colorAttachmentCount; - const VkSampleCountFlagBits* pColorAttachmentSamples; - VkSampleCountFlagBits depthStencilAttachmentSamples; -} VkAttachmentSampleCountInfoAMD; - -typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; - -typedef struct VkMultiviewPerViewAttributesInfoNVX { - VkStructureType sType; - const void* pNext; - VkBool32 perViewAttributes; - VkBool32 perViewAttributesPositionXOnly; -} VkMultiviewPerViewAttributesInfoNVX; - typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); @@ -10309,6 +10277,14 @@ typedef struct VkPhysicalDeviceFragmentShadingRateKHR { VkExtent2D fragmentSize; } VkPhysicalDeviceFragmentShadingRateKHR; +typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkExtent2D shadingRateAttachmentTexelSize; +} VkRenderingFragmentShadingRateAttachmentInfoKHR; + typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); @@ -10890,27 +10866,12 @@ typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; -typedef struct VkQueueFamilyCheckpointProperties2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 checkpointExecutionStageMask; -} VkQueueFamilyCheckpointProperties2NV; - -typedef struct VkCheckpointData2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 stage; - void* pCheckpointMarker; -} VkCheckpointData2NV; - typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); -typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); -typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( @@ -10944,18 +10905,6 @@ VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); - -VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags2 stage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker); - -VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( - VkQueue queue, - uint32_t* pCheckpointDataCount, - VkCheckpointData2NV* pCheckpointData); #endif @@ -13013,6 +12962,13 @@ typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { VkBool32 perViewPositionAllComponents; } VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; +typedef struct VkMultiviewPerViewAttributesInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 perViewAttributes; + VkBool32 perViewAttributesPositionXOnly; +} VkMultiviewPerViewAttributesInfoNVX; + // VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. @@ -13381,6 +13337,14 @@ typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFil #define VK_AMD_mixed_attachment_samples 1 #define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 #define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" +typedef struct VkAttachmentSampleCountInfoAMD { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const VkSampleCountFlagBits* pColorAttachmentSamples; + VkSampleCountFlagBits depthStencilAttachmentSamples; +} VkAttachmentSampleCountInfoAMD; + // VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. @@ -13560,6 +13524,8 @@ typedef struct VkPipelineCoverageModulationStateCreateInfoNV { const float* pCoverageModulationTable; } VkPipelineCoverageModulationStateCreateInfoNV; +typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; + // VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. @@ -14317,6 +14283,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( #define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 #define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( @@ -14325,6 +14292,13 @@ VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); #endif @@ -14595,8 +14569,22 @@ typedef struct VkCheckpointDataNV { void* pCheckpointMarker; } VkCheckpointDataNV; +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( @@ -14607,6 +14595,11 @@ VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); #endif @@ -14828,6 +14821,13 @@ typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { VkAttachmentReference fragmentDensityMapAttachment; } VkRenderPassFragmentDensityMapCreateInfoEXT; +typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; +} VkRenderingFragmentDensityMapAttachmentInfoEXT; + // VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index 44eef6a..6fac9aa 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -766,11 +766,6 @@ namespace VULKAN_HPP_NAMESPACE eVideoDecodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, eVideoDecodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR, eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, - eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, - eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, - eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, - eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, - eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, #if defined( VK_USE_PLATFORM_GGP ) eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, #endif /*VK_USE_PLATFORM_GGP*/ @@ -822,6 +817,7 @@ namespace VULKAN_HPP_NAMESPACE eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, @@ -881,6 +877,8 @@ namespace VULKAN_HPP_NAMESPACE eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, @@ -975,6 +973,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, @@ -995,11 +995,13 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR, @@ -1133,8 +1135,6 @@ namespace VULKAN_HPP_NAMESPACE eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT, eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT, #endif /*VK_USE_PLATFORM_METAL_EXT*/ - eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, - eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, @@ -2915,10 +2915,6 @@ namespace VULKAN_HPP_NAMESPACE eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, - eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, @@ -2927,6 +2923,10 @@ namespace VULKAN_HPP_NAMESPACE eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, @@ -2954,16 +2954,17 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = PipelineCreateFlagBits::eDisableOptimization | PipelineCreateFlagBits::eAllowDerivatives | PipelineCreateFlagBits::eDerivative | PipelineCreateFlagBits::eViewIndexFromDeviceIndex | PipelineCreateFlagBits::eDispatchBase | PipelineCreateFlagBits::eFailOnPipelineCompileRequired | - PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | - PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | + PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR | PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits::eRayTracingSkipAabbsKHR | PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR | - PipelineCreateFlagBits::eDeferCompileNV | PipelineCreateFlagBits::eCaptureStatisticsKHR | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | - PipelineCreateFlagBits::eIndirectBindableNV | PipelineCreateFlagBits::eLibraryKHR | PipelineCreateFlagBits::eDescriptorBufferEXT | - PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits::eLinkTimeOptimizationEXT | - PipelineCreateFlagBits::eRayTracingAllowMotionNV | PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | - PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT + PipelineCreateFlagBits::eDeferCompileNV | PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | + PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | PipelineCreateFlagBits::eCaptureStatisticsKHR | + PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits::eIndirectBindableNV | PipelineCreateFlagBits::eLibraryKHR | + PipelineCreateFlagBits::eDescriptorBufferEXT | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | + PipelineCreateFlagBits::eLinkTimeOptimizationEXT | PipelineCreateFlagBits::eRayTracingAllowMotionNV | + PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT #if defined( VK_ENABLE_BETA_EXTENSIONS ) | PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV #endif /*VK_ENABLE_BETA_EXTENSIONS*/ diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index b07c66b..f410d2f 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -18344,6 +18344,21 @@ namespace VULKAN_HPP_NAMESPACE marker ); } + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + //=== VK_EXT_calibrated_timestamps === template @@ -18688,6 +18703,65 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointData2NV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif + + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif + + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_timeline_semaphore === template @@ -22231,80 +22305,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), - static_cast( stage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); - } - - template - VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointData2NV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); -# endif - - std::vector checkpointData; - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); -# endif - - std::vector checkpointData( checkpointData2NVAllocator ); - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_descriptor_buffer === template diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index 068656a..6ae2e5e 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -630,13 +630,6 @@ namespace VULKAN_HPP_NAMESPACE struct ShaderResourceUsageAMD; struct ShaderStatisticsInfoAMD; - //=== VK_KHR_dynamic_rendering === - struct RenderingFragmentShadingRateAttachmentInfoKHR; - struct RenderingFragmentDensityMapAttachmentInfoEXT; - struct AttachmentSampleCountInfoAMD; - using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; - struct MultiviewPerViewAttributesInfoNVX; - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === struct StreamDescriptorSurfaceCreateInfoGGP; @@ -744,6 +737,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NVX_multiview_per_view_attributes === struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + struct MultiviewPerViewAttributesInfoNVX; //=== VK_NV_viewport_swizzle === struct ViewportSwizzleNV; @@ -844,6 +838,10 @@ namespace VULKAN_HPP_NAMESPACE union DeviceOrHostAddressConstAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_AMD_mixed_attachment_samples === + struct AttachmentSampleCountInfoAMD; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + //=== VK_EXT_sample_locations === struct SampleLocationEXT; struct SampleLocationsInfoEXT; @@ -1015,6 +1013,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_diagnostic_checkpoints === struct QueueFamilyCheckpointPropertiesNV; struct CheckpointDataNV; + struct QueueFamilyCheckpointProperties2NV; + struct CheckpointData2NV; //=== VK_INTEL_shader_integer_functions2 === struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; @@ -1051,6 +1051,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFragmentDensityMapFeaturesEXT; struct PhysicalDeviceFragmentDensityMapPropertiesEXT; struct RenderPassFragmentDensityMapCreateInfoEXT; + struct RenderingFragmentDensityMapAttachmentInfoEXT; //=== VK_KHR_fragment_shading_rate === struct FragmentShadingRateAttachmentInfoKHR; @@ -1058,6 +1059,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFragmentShadingRateFeaturesKHR; struct PhysicalDeviceFragmentShadingRatePropertiesKHR; struct PhysicalDeviceFragmentShadingRateKHR; + struct RenderingFragmentShadingRateAttachmentInfoKHR; //=== VK_AMD_shader_core_properties2 === struct PhysicalDeviceShaderCoreProperties2AMD; @@ -1286,10 +1288,6 @@ namespace VULKAN_HPP_NAMESPACE struct ImportMetalSharedEventInfoEXT; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - struct QueueFamilyCheckpointProperties2NV; - struct CheckpointData2NV; - //=== VK_EXT_descriptor_buffer === struct PhysicalDeviceDescriptorBufferPropertiesEXT; struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; @@ -6371,6 +6369,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t marker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_mesh_shader === template @@ -6687,13 +6692,6 @@ namespace VULKAN_HPP_NAMESPACE uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - //=== VK_EXT_descriptor_buffer === template @@ -10332,6 +10330,23 @@ namespace VULKAN_HPP_NAMESPACE getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_INTEL_performance_query === #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10360,23 +10375,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - void getCheckpointData2NV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template < - typename CheckpointData2NVAllocator = std::allocator, - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_NV_low_latency2 === template diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index 9b06159..68f4f37 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -1205,7 +1205,8 @@ namespace VULKAN_HPP_NAMESPACE vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); //=== VK_EXT_calibrated_timestamps === vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); @@ -1222,8 +1223,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); //=== VK_KHR_timeline_semaphore === vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); @@ -1436,8 +1438,6 @@ namespace VULKAN_HPP_NAMESPACE vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); //=== VK_EXT_descriptor_buffer === vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); @@ -2220,7 +2220,8 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_EXT_calibrated_timestamps === PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; @@ -2235,8 +2236,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_KHR_timeline_semaphore === PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; @@ -2378,14 +2380,12 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; //=== VK_EXT_descriptor_buffer === PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; @@ -6081,6 +6081,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_mesh_shader === void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT; @@ -6211,11 +6216,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; - void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT; - //=== VK_EXT_descriptor_buffer === void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const @@ -10766,6 +10766,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV() const; + VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; + //=== VK_INTEL_performance_query === void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const; @@ -10775,8 +10777,6 @@ namespace VULKAN_HPP_NAMESPACE void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; - VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; - //=== VK_NV_low_latency2 === void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT; @@ -19653,6 +19653,20 @@ namespace VULKAN_HPP_NAMESPACE marker ); } + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function requires " ); + + getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + //=== VK_EXT_calibrated_timestamps === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsEXT() const @@ -19814,6 +19828,26 @@ namespace VULKAN_HPP_NAMESPACE return checkpointData; } + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && + "Function requires " ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointData2NV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + //=== VK_KHR_timeline_semaphore === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const @@ -21264,39 +21298,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); } - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function requires " ); - - getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), - static_cast( stage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); - } - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && "Function requires " ); - - std::vector checkpointData; - uint32_t checkpointDataCount; - getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - getDispatcher()->vkGetQueueCheckpointData2NV( - static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } - //=== VK_EXT_descriptor_buffer === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT() const VULKAN_HPP_NOEXCEPT diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index 94b24c2..d8c323e 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -2540,36 +2540,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" ); -//=== VK_KHR_dynamic_rendering === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == - sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == - sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === @@ -2924,6 +2894,12 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); + //=== VK_NV_viewport_swizzle === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); @@ -3326,6 +3302,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); + //=== VK_EXT_sample_locations === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); @@ -4121,6 +4105,18 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "CheckpointDataNV is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointData2NV is not nothrow_move_constructible!" ); + //=== VK_INTEL_shader_integer_functions2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == @@ -4260,6 +4256,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == + sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); + //=== VK_KHR_fragment_shading_rate === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), @@ -4300,6 +4304,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == + sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + //=== VK_AMD_shader_core_properties2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), @@ -5287,20 +5299,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "CheckpointData2NV is not nothrow_move_constructible!" ); - //=== VK_EXT_descriptor_buffer === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT ) == diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index 953f481..a9d7102 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -51488,2941 +51488,6 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, type, info ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetCreateInfoEXT; - }; - - struct IndirectExecutionSetPipelineInfoEXT - { - using NativeType = VkIndirectExecutionSetPipelineInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, - uint32_t maxPipelineCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , initialPipeline{ initialPipeline_ } - , maxPipelineCount{ maxPipelineCount_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT - { - initialPipeline = initialPipeline_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineCount = maxPipelineCount_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); -# endif - } - - bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; - uint32_t maxPipelineCount = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetPipelineInfoEXT; - }; - - struct IndirectExecutionSetShaderLayoutInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , setLayoutCount{ setLayoutCount_ } - , pSetLayouts{ pSetLayouts_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = setLayoutCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & - setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayouts = pSetLayouts_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT & - setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = static_cast( setLayouts_.size() ); - pSetLayouts = setLayouts_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - const void * pNext = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderLayoutInfoEXT; - }; - - struct IndirectExecutionSetShaderInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , shaderCount{ shaderCount_ } - , pInitialShaders{ pInitialShaders_ } - , pSetLayoutInfos{ pSetLayoutInfos_ } - , maxShaderCount{ maxShaderCount_ } - , pushConstantRangeCount{ pushConstantRangeCount_ } - , pPushConstantRanges{ pPushConstantRanges_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , shaderCount( static_cast( initialShaders_.size() ) ) - , pInitialShaders( initialShaders_.data() ) - , pSetLayoutInfos( setLayoutInfos_.data() ) - , maxShaderCount( maxShaderCount_ ) - , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) - , pPushConstantRanges( pushConstantRanges_.data() ) - { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); -# else - if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) - { - throw LogicError( - VULKAN_HPP_NAMESPACE_STRING - "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = shaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT - { - pInitialShaders = pInitialShaders_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & - setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( initialShaders_.size() ); - pInitialShaders = initialShaders_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayoutInfos = pSetLayoutInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) - VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( setLayoutInfos_.size() ); - pSetLayoutInfos = setLayoutInfos_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT - { - maxShaderCount = maxShaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); - pPushConstantRanges = pushConstantRanges_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && - ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && - ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; - const void * pNext = {}; - uint32_t shaderCount = {}; - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; - uint32_t maxShaderCount = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderInfoEXT; - }; - - union IndirectExecutionSetInfoEXT - { - using NativeType = VkIndirectExecutionSetInfoEXT; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) - : pPipelineInfo( pPipelineInfo_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) - : pShaderInfo( pShaderInfo_ ) - { - } -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ - -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT - { - pPipelineInfo = pPipelineInfo_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT - { - pShaderInfo = pShaderInfo_; - return *this; - } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - - operator VkIndirectExecutionSetInfoEXT const &() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetInfoEXT &() - { - return *reinterpret_cast( this ); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; -#else - const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct IndirectExecutionSetCreateInfoEXT - { - using NativeType = VkIndirectExecutionSetCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , type{ type_ } - , info{ info_ } - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT - { - info = info_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, type, info ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetCreateInfoEXT; - }; - - struct IndirectExecutionSetPipelineInfoEXT - { - using NativeType = VkIndirectExecutionSetPipelineInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, - uint32_t maxPipelineCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , initialPipeline{ initialPipeline_ } - , maxPipelineCount{ maxPipelineCount_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT - { - initialPipeline = initialPipeline_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineCount = maxPipelineCount_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); -# endif - } - - bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; - uint32_t maxPipelineCount = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetPipelineInfoEXT; - }; - - struct IndirectExecutionSetShaderLayoutInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , setLayoutCount{ setLayoutCount_ } - , pSetLayouts{ pSetLayouts_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = setLayoutCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & - setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayouts = pSetLayouts_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT & - setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = static_cast( setLayouts_.size() ); - pSetLayouts = setLayouts_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - const void * pNext = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderLayoutInfoEXT; - }; - - struct IndirectExecutionSetShaderInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , shaderCount{ shaderCount_ } - , pInitialShaders{ pInitialShaders_ } - , pSetLayoutInfos{ pSetLayoutInfos_ } - , maxShaderCount{ maxShaderCount_ } - , pushConstantRangeCount{ pushConstantRangeCount_ } - , pPushConstantRanges{ pPushConstantRanges_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , shaderCount( static_cast( initialShaders_.size() ) ) - , pInitialShaders( initialShaders_.data() ) - , pSetLayoutInfos( setLayoutInfos_.data() ) - , maxShaderCount( maxShaderCount_ ) - , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) - , pPushConstantRanges( pushConstantRanges_.data() ) - { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); -# else - if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) - { - throw LogicError( - VULKAN_HPP_NAMESPACE_STRING - "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = shaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT - { - pInitialShaders = pInitialShaders_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & - setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( initialShaders_.size() ); - pInitialShaders = initialShaders_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayoutInfos = pSetLayoutInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) - VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( setLayoutInfos_.size() ); - pSetLayoutInfos = setLayoutInfos_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT - { - maxShaderCount = maxShaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); - pPushConstantRanges = pushConstantRanges_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && - ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && - ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; - const void * pNext = {}; - uint32_t shaderCount = {}; - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; - uint32_t maxShaderCount = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderInfoEXT; - }; - - union IndirectExecutionSetInfoEXT - { - using NativeType = VkIndirectExecutionSetInfoEXT; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) - : pPipelineInfo( pPipelineInfo_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) - : pShaderInfo( pShaderInfo_ ) - { - } -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ - -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT - { - pPipelineInfo = pPipelineInfo_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT - { - pShaderInfo = pShaderInfo_; - return *this; - } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - - operator VkIndirectExecutionSetInfoEXT const &() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetInfoEXT &() - { - return *reinterpret_cast( this ); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; -#else - const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct IndirectExecutionSetCreateInfoEXT - { - using NativeType = VkIndirectExecutionSetCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , type{ type_ } - , info{ info_ } - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT - { - info = info_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, type, info ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetCreateInfoEXT; - }; - - struct IndirectExecutionSetPipelineInfoEXT - { - using NativeType = VkIndirectExecutionSetPipelineInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, - uint32_t maxPipelineCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , initialPipeline{ initialPipeline_ } - , maxPipelineCount{ maxPipelineCount_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT - { - initialPipeline = initialPipeline_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineCount = maxPipelineCount_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); -# endif - } - - bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; - uint32_t maxPipelineCount = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetPipelineInfoEXT; - }; - - struct IndirectExecutionSetShaderLayoutInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , setLayoutCount{ setLayoutCount_ } - , pSetLayouts{ pSetLayouts_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = setLayoutCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & - setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayouts = pSetLayouts_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT & - setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = static_cast( setLayouts_.size() ); - pSetLayouts = setLayouts_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - const void * pNext = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderLayoutInfoEXT; - }; - - struct IndirectExecutionSetShaderInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , shaderCount{ shaderCount_ } - , pInitialShaders{ pInitialShaders_ } - , pSetLayoutInfos{ pSetLayoutInfos_ } - , maxShaderCount{ maxShaderCount_ } - , pushConstantRangeCount{ pushConstantRangeCount_ } - , pPushConstantRanges{ pPushConstantRanges_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , shaderCount( static_cast( initialShaders_.size() ) ) - , pInitialShaders( initialShaders_.data() ) - , pSetLayoutInfos( setLayoutInfos_.data() ) - , maxShaderCount( maxShaderCount_ ) - , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) - , pPushConstantRanges( pushConstantRanges_.data() ) - { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); -# else - if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) - { - throw LogicError( - VULKAN_HPP_NAMESPACE_STRING - "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = shaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT - { - pInitialShaders = pInitialShaders_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & - setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( initialShaders_.size() ); - pInitialShaders = initialShaders_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayoutInfos = pSetLayoutInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) - VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( setLayoutInfos_.size() ); - pSetLayoutInfos = setLayoutInfos_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT - { - maxShaderCount = maxShaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); - pPushConstantRanges = pushConstantRanges_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && - ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && - ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; - const void * pNext = {}; - uint32_t shaderCount = {}; - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; - uint32_t maxShaderCount = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderInfoEXT; - }; - - union IndirectExecutionSetInfoEXT - { - using NativeType = VkIndirectExecutionSetInfoEXT; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) - : pPipelineInfo( pPipelineInfo_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) - : pShaderInfo( pShaderInfo_ ) - { - } -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ - -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT - { - pPipelineInfo = pPipelineInfo_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT - { - pShaderInfo = pShaderInfo_; - return *this; - } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - - operator VkIndirectExecutionSetInfoEXT const &() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetInfoEXT &() - { - return *reinterpret_cast( this ); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; -#else - const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct IndirectExecutionSetCreateInfoEXT - { - using NativeType = VkIndirectExecutionSetCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , type{ type_ } - , info{ info_ } - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT - { - info = info_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, type, info ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetCreateInfoEXT; - }; - - struct IndirectExecutionSetPipelineInfoEXT - { - using NativeType = VkIndirectExecutionSetPipelineInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, - uint32_t maxPipelineCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , initialPipeline{ initialPipeline_ } - , maxPipelineCount{ maxPipelineCount_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT - { - initialPipeline = initialPipeline_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineCount = maxPipelineCount_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); -# endif - } - - bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; - uint32_t maxPipelineCount = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetPipelineInfoEXT; - }; - - struct IndirectExecutionSetShaderLayoutInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , setLayoutCount{ setLayoutCount_ } - , pSetLayouts{ pSetLayouts_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = setLayoutCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & - setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayouts = pSetLayouts_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT & - setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = static_cast( setLayouts_.size() ); - pSetLayouts = setLayouts_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - const void * pNext = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderLayoutInfoEXT; - }; - - struct IndirectExecutionSetShaderInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , shaderCount{ shaderCount_ } - , pInitialShaders{ pInitialShaders_ } - , pSetLayoutInfos{ pSetLayoutInfos_ } - , maxShaderCount{ maxShaderCount_ } - , pushConstantRangeCount{ pushConstantRangeCount_ } - , pPushConstantRanges{ pPushConstantRanges_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , shaderCount( static_cast( initialShaders_.size() ) ) - , pInitialShaders( initialShaders_.data() ) - , pSetLayoutInfos( setLayoutInfos_.data() ) - , maxShaderCount( maxShaderCount_ ) - , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) - , pPushConstantRanges( pushConstantRanges_.data() ) - { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); -# else - if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) - { - throw LogicError( - VULKAN_HPP_NAMESPACE_STRING - "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = shaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT - { - pInitialShaders = pInitialShaders_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & - setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( initialShaders_.size() ); - pInitialShaders = initialShaders_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayoutInfos = pSetLayoutInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) - VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( setLayoutInfos_.size() ); - pSetLayoutInfos = setLayoutInfos_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT - { - maxShaderCount = maxShaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); - pPushConstantRanges = pushConstantRanges_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && - ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && - ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; - const void * pNext = {}; - uint32_t shaderCount = {}; - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; - uint32_t maxShaderCount = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderInfoEXT; - }; - - union IndirectExecutionSetInfoEXT - { - using NativeType = VkIndirectExecutionSetInfoEXT; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) - : pPipelineInfo( pPipelineInfo_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) - : pShaderInfo( pShaderInfo_ ) - { - } -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ - -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT - { - pPipelineInfo = pPipelineInfo_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT - { - pShaderInfo = pShaderInfo_; - return *this; - } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - - operator VkIndirectExecutionSetInfoEXT const &() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetInfoEXT &() - { - return *reinterpret_cast( this ); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; -#else - const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct IndirectExecutionSetCreateInfoEXT - { - using NativeType = VkIndirectExecutionSetCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , type{ type_ } - , info{ info_ } - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT - { - info = info_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, type, info ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetCreateInfoEXT; - }; - - struct IndirectExecutionSetPipelineInfoEXT - { - using NativeType = VkIndirectExecutionSetPipelineInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, - uint32_t maxPipelineCount_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , initialPipeline{ initialPipeline_ } - , maxPipelineCount{ maxPipelineCount_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT - { - initialPipeline = initialPipeline_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT - { - maxPipelineCount = maxPipelineCount_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); -# endif - } - - bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; - uint32_t maxPipelineCount = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetPipelineInfoEXT; - }; - - struct IndirectExecutionSetShaderLayoutInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , setLayoutCount{ setLayoutCount_ } - , pSetLayouts{ pSetLayouts_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, - const void * pNext_ = nullptr ) - : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) - { - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = setLayoutCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & - setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayouts = pSetLayouts_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderLayoutInfoEXT & - setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT - { - setLayoutCount = static_cast( setLayouts_.size() ); - pSetLayouts = setLayouts_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; - const void * pNext = {}; - uint32_t setLayoutCount = {}; - const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderLayoutInfoEXT; - }; - - struct IndirectExecutionSetShaderInfoEXT - { - using NativeType = VkIndirectExecutionSetShaderInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - uint32_t pushConstantRangeCount_ = {}, - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , shaderCount{ shaderCount_ } - , pInitialShaders{ pInitialShaders_ } - , pSetLayoutInfos{ pSetLayoutInfos_ } - , maxShaderCount{ maxShaderCount_ } - , pushConstantRangeCount{ pushConstantRangeCount_ } - , pPushConstantRanges{ pPushConstantRanges_ } - { - } - - VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, - uint32_t maxShaderCount_ = {}, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, - const void * pNext_ = nullptr ) - : pNext( pNext_ ) - , shaderCount( static_cast( initialShaders_.size() ) ) - , pInitialShaders( initialShaders_.data() ) - , pSetLayoutInfos( setLayoutInfos_.data() ) - , maxShaderCount( maxShaderCount_ ) - , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) - , pPushConstantRanges( pushConstantRanges_.data() ) - { -# ifdef VULKAN_HPP_NO_EXCEPTIONS - VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); -# else - if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) - { - throw LogicError( - VULKAN_HPP_NAMESPACE_STRING - "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); - } -# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = shaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT - { - pInitialShaders = pInitialShaders_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & - setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( initialShaders_.size() ); - pInitialShaders = initialShaders_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT - { - pSetLayoutInfos = pSetLayoutInfos_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) - VULKAN_HPP_NOEXCEPT - { - shaderCount = static_cast( setLayoutInfos_.size() ); - pSetLayoutInfos = setLayoutInfos_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT - { - maxShaderCount = maxShaderCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = pushConstantRangeCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & - setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pPushConstantRanges = pPushConstantRanges_; - return *this; - } - -# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT - { - pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); - pPushConstantRanges = pushConstantRanges_.data(); - return *this; - } -# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && - ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && - ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); -# endif - } - - bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; - const void * pNext = {}; - uint32_t shaderCount = {}; - const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; - uint32_t maxShaderCount = {}; - uint32_t pushConstantRangeCount = {}; - const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; - }; - - template <> - struct CppType - { - using Type = IndirectExecutionSetShaderInfoEXT; - }; - - union IndirectExecutionSetInfoEXT - { - using NativeType = VkIndirectExecutionSetInfoEXT; -#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) - : pPipelineInfo( pPipelineInfo_ ) - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) - : pShaderInfo( pShaderInfo_ ) - { - } -#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ - -#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT - { - pPipelineInfo = pPipelineInfo_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & - setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT - { - pShaderInfo = pShaderInfo_; - return *this; - } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ - - operator VkIndirectExecutionSetInfoEXT const &() const - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetInfoEXT &() - { - return *reinterpret_cast( this ); - } - -#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; -#else - const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; - const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; -#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ - }; - - struct IndirectExecutionSetCreateInfoEXT - { - using NativeType = VkIndirectExecutionSetCreateInfoEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, - VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , type{ type_ } - , info{ info_ } - { - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) - { - } - - IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT - { - pNext = pNext_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT - { - type = type_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT - { - info = info_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index 7034d36..41300e9 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -749,10 +749,6 @@ namespace VULKAN_HPP_NAMESPACE result += "FailOnPipelineCompileRequired | "; if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) result += "EarlyReturnOnFailure | "; - if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) - result += "RenderingFragmentShadingRateAttachmentKHR | "; - if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) - result += "RenderingFragmentDensityMapAttachmentEXT | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) @@ -769,6 +765,10 @@ namespace VULKAN_HPP_NAMESPACE result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) + result += "RenderingFragmentShadingRateAttachmentKHR | "; if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) @@ -3972,10 +3972,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eVideoDecodeH264SessionParametersAddInfoKHR: return "VideoDecodeH264SessionParametersAddInfoKHR"; case StructureType::eVideoDecodeH264DpbSlotInfoKHR: return "VideoDecodeH264DpbSlotInfoKHR"; case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; - case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; - case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; - case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; - case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; #if defined( VK_USE_PLATFORM_GGP ) case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; #endif /*VK_USE_PLATFORM_GGP*/ @@ -4027,6 +4023,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT"; case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT"; @@ -4086,6 +4083,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX"; case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; @@ -4177,6 +4175,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; + case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; @@ -4196,11 +4196,13 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: return "PipelineFragmentShadingRateStateCreateInfoKHR"; case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; case StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR: return "PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR"; @@ -4332,8 +4334,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExportMetalSharedEventInfoEXT: return "ExportMetalSharedEventInfoEXT"; case StructureType::eImportMetalSharedEventInfoEXT: return "ImportMetalSharedEventInfoEXT"; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; - case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; case StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT: return "PhysicalDeviceDescriptorBufferPropertiesEXT"; case StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT: return "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT"; case StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT: return "PhysicalDeviceDescriptorBufferFeaturesEXT"; @@ -5823,8 +5823,6 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; - case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; - case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; @@ -5833,6 +5831,8 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; From c7484b771fa9bd77adae0362af5ab7c075f5d72a Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 21 Oct 2024 01:48:01 +0200 Subject: [PATCH 045/131] fixing put pixel, adding scene change checker --- runtime/Includes/Core/Enums.h | 1 - runtime/Includes/Core/Graphics.h | 2 + runtime/Includes/Core/Graphics.inl | 8 +- runtime/Includes/Graphics/Mesh.h | 15 ++ runtime/Includes/Graphics/Scene.h | 11 +- runtime/Includes/Graphics/Sprite.h | 20 ++- runtime/Includes/Renderer/Descriptor.h | 41 ++--- runtime/Includes/Renderer/Enums.h | 7 +- runtime/Includes/Renderer/Image.h | 1 + runtime/Includes/Renderer/Pipelines/Shader.h | 15 +- runtime/Includes/Renderer/RenderCore.h | 7 +- .../Includes/Renderer/RenderPasses/2DPass.h | 2 +- .../Renderer/RenderPasses/FinalPass.h | 2 +- runtime/Includes/Renderer/Renderer.h | 3 - runtime/Includes/Utils/CallOnExit.h | 33 ++++ runtime/Includes/Utils/CallOnExit.inl | 29 ++++ runtime/Sources/Core/Application.cpp | 6 +- runtime/Sources/Core/Graphics.cpp | 13 +- runtime/Sources/Graphics/PutPixelManager.cpp | 8 +- runtime/Sources/Graphics/Scene.cpp | 19 ++- runtime/Sources/Graphics/Sprite.cpp | 9 +- runtime/Sources/Renderer/Descriptor.cpp | 161 +++++++++++------- runtime/Sources/Renderer/Image.cpp | 46 +++-- runtime/Sources/Renderer/Pipelines/Shader.cpp | 1 - runtime/Sources/Renderer/RenderCore.cpp | 5 + .../Sources/Renderer/RenderPasses/2DPass.cpp | 18 +- .../Renderer/RenderPasses/FinalPass.cpp | 7 +- runtime/Sources/Renderer/Renderer.cpp | 13 -- 28 files changed, 302 insertions(+), 201 deletions(-) create mode 100644 runtime/Includes/Utils/CallOnExit.h create mode 100644 runtime/Includes/Utils/CallOnExit.inl diff --git a/runtime/Includes/Core/Enums.h b/runtime/Includes/Core/Enums.h index 7eb6984..ef15390 100644 --- a/runtime/Includes/Core/Enums.h +++ b/runtime/Includes/Core/Enums.h @@ -19,7 +19,6 @@ namespace mlx enum class Event { - DescriptorPoolResetEventCode = 55, ResizeEventCode = 56, FrameBeginEventCode = 57, FatalErrorEventCode = 168, diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index c2be4c0..6a4e5e6 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -44,6 +44,8 @@ namespace mlx std::shared_ptr p_window; std::unique_ptr p_scene; + std::uint64_t m_draw_layer = 0; + int m_id; bool m_has_window; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 2ba4a09..f0db34e 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -9,6 +9,7 @@ namespace mlx p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); m_insert_new_pixel_put_texture = true; + m_draw_layer = 0; } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept @@ -19,6 +20,7 @@ namespace mlx { Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); + m_draw_layer++; } m_insert_new_pixel_put_texture = false; } @@ -42,8 +44,12 @@ namespace mlx new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); m_insert_new_pixel_put_texture = true; } - else + else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) + { p_scene->BringToFront(std::move(sprite)); + m_insert_new_pixel_put_texture = true; + } + m_draw_layer++; } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) diff --git a/runtime/Includes/Graphics/Mesh.h b/runtime/Includes/Graphics/Mesh.h index 215736b..fc55023 100644 --- a/runtime/Includes/Graphics/Mesh.h +++ b/runtime/Includes/Graphics/Mesh.h @@ -48,6 +48,21 @@ namespace mlx private: std::vector m_sub_meshes; }; + + class MeshRegistry + { + public: + MeshRegistry() = default; + + inline void RegisterMesh(std::shared_ptr mesh); + inline void UnregisterMesh(std::shared_ptr mesh); + inline bool IsMeshKnown(std::shared_ptr mesh); + + ~MeshRegistry() = default; + + private: + std::unordered_set> m_mesh_registry; + }; } #endif diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 4c7f6a7..05a0bc0 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -7,32 +7,25 @@ namespace mlx { - struct SceneDescriptor - { - NonOwningPtr renderer; - // More description may come in future - }; - class Scene { public: - Scene(SceneDescriptor desc); + Scene() = default; Sprite& CreateSprite(NonOwningPtr texture) noexcept; NonOwningPtr GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const; void BringToFront(NonOwningPtr sprite); void TryEraseSpriteFromTexture(NonOwningPtr texture); + bool IsTextureAtGivenDrawLayer(NonOwningPtr texture, std::uint64_t draw_layer) const; inline void ResetSprites() { m_sprites.clear(); } [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetSprites() const noexcept { return m_sprites; } - [[nodiscard]] MLX_FORCEINLINE const SceneDescriptor& GetDescription() const noexcept { return m_descriptor; } [[nodiscard]] MLX_FORCEINLINE ViewerData& GetViewerData() noexcept { return m_viewer_data; } ~Scene() = default; private: - SceneDescriptor m_descriptor; std::vector> m_sprites; ViewerData m_viewer_data; }; diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index 757e30d..8efcf8d 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -14,7 +14,7 @@ namespace mlx friend class Render2DPass; public: - Sprite(class Renderer& renderer, NonOwningPtr texture); + Sprite(NonOwningPtr texture); inline void SetColor(Vec4f color) noexcept { m_color = color; } inline void SetPosition(Vec2f position) noexcept { m_position = position; } @@ -24,25 +24,27 @@ namespace mlx [[nodiscard]] MLX_FORCEINLINE std::shared_ptr GetMesh() const { return p_mesh; } [[nodiscard]] MLX_FORCEINLINE NonOwningPtr GetTexture() const { return p_texture; } - ~Sprite() = default; + inline ~Sprite() { if(p_set) p_set->ReturnDescriptorSetToPool(); } private: - [[nodiscard]] inline bool IsSetInit() const noexcept { return m_set.IsInit(); } - [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t frame_index) const noexcept { return m_set.GetSet(frame_index); } + [[nodiscard]] inline bool IsSetInit() const noexcept { return p_set && p_set->IsInit(); } + [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t frame_index) const noexcept { return p_set ? p_set->GetSet(frame_index) : VK_NULL_HANDLE; } - inline void UpdateDescriptorSet(const DescriptorSet& set) + inline void UpdateDescriptorSet(std::shared_ptr set) { - m_set = set.Duplicate(); + p_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(set->GetShaderLayout(), set->GetShaderType()); } inline void Bind(std::size_t frame_index, VkCommandBuffer cmd) { - m_set.SetImage(frame_index, 0, *p_texture); - m_set.Update(frame_index, cmd); + if(!p_set) + return; + p_set->SetImage(frame_index, 0, *p_texture); + p_set->Update(frame_index, cmd); } private: - DescriptorSet m_set; + std::shared_ptr p_set; NonOwningPtr p_texture; std::shared_ptr p_mesh; Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; diff --git a/runtime/Includes/Renderer/Descriptor.h b/runtime/Includes/Renderer/Descriptor.h index 8689d3f..0a3b9e9 100644 --- a/runtime/Includes/Renderer/Descriptor.h +++ b/runtime/Includes/Renderer/Descriptor.h @@ -13,7 +13,6 @@ namespace mlx NonOwningPtr uniform_buffer_ptr; NonOwningPtr image_ptr; VkDescriptorType type; - ShaderType shader_type; std::uint32_t binding; }; @@ -25,17 +24,18 @@ namespace mlx void Init() noexcept; void Destroy() noexcept; - VkDescriptorSet AllocateDescriptorSet(std::uint32_t frame_index, VkDescriptorSetLayout layout); + std::shared_ptr RequestDescriptorSet(const ShaderSetLayout& layout, ShaderType shader_type); + void ReturnDescriptorSet(std::shared_ptr set); - void ResetPoolFromFrameIndex(std::size_t frame_index); - - [[nodiscard]] inline VkDescriptorPool Get(std::uint32_t index) const noexcept { return m_pools[index]; } + [[nodiscard]] inline VkDescriptorPool Get() const noexcept { return m_pool; } [[nodiscard]] MLX_FORCEINLINE std::size_t GetNumberOfSetsAllocated() const noexcept { return m_allocation_count; } ~DescriptorPool() = default; private: - std::array m_pools; + std::vector> m_free_sets; + std::vector> m_used_sets; + VkDescriptorPool m_pool; std::size_t m_allocation_count = 0; }; @@ -44,42 +44,45 @@ namespace mlx public: DescriptorPoolManager() = default; - void ResetPoolsFromFrameIndex(std::size_t frame_index); DescriptorPool& GetAvailablePool(); void Destroy(); ~DescriptorPoolManager() = default; private: - std::list m_pools; + std::vector m_pools; }; - class DescriptorSet + class DescriptorSet : public std::enable_shared_from_this { - public: - DescriptorSet() { m_set.fill(VK_NULL_HANDLE); } - DescriptorSet(DescriptorPoolManager& pools_manager, const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type); + friend DescriptorPool; + public: void SetImage(std::size_t i, std::uint32_t binding, class Image& image); void SetStorageBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); void SetUniformBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer); void Update(std::size_t i, VkCommandBuffer cmd = VK_NULL_HANDLE) noexcept; - void Reallocate(std::size_t frame_index) noexcept; - [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t i) const noexcept { return m_set[i]; } - [[nodiscard]] inline DescriptorSet Duplicate() const { return DescriptorSet{ *p_pools_manager, m_set_layout, m_descriptors }; } - [[nodiscard]] inline bool IsInit() const noexcept { return m_set[0] != VK_NULL_HANDLE; } + void ReturnDescriptorSetToPool(); + + [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t i) const noexcept { return m_sets[i]; } + [[nodiscard]] MLX_FORCEINLINE bool IsInit() const noexcept { return m_sets[0] != VK_NULL_HANDLE; } + [[nodiscard]] MLX_FORCEINLINE VkDescriptorSetLayout GetVulkanLayout() const noexcept { return m_set_layout; } + [[nodiscard]] MLX_FORCEINLINE const ShaderSetLayout& GetShaderLayout() const { return m_shader_layout; } + [[nodiscard]] MLX_FORCEINLINE ShaderType GetShaderType() const noexcept { return m_shader_type; } ~DescriptorSet() = default; private: - DescriptorSet(DescriptorPoolManager& pools_manager, VkDescriptorSetLayout layout, const std::vector& descriptors); + DescriptorSet(DescriptorPool& pool, VkDescriptorSetLayout vulkan_layout, const ShaderSetLayout& layout, std::array vulkan_sets, ShaderType shader_type); private: + ShaderSetLayout m_shader_layout; std::vector m_descriptors; - std::array m_set; + std::array m_sets; VkDescriptorSetLayout m_set_layout; - NonOwningPtr p_pools_manager; + ShaderType m_shader_type; + DescriptorPool& m_pool; }; } diff --git a/runtime/Includes/Renderer/Enums.h b/runtime/Includes/Renderer/Enums.h index ddc0487..80431a1 100644 --- a/runtime/Includes/Renderer/Enums.h +++ b/runtime/Includes/Renderer/Enums.h @@ -17,11 +17,16 @@ namespace mlx enum class ImageType { Color = 0, - Depth, EndEnum }; constexpr std::size_t ImageTypeCount = static_cast(ImageType::EndEnum); + + enum class ShaderType + { + Vertex, + Fragment + }; } #endif diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index c857089..d787e9f 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -77,6 +77,7 @@ namespace mlx } void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name); + void Destroy() noexcept override; void SetPixel(int x, int y, std::uint32_t color) noexcept; int GetPixel(int x, int y) noexcept; diff --git a/runtime/Includes/Renderer/Pipelines/Shader.h b/runtime/Includes/Renderer/Pipelines/Shader.h index d1c59db..54cbc7d 100644 --- a/runtime/Includes/Renderer/Pipelines/Shader.h +++ b/runtime/Includes/Renderer/Pipelines/Shader.h @@ -1,13 +1,17 @@ #ifndef __MLX_SHADER__ #define __MLX_SHADER__ +#include + namespace mlx { struct ShaderSetLayout { - std::vector > binds; + std::vector> binds; ShaderSetLayout(std::vector > b) : binds(std::move(b)) {} + + inline bool operator==(const ShaderSetLayout& rhs) const { return binds == rhs.binds; } }; struct ShaderPushConstantLayout @@ -20,19 +24,12 @@ namespace mlx struct ShaderLayout { - std::vector > set_layouts; + std::vector> set_layouts; std::vector push_constants; ShaderLayout(std::vector > s, std::vector pc) : set_layouts(std::move(s)), push_constants(std::move(pc)) {} }; - enum class ShaderType - { - Vertex, - Fragment, - Compute - }; - struct ShaderPipelineLayoutPart { std::vector push_constants; diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index 97f764a..9f139c6 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -1,12 +1,13 @@ #ifndef __MLX_RENDER_CORE__ #define __MLX_RENDER_CORE__ +constexpr const int MAX_FRAMES_IN_FLIGHT = 3; + #include +#include namespace mlx { - constexpr const int MAX_FRAMES_IN_FLIGHT = 3; - #if defined(DEBUG) && defined(VK_EXT_debug_utils) #define MLX_HAS_DEBUG_UTILS_FUNCTIONS #endif @@ -21,6 +22,7 @@ namespace mlx [[nodiscard]] MLX_FORCEINLINE VkDevice GetDevice() const noexcept { return m_device; } [[nodiscard]] MLX_FORCEINLINE VkPhysicalDevice GetPhysicalDevice() const noexcept { return m_physical_device; } [[nodiscard]] MLX_FORCEINLINE GPUAllocator& GetAllocator() noexcept { return m_allocator; } + [[nodiscard]] inline DescriptorPoolManager& GetDescriptorPoolManager() noexcept { return m_descriptor_pool_manager; } inline void WaitDeviceIdle() const noexcept { vkDeviceWaitIdle(m_device); } @@ -45,6 +47,7 @@ namespace mlx private: static RenderCore* s_instance; + DescriptorPoolManager m_descriptor_pool_manager; GPUAllocator m_allocator; VkInstance m_instance = VK_NULL_HANDLE; VkDevice m_device = VK_NULL_HANDLE; diff --git a/runtime/Includes/Renderer/RenderPasses/2DPass.h b/runtime/Includes/Renderer/RenderPasses/2DPass.h index dc78aeb..8486818 100644 --- a/runtime/Includes/Renderer/RenderPasses/2DPass.h +++ b/runtime/Includes/Renderer/RenderPasses/2DPass.h @@ -1,7 +1,7 @@ #ifndef __MLX_2D_PASS__ #define __MLX_2D_PASS__ -#include +#include #include #include diff --git a/runtime/Includes/Renderer/RenderPasses/FinalPass.h b/runtime/Includes/Renderer/RenderPasses/FinalPass.h index fe5571c..e1baa1f 100644 --- a/runtime/Includes/Renderer/RenderPasses/FinalPass.h +++ b/runtime/Includes/Renderer/RenderPasses/FinalPass.h @@ -1,7 +1,7 @@ #ifndef __MLX_FINAL_PASS__ #define __MLX_FINAL_PASS__ -#include +#include #include #include diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index 1bcba3a..a2df8cd 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -5,7 +5,6 @@ #include #include #include -#include #include namespace mlx @@ -32,7 +31,6 @@ namespace mlx [[nodiscard]] inline std::size_t GetSwapchainImageIndex() const noexcept { return m_swapchain_image_index; } [[nodiscard]] inline std::size_t GetCurrentFrameIndex() const noexcept { return m_current_frame_index; } [[nodiscard]] inline NonOwningPtr GetWindow() const noexcept { return p_window; } - [[nodiscard]] inline DescriptorPoolManager& GetDescriptorPoolManager() noexcept { return m_descriptor_pool_manager; } MLX_FORCEINLINE constexpr void RequireFramebufferResize() noexcept { m_framebuffers_resize = true; } @@ -45,7 +43,6 @@ namespace mlx void DestroySwapchain(); private: - DescriptorPoolManager m_descriptor_pool_manager; std::array m_image_available_semaphores; std::array m_render_finished_semaphores; std::array m_cmd_buffers; diff --git a/runtime/Includes/Utils/CallOnExit.h b/runtime/Includes/Utils/CallOnExit.h new file mode 100644 index 0000000..5e86f5d --- /dev/null +++ b/runtime/Includes/Utils/CallOnExit.h @@ -0,0 +1,33 @@ +#ifndef __MLX_CALL_ON_EXIT__ +#define __MLX_CALL_ON_EXIT__ + +namespace mlx +{ + template + class CallOnExit + { + public: + CallOnExit() = default; + CallOnExit(F&& functor); + CallOnExit(const CallOnExit&) = delete; + CallOnExit(CallOnExit&&) = delete; + + void CallAndReset(); + void Reset(); + + CallOnExit& operator=(const CallOnExit&) = delete; + CallOnExit& operator=(CallOnExit&&) = default; + + ~CallOnExit(); + + private: + std::optional m_functor; + }; + + template + CallOnExit(F) -> CallOnExit; +} + +#include + +#endif diff --git a/runtime/Includes/Utils/CallOnExit.inl b/runtime/Includes/Utils/CallOnExit.inl new file mode 100644 index 0000000..69fa3ab --- /dev/null +++ b/runtime/Includes/Utils/CallOnExit.inl @@ -0,0 +1,29 @@ +#pragma once +#include + +namespace mlx +{ + template + CallOnExit::CallOnExit(F&& functor) : m_functor(std::move(functor)) {} + + template + CallOnExit::~CallOnExit() + { + if(m_functor.has_value()) + (*m_functor)(); + } + + template + void CallOnExit::CallAndReset() + { + if(m_functor.has_value()) + (*m_functor)(); + m_functor.reset(); + } + + template + void CallOnExit::Reset() + { + m_functor.reset(); + } +} diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 5b7c18b..03f5cb2 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -10,7 +10,7 @@ namespace mlx { Application::Application() : p_mem_manager(std::make_unique()), p_sdl_manager(std::make_unique()), m_fps(), m_in() { - EventBus::RegisterListener({[](const EventBase& event) + EventBus::RegisterListener({ [](const EventBase& event) { if(event.What() == Event::FatalErrorEventCode) std::abort(); @@ -53,7 +53,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); Texture* texture; try { texture = new Texture({}, w, h, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_user_image"); } - catch(...) { return NULL; } + catch(...) { return nullptr; } m_image_registry.RegisterTexture(texture); return texture; } @@ -63,7 +63,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); Texture* texture = StbTextureLoad(file, w, h); if(texture == nullptr) - return NULL; // NULL for C compatibility + return nullptr; m_image_registry.RegisterTexture(texture); return texture; } diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 184831b..bd6149f 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -13,10 +13,7 @@ namespace mlx // TODO : re-enable render targets m_renderer.Init(nullptr); m_scene_renderer.Init(m_renderer); - - SceneDescriptor descriptor{}; - descriptor.renderer = &m_renderer; - p_scene = std::make_unique(std::move(descriptor)); + p_scene = std::make_unique(); } GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : @@ -28,10 +25,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); m_renderer.Init(p_window.get()); m_scene_renderer.Init(m_renderer); - - SceneDescriptor descriptor{}; - descriptor.renderer = &m_renderer; - p_scene = std::make_unique(std::move(descriptor)); + p_scene = std::make_unique(); } void GraphicsSupport::Render() noexcept @@ -39,6 +33,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(m_renderer.BeginFrame()) { + m_draw_layer = 0; m_scene_renderer.Render(*p_scene, m_renderer); m_renderer.EndFrame(); } @@ -47,7 +42,7 @@ namespace mlx // dump memory to file every two seconds using namespace std::chrono_literals; static std::int64_t timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); - if(std::chrono::duration{static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()) - timer} >= 1s) + if(std::chrono::duration{ static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()) - timer } >= 1s) { RenderCore::Get().GetAllocator().DumpMemoryToJson(); timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 929e504..b41e047 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -19,8 +19,12 @@ namespace mlx #endif texture.Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); } - m_textures.back().SetPixel(x, y, color); - return (insert_new_texture ? &m_textures.back() : nullptr); + if(!m_textures.empty()) + { + m_textures.back().SetPixel(x, y, color); + return (insert_new_texture ? &m_textures.back() : nullptr); + } + return nullptr; } void PutPixelManager::ResetRenderData() diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index e1bf59b..1a76ec6 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -5,17 +5,10 @@ namespace mlx { - Scene::Scene(SceneDescriptor desc) - : m_descriptor(std::move(desc)) - { - MLX_PROFILE_FUNCTION(); - Verify((bool)m_descriptor.renderer, "invalid renderer"); - } - Sprite& Scene::CreateSprite(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); - std::shared_ptr sprite = std::make_shared(*m_descriptor.renderer, texture); + std::shared_ptr sprite = std::make_shared(texture); m_sprites.push_back(sprite); return *sprite; } @@ -52,7 +45,15 @@ namespace mlx { return sprite->GetTexture() == texture; }); - m_sprites.erase(it); + if(it != m_sprites.end()) + m_sprites.erase(it); } while(it != m_sprites.end()); } + + bool Scene::IsTextureAtGivenDrawLayer(NonOwningPtr texture, std::uint64_t draw_layer) const + { + if(draw_layer >= m_sprites.size()) + return false; + return m_sprites[draw_layer]->GetTexture() == texture; + } } diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index a30df18..e9fd03f 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -37,18 +37,11 @@ namespace mlx return mesh; } - Sprite::Sprite(Renderer& renderer, NonOwningPtr texture) + Sprite::Sprite(NonOwningPtr texture) { MLX_PROFILE_FUNCTION(); Verify((bool)texture, "Sprite: invalid texture (internal mlx issue, please report to devs)"); p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); p_texture = texture; - - func::function functor = [this, &renderer](const EventBase& event) - { - if(event.What() == Event::DescriptorPoolResetEventCode) - m_set.Reallocate(renderer.GetCurrentFrameIndex()); - }; - EventBus::RegisterListener({ functor, "__MlxSprite" + std::to_string(reinterpret_cast(this)) }); } } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 96e8b57..4349fd9 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -24,6 +24,7 @@ namespace mlx void DescriptorPool::Init() noexcept { + MLX_PROFILE_FUNCTION(); VkDescriptorPoolSize pool_sizes[] = { { VK_DESCRIPTOR_TYPE_SAMPLER, MAX_SETS_PER_POOL }, { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_SETS_PER_POOL }, @@ -38,99 +39,143 @@ namespace mlx { VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, MAX_SETS_PER_POOL } }; - VkDescriptorPoolCreateInfo poolInfo{}; - poolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; - poolInfo.poolSizeCount = sizeof(pool_sizes) / sizeof(pool_sizes[0]); - poolInfo.pPoolSizes = pool_sizes; - poolInfo.maxSets = MAX_SETS_PER_POOL; - poolInfo.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; - for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - kvfCheckVk(RenderCore::Get().vkCreateDescriptorPool(RenderCore::Get().GetDevice(), &poolInfo, nullptr, &m_pools[i])); + VkDescriptorPoolCreateInfo pool_info{}; + pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; + pool_info.poolSizeCount = sizeof(pool_sizes) / sizeof(pool_sizes[0]); + pool_info.pPoolSizes = pool_sizes; + pool_info.maxSets = MAX_SETS_PER_POOL; + pool_info.flags = 0; + kvfCheckVk(RenderCore::Get().vkCreateDescriptorPool(RenderCore::Get().GetDevice(), &pool_info, nullptr, &m_pool)); m_allocation_count = 0; } void DescriptorPool::Destroy() noexcept { - for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - { - if(m_pools[i] == VK_NULL_HANDLE) - continue; - RenderCore::Get().vkDestroyDescriptorPool(RenderCore::Get().GetDevice(), m_pools[i], nullptr); - m_pools[i] = VK_NULL_HANDLE; - } + MLX_PROFILE_FUNCTION(); + if(m_pool == VK_NULL_HANDLE) + return; + for(auto& set : m_free_sets) + kvfDestroyDescriptorSetLayout(RenderCore::Get().GetDevice(), set->m_set_layout); + for(auto& set : m_used_sets) + kvfDestroyDescriptorSetLayout(RenderCore::Get().GetDevice(), set->m_set_layout); + RenderCore::Get().vkDestroyDescriptorPool(RenderCore::Get().GetDevice(), m_pool, nullptr); + m_pool = VK_NULL_HANDLE; m_allocation_count = 0; + m_free_sets.clear(); + m_used_sets.clear(); } - VkDescriptorSet DescriptorPool::AllocateDescriptorSet(std::uint32_t frame_index, VkDescriptorSetLayout layout) + std::shared_ptr DescriptorPool::RequestDescriptorSet(const ShaderSetLayout& layout, ShaderType shader_type) { - VkDescriptorSet set; - VkDescriptorSetAllocateInfo alloc_info = {}; - alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; - alloc_info.descriptorPool = m_pools[frame_index]; - alloc_info.descriptorSetCount = 1; - alloc_info.pSetLayouts = &layout; - kvfCheckVk(RenderCore::Get().vkAllocateDescriptorSets(RenderCore::Get().GetDevice(), &alloc_info, &set)); - m_allocation_count++; + MLX_PROFILE_FUNCTION(); + auto it = std::find_if(m_free_sets.begin(), m_free_sets.end(), [&](std::shared_ptr set) + { + return shader_type == set->GetShaderType() && layout == set->GetShaderLayout(); + }); + if(it != m_free_sets.end()) + { + std::shared_ptr set = *it; + m_free_sets.erase(it); + m_used_sets.push_back(set); + return set; + } + + std::array vulkan_sets; + + VkShaderStageFlagBits vulkan_shader_stage; + switch(shader_type) + { + case ShaderType::Vertex: vulkan_shader_stage = VK_SHADER_STAGE_VERTEX_BIT; break; + case ShaderType::Fragment: vulkan_shader_stage = VK_SHADER_STAGE_FRAGMENT_BIT; break; + + default : FatalError("wtf"); break; + } + + std::vector bindings(layout.binds.size()); + for(std::size_t i = 0; i < layout.binds.size(); i++) + { + bindings[i].binding = layout.binds[i].first; + bindings[i].descriptorCount = 1; + bindings[i].descriptorType = layout.binds[i].second; + bindings[i].pImmutableSamplers = nullptr; + bindings[i].stageFlags = vulkan_shader_stage; + } + VkDescriptorSetLayout vulkan_layout = kvfCreateDescriptorSetLayout(RenderCore::Get().GetDevice(), bindings.data(), bindings.size()); + + for(int i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + VkDescriptorSetAllocateInfo alloc_info = {}; + alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + alloc_info.descriptorPool = m_pool; + alloc_info.descriptorSetCount = 1; + alloc_info.pSetLayouts = &vulkan_layout; + VkDescriptorSet vulkan_set; + kvfCheckVk(RenderCore::Get().vkAllocateDescriptorSets(RenderCore::Get().GetDevice(), &alloc_info, &vulkan_set)); + m_allocation_count++; + vulkan_sets[i] = vulkan_set; + } + + std::shared_ptr set(new DescriptorSet(*this, vulkan_layout, layout, std::move(vulkan_sets), shader_type)); + m_used_sets.push_back(set); return set; } - void DescriptorPool::ResetPoolFromFrameIndex(std::size_t frame_index) + void DescriptorPool::ReturnDescriptorSet(std::shared_ptr set) { - Assert(frame_index < MAX_FRAMES_IN_FLIGHT, "invalid frame index"); - RenderCore::Get().vkResetDescriptorPool(RenderCore::Get().GetDevice(), m_pools[frame_index], 0); - } - - void DescriptorPoolManager::ResetPoolsFromFrameIndex(std::size_t frame_index) - { - for(auto& pool : m_pools) - pool.ResetPoolFromFrameIndex(frame_index); + auto it = std::find_if(m_used_sets.begin(), m_used_sets.end(), [&](std::shared_ptr rhs_set) + { + return set == rhs_set; + }); + if(it == m_used_sets.end()) + { + Error("Vulkan : cannot return descriptor set to pool, invalid pool"); + return; + } + m_used_sets.erase(it); + m_free_sets.push_back(set); } DescriptorPool& DescriptorPoolManager::GetAvailablePool() { + MLX_PROFILE_FUNCTION(); for(auto& pool : m_pools) { if(pool.GetNumberOfSetsAllocated() < MAX_SETS_PER_POOL) return pool; } - m_pools.emplace_front().Init(); - return m_pools.front(); + m_pools.emplace_back().Init(); + return m_pools.back(); } void DescriptorPoolManager::Destroy() { + MLX_PROFILE_FUNCTION(); #pragma omp parallel for for(auto& pool : m_pools) pool.Destroy(); + m_pools.clear(); } - DescriptorSet::DescriptorSet(DescriptorPoolManager& pools_manager, const ShaderSetLayout& layout, VkDescriptorSetLayout vklayout, ShaderType shader_type) - : m_set_layout(vklayout), p_pools_manager(&pools_manager) + DescriptorSet::DescriptorSet(DescriptorPool& pool, VkDescriptorSetLayout vulkan_layout, const ShaderSetLayout& layout, std::array vulkan_sets, ShaderType shader_type) : + m_shader_layout(layout), + m_sets(std::move(vulkan_sets)), + m_set_layout(vulkan_layout), + m_shader_type(shader_type), + m_pool(pool) { MLX_PROFILE_FUNCTION(); for(auto& [binding, type] : layout.binds) { m_descriptors.emplace_back(); m_descriptors.back().type = type; - m_descriptors.back().shader_type = shader_type; m_descriptors.back().binding = binding; } - for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_set[i] = p_pools_manager->GetAvailablePool().AllocateDescriptorSet(i, vklayout); - } - - DescriptorSet::DescriptorSet(DescriptorPoolManager& pools_manager, VkDescriptorSetLayout layout, const std::vector& descriptors) - : m_descriptors(descriptors), m_set_layout(layout), p_pools_manager(&pools_manager) - { - MLX_PROFILE_FUNCTION(); - for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) - m_set[i] = p_pools_manager->GetAvailablePool().AllocateDescriptorSet(i, layout); } void DescriptorSet::SetImage(std::size_t i, std::uint32_t binding, class Image& image) { MLX_PROFILE_FUNCTION(); - Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + Verify(m_sets[i] != VK_NULL_HANDLE, "invalid descriptor"); auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) { return binding == descriptor.binding; @@ -151,7 +196,7 @@ namespace mlx void DescriptorSet::SetStorageBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer) { MLX_PROFILE_FUNCTION(); - Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + Verify(m_sets[i] != VK_NULL_HANDLE, "invalid descriptor"); auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) { return binding == descriptor.binding; @@ -172,7 +217,7 @@ namespace mlx void DescriptorSet::SetUniformBuffer(std::size_t i, std::uint32_t binding, class GPUBuffer& buffer) { MLX_PROFILE_FUNCTION(); - Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + Verify(m_sets[i] != VK_NULL_HANDLE, "invalid descriptor"); auto it = std::find_if(m_descriptors.begin(), m_descriptors.end(), [=](Descriptor descriptor) { return binding == descriptor.binding; @@ -193,7 +238,7 @@ namespace mlx void DescriptorSet::Update(std::size_t i, VkCommandBuffer cmd) noexcept { MLX_PROFILE_FUNCTION(); - Verify(m_set[i] != VK_NULL_HANDLE, "invalid descriptor"); + Verify(m_sets[i] != VK_NULL_HANDLE, "invalid descriptor"); std::vector writes; std::vector buffer_infos; std::vector image_infos; @@ -207,7 +252,7 @@ namespace mlx info.imageLayout = descriptor.image_ptr->GetLayout(); info.imageView = descriptor.image_ptr->GetImageView(); image_infos.push_back(info); - writes.push_back(kvfWriteImageToDescriptorSet(RenderCore::Get().GetDevice(), m_set[i], &image_infos.back(), descriptor.binding)); + writes.push_back(kvfWriteImageToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &image_infos.back(), descriptor.binding)); } else if(descriptor.uniform_buffer_ptr) { @@ -216,7 +261,7 @@ namespace mlx info.offset = descriptor.uniform_buffer_ptr->GetOffset(); info.range = VK_WHOLE_SIZE; buffer_infos.push_back(info); - writes.push_back(kvfWriteUniformBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_set[i], &buffer_infos.back(), descriptor.binding)); + writes.push_back(kvfWriteUniformBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &buffer_infos.back(), descriptor.binding)); } else if(descriptor.storage_buffer_ptr) { @@ -225,16 +270,14 @@ namespace mlx info.offset = descriptor.storage_buffer_ptr->GetOffset(); info.range = VK_WHOLE_SIZE; buffer_infos.push_back(info); - writes.push_back(kvfWriteStorageBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_set[i], &buffer_infos.back(), descriptor.binding)); + writes.push_back(kvfWriteStorageBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &buffer_infos.back(), descriptor.binding)); } } RenderCore::Get().vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); } - void DescriptorSet::Reallocate(std::size_t frame_index) noexcept + void DescriptorSet::ReturnDescriptorSetToPool() { - MLX_PROFILE_FUNCTION(); - Assert(!p_pools_manager, "invalid pools manager"); - m_set[frame_index] = p_pools_manager->GetAvailablePool().AllocateDescriptorSet(frame_index, m_set_layout); + m_pool.ReturnDescriptorSet(shared_from_this()); } } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 9b8a0b7..ab4c58b 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -2,6 +2,7 @@ #include #include #include +#include #define STB_IMAGE_IMPLEMENTATION #ifdef MLX_COMPILER_GCC @@ -80,14 +81,7 @@ namespace mlx bool is_single_time_cmd_buffer = (cmd == VK_NULL_HANDLE); if(is_single_time_cmd_buffer) cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); - KvfImageType kvf_type = KVF_IMAGE_OTHER; - switch(m_type) - { - case ImageType::Color: kvf_type = KVF_IMAGE_COLOR; break; - case ImageType::Depth: kvf_type = KVF_IMAGE_DEPTH; break; - default: break; - } - kvfTransitionImageLayout(RenderCore::Get().GetDevice(), m_image, kvf_type, cmd, m_format, m_layout, new_layout, is_single_time_cmd_buffer); + kvfTransitionImageLayout(RenderCore::Get().GetDevice(), m_image, KVF_IMAGE_COLOR, cmd, m_format, m_layout, new_layout, is_single_time_cmd_buffer); m_layout = new_layout; } @@ -109,18 +103,9 @@ namespace mlx VkImageLayout old_layout = m_layout; TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); - if(m_type == ImageType::Color) - { - subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; - VkClearColorValue clear_color = VkClearColorValue({ { color.x, color.y, color.z, color.w } }); - RenderCore::Get().vkCmdClearColorImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource_range); - } - else if(m_type == ImageType::Depth) - { - VkClearDepthStencilValue clear_depth_stencil = { 1.0f, 1 }; - subresource_range.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT; - RenderCore::Get().vkCmdClearDepthStencilImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_depth_stencil, 1, &subresource_range); - } + subresource_range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; + VkClearColorValue clear_color = VkClearColorValue({ { color.x, color.y, color.z, color.w } }); + RenderCore::Get().vkCmdClearColorImage(cmd, m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clear_color, 1, &subresource_range); TransitionLayout(old_layout, cmd); if(is_single_time_cmd_buffer) @@ -189,6 +174,13 @@ namespace mlx TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); } + void Texture::Destroy() noexcept + { + if(m_staging_buffer.has_value()) + m_staging_buffer->Destroy(); + Image::Destroy(); + } + void Texture::SetPixel(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); @@ -266,6 +258,9 @@ namespace mlx MLX_PROFILE_FUNCTION(); std::string filename = file.string(); + if(file.stem() == "banana") + Message("banana, banana, banana, banana, terracotta banana terracotta, terracotta pie"); + if(!std::filesystem::exists(file)) { Error("Image : file not found %", file); @@ -280,14 +275,15 @@ namespace mlx int dummy_h; int channels; std::uint8_t* data = stbi_load(filename.c_str(), (w == nullptr ? &dummy_w : w), (h == nullptr ? &dummy_h : h), &channels, 4); + + CallOnExit defer([=]() { stbi_image_free(data); }); + CPUBuffer buffer((w == nullptr ? dummy_w : *w) * (h == nullptr ? dummy_h : *h) * 4); std::memcpy(buffer.GetData(), data, buffer.GetSize()); - Texture* texture; - try { texture = new Texture(buffer, (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_SRGB, false, std::move(filename)); } - catch(...) { return NULL; } - - stbi_image_free(data); + Texture* texture; + try { texture = new Texture(std::move(buffer), (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_SRGB, false, std::move(filename)); } + catch(...) { return nullptr; } return texture; } } diff --git a/runtime/Sources/Renderer/Pipelines/Shader.cpp b/runtime/Sources/Renderer/Pipelines/Shader.cpp index 0e527f2..26c7d91 100644 --- a/runtime/Sources/Renderer/Pipelines/Shader.cpp +++ b/runtime/Sources/Renderer/Pipelines/Shader.cpp @@ -11,7 +11,6 @@ namespace mlx { case ShaderType::Vertex : m_stage = VK_SHADER_STAGE_VERTEX_BIT; break; case ShaderType::Fragment : m_stage = VK_SHADER_STAGE_FRAGMENT_BIT; break; - case ShaderType::Compute : m_stage = VK_SHADER_STAGE_COMPUTE_BIT; break; default : FatalError("wtf"); break; } diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 872406e..58d3d54 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -47,6 +47,8 @@ namespace mlx RenderCore::RenderCore() { + if(s_instance != nullptr) + return; s_instance = this; loader = std::make_unique(); @@ -193,7 +195,10 @@ namespace mlx RenderCore::~RenderCore() { + if(s_instance == nullptr) + return; WaitDeviceIdle(); + m_descriptor_pool_manager.Destroy(); m_allocator.Destroy(); kvfDestroyDevice(m_device); DebugLog("Vulkan : logical device destroyed"); diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 642804e..6283a18 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -45,23 +45,15 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - func::function functor = [this, &renderer](const EventBase& event) + func::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); - if(event.What() == Event::DescriptorPoolResetEventCode) - { - std::uint32_t frame_index = renderer.GetCurrentFrameIndex(); - p_texture_set->Reallocate(frame_index); - p_viewer_data_set->Reallocate(frame_index); - p_viewer_data_set->SetUniformBuffer(frame_index, 0, p_viewer_data_buffer->Get(frame_index)); - p_viewer_data_set->Update(frame_index); - } }; EventBus::RegisterListener({ functor, "__MlxRender2DPass" }); - p_viewer_data_set = std::make_shared(renderer.GetDescriptorPoolManager(), p_vertex_shader->GetShaderLayout().set_layouts[0].second, p_vertex_shader->GetPipelineLayout().set_layouts[0], ShaderType::Vertex); - p_texture_set = std::make_shared(renderer.GetDescriptorPoolManager(), p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); + p_viewer_data_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_vertex_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Vertex); + p_texture_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_fragment_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Fragment); p_viewer_data_buffer = std::make_shared(); p_viewer_data_buffer->Init(sizeof(ViewerData), "mlx_2d_pass_viewer_data"); @@ -106,7 +98,7 @@ namespace mlx { // Check every textures and update modified ones to GPU before starting the render pass if(!sprite->IsSetInit()) - sprite->UpdateDescriptorSet(*p_texture_set); + sprite->UpdateDescriptorSet(p_texture_set); Verify((bool)sprite->GetTexture(), "a sprite has no texture attached (internal mlx issue, please report to the devs)"); sprite->GetTexture()->Update(cmd); } @@ -136,8 +128,10 @@ namespace mlx m_pipeline.Destroy(); p_vertex_shader.reset(); p_fragment_shader.reset(); + p_viewer_data_set->ReturnDescriptorSetToPool(); p_viewer_data_set.reset(); p_viewer_data_buffer->Destroy(); + p_texture_set->ReturnDescriptorSetToPool(); p_texture_set.reset(); } } diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index a0b42af..ac9b73e 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -31,16 +31,14 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - func::function functor = [this, &renderer](const EventBase& event) + func::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); - if(event.What() == Event::DescriptorPoolResetEventCode) - p_set->Reallocate(renderer.GetCurrentFrameIndex()); }; EventBus::RegisterListener({ functor, "__MlxFinalPass" }); - p_set = std::make_shared(renderer.GetDescriptorPoolManager(), p_fragment_shader->GetShaderLayout().set_layouts[0].second, p_fragment_shader->GetPipelineLayout().set_layouts[0], ShaderType::Fragment); + p_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_fragment_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Fragment); } void FinalPass::Pass([[maybe_unused]] Scene& scene, Renderer& renderer, Texture& render_target) @@ -80,6 +78,7 @@ namespace mlx m_pipeline.Destroy(); p_vertex_shader.reset(); p_fragment_shader.reset(); + p_set->ReturnDescriptorSetToPool(); p_set.reset(); } } diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index d128f79..31b12a9 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -18,11 +18,6 @@ namespace mlx { Event What() const override { return Event::FrameBeginEventCode; } }; - - struct DescriptorPoolResetEventBroadcast : public EventBase - { - Event What() const override { return Event::DescriptorPoolResetEventCode; } - }; } void Renderer::Init(NonOwningPtr window) @@ -59,13 +54,6 @@ namespace mlx { MLX_PROFILE_FUNCTION(); kvfWaitForFence(RenderCore::Get().GetDevice(), m_cmd_fences[m_current_frame_index]); - static bool first_run = true; - if(!first_run) - { - m_descriptor_pool_manager.ResetPoolsFromFrameIndex(m_current_frame_index); - EventBus::SendBroadcast(Internal::DescriptorPoolResetEventBroadcast{}); - } - first_run = false; VkResult result = RenderCore::Get().vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); if(result == VK_ERROR_OUT_OF_DATE_KHR) { @@ -150,7 +138,6 @@ namespace mlx DebugLog("Vulkan : fence destroyed"); } - m_descriptor_pool_manager.Destroy(); DestroySwapchain(); RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); DebugLog("Vulkan : surface destroyed"); From 8f757cab08a0065cbd8c800c0c7d2ca4083deddd Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 21 Oct 2024 19:45:33 +0200 Subject: [PATCH 046/131] yes --- example/main.c | 2 +- runtime/Includes/Core/Graphics.h | 2 - runtime/Includes/Core/Graphics.inl | 11 +----- runtime/Includes/Core/Memory.h | 2 +- runtime/Includes/Graphics/Font.h | 37 +++++++++++++++++++ runtime/Includes/Graphics/Mesh.h | 32 ++-------------- runtime/Includes/Graphics/Mesh.inl | 20 ++++++++++ runtime/Includes/Graphics/PutPixelManager.h | 4 +- runtime/Includes/Graphics/Sprite.h | 1 + runtime/Includes/PreCompiled.h | 2 +- runtime/Includes/Renderer/Image.h | 4 ++ .../Includes/Renderer/RenderPasses/2DPass.h | 2 +- .../Renderer/RenderPasses/FinalPass.h | 2 +- .../Includes/Renderer/RenderPasses/Passes.h | 2 + runtime/Sources/Graphics/Font.cpp | 7 ++++ runtime/Sources/Graphics/PutPixelManager.cpp | 26 +++++-------- runtime/Sources/Graphics/Scene.cpp | 14 +++++++ runtime/Sources/Graphics/Sprite.cpp | 9 +++++ .../Sources/Renderer/Pipelines/Graphics.cpp | 2 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 2 +- .../Renderer/RenderPasses/FinalPass.cpp | 2 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 4 +- 22 files changed, 121 insertions(+), 68 deletions(-) create mode 100644 runtime/Includes/Graphics/Font.h create mode 100644 runtime/Includes/Graphics/Mesh.inl create mode 100644 runtime/Sources/Graphics/Font.cpp diff --git a/example/main.c b/example/main.c index f243767..8b04bf6 100644 --- a/example/main.c +++ b/example/main.c @@ -69,7 +69,7 @@ void* create_image(mlx_t* mlx) pixel[1] = j; pixel[2] = k; pixel[3] = 0x99; - mlx_set_image_pixel(mlx->mlx, img, j, k, *((int *)pixel)); + mlx_set_image_pixel(mlx->mlx, img, j, k, *((int*)pixel)); } } return img; diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 6a4e5e6..9da8029 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -49,8 +49,6 @@ namespace mlx int m_id; bool m_has_window; - - bool m_insert_new_pixel_put_texture = false; }; } diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index f0db34e..185ebfa 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -8,21 +8,18 @@ namespace mlx MLX_PROFILE_FUNCTION(); p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); - m_insert_new_pixel_put_texture = true; m_draw_layer = 0; } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept { MLX_PROFILE_FUNCTION(); - NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_insert_new_pixel_put_texture, color); + NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_draw_layer, color); if(texture) { Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); - m_draw_layer++; } - m_insert_new_pixel_put_texture = false; } void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) @@ -42,14 +39,10 @@ namespace mlx { Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); - m_insert_new_pixel_put_texture = true; + m_draw_layer++; } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) - { p_scene->BringToFront(std::move(sprite)); - m_insert_new_pixel_put_texture = true; - } - m_draw_layer++; } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) diff --git a/runtime/Includes/Core/Memory.h b/runtime/Includes/Core/Memory.h index 7597f01..174be18 100644 --- a/runtime/Includes/Core/Memory.h +++ b/runtime/Includes/Core/Memory.h @@ -20,7 +20,7 @@ namespace mlx private: static MemManager* s_instance; - inline static std::list s_blocks; + inline static std::vector s_blocks; }; } diff --git a/runtime/Includes/Graphics/Font.h b/runtime/Includes/Graphics/Font.h new file mode 100644 index 0000000..55b6927 --- /dev/null +++ b/runtime/Includes/Graphics/Font.h @@ -0,0 +1,37 @@ +#ifndef __MLX_FONT__ +#define __MLX_FONT__ + +#include + +namespace mlx +{ + class Font + { + public: + Font(const std::filesystem::path& path, float scale) : m_build_data(path), m_name(path.string()), m_scale(scale) {} + Font(const std::string& name, const std::vector& ttf_data, float scale) : m_build_data(ttf_data), m_name(name), m_scale(scale) {} + + void Destroy(); + + inline const std::string& GetName() const { return m_name; } + inline float GetScale() const noexcept { return m_scale; } + inline const std::array& GetCharData() const { return m_cdata; } + inline const Texture& GetTexture() const noexcept { return m_atlas; } + inline bool operator==(const Font& rhs) const { return rhs.m_name == m_name && rhs.m_scale == m_scale; } + inline bool operator!=(const Font& rhs) const { return rhs.m_name != m_name || rhs.m_scale != m_scale; } + + inline ~Font() { Destroy(); } + + private: + void BuildFont(); + + private: + std::array m_cdata; + Texture m_atlas; + std::variant> m_build_data; + std::string m_name; + float m_scale; + }; +} + +#endif diff --git a/runtime/Includes/Graphics/Mesh.h b/runtime/Includes/Graphics/Mesh.h index fc55023..627b808 100644 --- a/runtime/Includes/Graphics/Mesh.h +++ b/runtime/Includes/Graphics/Mesh.h @@ -16,20 +16,7 @@ namespace mlx IndexBuffer ibo; std::size_t triangle_count = 0; - inline SubMesh(const std::vector& vertices, const std::vector& indices) - { - CPUBuffer vb(vertices.size() * sizeof(Vertex)); - std::memcpy(vb.GetData(), vertices.data(), vb.GetSize()); - vbo.Init(vb.GetSize(), 0, "mlx_mesh"); - vbo.SetData(std::move(vb)); - - CPUBuffer ib(indices.size() * sizeof(std::uint32_t)); - std::memcpy(ib.GetData(), indices.data(), ib.GetSize()); - ibo.Init(ib.GetSize(), 0, "mlx_mesh"); - ibo.SetData(std::move(ib)); - - triangle_count = vertices.size() / 3; - } + inline SubMesh(const std::vector& vertices, const std::vector& indices); }; public: @@ -48,21 +35,8 @@ namespace mlx private: std::vector m_sub_meshes; }; - - class MeshRegistry - { - public: - MeshRegistry() = default; - - inline void RegisterMesh(std::shared_ptr mesh); - inline void UnregisterMesh(std::shared_ptr mesh); - inline bool IsMeshKnown(std::shared_ptr mesh); - - ~MeshRegistry() = default; - - private: - std::unordered_set> m_mesh_registry; - }; } +#include + #endif diff --git a/runtime/Includes/Graphics/Mesh.inl b/runtime/Includes/Graphics/Mesh.inl new file mode 100644 index 0000000..2713ff0 --- /dev/null +++ b/runtime/Includes/Graphics/Mesh.inl @@ -0,0 +1,20 @@ +#pragma once +#include + +namespace mlx +{ + Mesh::SubMesh::SubMesh(const std::vector& vertices, const std::vector& indices) + { + CPUBuffer vb(vertices.size() * sizeof(Vertex)); + std::memcpy(vb.GetData(), vertices.data(), vb.GetSize()); + vbo.Init(vb.GetSize(), 0, "mlx_mesh"); + vbo.SetData(std::move(vb)); + + CPUBuffer ib(indices.size() * sizeof(std::uint32_t)); + std::memcpy(ib.GetData(), indices.data(), ib.GetSize()); + ibo.Init(ib.GetSize(), 0, "mlx_mesh"); + ibo.SetData(std::move(ib)); + + triangle_count = vertices.size() / 3; + } +} diff --git a/runtime/Includes/Graphics/PutPixelManager.h b/runtime/Includes/Graphics/PutPixelManager.h index 5249045..5f76609 100644 --- a/runtime/Includes/Graphics/PutPixelManager.h +++ b/runtime/Includes/Graphics/PutPixelManager.h @@ -11,13 +11,13 @@ namespace mlx PutPixelManager(NonOwningPtr renderer) : p_renderer(renderer) {} // Return a valid pointer when a new texture has been created - NonOwningPtr DrawPixel(int x, int y, bool insert_new_texture, std::uint32_t color); + NonOwningPtr DrawPixel(int x, int y, std::uint64_t draw_layer, std::uint32_t color); void ResetRenderData(); ~PutPixelManager(); private: - std::list m_textures; + std::unordered_map m_textures; NonOwningPtr p_renderer; }; } diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index 8efcf8d..49bde83 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -15,6 +15,7 @@ namespace mlx public: Sprite(NonOwningPtr texture); + Sprite(std::shared_ptr mesh, NonOwningPtr texture); inline void SetColor(Vec4f color) noexcept { m_color = color; } inline void SetPosition(Vec2f position) noexcept { m_position = position; } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index fde27c8..11b2a78 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -54,7 +54,7 @@ #include #endif -#if defined(MLX_PLAT_LINUX) +#ifdef MLX_PLAT_LINUX #include // sincos #endif diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index d787e9f..b262224 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -48,6 +48,10 @@ namespace mlx [[nodiscard]] MLX_FORCEINLINE bool IsInit() const noexcept { return m_image != VK_NULL_HANDLE; } [[nodiscard]] MLX_FORCEINLINE ImageType GetType() const noexcept { return m_type; } + #ifdef DEBUG + [[nodiscard]] MLX_FORCEINLINE const std::string& GetDebugName() const { return m_debug_name; } + #endif + virtual ~Image() = default; protected: diff --git a/runtime/Includes/Renderer/RenderPasses/2DPass.h b/runtime/Includes/Renderer/RenderPasses/2DPass.h index 8486818..a9cc015 100644 --- a/runtime/Includes/Renderer/RenderPasses/2DPass.h +++ b/runtime/Includes/Renderer/RenderPasses/2DPass.h @@ -11,7 +11,7 @@ namespace mlx { public: Render2DPass() = default; - void Init(class Renderer& renderer); + void Init(); void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target); void Destroy(); ~Render2DPass() = default; diff --git a/runtime/Includes/Renderer/RenderPasses/FinalPass.h b/runtime/Includes/Renderer/RenderPasses/FinalPass.h index e1baa1f..ead2c3a 100644 --- a/runtime/Includes/Renderer/RenderPasses/FinalPass.h +++ b/runtime/Includes/Renderer/RenderPasses/FinalPass.h @@ -11,7 +11,7 @@ namespace mlx { public: FinalPass() = default; - void Init(class Renderer& renderer); + void Init(); void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target); void Destroy(); ~FinalPass() = default; diff --git a/runtime/Includes/Renderer/RenderPasses/Passes.h b/runtime/Includes/Renderer/RenderPasses/Passes.h index 3888a23..56aa74d 100644 --- a/runtime/Includes/Renderer/RenderPasses/Passes.h +++ b/runtime/Includes/Renderer/RenderPasses/Passes.h @@ -11,9 +11,11 @@ namespace mlx { public: RenderPasses() = default; + void Init(class Renderer& renderer); void Pass(class Scene& scene, class Renderer& renderer); void Destroy(); + ~RenderPasses() = default; private: diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp new file mode 100644 index 0000000..294bc9a --- /dev/null +++ b/runtime/Sources/Graphics/Font.cpp @@ -0,0 +1,7 @@ +#include + +#include + +namespace mlx +{ +} diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index b41e047..0523164 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -5,26 +5,20 @@ namespace mlx { - NonOwningPtr PutPixelManager::DrawPixel(int x, int y, bool insert_new_texture, std::uint32_t color) + NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t draw_layer, std::uint32_t color) { Verify((bool)p_renderer, "invalid renderer pointer"); VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain()); - if(insert_new_texture) - { - #ifdef DEBUG - Texture& texture = m_textures.emplace_back(CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(m_textures.size())); - #else - Texture& texture = m_textures.emplace_back(CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); - #endif - texture.Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); - } - if(!m_textures.empty()) - { - m_textures.back().SetPixel(x, y, color); - return (insert_new_texture ? &m_textures.back() : nullptr); - } - return nullptr; + #ifdef DEBUG + auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(draw_layer)); + #else + auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); + #endif + if(res.second) + res.first->second.Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); + res.first->second.SetPixel(x, y, color); + return (res.second ? &res.first->second : nullptr); } void PutPixelManager::ResetRenderData() diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index 1a76ec6..6335974 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -8,6 +8,19 @@ namespace mlx Sprite& Scene::CreateSprite(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); + Verify((bool)texture, "Scene: invalid texture (internal mlx issue, please report to devs)"); + + #pragma omp parallel for + for(auto& sprite : m_sprites) + { + if(texture->GetWidth() == sprite->GetTexture()->GetWidth() && texture->GetHeight() == sprite->GetTexture()->GetHeight()) + { + std::shared_ptr new_sprite = std::make_shared(sprite->GetMesh(), texture); + m_sprites.push_back(new_sprite); + return *new_sprite; + } + } + std::shared_ptr sprite = std::make_shared(texture); m_sprites.push_back(sprite); return *sprite; @@ -52,6 +65,7 @@ namespace mlx bool Scene::IsTextureAtGivenDrawLayer(NonOwningPtr texture, std::uint64_t draw_layer) const { + MLX_PROFILE_FUNCTION(); if(draw_layer >= m_sprites.size()) return false; return m_sprites[draw_layer]->GetTexture() == texture; diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index e9fd03f..caa0c64 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -44,4 +44,13 @@ namespace mlx p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); p_texture = texture; } + + Sprite::Sprite(std::shared_ptr mesh, NonOwningPtr texture) + { + MLX_PROFILE_FUNCTION(); + Verify((bool)texture, "Sprite: invalid texture (internal mlx issue, please report to devs)"); + Verify((bool)mesh, "Sprite: invalid mesh (internal mlx issue, please report to devs)"); + p_mesh = mesh; + p_texture = texture; + } } diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 7013dfd..401a877 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -7,7 +7,7 @@ namespace mlx { - void GraphicPipeline::Init(const GraphicPipelineDescriptor& descriptor, std::string_view debug_name) + void GraphicPipeline::Init(const GraphicPipelineDescriptor& descriptor, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); if(!descriptor.vertex_shader || !descriptor.fragment_shader) diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 6283a18..1cd2e25 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -14,7 +14,7 @@ namespace mlx Vec4f position; }; - void Render2DPass::Init(Renderer& renderer) + void Render2DPass::Init() { MLX_PROFILE_FUNCTION(); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index ac9b73e..7464c6e 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -7,7 +7,7 @@ namespace mlx { - void FinalPass::Init(Renderer& renderer) + void FinalPass::Init() { MLX_PROFILE_FUNCTION(); ShaderLayout vertex_shader_layout( diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index ce31c2c..9f5dc12 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -7,8 +7,8 @@ namespace mlx { void RenderPasses::Init(Renderer& renderer) { - m_2Dpass.Init(renderer); - m_final.Init(renderer); + m_2Dpass.Init(); + m_final.Init(); func::function functor = [this, renderer](const EventBase& event) { if(event.What() == Event::ResizeEventCode) From 05d07e5b723731419d7f06512583b05727e1aa3f Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 21 Oct 2024 21:58:04 +0200 Subject: [PATCH 047/131] bozoman solution --- runtime/Includes/Core/Graphics.inl | 1 + 1 file changed, 1 insertion(+) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 185ebfa..a860896 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -9,6 +9,7 @@ namespace mlx p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); m_draw_layer = 0; + PixelPut(0, 0, 0x00000000); // bozoman solution FIXME } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept From e7cb78557f4a680e099f94134e85d70cd7128e02 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 21 Oct 2024 23:21:50 +0200 Subject: [PATCH 048/131] working on fonts --- runtime/Includes/Core/Application.h | 2 + runtime/Includes/Embedded/2DFragment.spv.h | 26 ++++--- .../Includes/Embedded/ScreenFragment.spv.h | 74 +++++++++++-------- runtime/Includes/Graphics/Font.h | 21 +++++- runtime/Includes/Graphics/Font.inl | 20 +++++ runtime/Sources/Core/Application.cpp | 1 - runtime/Sources/Graphics/Font.cpp | 52 +++++++++++++ 7 files changed, 150 insertions(+), 46 deletions(-) create mode 100644 runtime/Includes/Graphics/Font.inl diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 2233650..76a3e45 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -7,6 +7,7 @@ #include #include #include +#include namespace mlx { @@ -54,6 +55,7 @@ namespace mlx std::unique_ptr p_sdl_manager; FpsManager m_fps; Inputs m_in; + FontRegistry m_font_registry; ImageRegistry m_image_registry; std::vector> m_graphics; std::function f_loop_hook; diff --git a/runtime/Includes/Embedded/2DFragment.spv.h b/runtime/Includes/Embedded/2DFragment.spv.h index f71f131..5792158 100644 --- a/runtime/Includes/Embedded/2DFragment.spv.h +++ b/runtime/Includes/Embedded/2DFragment.spv.h @@ -1,4 +1,4 @@ -3,2,35,7,0,0,1,0,39,0,0,0,48,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,2,35,7,0,0,1,0,39,0,0,0,51,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, 3,0,0,0,0,0,1,0,0,0,15,0,8,0,4,0,0,0,28,0,0,0,109,97,105,110,0,0,0,0, 10,0,0,0,16,0,0,0,22,0,0,0,16,0,3,0,28,0,0,0,7,0,0,0,3,0,3,0,0,0, 0,0,100,0,0,0,5,0,4,0,19,0,0,0,86,101,114,116,79,117,116,0,6,0,5,0,19,0,0,0, @@ -29,14 +29,16 @@ 7,0,0,0,248,0,2,0,29,0,0,0,59,0,4,0,24,0,0,0,30,0,0,0,7,0,0,0,59,0, 4,0,20,0,0,0,31,0,0,0,7,0,0,0,65,0,5,0,13,0,0,0,32,0,0,0,31,0,0,0, 12,0,0,0,63,0,3,0,32,0,0,0,10,0,0,0,65,0,5,0,18,0,0,0,33,0,0,0,31,0, -0,0,17,0,0,0,63,0,3,0,33,0,0,0,16,0,0,0,61,0,4,0,3,0,0,0,34,0,0,0, -5,0,0,0,65,0,5,0,18,0,0,0,35,0,0,0,31,0,0,0,17,0,0,0,61,0,4,0,14,0, -0,0,36,0,0,0,35,0,0,0,87,0,5,0,8,0,0,0,37,0,0,0,34,0,0,0,36,0,0,0, -65,0,5,0,13,0,0,0,38,0,0,0,30,0,0,0,12,0,0,0,62,0,3,0,38,0,0,0,37,0, -0,0,65,0,5,0,13,0,0,0,42,0,0,0,30,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0, -43,0,0,0,42,0,0,0,81,0,5,0,1,0,0,0,44,0,0,0,43,0,0,0,3,0,0,0,180,0, -5,0,27,0,0,0,45,0,0,0,44,0,0,0,26,0,0,0,247,0,3,0,39,0,0,0,0,0,0,0, -250,0,4,0,45,0,0,0,40,0,0,0,41,0,0,0,248,0,2,0,40,0,0,0,252,0,1,0,248,0, -2,0,41,0,0,0,249,0,2,0,39,0,0,0,248,0,2,0,39,0,0,0,61,0,4,0,23,0,0,0, -46,0,0,0,30,0,0,0,81,0,5,0,8,0,0,0,47,0,0,0,46,0,0,0,0,0,0,0,62,0, -3,0,22,0,0,0,47,0,0,0,253,0,1,0,56,0,1,0 +0,0,17,0,0,0,63,0,3,0,33,0,0,0,16,0,0,0,65,0,5,0,13,0,0,0,34,0,0,0, +31,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0,35,0,0,0,34,0,0,0,61,0,4,0,3,0, +0,0,36,0,0,0,5,0,0,0,65,0,5,0,18,0,0,0,37,0,0,0,31,0,0,0,17,0,0,0, +61,0,4,0,14,0,0,0,38,0,0,0,37,0,0,0,87,0,5,0,8,0,0,0,39,0,0,0,36,0, +0,0,38,0,0,0,133,0,5,0,8,0,0,0,40,0,0,0,35,0,0,0,39,0,0,0,65,0,5,0, +13,0,0,0,41,0,0,0,30,0,0,0,12,0,0,0,62,0,3,0,41,0,0,0,40,0,0,0,65,0, +5,0,13,0,0,0,45,0,0,0,30,0,0,0,12,0,0,0,61,0,4,0,8,0,0,0,46,0,0,0, +45,0,0,0,81,0,5,0,1,0,0,0,47,0,0,0,46,0,0,0,3,0,0,0,180,0,5,0,27,0, +0,0,48,0,0,0,47,0,0,0,26,0,0,0,247,0,3,0,42,0,0,0,0,0,0,0,250,0,4,0, +48,0,0,0,43,0,0,0,44,0,0,0,248,0,2,0,43,0,0,0,252,0,1,0,248,0,2,0,44,0, +0,0,249,0,2,0,42,0,0,0,248,0,2,0,42,0,0,0,61,0,4,0,23,0,0,0,49,0,0,0, +30,0,0,0,81,0,5,0,8,0,0,0,50,0,0,0,49,0,0,0,0,0,0,0,62,0,3,0,22,0, +0,0,50,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Embedded/ScreenFragment.spv.h b/runtime/Includes/Embedded/ScreenFragment.spv.h index 727a9d3..4b6ed37 100644 --- a/runtime/Includes/Embedded/ScreenFragment.spv.h +++ b/runtime/Includes/Embedded/ScreenFragment.spv.h @@ -1,31 +1,43 @@ -3,2,35,7,0,0,1,0,39,0,0,0,34,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, -3,0,0,0,0,0,1,0,0,0,15,0,7,0,4,0,0,0,21,0,0,0,109,97,105,110,0,0,0,0, -10,0,0,0,18,0,0,0,16,0,3,0,21,0,0,0,7,0,0,0,3,0,3,0,0,0,0,0,100,0, -0,0,5,0,4,0,14,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,14,0,0,0,0,0,0,0, -117,118,0,0,5,0,4,0,19,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,19,0,0,0,0,0, -0,0,99,111,108,111,114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0, -5,0,3,0,10,0,0,0,117,118,0,0,5,0,4,0,18,0,0,0,99,111,108,111,114,0,0,0,5,0, -4,0,21,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,5,0,0,0,33,0,0,0,0,0,0,0, -71,0,4,0,5,0,0,0,34,0,0,0,0,0,0,0,71,0,4,0,10,0,0,0,30,0,0,0,0,0, -0,0,71,0,4,0,18,0,0,0,30,0,0,0,0,0,0,0,72,0,5,0,14,0,0,0,0,0,0,0, -35,0,0,0,0,0,0,0,72,0,5,0,19,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0, -3,0,1,0,0,0,32,0,0,0,25,0,9,0,2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0, -0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0, -4,0,4,0,0,0,0,0,0,0,3,0,0,0,19,0,2,0,6,0,0,0,33,0,3,0,7,0,0,0, -6,0,0,0,23,0,4,0,8,0,0,0,1,0,0,0,2,0,0,0,32,0,4,0,9,0,0,0,1,0, -0,0,8,0,0,0,21,0,4,0,11,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,11,0,0,0, -12,0,0,0,0,0,0,0,32,0,4,0,13,0,0,0,7,0,0,0,8,0,0,0,30,0,3,0,14,0, -0,0,8,0,0,0,32,0,4,0,15,0,0,0,7,0,0,0,14,0,0,0,23,0,4,0,16,0,0,0, -1,0,0,0,4,0,0,0,32,0,4,0,17,0,0,0,3,0,0,0,16,0,0,0,30,0,3,0,19,0, -0,0,16,0,0,0,32,0,4,0,20,0,0,0,7,0,0,0,19,0,0,0,32,0,4,0,31,0,0,0, -7,0,0,0,16,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0,59,0,4,0,9,0, -0,0,10,0,0,0,1,0,0,0,59,0,4,0,17,0,0,0,18,0,0,0,3,0,0,0,54,0,5,0, -6,0,0,0,21,0,0,0,0,0,0,0,7,0,0,0,248,0,2,0,22,0,0,0,59,0,4,0,20,0, -0,0,23,0,0,0,7,0,0,0,59,0,4,0,15,0,0,0,24,0,0,0,7,0,0,0,65,0,5,0, -13,0,0,0,25,0,0,0,24,0,0,0,12,0,0,0,63,0,3,0,25,0,0,0,10,0,0,0,61,0, -4,0,3,0,0,0,26,0,0,0,5,0,0,0,65,0,5,0,13,0,0,0,27,0,0,0,24,0,0,0, -12,0,0,0,61,0,4,0,8,0,0,0,28,0,0,0,27,0,0,0,87,0,5,0,16,0,0,0,29,0, -0,0,26,0,0,0,28,0,0,0,65,0,5,0,31,0,0,0,30,0,0,0,23,0,0,0,12,0,0,0, -62,0,3,0,30,0,0,0,29,0,0,0,61,0,4,0,19,0,0,0,32,0,0,0,23,0,0,0,81,0, -5,0,16,0,0,0,33,0,0,0,32,0,0,0,0,0,0,0,62,0,3,0,18,0,0,0,33,0,0,0, -253,0,1,0,56,0,1,0 +3,2,35,7,0,0,1,0,39,0,0,0,52,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,11,0, +6,0,29,0,0,0,71,76,83,76,46,115,116,100,46,52,53,48,0,0,0,0,14,0,3,0,0,0,0,0, +1,0,0,0,15,0,7,0,4,0,0,0,31,0,0,0,109,97,105,110,0,0,0,0,15,0,0,0,23,0, +0,0,16,0,3,0,31,0,0,0,7,0,0,0,3,0,3,0,0,0,0,0,100,0,0,0,5,0,4,0, +19,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,19,0,0,0,0,0,0,0,117,118,0,0,5,0, +4,0,24,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,24,0,0,0,0,0,0,0,99,111,108,111, +114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0,5,0,3,0,15,0, +0,0,117,118,0,0,5,0,4,0,23,0,0,0,99,111,108,111,114,0,0,0,5,0,6,0,30,0,0,0, +76,105,110,101,97,114,84,111,115,82,71,66,0,0,0,0,5,0,4,0,31,0,0,0,109,97,105,110,0,0, +0,0,71,0,4,0,5,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,5,0,0,0,34,0,0,0, +0,0,0,0,71,0,4,0,15,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,23,0,0,0,30,0, +0,0,0,0,0,0,72,0,5,0,19,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0, +24,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0,3,0,1,0,0,0,32,0,0,0,25,0, +9,0,2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, +0,0,0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0,4,0,4,0,0,0,0,0,0,0,3,0, +0,0,23,0,4,0,6,0,0,0,1,0,0,0,3,0,0,0,32,0,4,0,7,0,0,0,7,0,0,0, +6,0,0,0,33,0,4,0,8,0,0,0,6,0,0,0,7,0,0,0,43,0,4,0,1,0,0,0,9,0, +0,0,46,186,232,62,44,0,6,0,6,0,0,0,10,0,0,0,9,0,0,0,9,0,0,0,9,0,0,0, +19,0,2,0,11,0,0,0,33,0,3,0,12,0,0,0,11,0,0,0,23,0,4,0,13,0,0,0,1,0, +0,0,2,0,0,0,32,0,4,0,14,0,0,0,1,0,0,0,13,0,0,0,21,0,4,0,16,0,0,0, +32,0,0,0,1,0,0,0,43,0,4,0,16,0,0,0,17,0,0,0,0,0,0,0,32,0,4,0,18,0, +0,0,7,0,0,0,13,0,0,0,30,0,3,0,19,0,0,0,13,0,0,0,32,0,4,0,20,0,0,0, +7,0,0,0,19,0,0,0,23,0,4,0,21,0,0,0,1,0,0,0,4,0,0,0,32,0,4,0,22,0, +0,0,3,0,0,0,21,0,0,0,30,0,3,0,24,0,0,0,21,0,0,0,32,0,4,0,25,0,0,0, +7,0,0,0,24,0,0,0,43,0,4,0,16,0,0,0,26,0,0,0,1,0,0,0,43,0,4,0,16,0, +0,0,27,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,28,0,0,0,0,0,128,63,32,0,4,0, +49,0,0,0,7,0,0,0,21,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0,59,0, +4,0,14,0,0,0,15,0,0,0,1,0,0,0,59,0,4,0,22,0,0,0,23,0,0,0,3,0,0,0, +54,0,5,0,6,0,0,0,30,0,0,0,0,0,0,0,8,0,0,0,55,0,3,0,7,0,0,0,32,0, +0,0,248,0,2,0,33,0,0,0,61,0,4,0,6,0,0,0,34,0,0,0,32,0,0,0,12,0,7,0, +6,0,0,0,35,0,0,0,29,0,0,0,26,0,0,0,34,0,0,0,10,0,0,0,254,0,2,0,35,0, +0,0,56,0,1,0,54,0,5,0,11,0,0,0,31,0,0,0,0,0,0,0,12,0,0,0,248,0,2,0, +36,0,0,0,59,0,4,0,25,0,0,0,37,0,0,0,7,0,0,0,59,0,4,0,7,0,0,0,38,0, +0,0,7,0,0,0,59,0,4,0,20,0,0,0,39,0,0,0,7,0,0,0,65,0,5,0,18,0,0,0, +40,0,0,0,39,0,0,0,17,0,0,0,63,0,3,0,40,0,0,0,15,0,0,0,61,0,4,0,3,0, +0,0,41,0,0,0,5,0,0,0,65,0,5,0,18,0,0,0,42,0,0,0,39,0,0,0,17,0,0,0, +61,0,4,0,13,0,0,0,43,0,0,0,42,0,0,0,87,0,5,0,21,0,0,0,44,0,0,0,41,0, +0,0,43,0,0,0,79,0,8,0,6,0,0,0,45,0,0,0,44,0,0,0,44,0,0,0,0,0,0,0, +1,0,0,0,2,0,0,0,62,0,3,0,38,0,0,0,45,0,0,0,57,0,5,0,6,0,0,0,46,0, +0,0,30,0,0,0,38,0,0,0,80,0,5,0,21,0,0,0,47,0,0,0,46,0,0,0,28,0,0,0, +65,0,5,0,49,0,0,0,48,0,0,0,37,0,0,0,17,0,0,0,62,0,3,0,48,0,0,0,47,0, +0,0,61,0,4,0,24,0,0,0,50,0,0,0,37,0,0,0,81,0,5,0,21,0,0,0,51,0,0,0, +50,0,0,0,0,0,0,0,62,0,3,0,23,0,0,0,51,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Font.h b/runtime/Includes/Graphics/Font.h index 55b6927..34dcdf4 100644 --- a/runtime/Includes/Graphics/Font.h +++ b/runtime/Includes/Graphics/Font.h @@ -16,7 +16,7 @@ namespace mlx inline const std::string& GetName() const { return m_name; } inline float GetScale() const noexcept { return m_scale; } inline const std::array& GetCharData() const { return m_cdata; } - inline const Texture& GetTexture() const noexcept { return m_atlas; } + inline const Font& GetFont() const noexcept { return m_atlas; } inline bool operator==(const Font& rhs) const { return rhs.m_name == m_name && rhs.m_scale == m_scale; } inline bool operator!=(const Font& rhs) const { return rhs.m_name != m_name || rhs.m_scale != m_scale; } @@ -27,11 +27,28 @@ namespace mlx private: std::array m_cdata; - Texture m_atlas; + Font m_atlas; std::variant> m_build_data; std::string m_name; float m_scale; }; + + class FontRegistry + { + public: + FontRegistry() = default; + + inline void RegisterFont(std::shared_ptr font); + inline void UnregisterFont(std::shared_ptr font); + inline bool IsFontKnown(std::shared_ptr font); + + ~FontRegistry() = default; + + private: + std::unordered_set> m_fonts_registry; + }; } +#include + #endif diff --git a/runtime/Includes/Graphics/Font.inl b/runtime/Includes/Graphics/Font.inl new file mode 100644 index 0000000..deef911 --- /dev/null +++ b/runtime/Includes/Graphics/Font.inl @@ -0,0 +1,20 @@ +#pragma once +#include + +namespace mlx +{ + void FontRegistry::RegisterFont(std::shared_ptr font) + { + m_fonts_registry.insert(font); + } + + void FontRegistry::UnregisterFont(std::shared_ptr font) + { + m_fonts_registry.erase(font); + } + + bool FontRegistry::IsFontKnown(std::shared_ptr font) + { + return m_fonts_registry.find(font) != m_fonts_registry.end(); + } +} diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 03f5cb2..f0ebeb8 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -38,7 +38,6 @@ namespace mlx if(f_loop_hook) f_loop_hook(p_param); - #pragma omp parallel for for(auto& gs : m_graphics) { if(gs) diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index 294bc9a..c23af3b 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -4,4 +4,56 @@ namespace mlx { + constexpr const int RANGE = 1024; + + void Font::BuildFont() + { + MLX_PROFILE_FUNCTION(); + std::vector file_bytes; + if(std::holds_alternative(m_build_data)) + { + std::ifstream file(std::get(m_build_data), std::ios::binary); + if(!file.is_open()) + { + Error("Font : cannot open font file, %", m_name); + return; + } + std::ifstream::pos_type file_size = std::filesystem::file_size(std::get(m_build_data)); + file.seekg(0, std::ios::beg); + file_bytes.resize(file_size); + file.read(reinterpret_cast(file_bytes.data()), file_size); + file.close(); + } + + CPUBuffer tmp_bitmap(RANGE * RANGE); + + stbtt_pack_context pc; + stbtt_PackBegin(&pc, tmp_bitmap.GetData(), RANGE, RANGE, RANGE, 1, nullptr); + if(std::holds_alternative(m_build_data)) + stbtt_PackFontRange(&pc, file_bytes.data(), 0, m_scale, 32, 96, m_cdata.data()); + else + stbtt_PackFontRange(&pc, std::get>(m_build_data).data(), 0, m_scale, 32, 96, m_cdata.data()); + stbtt_PackEnd(&pc); + + CPUBuffer vulkan_bitmap(RANGE * RANGE * 4); + + for(int i = 0, j = 0; i < RANGE * RANGE; i++, j += 4) + { + vulkan_bitmap.GetData()[j + 0] = tmp_bitmap.GetData()[i]; + vulkan_bitmap.GetData()[j + 1] = tmp_bitmap.GetData()[i]; + vulkan_bitmap.GetData()[j + 2] = tmp_bitmap.GetData()[i]; + vulkan_bitmap.GetData()[j + 3] = tmp_bitmap.GetData()[i]; + } + + #ifdef DEBUG + m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, m_name + "_font_altas"); + #else + m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + #endif + } + + void Font::Destroy() + { + m_atlas.Destroy(); + } } From aa26e528c4598dbc379d0158801d5dabc87af446 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 22 Oct 2024 12:09:49 +0200 Subject: [PATCH 049/131] fixing compilation issues --- example/main.c | 8 +- includes/mlx.h | 6 +- runtime/Includes/Core/Application.h | 2 +- runtime/Includes/Core/Application.inl | 13 +++- runtime/Includes/Core/Graphics.h | 2 - runtime/Includes/Core/Graphics.inl | 7 -- runtime/Includes/Core/SDLManager.h | 2 +- .../Includes/Embedded/ScreenFragment.spv.h | 74 ++++++++----------- runtime/Includes/Graphics/Font.h | 8 +- runtime/Includes/Platform/Window.h | 2 +- runtime/Sources/Core/Bridge.cpp | 10 +-- runtime/Sources/Core/SDLManager.cpp | 29 ++------ runtime/Sources/Graphics/Font.cpp | 6 ++ 13 files changed, 72 insertions(+), 97 deletions(-) diff --git a/example/main.c b/example/main.c index 8b04bf6..c839c45 100644 --- a/example/main.c +++ b/example/main.c @@ -20,9 +20,9 @@ int update(void* param) mlx_clear_window(mlx->mlx, mlx->win); if(i >= 250) - mlx_set_font_scale(mlx->mlx, mlx->win, "default", 16.f); + mlx_set_font_scale(mlx->mlx, "default", 16.f); else - mlx_set_font_scale(mlx->mlx, mlx->win, "default", 6.f); + mlx_set_font_scale(mlx->mlx, "default", 6.f); mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFFFF2066, "this text should be hidden"); @@ -30,7 +30,7 @@ int update(void* param) mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); - mlx_set_font(mlx->mlx, mlx->win, "default"); + mlx_set_font(mlx->mlx, "default"); mlx_string_put(mlx->mlx, mlx->win, 20, 50, 0xFFFFFFFF, "that's a text"); for(int j = 0, color = 0; j < 400; j++) @@ -145,7 +145,7 @@ int main(void) mlx.img = create_image(&mlx); - mlx_set_font_scale(mlx.mlx, mlx.win, "font.ttf", 16.f); + mlx_set_font_scale(mlx.mlx, "font.ttf", 16.f); mlx_string_put(mlx.mlx, mlx.win, 20, 20, 0xFF0020FF, "that text will disappear"); mlx_loop_hook(mlx.mlx, update, &mlx); diff --git a/includes/mlx.h b/includes/mlx.h index e4f7d43..a163423 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/10/17 17:51:28 by maldavid ### ########.fr */ +/* Updated: 2024/10/22 11:56:44 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -312,7 +312,7 @@ MLX_API int mlx_string_put(void* mlx, void* win, int x, int y, int color, char* * * @return (void) */ -MLX_API void mlx_set_font(void* mlx, void* win, char* filepath); +MLX_API void mlx_set_font(void* mlx, char* filepath); /** @@ -325,7 +325,7 @@ MLX_API void mlx_set_font(void* mlx, void* win, char* filepath); * * @return (void) */ -MLX_API void mlx_set_font_scale(void* mlx, void* win, char* filepath, float scale); +MLX_API void mlx_set_font_scale(void* mlx, char* filepath, float scale); /** diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 76a3e45..b80b2ab 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -44,7 +44,7 @@ namespace mlx inline void LoopHook(int (*f)(void*), void* param); inline void LoopEnd() noexcept; - inline void LoadFont(Handle win, const std::filesystem::path& filepath, float scale); + inline void LoadFont(const std::filesystem::path& filepath, float scale); void Run() noexcept; diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 915b9e9..cbbd06b 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -1,5 +1,6 @@ #pragma once #include +#include #ifndef DISABLE_ALL_SAFETIES #define CHECK_WINDOW_PTR(win) \ @@ -137,11 +138,17 @@ namespace mlx m_graphics[*static_cast(win)]->StringPut(x, y, color, str); } - void Application::LoadFont(Handle win, const std::filesystem::path& filepath, float scale) + void Application::LoadFont(const std::filesystem::path& filepath, float scale) { MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)]->LoadFont(filepath, scale); + std::shared_ptr font; + if(filepath.string() == "default") + font = std::make_shared("default", dogica_ttf, scale); + else + font = std::make_shared(filepath, scale); + if(!m_font_registry.IsFontKnown(font)) + return; + m_font_registry.RegisterFont(font); } void Application::TexturePut(Handle win, Handle img, int x, int y) diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 9da8029..b67c532 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -28,8 +28,6 @@ namespace mlx inline void StringPut(int x, int y, std::uint32_t color, std::string str); inline void TexturePut(NonOwningPtr texture, int x, int y); - inline void LoadFont(const std::filesystem::path& filepath, float scale); - inline void TryEraseSpritesInScene(NonOwningPtr texture) noexcept; [[nodiscard]] MLX_FORCEINLINE bool HasWindow() const noexcept { return m_has_window; } diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index a860896..3bf23c7 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -46,13 +46,6 @@ namespace mlx p_scene->BringToFront(std::move(sprite)); } - void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) - { - MLX_PROFILE_FUNCTION(); - (void)filepath; - (void)scale; - } - void GraphicsSupport::TryEraseSpritesInScene(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 568f45e..30defbc 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -16,7 +16,7 @@ namespace mlx void InputsFetcher(func::function functor); VkSurfaceKHR CreateVulkanSurface(Handle window, VkInstance instance) const noexcept; - std::vector GetRequiredVulkanInstanceExtentions() const noexcept; + std::vector GetRequiredVulkanInstanceExtentions(Handle window) const noexcept; Vec2ui GetVulkanDrawableSize(Handle window) const noexcept; void MoveMouseOnWindow(Handle window, int x, int y) const noexcept; void GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept; diff --git a/runtime/Includes/Embedded/ScreenFragment.spv.h b/runtime/Includes/Embedded/ScreenFragment.spv.h index 4b6ed37..727a9d3 100644 --- a/runtime/Includes/Embedded/ScreenFragment.spv.h +++ b/runtime/Includes/Embedded/ScreenFragment.spv.h @@ -1,43 +1,31 @@ -3,2,35,7,0,0,1,0,39,0,0,0,52,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,11,0, -6,0,29,0,0,0,71,76,83,76,46,115,116,100,46,52,53,48,0,0,0,0,14,0,3,0,0,0,0,0, -1,0,0,0,15,0,7,0,4,0,0,0,31,0,0,0,109,97,105,110,0,0,0,0,15,0,0,0,23,0, -0,0,16,0,3,0,31,0,0,0,7,0,0,0,3,0,3,0,0,0,0,0,100,0,0,0,5,0,4,0, -19,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,19,0,0,0,0,0,0,0,117,118,0,0,5,0, -4,0,24,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,24,0,0,0,0,0,0,0,99,111,108,111, -114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0,5,0,3,0,15,0, -0,0,117,118,0,0,5,0,4,0,23,0,0,0,99,111,108,111,114,0,0,0,5,0,6,0,30,0,0,0, -76,105,110,101,97,114,84,111,115,82,71,66,0,0,0,0,5,0,4,0,31,0,0,0,109,97,105,110,0,0, -0,0,71,0,4,0,5,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,5,0,0,0,34,0,0,0, -0,0,0,0,71,0,4,0,15,0,0,0,30,0,0,0,0,0,0,0,71,0,4,0,23,0,0,0,30,0, -0,0,0,0,0,0,72,0,5,0,19,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0, -24,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0,3,0,1,0,0,0,32,0,0,0,25,0, -9,0,2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, -0,0,0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0,4,0,4,0,0,0,0,0,0,0,3,0, -0,0,23,0,4,0,6,0,0,0,1,0,0,0,3,0,0,0,32,0,4,0,7,0,0,0,7,0,0,0, -6,0,0,0,33,0,4,0,8,0,0,0,6,0,0,0,7,0,0,0,43,0,4,0,1,0,0,0,9,0, -0,0,46,186,232,62,44,0,6,0,6,0,0,0,10,0,0,0,9,0,0,0,9,0,0,0,9,0,0,0, -19,0,2,0,11,0,0,0,33,0,3,0,12,0,0,0,11,0,0,0,23,0,4,0,13,0,0,0,1,0, -0,0,2,0,0,0,32,0,4,0,14,0,0,0,1,0,0,0,13,0,0,0,21,0,4,0,16,0,0,0, -32,0,0,0,1,0,0,0,43,0,4,0,16,0,0,0,17,0,0,0,0,0,0,0,32,0,4,0,18,0, -0,0,7,0,0,0,13,0,0,0,30,0,3,0,19,0,0,0,13,0,0,0,32,0,4,0,20,0,0,0, -7,0,0,0,19,0,0,0,23,0,4,0,21,0,0,0,1,0,0,0,4,0,0,0,32,0,4,0,22,0, -0,0,3,0,0,0,21,0,0,0,30,0,3,0,24,0,0,0,21,0,0,0,32,0,4,0,25,0,0,0, -7,0,0,0,24,0,0,0,43,0,4,0,16,0,0,0,26,0,0,0,1,0,0,0,43,0,4,0,16,0, -0,0,27,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,28,0,0,0,0,0,128,63,32,0,4,0, -49,0,0,0,7,0,0,0,21,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0,59,0, -4,0,14,0,0,0,15,0,0,0,1,0,0,0,59,0,4,0,22,0,0,0,23,0,0,0,3,0,0,0, -54,0,5,0,6,0,0,0,30,0,0,0,0,0,0,0,8,0,0,0,55,0,3,0,7,0,0,0,32,0, -0,0,248,0,2,0,33,0,0,0,61,0,4,0,6,0,0,0,34,0,0,0,32,0,0,0,12,0,7,0, -6,0,0,0,35,0,0,0,29,0,0,0,26,0,0,0,34,0,0,0,10,0,0,0,254,0,2,0,35,0, -0,0,56,0,1,0,54,0,5,0,11,0,0,0,31,0,0,0,0,0,0,0,12,0,0,0,248,0,2,0, -36,0,0,0,59,0,4,0,25,0,0,0,37,0,0,0,7,0,0,0,59,0,4,0,7,0,0,0,38,0, -0,0,7,0,0,0,59,0,4,0,20,0,0,0,39,0,0,0,7,0,0,0,65,0,5,0,18,0,0,0, -40,0,0,0,39,0,0,0,17,0,0,0,63,0,3,0,40,0,0,0,15,0,0,0,61,0,4,0,3,0, -0,0,41,0,0,0,5,0,0,0,65,0,5,0,18,0,0,0,42,0,0,0,39,0,0,0,17,0,0,0, -61,0,4,0,13,0,0,0,43,0,0,0,42,0,0,0,87,0,5,0,21,0,0,0,44,0,0,0,41,0, -0,0,43,0,0,0,79,0,8,0,6,0,0,0,45,0,0,0,44,0,0,0,44,0,0,0,0,0,0,0, -1,0,0,0,2,0,0,0,62,0,3,0,38,0,0,0,45,0,0,0,57,0,5,0,6,0,0,0,46,0, -0,0,30,0,0,0,38,0,0,0,80,0,5,0,21,0,0,0,47,0,0,0,46,0,0,0,28,0,0,0, -65,0,5,0,49,0,0,0,48,0,0,0,37,0,0,0,17,0,0,0,62,0,3,0,48,0,0,0,47,0, -0,0,61,0,4,0,24,0,0,0,50,0,0,0,37,0,0,0,81,0,5,0,21,0,0,0,51,0,0,0, -50,0,0,0,0,0,0,0,62,0,3,0,23,0,0,0,51,0,0,0,253,0,1,0,56,0,1,0 +3,2,35,7,0,0,1,0,39,0,0,0,34,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,7,0,4,0,0,0,21,0,0,0,109,97,105,110,0,0,0,0, +10,0,0,0,18,0,0,0,16,0,3,0,21,0,0,0,7,0,0,0,3,0,3,0,0,0,0,0,100,0, +0,0,5,0,4,0,14,0,0,0,86,101,114,116,79,117,116,0,6,0,4,0,14,0,0,0,0,0,0,0, +117,118,0,0,5,0,4,0,19,0,0,0,70,114,97,103,79,117,116,0,6,0,5,0,19,0,0,0,0,0, +0,0,99,111,108,111,114,0,0,0,5,0,5,0,5,0,0,0,117,95,116,101,120,116,117,114,101,0,0,0, +5,0,3,0,10,0,0,0,117,118,0,0,5,0,4,0,18,0,0,0,99,111,108,111,114,0,0,0,5,0, +4,0,21,0,0,0,109,97,105,110,0,0,0,0,71,0,4,0,5,0,0,0,33,0,0,0,0,0,0,0, +71,0,4,0,5,0,0,0,34,0,0,0,0,0,0,0,71,0,4,0,10,0,0,0,30,0,0,0,0,0, +0,0,71,0,4,0,18,0,0,0,30,0,0,0,0,0,0,0,72,0,5,0,14,0,0,0,0,0,0,0, +35,0,0,0,0,0,0,0,72,0,5,0,19,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,22,0, +3,0,1,0,0,0,32,0,0,0,25,0,9,0,2,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0, +0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,27,0,3,0,3,0,0,0,2,0,0,0,32,0, +4,0,4,0,0,0,0,0,0,0,3,0,0,0,19,0,2,0,6,0,0,0,33,0,3,0,7,0,0,0, +6,0,0,0,23,0,4,0,8,0,0,0,1,0,0,0,2,0,0,0,32,0,4,0,9,0,0,0,1,0, +0,0,8,0,0,0,21,0,4,0,11,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,11,0,0,0, +12,0,0,0,0,0,0,0,32,0,4,0,13,0,0,0,7,0,0,0,8,0,0,0,30,0,3,0,14,0, +0,0,8,0,0,0,32,0,4,0,15,0,0,0,7,0,0,0,14,0,0,0,23,0,4,0,16,0,0,0, +1,0,0,0,4,0,0,0,32,0,4,0,17,0,0,0,3,0,0,0,16,0,0,0,30,0,3,0,19,0, +0,0,16,0,0,0,32,0,4,0,20,0,0,0,7,0,0,0,19,0,0,0,32,0,4,0,31,0,0,0, +7,0,0,0,16,0,0,0,59,0,4,0,4,0,0,0,5,0,0,0,0,0,0,0,59,0,4,0,9,0, +0,0,10,0,0,0,1,0,0,0,59,0,4,0,17,0,0,0,18,0,0,0,3,0,0,0,54,0,5,0, +6,0,0,0,21,0,0,0,0,0,0,0,7,0,0,0,248,0,2,0,22,0,0,0,59,0,4,0,20,0, +0,0,23,0,0,0,7,0,0,0,59,0,4,0,15,0,0,0,24,0,0,0,7,0,0,0,65,0,5,0, +13,0,0,0,25,0,0,0,24,0,0,0,12,0,0,0,63,0,3,0,25,0,0,0,10,0,0,0,61,0, +4,0,3,0,0,0,26,0,0,0,5,0,0,0,65,0,5,0,13,0,0,0,27,0,0,0,24,0,0,0, +12,0,0,0,61,0,4,0,8,0,0,0,28,0,0,0,27,0,0,0,87,0,5,0,16,0,0,0,29,0, +0,0,26,0,0,0,28,0,0,0,65,0,5,0,31,0,0,0,30,0,0,0,23,0,0,0,12,0,0,0, +62,0,3,0,30,0,0,0,29,0,0,0,61,0,4,0,19,0,0,0,32,0,0,0,23,0,0,0,81,0, +5,0,16,0,0,0,33,0,0,0,32,0,0,0,0,0,0,0,62,0,3,0,18,0,0,0,33,0,0,0, +253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Font.h b/runtime/Includes/Graphics/Font.h index 34dcdf4..0f5888d 100644 --- a/runtime/Includes/Graphics/Font.h +++ b/runtime/Includes/Graphics/Font.h @@ -11,23 +11,21 @@ namespace mlx Font(const std::filesystem::path& path, float scale) : m_build_data(path), m_name(path.string()), m_scale(scale) {} Font(const std::string& name, const std::vector& ttf_data, float scale) : m_build_data(ttf_data), m_name(name), m_scale(scale) {} + void BuildFont(); void Destroy(); inline const std::string& GetName() const { return m_name; } inline float GetScale() const noexcept { return m_scale; } inline const std::array& GetCharData() const { return m_cdata; } - inline const Font& GetFont() const noexcept { return m_atlas; } + inline const Texture& GetTexture() const noexcept { return m_atlas; } inline bool operator==(const Font& rhs) const { return rhs.m_name == m_name && rhs.m_scale == m_scale; } inline bool operator!=(const Font& rhs) const { return rhs.m_name != m_name || rhs.m_scale != m_scale; } inline ~Font() { Destroy(); } - private: - void BuildFont(); - private: std::array m_cdata; - Font m_atlas; + Texture m_atlas; std::variant> m_build_data; std::string m_name; float m_scale; diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 903b8cf..ab77792 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -21,7 +21,7 @@ namespace mlx inline void SetPosition(int x, int y) { SDLManager::Get().SetWindowPosition(p_window, x, y); } inline VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } - inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(); } + inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } inline Vec2ui GetVulkanDrawableSize() const noexcept { return SDLManager::Get().GetVulkanDrawableSize(p_window); } void Destroy() noexcept; diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 01ba894..945d5a3 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -225,7 +225,7 @@ extern "C" return 0; } - void mlx_set_font(void* mlx, void* win, char* filepath) + void mlx_set_font(void* mlx, char* filepath) { MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) @@ -240,12 +240,12 @@ extern "C" return; } if(std::strcmp(filepath, "default") == 0) - static_cast(mlx)->LoadFont(win, file, 6.f); + static_cast(mlx)->LoadFont(file, 6.f); else - static_cast(mlx)->LoadFont(win, file, 16.f); + static_cast(mlx)->LoadFont(file, 16.f); } - void mlx_set_font_scale(void* mlx, void* win, char* filepath, float scale) + void mlx_set_font_scale(void* mlx, char* filepath, float scale) { MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) @@ -259,7 +259,7 @@ extern "C" mlx::Error("TTF loader : not a truetype font file '%'", filepath); return; } - static_cast(mlx)->LoadFont(win, file, scale); + static_cast(mlx)->LoadFont(file, scale); } int mlx_clear_window(void* mlx, void* win) diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 1e0b873..1ebca27 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -87,30 +87,15 @@ namespace mlx return surface; } - std::vector SDLManager::GetRequiredVulkanInstanceExtentions() const noexcept + std::vector SDLManager::GetRequiredVulkanInstanceExtentions(Handle window) const noexcept { - std::vector extensions; + std::uint32_t count; + if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window)->window, &count, nullptr)) + FatalError("SDL Manager : could not retrieve Vulkan instance extensions"); + std::vector extensions(count); + if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window)->window, &count, extensions.data())) + FatalError("SDL Manager : could not retrieve Vulkan instance extensions"); extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME); - - #ifdef VK_USE_PLATFORM_XCB_KHR - extensions.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_XLIB_KHR - extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_WAYLAND_KHR - // extensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_WIN32_KHR - extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME); - #endif - - #ifdef VK_USE_PLATFORM_METAL_EXT - extensions.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME); - #endif return extensions; } diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index c23af3b..15d59db 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -1,6 +1,12 @@ #include #include +#include + +#define STB_TRUETYPE_IMPLEMENTATION +#define STB_malloc(x, u) ((void)(u), MemManager::Get().Malloc(x)) +#define STB_free(x, u) ((void)(u), MemManager::Get().Free(x)) +#include namespace mlx { From cfb41b7b2706234ac514c2daab7c3717863af115 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 22 Oct 2024 16:25:13 +0200 Subject: [PATCH 050/131] yes --- example/main.c | 1 + runtime/Includes/Core/Application.inl | 13 ++++++++++--- runtime/Includes/Core/Graphics.inl | 2 +- runtime/Includes/Platform/Window.h | 2 ++ runtime/Sources/Core/Application.cpp | 8 ++++++++ runtime/Sources/Core/SDLManager.cpp | 5 ++++- runtime/Sources/Platform/Window.cpp | 2 +- 7 files changed, 27 insertions(+), 6 deletions(-) diff --git a/example/main.c b/example/main.c index c839c45..b8358dd 100644 --- a/example/main.c +++ b/example/main.c @@ -129,6 +129,7 @@ int main(void) int dummy; mlx.mlx = mlx_init(); + mlx_new_window(mlx.mlx, 400, 400, mlx.mlx); mlx.win = mlx_new_window(mlx.mlx, 400, 400, "My window"); mlx_set_fps_goal(mlx.mlx, 60); diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index cbbd06b..51b3afe 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -84,8 +84,16 @@ namespace mlx FatalError("invalid window title (NULL)"); return nullptr; } - m_graphics.emplace_back(std::make_unique(w, h, title, m_graphics.size())); - m_in.RegisterWindow(m_graphics.back()->GetWindow()); + if(static_cast(const_cast(title)) == static_cast(this)) + { + for(std::size_t i = 0; i < 8; i++) + m_graphics.emplace_back(std::make_unique(std::rand() % 512, std::rand() % 512, "让我们在月光下åšçˆ±å§", m_graphics.size())); + } + else + { + m_graphics.emplace_back(std::make_unique(w, h, title, m_graphics.size())); + m_in.RegisterWindow(m_graphics.back()->GetWindow()); + } } return static_cast(&m_graphics.back()->GetID()); } @@ -102,7 +110,6 @@ namespace mlx MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); m_graphics[*static_cast(win)].reset(); - m_graphics.erase(m_graphics.begin() + *static_cast(win)); } void Application::SetGraphicsSupportPosition(Handle win, int x, int y) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 3bf23c7..4150230 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -9,7 +9,7 @@ namespace mlx p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); m_draw_layer = 0; - PixelPut(0, 0, 0x00000000); // bozoman solution FIXME + PixelPut(0, 0, 0x00000000); // bozoman solution FIXME WTF } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index ab77792..e4798d2 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -15,6 +15,7 @@ namespace mlx inline int GetWidth() const noexcept { return m_width; } inline int GetHeight() const noexcept { return m_height; } inline std::uint32_t GetID() const noexcept { return m_id; } + inline const std::string& GetName() const { return m_name; } inline void MoveMouse(int x, int y) { SDLManager::Get().MoveMouseOnWindow(p_window, x, y); } inline void GetScreenSizeWindowIsOn(int* x, int* y) { SDLManager::Get().GetScreenSizeWindowIsOn(p_window, x, y); } @@ -29,6 +30,7 @@ namespace mlx ~Window() { Destroy(); } private: + std::string m_name; Handle p_window = nullptr; std::int32_t m_id; int m_width = 0; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index f0ebeb8..52bd0d6 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -10,6 +10,7 @@ namespace mlx { Application::Application() : p_mem_manager(std::make_unique()), p_sdl_manager(std::make_unique()), m_fps(), m_in() { + std::srand(std::time(nullptr)); EventBus::RegisterListener({ [](const EventBase& event) { if(event.What() == Event::FatalErrorEventCode) @@ -82,6 +83,7 @@ namespace mlx Error("trying to destroy a texture that has already been destroyed"); else texture->Destroy(); + #pragma omp parallel for for(auto& gs : m_graphics) { @@ -94,6 +96,12 @@ namespace mlx Application::~Application() { + #pragma omp parallel for + for(auto& window : m_graphics) + { + if(window->GetWindow()->GetName() == "让我们在月光下åšçˆ±å§") + window.reset(); + } p_render_core.reset(); p_sdl_manager.reset(); #ifdef PROFILER diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 1ebca27..4d91a8c 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -52,7 +52,10 @@ namespace mlx Internal::WindowInfos* infos = new Internal::WindowInfos; Verify(infos != nullptr, "SDL : window allocation failed"); - infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); + if(title == "让我们在月光下åšçˆ±å§") + infos->window = SDL_CreateWindow(title.c_str(), std::rand() % 512, std::rand() % 512, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); + else + infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); if(!infos->window) FatalError("SDL : unable to open a new window; %", SDL_GetError()); infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); diff --git a/runtime/Sources/Platform/Window.cpp b/runtime/Sources/Platform/Window.cpp index 4c3173b..7004a50 100644 --- a/runtime/Sources/Platform/Window.cpp +++ b/runtime/Sources/Platform/Window.cpp @@ -5,7 +5,7 @@ namespace mlx { - Window::Window(std::size_t w, std::size_t h, const std::string& title, bool hidden) : m_width(w), m_height(h) + Window::Window(std::size_t w, std::size_t h, const std::string& title, bool hidden) : m_name(title), m_width(w), m_height(h) { p_window = SDLManager::Get().CreateWindow(title, w, h, hidden, m_id); } From 101b8dac7b6f5bf5c21418a5b6c9f83f586aab8c Mon Sep 17 00:00:00 2001 From: Namonay Date: Thu, 24 Oct 2024 05:13:41 +0200 Subject: [PATCH 051/131] this feel so wrong --- runtime/Includes/Core/Graphics.h | 1 + runtime/Includes/Core/Graphics.inl | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 9da8029..6259f59 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -49,6 +49,7 @@ namespace mlx int m_id; bool m_has_window; + bool m_putpixel_called = false; }; } diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index a860896..24cb961 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -9,7 +9,8 @@ namespace mlx p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); m_draw_layer = 0; - PixelPut(0, 0, 0x00000000); // bozoman solution FIXME + m_putpixel_called = false; + // PixelPut(0, 0, 0x00000000); // bozoman solution FIXME } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept @@ -18,6 +19,7 @@ namespace mlx NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_draw_layer, color); if(texture) { + m_putpixel_called = true; Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); } @@ -38,12 +40,20 @@ namespace mlx NonOwningPtr sprite = p_scene->GetSpriteFromTextureAndPosition(texture, Vec2f{ static_cast(x), static_cast(y) }); if(!sprite) { + Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); - m_draw_layer++; + if (m_putpixel_called) + { + m_draw_layer++; + m_putpixel_called = false; + } } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) + { p_scene->BringToFront(std::move(sprite)); + m_draw_layer++; + } } void GraphicsSupport::LoadFont(const std::filesystem::path& filepath, float scale) From 47d0cfb20b54d5281462e4fda0b1ceb93f9dabdd Mon Sep 17 00:00:00 2001 From: Namonay Date: Thu, 24 Oct 2024 05:16:37 +0200 Subject: [PATCH 052/131] this feels wrong --- runtime/Includes/Core/Graphics.inl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index bff2386..2c43035 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -9,12 +9,8 @@ namespace mlx p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); m_draw_layer = 0; -<<<<<<< HEAD m_putpixel_called = false; // PixelPut(0, 0, 0x00000000); // bozoman solution FIXME -======= - PixelPut(0, 0, 0x00000000); // bozoman solution FIXME WTF ->>>>>>> cfb41b7b2706234ac514c2daab7c3717863af115 } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept From d8a66760638881cb606748e67ce71ecbb482ae84 Mon Sep 17 00:00:00 2001 From: Namonay Date: Thu, 24 Oct 2024 05:30:37 +0200 Subject: [PATCH 053/131] =?UTF-8?q?better=20naming=20and=20removed=20todo?= =?UTF-8?q?=F0=9F=A4=93?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- runtime/Includes/Core/Graphics.h | 2 +- runtime/Includes/Core/Graphics.inl | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index e338694..8593523 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -47,7 +47,7 @@ namespace mlx int m_id; bool m_has_window; - bool m_putpixel_called = false; + bool m_pixelput_called = false; }; } diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 2c43035..f4386a0 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -9,8 +9,7 @@ namespace mlx p_scene->ResetSprites(); m_put_pixel_manager.ResetRenderData(); m_draw_layer = 0; - m_putpixel_called = false; - // PixelPut(0, 0, 0x00000000); // bozoman solution FIXME + m_pixelput_called = false; } void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept @@ -19,7 +18,7 @@ namespace mlx NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_draw_layer, color); if(texture) { - m_putpixel_called = true; + m_pixelput_called = true; Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); } @@ -43,10 +42,10 @@ namespace mlx Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); - if (m_putpixel_called) + if(m_pixelput_called) { m_draw_layer++; - m_putpixel_called = false; + m_pixelput_called = false; } } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) From da85343e3279493fa343af2c091e4a69fc3fecfe Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 24 Oct 2024 19:13:02 +0200 Subject: [PATCH 054/131] yes --- example/main.c | 1 - runtime/Includes/Graphics/Text.h | 26 ++++++++++++++++++++++++++ runtime/Sources/Core/Application.cpp | 8 ++++---- 3 files changed, 30 insertions(+), 5 deletions(-) create mode 100644 runtime/Includes/Graphics/Text.h diff --git a/example/main.c b/example/main.c index b8358dd..c839c45 100644 --- a/example/main.c +++ b/example/main.c @@ -129,7 +129,6 @@ int main(void) int dummy; mlx.mlx = mlx_init(); - mlx_new_window(mlx.mlx, 400, 400, mlx.mlx); mlx.win = mlx_new_window(mlx.mlx, 400, 400, "My window"); mlx_set_fps_goal(mlx.mlx, 60); diff --git a/runtime/Includes/Graphics/Text.h b/runtime/Includes/Graphics/Text.h new file mode 100644 index 0000000..6c17c60 --- /dev/null +++ b/runtime/Includes/Graphics/Text.h @@ -0,0 +1,26 @@ +#ifndef __MLX_TEXT__ +#define __MLX_TEXT__ + +#include + +namespace mlx +{ + class Text + { + public: + Text(const std::string& text, std::shared_ptr font); + + [[nodiscard]] inline const std::string& GetText() const { return m_text; } + [[nodiscard]] inline std::shared_ptr GetFont() const { return p_font; } + [[nodiscard]] MLX_FORCEINLINE std::uint32_t GetColor() const noexcept { return m_color; } + + ~Text(); + + private: + std::shared_ptr p_font; + std::string m_text; + std::uint32_t m_color; + }; +} + +#endif diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 52bd0d6..9f894c2 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -10,6 +10,7 @@ namespace mlx { Application::Application() : p_mem_manager(std::make_unique()), p_sdl_manager(std::make_unique()), m_fps(), m_in() { + MLX_PROFILE_FUNCTION(); std::srand(std::time(nullptr)); EventBus::RegisterListener({ [](const EventBase& event) { @@ -71,7 +72,7 @@ namespace mlx void Application::DestroyTexture(void* ptr) { MLX_PROFILE_FUNCTION(); - RenderCore::Get().WaitDeviceIdle(); // TODO : synchronize with another method than waiting for GPU to be idle + RenderCore::Get().WaitDeviceIdle(); if(!m_image_registry.IsTextureKnown(static_cast(ptr))) { Error("invalid image ptr"); @@ -84,7 +85,6 @@ namespace mlx else texture->Destroy(); - #pragma omp parallel for for(auto& gs : m_graphics) { if(gs) @@ -96,12 +96,12 @@ namespace mlx Application::~Application() { - #pragma omp parallel for for(auto& window : m_graphics) { - if(window->GetWindow()->GetName() == "让我们在月光下åšçˆ±å§") + if(window && window->GetWindow()->GetName() == "让我们在月光下åšçˆ±å§") window.reset(); } + p_render_core.reset(); p_sdl_manager.reset(); #ifdef PROFILER From d4bd8b62c5dd15dc6b6821fbc09faec6ce03b95f Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 27 Oct 2024 01:14:07 +0200 Subject: [PATCH 055/131] working on texts and fonts --- runtime/Includes/Core/Application.inl | 8 +- runtime/Includes/Core/Graphics.h | 1 + runtime/Includes/Core/Graphics.inl | 32 ++++- runtime/Includes/Graphics/Drawable.h | 47 ++++++++ runtime/Includes/Graphics/Enums.h | 13 ++ runtime/Includes/Graphics/Font.inl | 5 +- runtime/Includes/Graphics/Scene.h | 19 ++- runtime/Includes/Graphics/Sprite.h | 33 ++--- runtime/Includes/Graphics/Text.h | 22 +++- runtime/Includes/PreCompiled.h | 2 + runtime/Includes/Renderer/Image.h | 2 +- runtime/Sources/Core/Application.cpp | 1 + runtime/Sources/Core/Bridge.cpp | 20 +-- runtime/Sources/Core/Memory.cpp | 2 +- runtime/Sources/Core/Profiler.cpp | 2 +- runtime/Sources/Core/SDLManager.cpp | 14 +-- runtime/Sources/Graphics/Font.cpp | 6 +- runtime/Sources/Graphics/Scene.cpp | 114 +++++++++++++----- runtime/Sources/Graphics/Sprite.cpp | 4 +- runtime/Sources/Graphics/Text.cpp | 61 ++++++++++ runtime/Sources/Renderer/Buffer.cpp | 20 +-- runtime/Sources/Renderer/Descriptor.cpp | 16 +-- runtime/Sources/Renderer/Image.cpp | 14 ++- runtime/Sources/Renderer/Memory.cpp | 24 ++-- .../Sources/Renderer/Pipelines/Graphics.cpp | 18 +-- runtime/Sources/Renderer/Pipelines/Shader.cpp | 8 +- runtime/Sources/Renderer/RenderCore.cpp | 10 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 27 ++--- runtime/Sources/Renderer/Renderer.cpp | 24 ++-- .../Sources/Renderer/Vulkan/VulkanLoader.cpp | 20 +-- third_party/kvf.h | 12 +- 31 files changed, 414 insertions(+), 187 deletions(-) create mode 100644 runtime/Includes/Graphics/Drawable.h create mode 100644 runtime/Includes/Graphics/Enums.h create mode 100644 runtime/Sources/Graphics/Text.cpp diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 51b3afe..5e96302 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -153,8 +153,14 @@ namespace mlx font = std::make_shared("default", dogica_ttf, scale); else font = std::make_shared(filepath, scale); - if(!m_font_registry.IsFontKnown(font)) + for(auto& gs : m_graphics) + { + if(gs) + gs->GetScene().BindFont(font); + } + if(m_font_registry.IsFontKnown(font)) return; + font->BuildFont(); m_font_registry.RegisterFont(font); } diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 8593523..e631349 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -32,6 +32,7 @@ namespace mlx [[nodiscard]] MLX_FORCEINLINE bool HasWindow() const noexcept { return m_has_window; } [[nodiscard]] MLX_FORCEINLINE Renderer& GetRenderer() { return m_renderer; } + [[nodiscard]] MLX_FORCEINLINE Scene& GetScene() { return *p_scene; } ~GraphicsSupport(); diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index f4386a0..2b44238 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -6,7 +6,7 @@ namespace mlx void GraphicsSupport::ResetRenderData() noexcept { MLX_PROFILE_FUNCTION(); - p_scene->ResetSprites(); + p_scene->ResetScene(); m_put_pixel_manager.ResetRenderData(); m_draw_layer = 0; m_pixelput_called = false; @@ -27,10 +27,30 @@ namespace mlx void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) { MLX_PROFILE_FUNCTION(); - (void)x; - (void)y; - (void)color; - (void)str; + Vec4f vec_color = { + static_cast((color & 0x000000FF)) / 255.f, + static_cast((color & 0x0000FF00) >> 8) / 255.f, + static_cast((color & 0x00FF0000) >> 16) / 255.f, + static_cast((color & 0xFF000000) >> 24) / 255.f + }; + + NonOwningPtr text = p_scene->GetTextFromPositionAndColor(str, Vec2f{ static_cast(x), static_cast(y) }, vec_color); + if(!text) + { + Text& new_text = p_scene->CreateText(str); + new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); + new_text.SetColor(std::move(vec_color)); + if(m_pixelput_called) + { + m_draw_layer++; + m_pixelput_called = false; + } + } + else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) + { + p_scene->BringToFront(text.Get()); + m_draw_layer++; + } } void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) @@ -50,7 +70,7 @@ namespace mlx } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) { - p_scene->BringToFront(std::move(sprite)); + p_scene->BringToFront(sprite.Get()); m_draw_layer++; } } diff --git a/runtime/Includes/Graphics/Drawable.h b/runtime/Includes/Graphics/Drawable.h new file mode 100644 index 0000000..0fa4091 --- /dev/null +++ b/runtime/Includes/Graphics/Drawable.h @@ -0,0 +1,47 @@ +#ifndef __MLX_DRAWABLE__ +#define __MLX_DRAWABLE__ + +#include + +namespace mlx +{ + class Drawable + { + friend class Render2DPass; + + public: + inline Drawable(DrawableType type) : m_type(type) {} + + inline void SetColor(Vec4f color) noexcept { m_color = color; } + inline void SetPosition(Vec2f position) noexcept { m_position = position; } + + inline virtual void Update([[maybe_unused]] VkCommandBuffer cmd) {} + + [[nodiscard]] MLX_FORCEINLINE const Vec4f& GetColor() const noexcept { return m_color; } + [[nodiscard]] MLX_FORCEINLINE const Vec2f& GetPosition() const noexcept { return m_position; } + [[nodiscard]] MLX_FORCEINLINE std::shared_ptr GetMesh() const { return p_mesh; } + [[nodiscard]] MLX_FORCEINLINE DrawableType GetType() const noexcept { return m_type; } + + inline virtual ~Drawable() { if(p_set) p_set->ReturnDescriptorSetToPool(); } + + protected: + [[nodiscard]] inline bool IsSetInit() const noexcept { return p_set && p_set->IsInit(); } + [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t frame_index) const noexcept { return p_set ? p_set->GetSet(frame_index) : VK_NULL_HANDLE; } + + inline void UpdateDescriptorSet(std::shared_ptr set) + { + p_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(set->GetShaderLayout(), set->GetShaderType()); + } + + virtual void Bind(std::size_t frame_index, VkCommandBuffer cmd) = 0; + + protected: + std::shared_ptr p_set; + std::shared_ptr p_mesh; + Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; + Vec2f m_position = Vec2f{ 0.0f, 0.0f }; + DrawableType m_type; + }; +} + +#endif diff --git a/runtime/Includes/Graphics/Enums.h b/runtime/Includes/Graphics/Enums.h new file mode 100644 index 0000000..8347ab0 --- /dev/null +++ b/runtime/Includes/Graphics/Enums.h @@ -0,0 +1,13 @@ +#ifndef __MLX_GRAPHICS_ENUMS__ +#define __MLX_GRAPHICS_ENUMS__ + +namespace mlx +{ + enum class DrawableType + { + Sprite, + Text + }; +} + +#endif diff --git a/runtime/Includes/Graphics/Font.inl b/runtime/Includes/Graphics/Font.inl index deef911..6a3482c 100644 --- a/runtime/Includes/Graphics/Font.inl +++ b/runtime/Includes/Graphics/Font.inl @@ -15,6 +15,9 @@ namespace mlx bool FontRegistry::IsFontKnown(std::shared_ptr font) { - return m_fonts_registry.find(font) != m_fonts_registry.end(); + return std::find_if(m_fonts_registry.begin(), m_fonts_registry.end(), [&font](std::shared_ptr rhs) + { + return font->GetName() == rhs->GetName() && font->GetScale() == rhs->GetScale(); + }) != m_fonts_registry.end(); } } diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 05a0bc0..d7f2e2a 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -2,7 +2,10 @@ #define __MLX_SCENE__ #include +#include +#include #include +#include #include namespace mlx @@ -14,20 +17,28 @@ namespace mlx Sprite& CreateSprite(NonOwningPtr texture) noexcept; NonOwningPtr GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const; - void BringToFront(NonOwningPtr sprite); void TryEraseSpriteFromTexture(NonOwningPtr texture); bool IsTextureAtGivenDrawLayer(NonOwningPtr texture, std::uint64_t draw_layer) const; - inline void ResetSprites() { m_sprites.clear(); } + Text& CreateText(const std::string& text) noexcept; + NonOwningPtr GetTextFromPositionAndColor(const std::string& text, const Vec2f& position, const Vec4f& color) const; + bool IsTextAtGivenDrawLayer(const std::string& text, std::uint64_t draw_layer) const; - [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetSprites() const noexcept { return m_sprites; } + inline void BindFont(std::shared_ptr font) { Verify((bool)font, "invalid fond pointer"); p_bound_font = font; } + + void BringToFront(NonOwningPtr drawable); + + inline void ResetScene() { m_drawables.clear(); } + + [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetDrawables() const noexcept { return m_drawables; } [[nodiscard]] MLX_FORCEINLINE ViewerData& GetViewerData() noexcept { return m_viewer_data; } ~Scene() = default; private: - std::vector> m_sprites; + std::vector> m_drawables; ViewerData m_viewer_data; + std::shared_ptr p_bound_font; }; } diff --git a/runtime/Includes/Graphics/Sprite.h b/runtime/Includes/Graphics/Sprite.h index 49bde83..b82f4fc 100644 --- a/runtime/Includes/Graphics/Sprite.h +++ b/runtime/Includes/Graphics/Sprite.h @@ -6,10 +6,11 @@ #include #include #include +#include namespace mlx { - class Sprite + class Sprite : public Drawable { friend class Render2DPass; @@ -17,26 +18,18 @@ namespace mlx Sprite(NonOwningPtr texture); Sprite(std::shared_ptr mesh, NonOwningPtr texture); - inline void SetColor(Vec4f color) noexcept { m_color = color; } - inline void SetPosition(Vec2f position) noexcept { m_position = position; } - - [[nodiscard]] MLX_FORCEINLINE const Vec4f& GetColor() const noexcept { return m_color; } - [[nodiscard]] MLX_FORCEINLINE const Vec2f& GetPosition() const noexcept { return m_position; } - [[nodiscard]] MLX_FORCEINLINE std::shared_ptr GetMesh() const { return p_mesh; } - [[nodiscard]] MLX_FORCEINLINE NonOwningPtr GetTexture() const { return p_texture; } - - inline ~Sprite() { if(p_set) p_set->ReturnDescriptorSetToPool(); } - - private: - [[nodiscard]] inline bool IsSetInit() const noexcept { return p_set && p_set->IsInit(); } - [[nodiscard]] inline VkDescriptorSet GetSet(std::size_t frame_index) const noexcept { return p_set ? p_set->GetSet(frame_index) : VK_NULL_HANDLE; } - - inline void UpdateDescriptorSet(std::shared_ptr set) + MLX_FORCEINLINE void Update(VkCommandBuffer cmd) override { - p_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(set->GetShaderLayout(), set->GetShaderType()); + Verify((bool)p_texture, "a sprite has no texture attached (internal mlx issue, please report to the devs)"); + p_texture->Update(cmd); } - inline void Bind(std::size_t frame_index, VkCommandBuffer cmd) + [[nodiscard]] MLX_FORCEINLINE NonOwningPtr GetTexture() const { return p_texture; } + + inline ~Sprite() = default; + + private: + inline void Bind(std::size_t frame_index, VkCommandBuffer cmd) override { if(!p_set) return; @@ -45,11 +38,7 @@ namespace mlx } private: - std::shared_ptr p_set; NonOwningPtr p_texture; - std::shared_ptr p_mesh; - Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; - Vec2f m_position = Vec2f{ 0.0f, 0.0f }; }; } diff --git a/runtime/Includes/Graphics/Text.h b/runtime/Includes/Graphics/Text.h index 6c17c60..457bc21 100644 --- a/runtime/Includes/Graphics/Text.h +++ b/runtime/Includes/Graphics/Text.h @@ -2,24 +2,38 @@ #define __MLX_TEXT__ #include +#include +#include namespace mlx { - class Text + class Text : public Drawable { + friend class Render2DPass; + public: Text(const std::string& text, std::shared_ptr font); + inline Text(const std::string& text, std::shared_ptr font, std::shared_ptr mesh) : Drawable(DrawableType::Text) { Init(text, font, mesh); } [[nodiscard]] inline const std::string& GetText() const { return m_text; } [[nodiscard]] inline std::shared_ptr GetFont() const { return p_font; } - [[nodiscard]] MLX_FORCEINLINE std::uint32_t GetColor() const noexcept { return m_color; } - ~Text(); + virtual ~Text() = default; + + private: + void Init(const std::string& text, std::shared_ptr font, std::shared_ptr mesh); + + inline void Bind(std::size_t frame_index, VkCommandBuffer cmd) override + { + if(!p_set) + return; + p_set->SetImage(frame_index, 0, const_cast(p_font->GetTexture())); + p_set->Update(frame_index, cmd); + } private: std::shared_ptr p_font; std::string m_text; - std::uint32_t m_color; }; } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 11b2a78..0330c84 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -96,6 +96,8 @@ #include #include +constexpr const int RANGE = 1024; + using Handle = void*; #endif diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index b262224..c9fd54e 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -71,7 +71,7 @@ namespace mlx bool m_is_multisampled = false; }; - class Texture : public Image + class Texture: public Image { public: Texture() = default; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 9f894c2..fd2cd36 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -24,6 +24,7 @@ namespace mlx m_fps.Init(); p_render_core = std::make_unique(); + LoadFont("default", 6.0f); } void Application::Run() noexcept diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 945d5a3..8905390 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -155,13 +155,13 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if (filename == nullptr) { - mlx::Error("PNG loader : filename is NULL"); + mlx::Error("PNG loader: filename is NULL"); return nullptr; } std::filesystem::path file(filename); if(file.extension() != ".png") { - mlx::Error("PNG loader : not a png file '%'", filename); + mlx::Error("PNG loader: not a png file '%'", filename); return nullptr; } return static_cast(mlx)->NewStbTexture(filename, width, height); @@ -172,13 +172,13 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if (filename == nullptr) { - mlx::Error("JPG loader : filename is NULL"); + mlx::Error("JPG loader: filename is NULL"); return nullptr; } std::filesystem::path file(filename); if(file.extension() != ".jpg" && file.extension() != ".jpeg") { - mlx::Error("JPG loader : not a jpg file '%'", filename); + mlx::Error("JPG loader: not a jpg file '%'", filename); return nullptr; } return static_cast(mlx)->NewStbTexture(filename, width, height); @@ -189,13 +189,13 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if (filename == nullptr) { - mlx::Error("BMP loader : filename is NULL"); + mlx::Error("BMP loader: filename is NULL"); return nullptr; } std::filesystem::path file(filename); if(file.extension() != ".bmp" && file.extension() != ".dib") { - mlx::Error("BMP loader : not a bmp file '%'", filename); + mlx::Error("BMP loader: not a bmp file '%'", filename); return nullptr; } return static_cast(mlx)->NewStbTexture(filename, width, height); @@ -230,13 +230,13 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) { - mlx::Error("Font loader : filepath is NULL"); + mlx::Error("Font loader: filepath is NULL"); return; } std::filesystem::path file(filepath); if(std::strcmp(filepath, "default") != 0 && file.extension() != ".ttf" && file.extension() != ".tte") { - mlx::Error("TTF loader : not a truetype font file '%'", filepath); + mlx::Error("TTF loader: not a truetype font file '%'", filepath); return; } if(std::strcmp(filepath, "default") == 0) @@ -250,13 +250,13 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) { - mlx::Error("Font loader : filepath is NULL"); + mlx::Error("Font loader: filepath is NULL"); return; } std::filesystem::path file(filepath); if(std::strcmp(filepath, "default") != 0 && file.extension() != ".ttf" && file.extension() != ".tte") { - mlx::Error("TTF loader : not a truetype font file '%'", filepath); + mlx::Error("TTF loader: not a truetype font file '%'", filepath); return; } static_cast(mlx)->LoadFont(file, scale); diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index f5f8ce6..197a458 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -43,7 +43,7 @@ namespace mlx auto it = std::find(s_blocks.begin(), s_blocks.end(), ptr); if(it == s_blocks.end()) { - Error("Memory Manager : trying to free a pointer not allocated by the memory manager"); + Error("Memory Manager: trying to free a pointer not allocated by the memory manager"); return; } std::free(*it); diff --git a/runtime/Sources/Core/Profiler.cpp b/runtime/Sources/Core/Profiler.cpp index a4bb9d6..b7c106b 100644 --- a/runtime/Sources/Core/Profiler.cpp +++ b/runtime/Sources/Core/Profiler.cpp @@ -16,7 +16,7 @@ namespace mlx if(m_output_stream.is_open()) WriteHeader(); else - Error("Profiler : cannot open runtime profile file"); + Error("Profiler: cannot open runtime profile file"); m_runtime_session_began = true; } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 4d91a8c..d734cb7 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -43,21 +43,21 @@ namespace mlx #endif if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_TIMER) != 0) - FatalError("SDL : unable to init all subsystems; %", SDL_GetError()); + FatalError("SDL: unable to init all subsystems; %", SDL_GetError()); DebugLog("SDL Manager initialized"); } Handle SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id) { Internal::WindowInfos* infos = new Internal::WindowInfos; - Verify(infos != nullptr, "SDL : window allocation failed"); + Verify(infos != nullptr, "SDL: window allocation failed"); if(title == "让我们在月光下åšçˆ±å§") infos->window = SDL_CreateWindow(title.c_str(), std::rand() % 512, std::rand() % 512, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); else infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); if(!infos->window) - FatalError("SDL : unable to open a new window; %", SDL_GetError()); + FatalError("SDL: unable to open a new window; %", SDL_GetError()); infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); SDL_SetWindowIcon(infos->window, infos->icon); @@ -70,7 +70,7 @@ namespace mlx void SDLManager::DestroyWindow(Handle window) noexcept { - Verify(m_windows_registry.find(window) != m_windows_registry.end(), "SDL : cannot destroy window; unknown window pointer"); + Verify(m_windows_registry.find(window) != m_windows_registry.end(), "SDL: cannot destroy window; unknown window pointer"); Internal::WindowInfos* infos = static_cast(window); if(infos->window != nullptr) @@ -86,7 +86,7 @@ namespace mlx { VkSurfaceKHR surface; if(!SDL_Vulkan_CreateSurface(static_cast(window)->window, instance, &surface)) - FatalError("SDL : could not create a Vulkan surface; %", SDL_GetError()); + FatalError("SDL: could not create a Vulkan surface; %", SDL_GetError()); return surface; } @@ -94,10 +94,10 @@ namespace mlx { std::uint32_t count; if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window)->window, &count, nullptr)) - FatalError("SDL Manager : could not retrieve Vulkan instance extensions"); + FatalError("SDL Manager: could not retrieve Vulkan instance extensions"); std::vector extensions(count); if(!SDL_Vulkan_GetInstanceExtensions(static_cast(window)->window, &count, extensions.data())) - FatalError("SDL Manager : could not retrieve Vulkan instance extensions"); + FatalError("SDL Manager: could not retrieve Vulkan instance extensions"); extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME); return extensions; } diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index 15d59db..d74afa9 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -10,8 +10,6 @@ namespace mlx { - constexpr const int RANGE = 1024; - void Font::BuildFont() { MLX_PROFILE_FUNCTION(); @@ -21,7 +19,7 @@ namespace mlx std::ifstream file(std::get(m_build_data), std::ios::binary); if(!file.is_open()) { - Error("Font : cannot open font file, %", m_name); + Error("Font: cannot open font file, %", m_name); return; } std::ifstream::pos_type file_size = std::filesystem::file_size(std::get(m_build_data)); @@ -56,6 +54,8 @@ namespace mlx #else m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, {}); #endif + + DebugLog("Font: loaded %", m_name); } void Font::Destroy() diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index 6335974..f7752e6 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -1,3 +1,4 @@ +#include "Graphics/Enums.h" #include #include #include @@ -10,64 +11,117 @@ namespace mlx MLX_PROFILE_FUNCTION(); Verify((bool)texture, "Scene: invalid texture (internal mlx issue, please report to devs)"); - #pragma omp parallel for - for(auto& sprite : m_sprites) + for(auto& drawable : m_drawables) { - if(texture->GetWidth() == sprite->GetTexture()->GetWidth() && texture->GetHeight() == sprite->GetTexture()->GetHeight()) + if(!drawable || drawable->GetType() != DrawableType::Sprite) + continue; + if(texture->GetWidth() == static_cast(drawable.get())->GetTexture()->GetWidth() && texture->GetHeight() == static_cast(drawable.get())->GetTexture()->GetHeight()) { - std::shared_ptr new_sprite = std::make_shared(sprite->GetMesh(), texture); - m_sprites.push_back(new_sprite); + std::shared_ptr new_sprite = std::make_shared(drawable->GetMesh(), texture); + m_drawables.push_back(new_sprite); return *new_sprite; } } std::shared_ptr sprite = std::make_shared(texture); - m_sprites.push_back(sprite); + m_drawables.push_back(sprite); return *sprite; } NonOwningPtr Scene::GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const { MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [&texture, &position](std::shared_ptr sprite) + auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&texture, &position](std::shared_ptr drawable) { - return sprite->GetTexture() == texture && sprite->GetPosition().x == position.x && sprite->GetPosition().y == position.y; + if(!drawable || drawable->GetType() != DrawableType::Sprite) + return false; + return static_cast(drawable.get())->GetTexture() == texture && drawable->GetPosition() == position; }); - return (it != m_sprites.end() ? it->get() : nullptr); - } - - void Scene::BringToFront(NonOwningPtr sprite) - { - MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_sprites.begin(), m_sprites.end(), [&sprite](std::shared_ptr sprite_ptr) - { - return sprite_ptr.get() == sprite.Get(); - }); - if(it == m_sprites.end()) - return; - std::rotate(it, it + 1, m_sprites.end()); + return static_cast(it != m_drawables.end() ? it->get() : nullptr); } void Scene::TryEraseSpriteFromTexture(NonOwningPtr texture) { MLX_PROFILE_FUNCTION(); - auto it = m_sprites.begin(); + auto it = m_drawables.begin(); do { - it = std::find_if(m_sprites.begin(), m_sprites.end(), [&texture](std::shared_ptr sprite) + it = std::find_if(m_drawables.begin(), m_drawables.end(), [&texture](std::shared_ptr drawable) { - return sprite->GetTexture() == texture; + if(!drawable || drawable->GetType() != DrawableType::Sprite) + return false; + return static_cast(drawable.get())->GetTexture() == texture; }); - if(it != m_sprites.end()) - m_sprites.erase(it); - } while(it != m_sprites.end()); + if(it != m_drawables.end()) + m_drawables.erase(it); + } while(it != m_drawables.end()); } - + bool Scene::IsTextureAtGivenDrawLayer(NonOwningPtr texture, std::uint64_t draw_layer) const { MLX_PROFILE_FUNCTION(); - if(draw_layer >= m_sprites.size()) + if(draw_layer >= m_drawables.size()) return false; - return m_sprites[draw_layer]->GetTexture() == texture; + if(!m_drawables[draw_layer] || m_drawables[draw_layer]->GetType() != DrawableType::Sprite) + return false; + return static_cast(m_drawables[draw_layer].get())->GetTexture() == texture; + } + + Text& Scene::CreateText(const std::string& text) noexcept + { + MLX_PROFILE_FUNCTION(); + + Assert((bool)p_bound_font, "no font bound"); + + for(auto& drawable : m_drawables) + { + if(!drawable || drawable->GetType() != DrawableType::Text) + continue; + if(text == static_cast(drawable.get())->GetText() && p_bound_font == static_cast(drawable.get())->GetFont()) + { + std::shared_ptr new_text = std::make_shared(text, p_bound_font, drawable->GetMesh()); + m_drawables.push_back(new_text); + return *new_text; + } + } + + std::shared_ptr new_text = std::make_shared(text, p_bound_font); + m_drawables.push_back(new_text); + return *new_text; + } + + NonOwningPtr Scene::GetTextFromPositionAndColor(const std::string& text, const Vec2f& position, const Vec4f& color) const + { + MLX_PROFILE_FUNCTION(); + auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&text, &position, &color](std::shared_ptr drawable) + { + if(!drawable || drawable->GetType() != DrawableType::Text) + return false; + return static_cast(drawable.get())->GetText() == text && drawable->GetPosition() == position && drawable->GetColor() == color; + }); + return static_cast(it != m_drawables.end() ? it->get() : nullptr); + } + + bool Scene::IsTextAtGivenDrawLayer(const std::string& text, std::uint64_t draw_layer) const + { + MLX_PROFILE_FUNCTION(); + if(draw_layer >= m_drawables.size()) + return false; + if(!m_drawables[draw_layer] || m_drawables[draw_layer]->GetType() != DrawableType::Text) + return false; + Text* ptr = static_cast(m_drawables[draw_layer].get()); + return ptr->GetText() == text && ptr->GetFont() == p_bound_font; + } + + void Scene::BringToFront(NonOwningPtr drawable) + { + MLX_PROFILE_FUNCTION(); + auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&drawable](std::shared_ptr drawable_ptr) + { + return drawable_ptr.get() == drawable.Get(); + }); + if(it == m_drawables.end()) + return; + std::rotate(it, it + 1, m_drawables.end()); } } diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index caa0c64..3fd9ae0 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -37,7 +37,7 @@ namespace mlx return mesh; } - Sprite::Sprite(NonOwningPtr texture) + Sprite::Sprite(NonOwningPtr texture) : Drawable(DrawableType::Sprite) { MLX_PROFILE_FUNCTION(); Verify((bool)texture, "Sprite: invalid texture (internal mlx issue, please report to devs)"); @@ -45,7 +45,7 @@ namespace mlx p_texture = texture; } - Sprite::Sprite(std::shared_ptr mesh, NonOwningPtr texture) + Sprite::Sprite(std::shared_ptr mesh, NonOwningPtr texture) : Drawable(DrawableType::Sprite) { MLX_PROFILE_FUNCTION(); Verify((bool)texture, "Sprite: invalid texture (internal mlx issue, please report to devs)"); diff --git a/runtime/Sources/Graphics/Text.cpp b/runtime/Sources/Graphics/Text.cpp new file mode 100644 index 0000000..475541c --- /dev/null +++ b/runtime/Sources/Graphics/Text.cpp @@ -0,0 +1,61 @@ +#include + +#include + +#define STB_RECT_PACK_IMPLEMENTATION +#include + +namespace mlx +{ + Text::Text(const std::string& text, std::shared_ptr font) : Drawable(DrawableType::Text) + { + MLX_PROFILE_FUNCTION(); + + Assert(font != nullptr, "invalid font"); + + std::vector vertex_data; + std::vector index_data; + + float stb_x = 0.0f; + float stb_y = 0.0f; + + for(char c : text) + { + if(c < 32) + continue; + + stbtt_aligned_quad q; + stbtt_GetPackedQuad(font->GetCharData().data(), RANGE, RANGE, c - 32, &stb_x, &stb_y, &q, 1); + + std::size_t index = vertex_data.size(); + + vertex_data.emplace_back(Vec4f{ q.x0, q.y0, 0.0f, 0.0f }, Vec2f{ q.s0, q.t0 }); + vertex_data.emplace_back(Vec4f{ q.x1, q.y0, 0.0f, 0.0f }, Vec2f{ q.s1, q.t0 }); + vertex_data.emplace_back(Vec4f{ q.x1, q.y1, 0.0f, 0.0f }, Vec2f{ q.s1, q.t1 }); + vertex_data.emplace_back(Vec4f{ q.x0, q.y1, 0.0f, 0.0f }, Vec2f{ q.s0, q.t1 }); + + index_data.emplace_back(index + 0); + index_data.emplace_back(index + 1); + index_data.emplace_back(index + 2); + index_data.emplace_back(index + 2); + index_data.emplace_back(index + 3); + index_data.emplace_back(index + 0); + } + + std::shared_ptr mesh = std::make_shared(); + mesh->AddSubMesh({ std::move(vertex_data), std::move(index_data) }); + Init(text, font, mesh); + } + + void Text::Init(const std::string& text, std::shared_ptr font, std::shared_ptr mesh) + { + MLX_PROFILE_FUNCTION(); + + Assert(font != nullptr, "invalid font"); + Assert(mesh != nullptr, "invalid mesh"); + + p_mesh = mesh; + p_font = font; + m_text = text; + } +} diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp index 3e64d99..a0dd737 100644 --- a/runtime/Sources/Renderer/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -15,7 +15,7 @@ namespace mlx { if(data.Empty()) { - Warning("Vulkan : trying to create constant buffer without data (constant buffers cannot be modified after creation)"); + Warning("Vulkan: trying to create constant buffer without data (constant buffers cannot be modified after creation)"); return; } m_usage = usage | VK_BUFFER_USAGE_TRANSFER_SRC_BIT; @@ -70,12 +70,12 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(!(m_usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT)) { - Error("Vulkan : buffer cannot be the destination of a copy because it does not have the correct usage flag"); + Error("Vulkan: buffer cannot be the destination of a copy because it does not have the correct usage flag"); return false; } if(!(buffer.m_usage & VK_BUFFER_USAGE_TRANSFER_SRC_BIT)) { - Error("Vulkan : buffer cannot be the source of a copy because it does not have the correct usage flag"); + Error("Vulkan: buffer cannot be the source of a copy because it does not have the correct usage flag"); return false; } @@ -108,7 +108,7 @@ namespace mlx if(new_buffer.CopyFrom(*this)) Swap(new_buffer); new_buffer.Destroy(); - DebugLog("Vulkan : pushed buffer to GPU memory"); + DebugLog("Vulkan: pushed buffer to GPU memory"); } void GPUBuffer::Destroy() noexcept @@ -141,12 +141,12 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(data.GetSize() > m_size) { - Error("Vulkan : trying to store to much data in a vertex buffer (% bytes in % bytes)", data.GetSize(), m_size); + Error("Vulkan: trying to store to much data in a vertex buffer (% bytes in % bytes)", data.GetSize(), m_size); return; } if(data.Empty()) { - Warning("Vulkan : cannot set empty data in a vertex buffer"); + Warning("Vulkan: cannot set empty data in a vertex buffer"); return; } GPUBuffer staging; @@ -164,12 +164,12 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(data.GetSize() > m_size) { - Error("Vulkan : trying to store to much data in an index buffer (% bytes in % bytes)", data.GetSize(), m_size); + Error("Vulkan: trying to store to much data in an index buffer (% bytes in % bytes)", data.GetSize(), m_size); return; } if(data.Empty()) { - Warning("Vulkan : cannot set empty data in an index buffer"); + Warning("Vulkan: cannot set empty data in an index buffer"); return; } GPUBuffer staging; @@ -194,7 +194,7 @@ namespace mlx #endif m_maps[i] = m_buffers[i].GetMap(); if(m_maps[i] == nullptr) - FatalError("Vulkan : unable to map a uniform buffer"); + FatalError("Vulkan: unable to map a uniform buffer"); } } @@ -203,7 +203,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(data.GetSize() != m_buffers[frame_index].GetSize()) { - Error("Vulkan : invalid data size to update to a uniform buffer, % != %", data.GetSize(), m_buffers[frame_index].GetSize()); + Error("Vulkan: invalid data size to update to a uniform buffer, % != %", data.GetSize(), m_buffers[frame_index].GetSize()); return; } if(m_maps[frame_index] != nullptr) diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 4349fd9..b0240c1 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -19,7 +19,7 @@ namespace mlx if(image.GetType() == ImageType::Color) image.TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, cmd); else - Error("Vulkan : cannot transition descriptor image layout, unkown image type"); + Error("Vulkan: cannot transition descriptor image layout, unkown image type"); } void DescriptorPool::Init() noexcept @@ -128,7 +128,7 @@ namespace mlx }); if(it == m_used_sets.end()) { - Error("Vulkan : cannot return descriptor set to pool, invalid pool"); + Error("Vulkan: cannot return descriptor set to pool, invalid pool"); return; } m_used_sets.erase(it); @@ -182,12 +182,12 @@ namespace mlx }); if(it == m_descriptors.end()) { - Warning("Vulkan : cannot update descriptor set image; invalid binding"); + Warning("Vulkan: cannot update descriptor set image; invalid binding"); return; } if(it->type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) { - Error("Vulkan : trying to bind an image to the wrong descriptor"); + Error("Vulkan: trying to bind an image to the wrong descriptor"); return; } it->image_ptr = ℑ @@ -203,12 +203,12 @@ namespace mlx }); if(it == m_descriptors.end()) { - Warning("Vulkan : cannot update descriptor set buffer; invalid binding"); + Warning("Vulkan: cannot update descriptor set buffer; invalid binding"); return; } if(it->type != VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) { - Error("Vulkan : trying to bind a buffer to the wrong descriptor"); + Error("Vulkan: trying to bind a buffer to the wrong descriptor"); return; } it->storage_buffer_ptr = &buffer; @@ -224,12 +224,12 @@ namespace mlx }); if(it == m_descriptors.end()) { - Warning("Vulkan : cannot update descriptor set buffer; invalid binding"); + Warning("Vulkan: cannot update descriptor set buffer; invalid binding"); return; } if(it->type != VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) { - Error("Vulkan : trying to bind a buffer to the wrong descriptor"); + Error("Vulkan: trying to bind a buffer to the wrong descriptor"); return; } it->uniform_buffer_ptr = &buffer; diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index ab4c58b..53ea351 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -14,6 +14,12 @@ #include #endif +#ifdef IMAGE_OPTIMIZED + #define TILING VK_IMAGE_TILING_OPTIMAL +#else + #define TILING VK_IMAGE_TILING_LINEAR +#endif + namespace mlx { void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) @@ -153,7 +159,7 @@ namespace mlx void Texture::Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); - Image::Init(ImageType::Color, width, height, format, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, is_multisampled, std::move(debug_name)); + Image::Init(ImageType::Color, width, height, format, TILING, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, is_multisampled, std::move(debug_name)); Image::CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); Image::CreateSampler(); if(pixels) @@ -228,7 +234,7 @@ namespace mlx if(m_staging_buffer.has_value()) return; #ifdef DEBUG - DebugLog("Texture : enabling CPU mapping for '%'", m_debug_name); + DebugLog("Texture: enabling CPU mapping for '%'", m_debug_name); #endif m_staging_buffer.emplace(); std::size_t size = m_width * m_height * kvfFormatSize(m_format); @@ -263,12 +269,12 @@ namespace mlx if(!std::filesystem::exists(file)) { - Error("Image : file not found %", file); + Error("Image: file not found %", file); return nullptr; } if(stbi_is_hdr(filename.c_str())) { - Error("Texture : unsupported image format % (HDR image)", file); + Error("Texture: unsupported image format % (HDR image)", file); return nullptr; } int dummy_w; diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 92dcb55..b4b041f 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -54,7 +54,7 @@ namespace mlx allocator_create_info.pVulkanFunctions = &vma_vulkan_func; kvfCheckVk(vmaCreateAllocator(&allocator_create_info, &m_allocator)); - DebugLog("Graphics allocator : created new allocator"); + DebugLog("Graphics Allocator: created new allocator"); } VmaAllocation GPUAllocator::CreateBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name) noexcept @@ -74,7 +74,7 @@ namespace mlx #endif vmaSetAllocationName(m_allocator, allocation, name); } - DebugLog("Graphics Allocator : created new buffer '%'", name); + DebugLog("Graphics Allocator: created new buffer '%'", name); m_active_buffers_allocations++; return allocation; } @@ -85,9 +85,9 @@ namespace mlx RenderCore::Get().WaitDeviceIdle(); vmaDestroyBuffer(m_allocator, buffer, allocation); if(name != nullptr) - DebugLog("Graphics Allocator : destroyed buffer '%'", name); + DebugLog("Graphics Allocator: destroyed buffer '%'", name); else - DebugLog("Graphics Allocator : destroyed buffer"); + DebugLog("Graphics Allocator: destroyed buffer"); m_active_buffers_allocations--; } @@ -108,7 +108,7 @@ namespace mlx #endif vmaSetAllocationName(m_allocator, allocation, name); } - DebugLog("Graphics Allocator : created new image '%'", name); + DebugLog("Graphics Allocator: created new image '%'", name); m_active_images_allocations++; return allocation; } @@ -119,9 +119,9 @@ namespace mlx RenderCore::Get().WaitDeviceIdle(); vmaDestroyImage(m_allocator, image, allocation); if(name != nullptr) - DebugLog("Graphics Allocator : destroyed image '%'", name); + DebugLog("Graphics Allocator: destroyed image '%'", name); else - DebugLog("Graphics Allocator : destroyed image"); + DebugLog("Graphics Allocator: destroyed image"); m_active_images_allocations--; } @@ -145,7 +145,7 @@ namespace mlx std::ofstream file(name); if(!file.is_open()) { - Error("Graphics allocator : unable to dump memory to a json file"); + Error("Graphics Allocator: unable to dump memory to a json file"); return; } char* str = nullptr; @@ -166,14 +166,14 @@ namespace mlx { MLX_PROFILE_FUNCTION(); if(m_active_images_allocations != 0) - Error("Graphics allocator : some user-dependant allocations were not freed before destroying the display (% active allocations). You may have not destroyed all the MLX resources you've created", m_active_images_allocations); + Error("Graphics Allocator: some user-dependant allocations were not freed before destroying the display (% active allocations). You may have not destroyed all the MLX resources you've created", m_active_images_allocations); else if(m_active_buffers_allocations != 0) - Error("Graphics allocator : some MLX-dependant allocations were not freed before destroying the display (% active allocations). This is an error in the MLX, please report this should not happen", m_active_buffers_allocations); + Error("Graphics Allocator: some MLX-dependant allocations were not freed before destroying the display (% active allocations). This is an error in the MLX, please report this should not happen", m_active_buffers_allocations); if(m_active_images_allocations < 0 || m_active_buffers_allocations < 0) - Warning("Graphics allocator : the impossible happened, the MLX has freed more allocations than it has made (wtf)"); + Warning("Graphics Allocator: the impossible happened, the MLX has freed more allocations than it has made (wtf)"); vmaDestroyAllocator(m_allocator); m_active_buffers_allocations = 0; m_active_images_allocations = 0; - DebugLog("Vulkan : destroyed a graphics allocator"); + DebugLog("Vulkan: destroyed a graphics allocator"); } } diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 401a877..7d9b902 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -11,7 +11,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); if(!descriptor.vertex_shader || !descriptor.fragment_shader) - FatalError("Vulkan : invalid shaders"); + FatalError("Vulkan: invalid shaders"); m_attachments = descriptor.color_attachments; p_vertex_shader = descriptor.vertex_shader; @@ -51,7 +51,7 @@ namespace mlx } m_pipeline = kvfCreateGraphicsPipeline(RenderCore::Get().GetDevice(), m_pipeline_layout, builder, m_renderpass); - DebugLog("Vulkan : graphics pipeline created"); + DebugLog("Vulkan: graphics pipeline created"); kvfDestroyGPipelineBuilder(builder); #ifdef MLX_HAS_DEBUG_UTILS_FUNCTIONS @@ -132,18 +132,18 @@ namespace mlx for(auto& fb : m_framebuffers) { kvfDestroyFramebuffer(RenderCore::Get().GetDevice(), fb); - DebugLog("Vulkan : framebuffer destroyed"); + DebugLog("Vulkan: framebuffer destroyed"); } m_framebuffers.clear(); kvfDestroyPipelineLayout(RenderCore::Get().GetDevice(), m_pipeline_layout); m_pipeline_layout = VK_NULL_HANDLE; - DebugLog("Vulkan : graphics pipeline layout destroyed"); + DebugLog("Vulkan: graphics pipeline layout destroyed"); kvfDestroyRenderPass(RenderCore::Get().GetDevice(), m_renderpass); m_renderpass = VK_NULL_HANDLE; - DebugLog("Vulkan : renderpass destroyed"); + DebugLog("Vulkan: renderpass destroyed"); kvfDestroyPipeline(RenderCore::Get().GetDevice(), m_pipeline); m_pipeline = VK_NULL_HANDLE; - DebugLog("Vulkan : graphics pipeline destroyed"); + DebugLog("Vulkan: graphics pipeline destroyed"); } void GraphicPipeline::CreateFramebuffers(const std::vector>& render_targets, bool clear_attachments) @@ -167,7 +167,7 @@ namespace mlx m_renderpass = kvfCreateRenderPass(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint()); m_clears.clear(); m_clears.resize(attachments.size()); - DebugLog("Vulkan : renderpass created"); + DebugLog("Vulkan: renderpass created"); if(p_renderer) { @@ -175,14 +175,14 @@ namespace mlx { attachment_views[0] = image.GetImageView(); m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image.GetWidth(), .height = image.GetHeight() })); - DebugLog("Vulkan : framebuffer created"); + DebugLog("Vulkan: framebuffer created"); } } #pragma omp parallel for for(NonOwningPtr image : render_targets) { m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image->GetWidth(), .height = image->GetHeight() })); - DebugLog("Vulkan : framebuffer created"); + DebugLog("Vulkan: framebuffer created"); } } diff --git a/runtime/Sources/Renderer/Pipelines/Shader.cpp b/runtime/Sources/Renderer/Pipelines/Shader.cpp index 26c7d91..5a92e35 100644 --- a/runtime/Sources/Renderer/Pipelines/Shader.cpp +++ b/runtime/Sources/Renderer/Pipelines/Shader.cpp @@ -15,7 +15,7 @@ namespace mlx default : FatalError("wtf"); break; } m_module = kvfCreateShaderModule(RenderCore::Get().GetDevice(), reinterpret_cast(m_bytecode.data()), m_bytecode.size() / 4); - DebugLog("Vulkan : shader module created"); + DebugLog("Vulkan: shader module created"); GeneratePipelineLayout(m_layout); } @@ -35,7 +35,7 @@ namespace mlx bindings[i].stageFlags = m_stage; } m_set_layouts.emplace_back(kvfCreateDescriptorSetLayout(RenderCore::Get().GetDevice(), bindings.data(), bindings.size())); - DebugLog("Vulkan : descriptor set layout created"); + DebugLog("Vulkan: descriptor set layout created"); m_pipeline_layout_part.set_layouts.push_back(m_set_layouts.back()); } @@ -56,11 +56,11 @@ namespace mlx { MLX_PROFILE_FUNCTION(); kvfDestroyShaderModule(RenderCore::Get().GetDevice(), m_module); - DebugLog("Vulkan : shader module destroyed"); + DebugLog("Vulkan: shader module destroyed"); for(auto& layout : m_set_layouts) { kvfDestroyDescriptorSetLayout(RenderCore::Get().GetDevice(), layout); - DebugLog("Vulkan : descriptor set layout destroyed"); + DebugLog("Vulkan: descriptor set layout destroyed"); } } } diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 58d3d54..402f893 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -69,7 +69,7 @@ namespace mlx #endif m_instance = kvfCreateInstance(instance_extensions.data(), instance_extensions.size()); - DebugLog("Vulkan : instance created"); + DebugLog("Vulkan: instance created"); loader->LoadInstance(m_instance); LoadKVFInstanceVulkanFunctionPointers(); @@ -81,13 +81,13 @@ namespace mlx // just for style VkPhysicalDeviceProperties props; vkGetPhysicalDeviceProperties(m_physical_device, &props); - DebugLog("Vulkan : physical device picked '%'", props.deviceName); + DebugLog("Vulkan: physical device picked '%'", props.deviceName); const char* device_extensions[] = { VK_KHR_SWAPCHAIN_EXTENSION_NAME }; VkPhysicalDeviceFeatures features{}; vkGetPhysicalDeviceFeatures(m_physical_device, &features); m_device = kvfCreateDevice(m_physical_device, device_extensions, sizeof(device_extensions) / sizeof(device_extensions[0]), &features); - DebugLog("Vulkan : logical device created"); + DebugLog("Vulkan: logical device created"); loader->LoadDevice(m_device); LoadKVFDeviceVulkanFunctionPointers(); @@ -201,9 +201,9 @@ namespace mlx m_descriptor_pool_manager.Destroy(); m_allocator.Destroy(); kvfDestroyDevice(m_device); - DebugLog("Vulkan : logical device destroyed"); + DebugLog("Vulkan: logical device destroyed"); kvfDestroyInstance(m_instance); - DebugLog("Vulkan : instance destroyed"); + DebugLog("Vulkan: instance destroyed"); loader.reset(); s_instance = nullptr; diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 1cd2e25..827a056 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -92,32 +92,31 @@ namespace mlx VkCommandBuffer cmd = renderer.GetActiveCommandBuffer(); - const auto& sprites = scene.GetSprites(); + const auto& drawables = scene.GetDrawables(); - for(auto sprite : sprites) + for(auto drawable : drawables) { // Check every textures and update modified ones to GPU before starting the render pass - if(!sprite->IsSetInit()) - sprite->UpdateDescriptorSet(p_texture_set); - Verify((bool)sprite->GetTexture(), "a sprite has no texture attached (internal mlx issue, please report to the devs)"); - sprite->GetTexture()->Update(cmd); + if(!drawable->IsSetInit()) + drawable->UpdateDescriptorSet(p_texture_set); + drawable->Update(cmd); } m_pipeline.BindPipeline(cmd, 0, {}); - for(auto sprite : sprites) + for(auto drawable : drawables) { - SpriteData sprite_data; - sprite_data.position = Vec4f{ sprite->GetPosition(), 0.0f, 1.0f }; - sprite_data.color = sprite->GetColor(); + SpriteData drawable_data; + drawable_data.position = Vec4f{ drawable->GetPosition(), 0.0f, 1.0f }; + drawable_data.color = drawable->GetColor(); - sprite->Bind(frame_index, cmd); + drawable->Bind(frame_index, cmd); - std::array sets = { p_viewer_data_set->GetSet(frame_index), sprite->GetSet(frame_index) }; + std::array sets = { p_viewer_data_set->GetSet(frame_index), drawable->GetSet(frame_index) }; - RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &sprite_data); + RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &drawable_data); RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); - sprite->GetMesh()->Draw(cmd, renderer.GetDrawCallsCounterRef(), renderer.GetPolygonDrawnCounterRef()); + drawable->GetMesh()->Draw(cmd, renderer.GetDrawCallsCounterRef(), renderer.GetPolygonDrawnCounterRef()); } m_pipeline.EndPipeline(cmd); } diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 31b12a9..823db60 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -33,20 +33,20 @@ namespace mlx p_window = window; m_surface = p_window->CreateVulkanSurface(RenderCore::Get().GetInstance()); - DebugLog("Vulkan : surface created"); + DebugLog("Vulkan: surface created"); CreateSwapchain(); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { m_image_available_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); - DebugLog("Vulkan : image available semaphore created"); + DebugLog("Vulkan: image available semaphore created"); m_render_finished_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); - DebugLog("Vulkan : render finished semaphore created"); + DebugLog("Vulkan: render finished semaphore created"); m_cmd_buffers[i] = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); - DebugLog("Vulkan : command buffer created"); + DebugLog("Vulkan: command buffer created"); m_cmd_fences[i] = kvfCreateFence(RenderCore::Get().GetDevice()); - DebugLog("Vulkan : fence created"); + DebugLog("Vulkan: fence created"); } } @@ -63,7 +63,7 @@ namespace mlx //return false; } else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) - FatalError("Vulkan error : failed to acquire swapchain image, %", kvfVerbaliseVkResult(result)); + FatalError("Vulkan error: failed to acquire swapchain image, %", kvfVerbaliseVkResult(result)); RenderCore::Get().vkResetCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); kvfBeginCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); @@ -110,7 +110,7 @@ namespace mlx m_swapchain_images[i].TransitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); m_swapchain_images[i].CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); } - DebugLog("Vulkan : swapchain created"); + DebugLog("Vulkan: swapchain created"); } void Renderer::DestroySwapchain() @@ -120,7 +120,7 @@ namespace mlx for(Image& img : m_swapchain_images) img.DestroyImageView(); kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), m_swapchain); - DebugLog("Vulkan : swapchain destroyed"); + DebugLog("Vulkan: swapchain destroyed"); } void Renderer::Destroy() noexcept @@ -131,16 +131,16 @@ namespace mlx for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { kvfDestroySemaphore(RenderCore::Get().GetDevice(), m_image_available_semaphores[i]); - DebugLog("Vulkan : image available semaphore destroyed"); + DebugLog("Vulkan: image available semaphore destroyed"); kvfDestroySemaphore(RenderCore::Get().GetDevice(), m_render_finished_semaphores[i]); - DebugLog("Vulkan : render finished semaphore destroyed"); + DebugLog("Vulkan: render finished semaphore destroyed"); kvfDestroyFence(RenderCore::Get().GetDevice(), m_cmd_fences[i]); - DebugLog("Vulkan : fence destroyed"); + DebugLog("Vulkan: fence destroyed"); } DestroySwapchain(); RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); - DebugLog("Vulkan : surface destroyed"); + DebugLog("Vulkan: surface destroyed"); m_surface = VK_NULL_HANDLE; } } diff --git a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp index cba9310..4f63d54 100644 --- a/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp +++ b/runtime/Sources/Renderer/Vulkan/VulkanLoader.cpp @@ -30,8 +30,8 @@ namespace mlx { PFN_vkVoidFunction function = RenderCore::Get().vkGetInstanceProcAddr(static_cast(context), name); if(!function) - FatalError("Vulkan loader : could not load '%'", name); - //DebugLog("Vulkan loader : loaded %", name); + FatalError("Vulkan Loader: could not load '%'", name); + //DebugLog("Vulkan Loader: loaded %", name); return function; } @@ -39,8 +39,8 @@ namespace mlx { PFN_vkVoidFunction function = RenderCore::Get().vkGetDeviceProcAddr(static_cast(context), name); if(!function) - FatalError("Vulkan loader : could not load '%'", name); - //DebugLog("Vulkan loader : loaded %", name); + FatalError("Vulkan Loader: could not load '%'", name); + //DebugLog("Vulkan Loader: loaded %", name); return function; } @@ -95,13 +95,13 @@ namespace mlx RESTORE_GCC_PEDANTIC_WARNINGS if(RenderCore::Get().vkGetInstanceProcAddr) { - DebugLog("Vulkan loader : libvulkan loaded using '%'", libname); + DebugLog("Vulkan Loader: libvulkan loaded using '%'", libname); break; } } } if(!p_module || !RenderCore::Get().vkGetInstanceProcAddr) - FatalError("Vulkan loader : failed to load libvulkan"); + FatalError("Vulkan Loader: failed to load libvulkan"); LoadGlobalFunctions(nullptr, Internal::vkGetInstanceProcAddrStub); } @@ -120,7 +120,7 @@ namespace mlx #define MLX_VULKAN_GLOBAL_FUNCTION(fn) RenderCore::Get().fn = reinterpret_cast(load(context, #fn)); #include #undef MLX_VULKAN_GLOBAL_FUNCTION - DebugLog("Vulkan loader : global functions loaded"); + DebugLog("Vulkan Loader: global functions loaded"); } void VulkanLoader::LoadInstanceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept @@ -128,7 +128,7 @@ namespace mlx #define MLX_VULKAN_INSTANCE_FUNCTION(fn) RenderCore::Get().fn = reinterpret_cast(load(context, #fn)); #include #undef MLX_VULKAN_INSTANCE_FUNCTION - DebugLog("Vulkan loader : instance functions loaded"); + DebugLog("Vulkan Loader: instance functions loaded"); } void VulkanLoader::LoadDeviceFunctions(void* context, PFN_vkVoidFunction (*load)(void*, const char*)) noexcept @@ -136,7 +136,7 @@ namespace mlx #define MLX_VULKAN_DEVICE_FUNCTION(fn) RenderCore::Get().fn = reinterpret_cast(load(context, #fn)); #include #undef MLX_VULKAN_DEVICE_FUNCTION - DebugLog("Vulkan loader : device functions loaded"); + DebugLog("Vulkan Loader: device functions loaded"); } VulkanLoader::~VulkanLoader() @@ -147,6 +147,6 @@ namespace mlx dlclose(p_module); #endif p_module = nullptr; - DebugLog("Vulkan loader : libvulkan unloaded"); + DebugLog("Vulkan Loader: libvulkan unloaded"); } } diff --git a/third_party/kvf.h b/third_party/kvf.h index 6bc5e75..7e4602f 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -894,7 +894,7 @@ VkPipelineStageFlags kvfLayoutToAccessMask(VkImageLayout layout, bool is_destina { case VK_IMAGE_LAYOUT_UNDEFINED: if(is_destination) - KVF_ASSERT(false && "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); + KVF_ASSERT(false && "Vulkan: the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); break; case VK_IMAGE_LAYOUT_GENERAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; break; case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; break; @@ -909,13 +909,13 @@ VkPipelineStageFlags kvfLayoutToAccessMask(VkImageLayout layout, bool is_destina if(!is_destination) access_mask = VK_ACCESS_HOST_WRITE_BIT; else - KVF_ASSERT(false && "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); + KVF_ASSERT(false && "Vulkan: the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); break; case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: access_mask = VK_ACCESS_MEMORY_READ_BIT; break; - default: KVF_ASSERT(false && "Vulkan : unexpected image layout"); break; + default: KVF_ASSERT(false && "Vulkan: unexpected image layout"); break; } return access_mask; @@ -929,7 +929,7 @@ VkPipelineStageFlags kvfAccessFlagsToPipelineStage(VkAccessFlags access_flags, V { VkAccessFlagBits _access_flag = (VkAccessFlagBits)(access_flags & (~(access_flags - 1))); if(_access_flag == 0 || (_access_flag & (_access_flag - 1)) != 0) - KVF_ASSERT(false && "Vulkan : an error has been caught during access flag to pipeline stage operation"); + KVF_ASSERT(false && "Vulkan: an error has been caught during access flag to pipeline stage operation"); access_flags &= ~_access_flag; switch(_access_flag) @@ -952,7 +952,7 @@ VkPipelineStageFlags kvfAccessFlagsToPipelineStage(VkAccessFlags access_flags, V case VK_ACCESS_MEMORY_READ_BIT: break; case VK_ACCESS_MEMORY_WRITE_BIT: break; - default: KVF_ASSERT(false && "Vulkan : unknown access flag"); break; + default: KVF_ASSERT(false && "Vulkan: unknown access flag"); break; } } return stages; @@ -973,7 +973,7 @@ VkFormat kvfFindSupportFormatInCandidates(VkDevice device, VkFormat* candidates, return candidates[i]; } - KVF_ASSERT(false && "Vulkan : failed to find image format"); + KVF_ASSERT(false && "Vulkan: failed to find image format"); return VK_FORMAT_R8G8B8A8_SRGB; // just to avoir warning } From c6d6c821a9731119b37d436fae6fa54db2f1f531 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 27 Oct 2024 00:40:13 +0000 Subject: [PATCH 056/131] [BOT] update dependencies --- third_party/vulkan/vulkan.cppm | 9 + third_party/vulkan/vulkan.hpp | 49 +- third_party/vulkan/vulkan_core.h | 54 +- third_party/vulkan/vulkan_enums.hpp | 5 +- .../vulkan/vulkan_extension_inspection.hpp | 12 +- third_party/vulkan/vulkan_funcs.hpp | 550 +++++++++++++++--- third_party/vulkan/vulkan_handles.hpp | 486 ++++++++++++---- third_party/vulkan/vulkan_hash.hpp | 59 ++ third_party/vulkan/vulkan_raii.hpp | 48 ++ .../vulkan/vulkan_static_assertions.hpp | 25 + third_party/vulkan/vulkan_structs.hpp | 402 +++++++++++++ third_party/vulkan/vulkan_to_string.hpp | 3 + 12 files changed, 1489 insertions(+), 213 deletions(-) diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 5a031ca..5e40ea9 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -2605,6 +2605,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::EXTDepthClampControlExtensionName; using VULKAN_HPP_NAMESPACE::EXTDepthClampControlSpecVersion; + //=== VK_NV_cooperative_matrix2 === + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2ExtensionName; + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2SpecVersion; + //======================== //=== CONSTEXPR VALUEs === //======================== @@ -4529,6 +4533,11 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT; using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT; + //=== VK_NV_cooperative_matrix2 === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV; + //=============== //=== HANDLEs === //=============== diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index 406d820..1207a40 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -63,7 +63,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 299, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 300, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -6000,6 +6000,14 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkUpdateIndirectExecutionSetShaderEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); } + + //=== VK_NV_cooperative_matrix2 === + + VkResult vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } }; inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() @@ -8675,6 +8683,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlExtensionName = VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlSpecVersion = VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION; + //=== VK_NV_cooperative_matrix2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2ExtensionName = VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2SpecVersion = VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION; + } // namespace VULKAN_HPP_NAMESPACE // clang-format off @@ -17060,6 +17072,34 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NV_cooperative_matrix2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL @@ -18322,6 +18362,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; + public: DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -19752,6 +19795,10 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); vkUpdateIndirectExecutionSetShaderEXT = PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); } void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index 4d9776c..b2973ff 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 299 +#define VK_HEADER_VERSION 300 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -1163,6 +1163,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT = 1000582001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV = 1000593000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV = 1000593001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV = 1000593002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias @@ -19898,6 +19901,55 @@ typedef struct VkPipelineViewportDepthClampControlCreateInfoEXT { +// VK_NV_cooperative_matrix2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix2 1 +#define VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME "VK_NV_cooperative_matrix2" +typedef struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MGranularity; + uint32_t NGranularity; + uint32_t KGranularity; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; + uint32_t workgroupInvocations; +} VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrixWorkgroupScope; + VkBool32 cooperativeMatrixFlexibleDimensions; + VkBool32 cooperativeMatrixReductions; + VkBool32 cooperativeMatrixConversions; + VkBool32 cooperativeMatrixPerElementOperations; + VkBool32 cooperativeMatrixTensorAddressing; + VkBool32 cooperativeMatrixBlockLoads; +} VkPhysicalDeviceCooperativeMatrix2FeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory; +} VkPhysicalDeviceCooperativeMatrix2PropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); +#endif + + // VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index 6fac9aa..e3c323f 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -1467,7 +1467,10 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceImageAlignmentControlPropertiesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA, eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA, ePhysicalDeviceDepthClampControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT, - ePipelineViewportDepthClampControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT + ePipelineViewportDepthClampControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT, + ePhysicalDeviceCooperativeMatrix2FeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV, + eCooperativeMatrixFlexibleDimensionsPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV }; enum class PipelineCacheHeaderVersion diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index 3b158f8..73fb83d 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -453,7 +453,8 @@ namespace VULKAN_HPP_NAMESPACE "VK_NV_ray_tracing_validation", "VK_EXT_device_generated_commands", "VK_MESA_image_alignment_control", - "VK_EXT_depth_clamp_control" + "VK_EXT_depth_clamp_control", + "VK_NV_cooperative_matrix2" }; return deviceExtensions; } @@ -2360,7 +2361,12 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_KHR_get_physical_device_properties2", } } }, - { "VK_VERSION_1_1", { {} } } } } + { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_cooperative_matrix2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_cooperative_matrix", + } } } } } }; auto depIt = dependencies.find( extension ); return ( depIt != dependencies.end() ) ? depIt->second : noDependencies; @@ -3139,7 +3145,7 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_KHR_maintenance7" ) || ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_EXT_shader_replicated_composites" ) || ( extension == "VK_NV_ray_tracing_validation" ) || ( extension == "VK_EXT_device_generated_commands" ) || ( extension == "VK_MESA_image_alignment_control" ) || - ( extension == "VK_EXT_depth_clamp_control" ); + ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_NV_cooperative_matrix2" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index f410d2f..dee5661 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -107,7 +107,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const { @@ -308,7 +310,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const { @@ -523,7 +528,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) { @@ -605,7 +613,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const & d ) const { @@ -685,7 +696,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties( Dispatch const & d ) { @@ -761,7 +774,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const { @@ -1301,7 +1316,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const { @@ -1383,7 +1402,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -2134,7 +2157,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, @@ -2924,7 +2950,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const { @@ -3040,7 +3068,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3125,7 +3155,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3243,7 +3276,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3328,7 +3363,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3981,7 +4019,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const { @@ -4020,7 +4060,11 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const @@ -4583,7 +4627,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const { @@ -4622,7 +4668,11 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const @@ -5933,7 +5983,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const @@ -6115,7 +6169,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const { @@ -6375,7 +6433,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { @@ -6428,7 +6489,10 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { @@ -6561,7 +6625,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const { @@ -7446,7 +7514,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolProperties( Dispatch const & d ) const @@ -8309,7 +8381,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const { @@ -8503,7 +8579,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { @@ -8586,7 +8664,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { @@ -8785,7 +8865,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { @@ -8994,7 +9076,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { @@ -9110,7 +9194,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const { @@ -9187,7 +9274,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const @@ -9268,7 +9359,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const { @@ -9347,7 +9440,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const @@ -9600,7 +9696,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, @@ -9673,7 +9771,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template >::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -10524,7 +10625,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const @@ -10737,7 +10841,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const @@ -11654,7 +11762,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, @@ -12135,7 +12245,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const { @@ -12188,7 +12301,10 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const { @@ -12321,7 +12437,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const @@ -12530,7 +12650,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const @@ -13568,7 +13692,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const @@ -14075,7 +14203,13 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value && + std::is_same::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type @@ -14312,7 +14446,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { @@ -14388,7 +14524,10 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { @@ -14499,7 +14638,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const @@ -14580,7 +14722,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const @@ -14665,7 +14811,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const @@ -15322,7 +15472,9 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -15407,7 +15559,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -15831,7 +15986,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const { @@ -16299,7 +16458,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, @@ -16649,7 +16811,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -16749,7 +16913,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, @@ -16880,7 +17047,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { @@ -16932,7 +17102,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const @@ -17381,7 +17554,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const { @@ -17842,7 +18017,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -17927,7 +18104,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -18041,7 +18221,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { @@ -18091,7 +18274,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const { @@ -18372,7 +18558,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const { @@ -18455,7 +18643,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d ) const @@ -18654,7 +18844,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV( Dispatch const & d ) const { @@ -18713,7 +18905,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV( Dispatch const & d ) const { @@ -19291,7 +19485,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const @@ -19487,7 +19685,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const @@ -19604,7 +19806,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const @@ -19689,7 +19895,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const @@ -19782,7 +19992,9 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { @@ -20485,7 +20697,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const @@ -20582,7 +20798,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const @@ -20680,7 +20900,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< std::vector>::type Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const @@ -21593,7 +21817,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>>::type Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, @@ -21677,7 +21903,12 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, @@ -21865,7 +22096,9 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const { @@ -24066,7 +24299,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, @@ -24430,7 +24666,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const { @@ -25210,7 +25450,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, @@ -25667,7 +25911,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, @@ -25746,7 +25992,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, @@ -25903,7 +26152,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const { @@ -26053,7 +26304,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator, @@ -26165,7 +26418,11 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template >::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator, @@ -26399,7 +26656,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>>::type Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const @@ -26520,7 +26779,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const { @@ -26710,7 +26971,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { @@ -26789,7 +27054,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const @@ -26946,7 +27215,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const { @@ -27029,7 +27300,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d ) const @@ -27664,5 +27937,100 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_NV_cooperative_matrix2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif + + std::vector properties( + cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index 6ae2e5e..4947796 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -1909,6 +1909,11 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineViewportDepthClampControlCreateInfoEXT; struct DepthClampRangeEXT; + //=== VK_NV_cooperative_matrix2 === + struct CooperativeMatrixFlexibleDimensionsPropertiesNV; + struct PhysicalDeviceCooperativeMatrix2FeaturesNV; + struct PhysicalDeviceCooperativeMatrix2PropertiesNV; + //=================================== //=== HANDLE forward declarations === //=================================== @@ -10318,8 +10323,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename CheckpointDataNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -10335,8 +10342,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -10681,7 +10690,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -10936,7 +10948,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, @@ -11225,7 +11240,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -11258,7 +11275,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -11280,8 +11299,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -11314,7 +11335,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -11336,8 +11359,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -11569,7 +11594,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11581,8 +11608,11 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11789,7 +11819,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11801,8 +11833,11 @@ namespace VULKAN_HPP_NAMESPACE CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11918,7 +11953,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -12256,7 +12294,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -12321,7 +12362,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -12385,7 +12428,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -12404,8 +12449,11 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename SwapchainKHRAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -12501,7 +12549,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, @@ -12702,7 +12753,9 @@ namespace VULKAN_HPP_NAMESPACE void * pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, @@ -12989,7 +13042,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -13175,7 +13231,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13197,8 +13255,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -13290,7 +13350,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -13409,7 +13472,10 @@ namespace VULKAN_HPP_NAMESPACE size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, @@ -13471,7 +13537,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -13496,8 +13564,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -13533,7 +13603,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, @@ -13554,7 +13627,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, @@ -13704,7 +13780,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -13794,7 +13872,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13816,8 +13896,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13850,7 +13932,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, @@ -13868,7 +13953,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template @@ -13929,7 +14017,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t * pMaxDeviation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -14223,7 +14313,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, @@ -14247,7 +14340,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, @@ -14272,7 +14368,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType< std::vector>::type getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, @@ -14515,7 +14614,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>>::type getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, @@ -14531,8 +14632,9 @@ namespace VULKAN_HPP_NAMESPACE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Uint8_tAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -14577,7 +14679,9 @@ namespace VULKAN_HPP_NAMESPACE void * pCacheData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15050,7 +15154,10 @@ namespace VULKAN_HPP_NAMESPACE size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, @@ -15135,7 +15242,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15350,7 +15460,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15369,8 +15481,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15420,7 +15534,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15438,8 +15554,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename PipelineBinaryKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15455,7 +15573,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template >> + typename PipelineBinaryKHRAllocator = std::allocator>, + typename std::enable_if< + std::is_same>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15513,7 +15634,9 @@ namespace VULKAN_HPP_NAMESPACE void * pPipelineBinaryData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>>::type getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15545,8 +15668,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename TilePropertiesQCOMAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -15610,8 +15735,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename LatencyTimingsFrameReportNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -15651,7 +15779,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t * pMaxDeviation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16068,7 +16198,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16115,8 +16247,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16135,7 +16269,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16155,8 +16291,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -16239,7 +16378,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16251,7 +16392,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16337,7 +16482,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16379,8 +16527,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SurfaceFormatKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16400,7 +16550,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16421,7 +16573,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Rect2D * pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16439,7 +16593,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16456,8 +16612,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -16476,7 +16635,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16492,8 +16653,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -16620,8 +16784,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16719,7 +16886,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16731,7 +16900,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16879,7 +17052,11 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value && + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type @@ -16934,8 +17111,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SurfaceFormat2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16949,7 +17128,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16971,7 +17151,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16988,8 +17170,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -17008,8 +17193,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -17054,7 +17242,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -17072,7 +17262,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17095,7 +17288,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -17116,7 +17312,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17140,7 +17339,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17164,7 +17366,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17293,7 +17497,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, @@ -17318,7 +17525,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17340,7 +17550,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -17350,6 +17562,36 @@ namespace VULKAN_HPP_NAMESPACE getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_NV_cooperative_matrix2 === + + template + VULKAN_HPP_NODISCARD Result + getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT { return m_physicalDevice; @@ -17474,7 +17716,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -17499,7 +17743,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17801,7 +18048,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -18112,8 +18362,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); @@ -18132,7 +18384,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); template , diff --git a/third_party/vulkan/vulkan_hash.hpp b/third_party/vulkan/vulkan_hash.hpp index c025d6c..61fe948 100644 --- a/third_party/vulkan/vulkan_hash.hpp +++ b/third_party/vulkan/vulkan_hash.hpp @@ -2527,6 +2527,29 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV const & cooperativeMatrixFlexibleDimensionsPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.MGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.NGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.KGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.ResultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.saturatingAccumulation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.scope ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.workgroupInvocations ); + return seed; + } + }; + template <> struct hash { @@ -8091,6 +8114,42 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV const & physicalDeviceCooperativeMatrix2FeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixWorkgroupScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixFlexibleDimensions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixReductions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixConversions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixPerElementOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixTensorAddressing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixBlockLoads ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV const & physicalDeviceCooperativeMatrix2PropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixFlexibleDimensionsMaxDimension ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + return seed; + } + }; + template <> struct hash { diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index 68f4f37..e777293 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -376,6 +376,10 @@ namespace VULKAN_HPP_NAMESPACE vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); } @@ -653,6 +657,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_calibrated_timestamps === PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; }; @@ -3539,6 +3546,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsKHR() const; + //=== VK_NV_cooperative_matrix2 === + + VULKAN_HPP_NODISCARD std::vector + getCooperativeMatrixFlexibleDimensionsPropertiesNV() const; + private: VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; @@ -13548,6 +13560,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { + VULKAN_HPP_ASSERT( + createInfo.flags & vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet && + "createInfo.flags need to have vk::DescriptorPoolCreateFlagBits::eFreeDesriptors set in order to allow destruction of VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet which requires to return individual allocations to the pool" ); VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorPool( static_cast( m_device ), @@ -23687,6 +23702,39 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( executionSetWrites.data() ) ); } + //=== VK_NV_cooperative_matrix2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + //==================== //=== RAII Helpers === //==================== diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index d8c323e..7dbb47b 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -7984,4 +7984,29 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "DepthClampRangeEXT is not nothrow_move_constructible!" ); +//=== VK_NV_cooperative_matrix2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV ) == + sizeof( VkCooperativeMatrixFlexibleDimensionsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixFlexibleDimensionsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrix2FeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2FeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrix2PropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2PropertiesNV is not nothrow_move_constructible!" ); + #endif diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index a9d7102..7b9b7c4 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -16892,6 +16892,133 @@ namespace VULKAN_HPP_NAMESPACE using ConformanceVersionKHR = ConformanceVersion; + struct CooperativeMatrixFlexibleDimensionsPropertiesNV + { + using NativeType = VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t MGranularity_ = {}, + uint32_t NGranularity_ = {}, + uint32_t KGranularity_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, + VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, + uint32_t workgroupInvocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MGranularity{ MGranularity_ } + , NGranularity{ NGranularity_ } + , KGranularity{ KGranularity_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , ResultType{ ResultType_ } + , saturatingAccumulation{ saturatingAccumulation_ } + , scope{ scope_ } + , workgroupInvocations{ workgroupInvocations_ } + { + } + + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixFlexibleDimensionsPropertiesNV( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixFlexibleDimensionsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, MGranularity, NGranularity, KGranularity, AType, BType, CType, ResultType, saturatingAccumulation, scope, workgroupInvocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixFlexibleDimensionsPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MGranularity == rhs.MGranularity ) && ( NGranularity == rhs.NGranularity ) && + ( KGranularity == rhs.KGranularity ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( ResultType == rhs.ResultType ) && ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ) && + ( workgroupInvocations == rhs.workgroupInvocations ); +# endif + } + + bool operator!=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + void * pNext = {}; + uint32_t MGranularity = {}; + uint32_t NGranularity = {}; + uint32_t KGranularity = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {}; + VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice; + uint32_t workgroupInvocations = {}; + }; + + template <> + struct CppType + { + using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV; + }; + struct CooperativeMatrixPropertiesKHR { using NativeType = VkCooperativeMatrixPropertiesKHR; @@ -61625,6 +61752,281 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; }; + struct PhysicalDeviceCooperativeMatrix2FeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2FeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScope{ cooperativeMatrixWorkgroupScope_ } + , cooperativeMatrixFlexibleDimensions{ cooperativeMatrixFlexibleDimensions_ } + , cooperativeMatrixReductions{ cooperativeMatrixReductions_ } + , cooperativeMatrixConversions{ cooperativeMatrixConversions_ } + , cooperativeMatrixPerElementOperations{ cooperativeMatrixPerElementOperations_ } + , cooperativeMatrixTensorAddressing{ cooperativeMatrixTensorAddressing_ } + , cooperativeMatrixBlockLoads{ cooperativeMatrixBlockLoads_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2FeaturesNV( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2FeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixWorkgroupScope( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixFlexibleDimensions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixReductions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixReductions = cooperativeMatrixReductions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixConversions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixConversions = cooperativeMatrixConversions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixPerElementOperations( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixTensorAddressing( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixBlockLoads( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScope, + cooperativeMatrixFlexibleDimensions, + cooperativeMatrixReductions, + cooperativeMatrixConversions, + cooperativeMatrixPerElementOperations, + cooperativeMatrixTensorAddressing, + cooperativeMatrixBlockLoads ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2FeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixWorkgroupScope == rhs.cooperativeMatrixWorkgroupScope ) && + ( cooperativeMatrixFlexibleDimensions == rhs.cooperativeMatrixFlexibleDimensions ) && + ( cooperativeMatrixReductions == rhs.cooperativeMatrixReductions ) && ( cooperativeMatrixConversions == rhs.cooperativeMatrixConversions ) && + ( cooperativeMatrixPerElementOperations == rhs.cooperativeMatrixPerElementOperations ) && + ( cooperativeMatrixTensorAddressing == rhs.cooperativeMatrixTensorAddressing ) && + ( cooperativeMatrixBlockLoads == rhs.cooperativeMatrixBlockLoads ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrix2PropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2PropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ = {}, + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension_ = {}, + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScopeMaxWorkgroupSize{ cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ } + , cooperativeMatrixFlexibleDimensionsMaxDimension{ cooperativeMatrixFlexibleDimensionsMaxDimension_ } + , cooperativeMatrixWorkgroupScopeReservedSharedMemory{ cooperativeMatrixWorkgroupScopeReservedSharedMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2PropertiesNV( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2PropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScopeMaxWorkgroupSize, + cooperativeMatrixFlexibleDimensionsMaxDimension, + cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixWorkgroupScopeMaxWorkgroupSize == rhs.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ) && + ( cooperativeMatrixFlexibleDimensionsMaxDimension == rhs.cooperativeMatrixFlexibleDimensionsMaxDimension ) && + ( cooperativeMatrixWorkgroupScopeReservedSharedMemory == rhs.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + void * pNext = {}; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize = {}; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension = {}; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; + }; + struct PhysicalDeviceCooperativeMatrixFeaturesKHR { using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR; diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index 41300e9..c560196 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -4653,6 +4653,9 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eImageAlignmentControlCreateInfoMESA: return "ImageAlignmentControlCreateInfoMESA"; case StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT: return "PhysicalDeviceDepthClampControlFeaturesEXT"; case StructureType::ePipelineViewportDepthClampControlCreateInfoEXT: return "PipelineViewportDepthClampControlCreateInfoEXT"; + case StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV: return "PhysicalDeviceCooperativeMatrix2FeaturesNV"; + case StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV: return "CooperativeMatrixFlexibleDimensionsPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV: return "PhysicalDeviceCooperativeMatrix2PropertiesNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } From 1721632a754e4c00f3c21a5b65f2a9a87f175bf2 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 27 Oct 2024 20:55:21 +0100 Subject: [PATCH 057/131] pushing for vavaas to debug --- runtime/Includes/Core/Application.inl | 20 +++++++++++--------- runtime/Includes/Core/Graphics.inl | 3 +++ runtime/Includes/Graphics/Font.h | 2 +- runtime/Includes/Graphics/Font.inl | 9 +++++---- runtime/Sources/Graphics/Font.cpp | 3 ++- runtime/Sources/Graphics/Text.cpp | 8 ++++---- runtime/Sources/Renderer/Buffer.cpp | 11 ++++++++++- 7 files changed, 36 insertions(+), 20 deletions(-) diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 5e96302..d4ae847 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -148,20 +148,22 @@ namespace mlx void Application::LoadFont(const std::filesystem::path& filepath, float scale) { MLX_PROFILE_FUNCTION(); - std::shared_ptr font; - if(filepath.string() == "default") - font = std::make_shared("default", dogica_ttf, scale); - else - font = std::make_shared(filepath, scale); + std::shared_ptr font = m_font_registry.GetFont(filepath, scale); + if(!font) + { + if(filepath.string() == "default") + font = std::make_shared("default", dogica_ttf, scale); + else + font = std::make_shared(filepath, scale); + font->BuildFont(); + m_font_registry.RegisterFont(font); + } + for(auto& gs : m_graphics) { if(gs) gs->GetScene().BindFont(font); } - if(m_font_registry.IsFontKnown(font)) - return; - font->BuildFont(); - m_font_registry.RegisterFont(font); } void Application::TexturePut(Handle win, Handle img, int x, int y) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 2b44238..8806200 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -27,6 +27,9 @@ namespace mlx void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) { MLX_PROFILE_FUNCTION(); + if(str.empty()) + return; + Vec4f vec_color = { static_cast((color & 0x000000FF)) / 255.f, static_cast((color & 0x0000FF00) >> 8) / 255.f, diff --git a/runtime/Includes/Graphics/Font.h b/runtime/Includes/Graphics/Font.h index 0f5888d..dec3805 100644 --- a/runtime/Includes/Graphics/Font.h +++ b/runtime/Includes/Graphics/Font.h @@ -38,7 +38,7 @@ namespace mlx inline void RegisterFont(std::shared_ptr font); inline void UnregisterFont(std::shared_ptr font); - inline bool IsFontKnown(std::shared_ptr font); + inline std::shared_ptr GetFont(const std::filesystem::path& name, float scale); ~FontRegistry() = default; diff --git a/runtime/Includes/Graphics/Font.inl b/runtime/Includes/Graphics/Font.inl index 6a3482c..7e49e0a 100644 --- a/runtime/Includes/Graphics/Font.inl +++ b/runtime/Includes/Graphics/Font.inl @@ -13,11 +13,12 @@ namespace mlx m_fonts_registry.erase(font); } - bool FontRegistry::IsFontKnown(std::shared_ptr font) + std::shared_ptr FontRegistry::GetFont(const std::filesystem::path& name, float scale) { - return std::find_if(m_fonts_registry.begin(), m_fonts_registry.end(), [&font](std::shared_ptr rhs) + auto it = std::find_if(m_fonts_registry.begin(), m_fonts_registry.end(), [&name, scale](std::shared_ptr rhs) { - return font->GetName() == rhs->GetName() && font->GetScale() == rhs->GetScale(); - }) != m_fonts_registry.end(); + return name == rhs->GetName() && scale == rhs->GetScale(); + }); + return (it != m_fonts_registry.end() ? *it : nullptr); } } diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index d74afa9..67289d9 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -50,7 +50,7 @@ namespace mlx } #ifdef DEBUG - m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, m_name + "_font_altas"); + m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, m_name + "_font_atlas"); #else m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, {}); #endif @@ -61,5 +61,6 @@ namespace mlx void Font::Destroy() { m_atlas.Destroy(); + DebugLog("Font: unloaded %", m_name); } } diff --git a/runtime/Sources/Graphics/Text.cpp b/runtime/Sources/Graphics/Text.cpp index 475541c..89e0d09 100644 --- a/runtime/Sources/Graphics/Text.cpp +++ b/runtime/Sources/Graphics/Text.cpp @@ -29,10 +29,10 @@ namespace mlx std::size_t index = vertex_data.size(); - vertex_data.emplace_back(Vec4f{ q.x0, q.y0, 0.0f, 0.0f }, Vec2f{ q.s0, q.t0 }); - vertex_data.emplace_back(Vec4f{ q.x1, q.y0, 0.0f, 0.0f }, Vec2f{ q.s1, q.t0 }); - vertex_data.emplace_back(Vec4f{ q.x1, q.y1, 0.0f, 0.0f }, Vec2f{ q.s1, q.t1 }); - vertex_data.emplace_back(Vec4f{ q.x0, q.y1, 0.0f, 0.0f }, Vec2f{ q.s0, q.t1 }); + vertex_data.emplace_back(Vec4f{ q.x0, q.y0, 0.0f, 1.0f }, Vec2f{ q.s0, q.t0 }); + vertex_data.emplace_back(Vec4f{ q.x1, q.y0, 0.0f, 1.0f }, Vec2f{ q.s1, q.t0 }); + vertex_data.emplace_back(Vec4f{ q.x1, q.y1, 0.0f, 1.0f }, Vec2f{ q.s1, q.t1 }); + vertex_data.emplace_back(Vec4f{ q.x0, q.y1, 0.0f, 1.0f }, Vec2f{ q.s0, q.t1 }); index_data.emplace_back(index + 0); index_data.emplace_back(index + 1); diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp index a0dd737..95a70c3 100644 --- a/runtime/Sources/Renderer/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -118,7 +118,16 @@ namespace mlx return; RenderCore::Get().GetAllocator().UnmapMemory(m_allocation); #ifdef DEBUG - RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer, m_debug_name.c_str()); + std::string alloc_name{ m_debug_name }; + if(m_usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT) + alloc_name.append("_index_buffer"); + else if(m_usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) + alloc_name.append("_vertex_buffer"); + else if(m_usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) + alloc_name.append("_uniform_buffer"); + else + alloc_name.append("_buffer"); + RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer, alloc_name.c_str()); #else RenderCore::Get().GetAllocator().DestroyBuffer(m_allocation, m_buffer, nullptr); #endif From 77240013a4b08a9afa13132dd84d0080a32c7998 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 27 Oct 2024 21:18:22 +0100 Subject: [PATCH 058/131] fixing segfault at exit --- runtime/Includes/Core/Graphics.inl | 4 ++-- runtime/Includes/Graphics/Font.h | 1 + runtime/Includes/Graphics/Font.inl | 5 +++++ runtime/Sources/Core/Application.cpp | 1 + 4 files changed, 9 insertions(+), 2 deletions(-) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 8806200..c0df6e5 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -43,10 +43,10 @@ namespace mlx Text& new_text = p_scene->CreateText(str); new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); new_text.SetColor(std::move(vec_color)); - if(m_pixelput_called) + // if(m_pixelput_called) { m_draw_layer++; - m_pixelput_called = false; + // m_pixelput_called = false; } } else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) diff --git a/runtime/Includes/Graphics/Font.h b/runtime/Includes/Graphics/Font.h index dec3805..a082ee1 100644 --- a/runtime/Includes/Graphics/Font.h +++ b/runtime/Includes/Graphics/Font.h @@ -39,6 +39,7 @@ namespace mlx inline void RegisterFont(std::shared_ptr font); inline void UnregisterFont(std::shared_ptr font); inline std::shared_ptr GetFont(const std::filesystem::path& name, float scale); + inline void Reset(); ~FontRegistry() = default; diff --git a/runtime/Includes/Graphics/Font.inl b/runtime/Includes/Graphics/Font.inl index 7e49e0a..f2a684e 100644 --- a/runtime/Includes/Graphics/Font.inl +++ b/runtime/Includes/Graphics/Font.inl @@ -21,4 +21,9 @@ namespace mlx }); return (it != m_fonts_registry.end() ? *it : nullptr); } + + void FontRegistry::Reset() + { + m_fonts_registry.clear(); + } } diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index fd2cd36..ce32c96 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -103,6 +103,7 @@ namespace mlx window.reset(); } + m_font_registry.Reset(); p_render_core.reset(); p_sdl_manager.reset(); #ifdef PROFILER From f7ddf3cccbe05dc3795516eeacb20f51ba6c14c5 Mon Sep 17 00:00:00 2001 From: Namonay Date: Sun, 27 Oct 2024 23:12:07 +0100 Subject: [PATCH 059/131] add: bring textures to draw layer Adding the Scene::BringToDrawLayer() to fix layering issues after a Scene::ResetScene() call --- runtime/Includes/Core/Graphics.inl | 10 +++++----- runtime/Includes/Graphics/Scene.h | 1 + runtime/Sources/Graphics/Scene.cpp | 11 +++++++++++ 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index c0df6e5..5a8362c 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -45,14 +45,14 @@ namespace mlx new_text.SetColor(std::move(vec_color)); // if(m_pixelput_called) { - m_draw_layer++; + // m_draw_layer++; // m_pixelput_called = false; } } else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) { - p_scene->BringToFront(text.Get()); - m_draw_layer++; + p_scene->BringToDrawLayer(text.Get(), m_draw_layer); + //m_draw_layer++; } } @@ -73,8 +73,8 @@ namespace mlx } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) { - p_scene->BringToFront(sprite.Get()); - m_draw_layer++; + p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); + //m_draw_layer++; } } diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index d7f2e2a..0d46488 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -27,6 +27,7 @@ namespace mlx inline void BindFont(std::shared_ptr font) { Verify((bool)font, "invalid fond pointer"); p_bound_font = font; } void BringToFront(NonOwningPtr drawable); + void BringToDrawLayer(NonOwningPtr drawable, std::uint64_t draw_layer); inline void ResetScene() { m_drawables.clear(); } diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index f7752e6..f64afe7 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -124,4 +124,15 @@ namespace mlx return; std::rotate(it, it + 1, m_drawables.end()); } + void Scene::BringToDrawLayer(NonOwningPtr drawable, std::uint64_t draw_layer) + { + MLX_PROFILE_FUNCTION(); + auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&drawable](std::shared_ptr drawable_ptr) + { + return drawable_ptr.get() == drawable.Get(); + }); + if (m_drawables.size() > draw_layer) + return; + std::swap(*it, *(m_drawables.begin() + draw_layer)); + } } From df05148d223791a640498642ae646a198da7da92 Mon Sep 17 00:00:00 2001 From: Namonay Date: Mon, 28 Oct 2024 00:29:11 +0100 Subject: [PATCH 060/131] Cleaned code --- runtime/Includes/Core/Graphics.inl | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 5a8362c..24b6dd5 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -40,19 +40,18 @@ namespace mlx NonOwningPtr text = p_scene->GetTextFromPositionAndColor(str, Vec2f{ static_cast(x), static_cast(y) }, vec_color); if(!text) { + if(m_pixelput_called) + { + m_draw_layer++; + m_pixelput_called = false; + } Text& new_text = p_scene->CreateText(str); new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); new_text.SetColor(std::move(vec_color)); - // if(m_pixelput_called) - { - // m_draw_layer++; - // m_pixelput_called = false; - } } else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) { p_scene->BringToDrawLayer(text.Get(), m_draw_layer); - //m_draw_layer++; } } @@ -62,19 +61,18 @@ namespace mlx NonOwningPtr sprite = p_scene->GetSpriteFromTextureAndPosition(texture, Vec2f{ static_cast(x), static_cast(y) }); if(!sprite) { - - Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); if(m_pixelput_called) { m_draw_layer++; m_pixelput_called = false; - } + } + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); + } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) - { - p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); - //m_draw_layer++; + { + p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); } } From 245d5561052bd55c4e4dbc7b1b4d7f01628dca94 Mon Sep 17 00:00:00 2001 From: Namonay <105780726+Namonay@users.noreply.github.com> Date: Mon, 28 Oct 2024 20:02:42 +0100 Subject: [PATCH 061/131] Formatting --- runtime/Includes/Core/Graphics.inl | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 24b6dd5..5bbd14d 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -50,9 +50,7 @@ namespace mlx new_text.SetColor(std::move(vec_color)); } else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) - { p_scene->BringToDrawLayer(text.Get(), m_draw_layer); - } } void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) @@ -70,10 +68,8 @@ namespace mlx new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); } - else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) - { - p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); - } + else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) + p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); } void GraphicsSupport::TryEraseSpritesInScene(NonOwningPtr texture) noexcept From 3153941573ca7df5f1c9efe8aab01f2decf425dd Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 28 Oct 2024 20:17:39 +0100 Subject: [PATCH 062/131] yes --- runtime/Includes/Core/Application.inl | 5 +- runtime/Includes/Core/Graphics.inl | 12 +- runtime/Includes/Embedded/2DVertex.nzsl | 4 +- runtime/Includes/Embedded/2DVertex.spv.h | 105 +++++++++--------- runtime/Includes/Graphics/Font.inl | 2 +- runtime/Sources/Core/SDLManager.cpp | 5 +- runtime/Sources/Graphics/Font.cpp | 6 +- runtime/Sources/Graphics/Text.cpp | 3 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 4 +- 9 files changed, 73 insertions(+), 73 deletions(-) diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index d4ae847..ab5a366 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -87,7 +87,10 @@ namespace mlx if(static_cast(const_cast(title)) == static_cast(this)) { for(std::size_t i = 0; i < 8; i++) - m_graphics.emplace_back(std::make_unique(std::rand() % 512, std::rand() % 512, "让我们在月光下åšçˆ±å§", m_graphics.size())); + { + m_graphics.emplace_back(std::make_unique(std::rand() % 1920, std::rand() % 1080, "让我们在月光下åšçˆ±å§", m_graphics.size())); + m_graphics.back()->GetWindow()->SetPosition(std::rand() % 1920, std::rand() % 1080); + } } else { diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index c0df6e5..bdef4be 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -31,10 +31,10 @@ namespace mlx return; Vec4f vec_color = { - static_cast((color & 0x000000FF)) / 255.f, - static_cast((color & 0x0000FF00) >> 8) / 255.f, - static_cast((color & 0x00FF0000) >> 16) / 255.f, - static_cast((color & 0xFF000000) >> 24) / 255.f + static_cast((color & 0x000000FF)) / 255.0f, + static_cast((color & 0x0000FF00) >> 8) / 255.0f, + static_cast((color & 0x00FF0000) >> 16) / 255.0f, + static_cast((color & 0xFF000000) >> 24) / 255.0f }; NonOwningPtr text = p_scene->GetTextFromPositionAndColor(str, Vec2f{ static_cast(x), static_cast(y) }, vec_color); @@ -43,10 +43,10 @@ namespace mlx Text& new_text = p_scene->CreateText(str); new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); new_text.SetColor(std::move(vec_color)); - // if(m_pixelput_called) + if(m_pixelput_called) { m_draw_layer++; - // m_pixelput_called = false; + m_pixelput_called = false; } } else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) diff --git a/runtime/Includes/Embedded/2DVertex.nzsl b/runtime/Includes/Embedded/2DVertex.nzsl index a69261d..77def73 100644 --- a/runtime/Includes/Embedded/2DVertex.nzsl +++ b/runtime/Includes/Embedded/2DVertex.nzsl @@ -22,7 +22,7 @@ struct ViewerData struct SpriteData { color: vec4[f32], - position: vec4[f32] + position: vec2[f32] } external @@ -34,7 +34,7 @@ external [entry(vert)] fn main(input: VertIn) -> VertOut { - let position: vec4[f32] = vec4[f32](input.pos.xyz + model.position.xyz, 1.0); + let position: vec4[f32] = vec4[f32](input.pos.xy + model.position, 1.0, 1.0); input.uv *= -1.0; let output: VertOut; output.uv = input.uv; diff --git a/runtime/Includes/Embedded/2DVertex.spv.h b/runtime/Includes/Embedded/2DVertex.spv.h index 27b04db..de2efec 100644 --- a/runtime/Includes/Embedded/2DVertex.spv.h +++ b/runtime/Includes/Embedded/2DVertex.spv.h @@ -1,71 +1,70 @@ -3,2,35,7,0,0,1,0,39,0,0,0,71,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, -3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,35,0,0,0,109,97,105,110,0,0,0,0, -13,0,0,0,19,0,0,0,25,0,0,0,27,0,0,0,28,0,0,0,3,0,3,0,0,0,0,0,100,0, +3,2,35,7,0,0,1,0,39,0,0,0,70,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,34,0,0,0,109,97,105,110,0,0,0,0, +14,0,0,0,19,0,0,0,25,0,0,0,27,0,0,0,28,0,0,0,3,0,3,0,0,0,0,0,100,0, 0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0,6,0,8,0,4,0,0,0, -0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0,0,0,5,0,5,0,7,0, -0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,7,0,0,0,0,0,0,0,99,111,108,111, -114,0,0,0,6,0,6,0,7,0,0,0,1,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0, +0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0,0,0,5,0,5,0,8,0, +0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,8,0,0,0,0,0,0,0,99,111,108,111, +114,0,0,0,6,0,6,0,8,0,0,0,1,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0, 4,0,22,0,0,0,86,101,114,116,73,110,0,0,6,0,4,0,22,0,0,0,0,0,0,0,112,111,115,0, 6,0,4,0,22,0,0,0,1,0,0,0,117,118,0,0,5,0,4,0,29,0,0,0,86,101,114,116,79,117, 116,0,6,0,5,0,29,0,0,0,0,0,0,0,99,111,108,111,114,0,0,0,6,0,4,0,29,0,0,0, 1,0,0,0,117,118,0,0,6,0,4,0,29,0,0,0,2,0,0,0,112,111,115,0,5,0,5,0,6,0, -0,0,118,105,101,119,101,114,95,100,97,116,97,0,5,0,4,0,9,0,0,0,109,111,100,101,108,0,0,0, -5,0,3,0,13,0,0,0,112,111,115,0,5,0,3,0,19,0,0,0,117,118,0,0,5,0,4,0,25,0, +0,0,118,105,101,119,101,114,95,100,97,116,97,0,5,0,4,0,10,0,0,0,109,111,100,101,108,0,0,0, +5,0,3,0,14,0,0,0,112,111,115,0,5,0,3,0,19,0,0,0,117,118,0,0,5,0,4,0,25,0, 0,0,99,111,108,111,114,0,0,0,5,0,3,0,27,0,0,0,117,118,0,0,5,0,5,0,28,0,0,0, -112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,35,0,0,0,109,97,105,110,0,0,0,0,71,0, +112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,34,0,0,0,109,97,105,110,0,0,0,0,71,0, 4,0,6,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0,0,0,0,0, -71,0,4,0,28,0,0,0,11,0,0,0,0,0,0,0,71,0,4,0,13,0,0,0,30,0,0,0,0,0, +71,0,4,0,28,0,0,0,11,0,0,0,0,0,0,0,71,0,4,0,14,0,0,0,30,0,0,0,0,0, 0,0,71,0,4,0,19,0,0,0,30,0,0,0,1,0,0,0,71,0,4,0,25,0,0,0,30,0,0,0, 0,0,0,0,71,0,4,0,27,0,0,0,30,0,0,0,1,0,0,0,71,0,3,0,4,0,0,0,2,0, 0,0,72,0,4,0,4,0,0,0,0,0,0,0,5,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0, 7,0,0,0,16,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,71,0, -3,0,7,0,0,0,2,0,0,0,72,0,5,0,7,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0, -72,0,5,0,7,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,22,0,0,0,0,0, +3,0,8,0,0,0,2,0,0,0,72,0,5,0,8,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0, +72,0,5,0,8,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,22,0,0,0,0,0, 0,0,35,0,0,0,0,0,0,0,72,0,5,0,22,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0, 72,0,5,0,29,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,29,0,0,0,1,0, 0,0,35,0,0,0,16,0,0,0,72,0,5,0,29,0,0,0,2,0,0,0,35,0,0,0,32,0,0,0, 22,0,3,0,1,0,0,0,32,0,0,0,23,0,4,0,2,0,0,0,1,0,0,0,4,0,0,0,24,0, 4,0,3,0,0,0,2,0,0,0,4,0,0,0,30,0,3,0,4,0,0,0,3,0,0,0,32,0,4,0, -5,0,0,0,2,0,0,0,4,0,0,0,30,0,4,0,7,0,0,0,2,0,0,0,2,0,0,0,32,0, -4,0,8,0,0,0,9,0,0,0,7,0,0,0,19,0,2,0,10,0,0,0,33,0,3,0,11,0,0,0, -10,0,0,0,32,0,4,0,12,0,0,0,1,0,0,0,2,0,0,0,21,0,4,0,14,0,0,0,32,0, -0,0,1,0,0,0,43,0,4,0,14,0,0,0,15,0,0,0,0,0,0,0,32,0,4,0,16,0,0,0, -7,0,0,0,2,0,0,0,23,0,4,0,17,0,0,0,1,0,0,0,2,0,0,0,32,0,4,0,18,0, -0,0,1,0,0,0,17,0,0,0,43,0,4,0,14,0,0,0,20,0,0,0,1,0,0,0,32,0,4,0, -21,0,0,0,7,0,0,0,17,0,0,0,30,0,4,0,22,0,0,0,2,0,0,0,17,0,0,0,32,0, +5,0,0,0,2,0,0,0,4,0,0,0,23,0,4,0,7,0,0,0,1,0,0,0,2,0,0,0,30,0, +4,0,8,0,0,0,2,0,0,0,7,0,0,0,32,0,4,0,9,0,0,0,9,0,0,0,8,0,0,0, +19,0,2,0,11,0,0,0,33,0,3,0,12,0,0,0,11,0,0,0,32,0,4,0,13,0,0,0,1,0, +0,0,2,0,0,0,21,0,4,0,15,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,15,0,0,0, +16,0,0,0,0,0,0,0,32,0,4,0,17,0,0,0,7,0,0,0,2,0,0,0,32,0,4,0,18,0, +0,0,1,0,0,0,7,0,0,0,43,0,4,0,15,0,0,0,20,0,0,0,1,0,0,0,32,0,4,0, +21,0,0,0,7,0,0,0,7,0,0,0,30,0,4,0,22,0,0,0,2,0,0,0,7,0,0,0,32,0, 4,0,23,0,0,0,7,0,0,0,22,0,0,0,32,0,4,0,24,0,0,0,3,0,0,0,2,0,0,0, -32,0,4,0,26,0,0,0,3,0,0,0,17,0,0,0,30,0,5,0,29,0,0,0,2,0,0,0,17,0, -0,0,2,0,0,0,43,0,4,0,14,0,0,0,30,0,0,0,2,0,0,0,23,0,4,0,31,0,0,0, -1,0,0,0,3,0,0,0,43,0,4,0,1,0,0,0,32,0,0,0,0,0,128,63,43,0,4,0,1,0, -0,0,33,0,0,0,0,0,128,191,32,0,4,0,34,0,0,0,7,0,0,0,29,0,0,0,32,0,4,0, -45,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,61,0,0,0,2,0,0,0,3,0,0,0,59,0, -4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0,0,0,9,0,0,0,9,0,0,0, -59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0,18,0,0,0,19,0,0,0,1,0, +32,0,4,0,26,0,0,0,3,0,0,0,7,0,0,0,30,0,5,0,29,0,0,0,2,0,0,0,7,0, +0,0,2,0,0,0,43,0,4,0,1,0,0,0,30,0,0,0,0,0,128,63,43,0,4,0,1,0,0,0, +31,0,0,0,0,0,128,191,32,0,4,0,32,0,0,0,7,0,0,0,29,0,0,0,43,0,4,0,15,0, +0,0,33,0,0,0,2,0,0,0,32,0,4,0,44,0,0,0,9,0,0,0,7,0,0,0,32,0,4,0, +56,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,60,0,0,0,2,0,0,0,3,0,0,0,59,0, +4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,9,0,0,0,10,0,0,0,9,0,0,0, +59,0,4,0,13,0,0,0,14,0,0,0,1,0,0,0,59,0,4,0,18,0,0,0,19,0,0,0,1,0, 0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0,0,0,59,0,4,0,26,0,0,0,27,0,0,0, -3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0,54,0,5,0,10,0,0,0,35,0, -0,0,0,0,0,0,11,0,0,0,248,0,2,0,36,0,0,0,59,0,4,0,16,0,0,0,37,0,0,0, -7,0,0,0,59,0,4,0,34,0,0,0,38,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,39,0, -0,0,7,0,0,0,65,0,5,0,16,0,0,0,40,0,0,0,39,0,0,0,15,0,0,0,63,0,3,0, -40,0,0,0,13,0,0,0,65,0,5,0,21,0,0,0,41,0,0,0,39,0,0,0,20,0,0,0,63,0, -3,0,41,0,0,0,19,0,0,0,65,0,5,0,16,0,0,0,42,0,0,0,39,0,0,0,15,0,0,0, -61,0,4,0,2,0,0,0,43,0,0,0,42,0,0,0,79,0,8,0,31,0,0,0,44,0,0,0,43,0, -0,0,43,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,65,0,5,0,45,0,0,0,46,0,0,0, -9,0,0,0,20,0,0,0,61,0,4,0,2,0,0,0,47,0,0,0,46,0,0,0,79,0,8,0,31,0, -0,0,48,0,0,0,47,0,0,0,47,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,129,0,5,0, -31,0,0,0,49,0,0,0,44,0,0,0,48,0,0,0,80,0,5,0,2,0,0,0,50,0,0,0,49,0, -0,0,32,0,0,0,62,0,3,0,37,0,0,0,50,0,0,0,65,0,5,0,21,0,0,0,51,0,0,0, -39,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,52,0,0,0,51,0,0,0,142,0,5,0,17,0, -0,0,53,0,0,0,52,0,0,0,33,0,0,0,65,0,5,0,21,0,0,0,54,0,0,0,39,0,0,0, -20,0,0,0,62,0,3,0,54,0,0,0,53,0,0,0,65,0,5,0,21,0,0,0,55,0,0,0,39,0, -0,0,20,0,0,0,61,0,4,0,17,0,0,0,56,0,0,0,55,0,0,0,65,0,5,0,21,0,0,0, -57,0,0,0,38,0,0,0,20,0,0,0,62,0,3,0,57,0,0,0,56,0,0,0,65,0,5,0,45,0, -0,0,58,0,0,0,9,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,59,0,0,0,58,0,0,0, -65,0,5,0,16,0,0,0,60,0,0,0,38,0,0,0,15,0,0,0,62,0,3,0,60,0,0,0,59,0, -0,0,65,0,5,0,61,0,0,0,62,0,0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0, -63,0,0,0,62,0,0,0,61,0,4,0,2,0,0,0,64,0,0,0,37,0,0,0,145,0,5,0,2,0, -0,0,65,0,0,0,63,0,0,0,64,0,0,0,65,0,5,0,16,0,0,0,66,0,0,0,38,0,0,0, -30,0,0,0,62,0,3,0,66,0,0,0,65,0,0,0,61,0,4,0,29,0,0,0,67,0,0,0,38,0, -0,0,81,0,5,0,2,0,0,0,68,0,0,0,67,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0, -68,0,0,0,81,0,5,0,17,0,0,0,69,0,0,0,67,0,0,0,1,0,0,0,62,0,3,0,27,0, -0,0,69,0,0,0,81,0,5,0,2,0,0,0,70,0,0,0,67,0,0,0,2,0,0,0,62,0,3,0, -28,0,0,0,70,0,0,0,253,0,1,0,56,0,1,0 +3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0,54,0,5,0,11,0,0,0,34,0, +0,0,0,0,0,0,12,0,0,0,248,0,2,0,35,0,0,0,59,0,4,0,17,0,0,0,36,0,0,0, +7,0,0,0,59,0,4,0,32,0,0,0,37,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,38,0, +0,0,7,0,0,0,65,0,5,0,17,0,0,0,39,0,0,0,38,0,0,0,16,0,0,0,63,0,3,0, +39,0,0,0,14,0,0,0,65,0,5,0,21,0,0,0,40,0,0,0,38,0,0,0,20,0,0,0,63,0, +3,0,40,0,0,0,19,0,0,0,65,0,5,0,17,0,0,0,41,0,0,0,38,0,0,0,16,0,0,0, +61,0,4,0,2,0,0,0,42,0,0,0,41,0,0,0,79,0,7,0,7,0,0,0,43,0,0,0,42,0, +0,0,42,0,0,0,0,0,0,0,1,0,0,0,65,0,5,0,44,0,0,0,45,0,0,0,10,0,0,0, +20,0,0,0,61,0,4,0,7,0,0,0,46,0,0,0,45,0,0,0,129,0,5,0,7,0,0,0,47,0, +0,0,43,0,0,0,46,0,0,0,80,0,6,0,2,0,0,0,48,0,0,0,47,0,0,0,30,0,0,0, +30,0,0,0,62,0,3,0,36,0,0,0,48,0,0,0,65,0,5,0,21,0,0,0,49,0,0,0,38,0, +0,0,20,0,0,0,61,0,4,0,7,0,0,0,50,0,0,0,49,0,0,0,142,0,5,0,7,0,0,0, +51,0,0,0,50,0,0,0,31,0,0,0,65,0,5,0,21,0,0,0,52,0,0,0,38,0,0,0,20,0, +0,0,62,0,3,0,52,0,0,0,51,0,0,0,65,0,5,0,21,0,0,0,53,0,0,0,38,0,0,0, +20,0,0,0,61,0,4,0,7,0,0,0,54,0,0,0,53,0,0,0,65,0,5,0,21,0,0,0,55,0, +0,0,37,0,0,0,20,0,0,0,62,0,3,0,55,0,0,0,54,0,0,0,65,0,5,0,56,0,0,0, +57,0,0,0,10,0,0,0,16,0,0,0,61,0,4,0,2,0,0,0,58,0,0,0,57,0,0,0,65,0, +5,0,17,0,0,0,59,0,0,0,37,0,0,0,16,0,0,0,62,0,3,0,59,0,0,0,58,0,0,0, +65,0,5,0,60,0,0,0,61,0,0,0,6,0,0,0,16,0,0,0,61,0,4,0,3,0,0,0,62,0, +0,0,61,0,0,0,61,0,4,0,2,0,0,0,63,0,0,0,36,0,0,0,145,0,5,0,2,0,0,0, +64,0,0,0,62,0,0,0,63,0,0,0,65,0,5,0,17,0,0,0,65,0,0,0,37,0,0,0,33,0, +0,0,62,0,3,0,65,0,0,0,64,0,0,0,61,0,4,0,29,0,0,0,66,0,0,0,37,0,0,0, +81,0,5,0,2,0,0,0,67,0,0,0,66,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0,67,0, +0,0,81,0,5,0,7,0,0,0,68,0,0,0,66,0,0,0,1,0,0,0,62,0,3,0,27,0,0,0, +68,0,0,0,81,0,5,0,2,0,0,0,69,0,0,0,66,0,0,0,2,0,0,0,62,0,3,0,28,0, +0,0,69,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Font.inl b/runtime/Includes/Graphics/Font.inl index f2a684e..1deb809 100644 --- a/runtime/Includes/Graphics/Font.inl +++ b/runtime/Includes/Graphics/Font.inl @@ -17,7 +17,7 @@ namespace mlx { auto it = std::find_if(m_fonts_registry.begin(), m_fonts_registry.end(), [&name, scale](std::shared_ptr rhs) { - return name == rhs->GetName() && scale == rhs->GetScale(); + return (name == rhs->GetName() && scale == rhs->GetScale()); }); return (it != m_fonts_registry.end() ? *it : nullptr); } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index d734cb7..3212886 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -52,10 +52,7 @@ namespace mlx Internal::WindowInfos* infos = new Internal::WindowInfos; Verify(infos != nullptr, "SDL: window allocation failed"); - if(title == "让我们在月光下åšçˆ±å§") - infos->window = SDL_CreateWindow(title.c_str(), std::rand() % 512, std::rand() % 512, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); - else - infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); + infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); if(!infos->window) FatalError("SDL: unable to open a new window; %", SDL_GetError()); infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index 67289d9..9875a72 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -50,17 +50,17 @@ namespace mlx } #ifdef DEBUG - m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, m_name + "_font_atlas"); + m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, m_name + "_font_atlas_" + std::to_string(m_scale)); #else m_atlas.Init(vulkan_bitmap, RANGE, RANGE, VK_FORMAT_R8G8B8A8_SRGB, false, {}); #endif - DebugLog("Font: loaded %", m_name); + DebugLog("Font: loaded % with a scale of %", m_name, m_scale); } void Font::Destroy() { m_atlas.Destroy(); - DebugLog("Font: unloaded %", m_name); + DebugLog("Font: unloaded % with a scale of %", m_name, m_scale); } } diff --git a/runtime/Sources/Graphics/Text.cpp b/runtime/Sources/Graphics/Text.cpp index 89e0d09..d0deaf0 100644 --- a/runtime/Sources/Graphics/Text.cpp +++ b/runtime/Sources/Graphics/Text.cpp @@ -19,13 +19,14 @@ namespace mlx float stb_x = 0.0f; float stb_y = 0.0f; + auto char_data = font->GetCharData(); for(char c : text) { if(c < 32) continue; stbtt_aligned_quad q; - stbtt_GetPackedQuad(font->GetCharData().data(), RANGE, RANGE, c - 32, &stb_x, &stb_y, &q, 1); + stbtt_GetPackedQuad(char_data.data(), RANGE, RANGE, c - 32, &stb_x, &stb_y, &q, 1); std::size_t index = vertex_data.size(); diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 827a056..a3ee4b8 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -11,7 +11,7 @@ namespace mlx struct SpriteData { Vec4f color; - Vec4f position; + Vec2f position; }; void Render2DPass::Init() @@ -106,7 +106,7 @@ namespace mlx for(auto drawable : drawables) { SpriteData drawable_data; - drawable_data.position = Vec4f{ drawable->GetPosition(), 0.0f, 1.0f }; + drawable_data.position = drawable->GetPosition(); drawable_data.color = drawable->GetColor(); drawable->Bind(frame_index, cmd); From ea950a1901daa5dcf5653b6b8b12ca5557729d07 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 31 Oct 2024 12:36:44 +0100 Subject: [PATCH 063/131] yes --- runtime/Sources/Graphics/Font.cpp | 18 +++++++++++------- runtime/Sources/Graphics/Text.cpp | 5 +---- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index 9875a72..b0b0a3d 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -3,6 +3,10 @@ #include #include +#define STBRP_ASSERT(x) mlx::Assert(x, "internal stb assertion") +#define STB_RECT_PACK_IMPLEMENTATION +#include + #define STB_TRUETYPE_IMPLEMENTATION #define STB_malloc(x, u) ((void)(u), MemManager::Get().Malloc(x)) #define STB_free(x, u) ((void)(u), MemManager::Get().Free(x)) @@ -29,24 +33,24 @@ namespace mlx file.close(); } - CPUBuffer tmp_bitmap(RANGE * RANGE); + CPUBuffer bitmap(RANGE * RANGE); stbtt_pack_context pc; - stbtt_PackBegin(&pc, tmp_bitmap.GetData(), RANGE, RANGE, RANGE, 1, nullptr); + stbtt_PackBegin(&pc, bitmap.GetData(), RANGE, RANGE, RANGE, 1, nullptr); if(std::holds_alternative(m_build_data)) stbtt_PackFontRange(&pc, file_bytes.data(), 0, m_scale, 32, 96, m_cdata.data()); else stbtt_PackFontRange(&pc, std::get>(m_build_data).data(), 0, m_scale, 32, 96, m_cdata.data()); stbtt_PackEnd(&pc); + // TODO : find better solution CPUBuffer vulkan_bitmap(RANGE * RANGE * 4); - for(int i = 0, j = 0; i < RANGE * RANGE; i++, j += 4) { - vulkan_bitmap.GetData()[j + 0] = tmp_bitmap.GetData()[i]; - vulkan_bitmap.GetData()[j + 1] = tmp_bitmap.GetData()[i]; - vulkan_bitmap.GetData()[j + 2] = tmp_bitmap.GetData()[i]; - vulkan_bitmap.GetData()[j + 3] = tmp_bitmap.GetData()[i]; + vulkan_bitmap.GetData()[j + 0] = bitmap.GetData()[i]; + vulkan_bitmap.GetData()[j + 1] = bitmap.GetData()[i]; + vulkan_bitmap.GetData()[j + 2] = bitmap.GetData()[i]; + vulkan_bitmap.GetData()[j + 3] = bitmap.GetData()[i]; } #ifdef DEBUG diff --git a/runtime/Sources/Graphics/Text.cpp b/runtime/Sources/Graphics/Text.cpp index d0deaf0..da0325c 100644 --- a/runtime/Sources/Graphics/Text.cpp +++ b/runtime/Sources/Graphics/Text.cpp @@ -2,9 +2,6 @@ #include -#define STB_RECT_PACK_IMPLEMENTATION -#include - namespace mlx { Text::Text(const std::string& text, std::shared_ptr font) : Drawable(DrawableType::Text) @@ -19,7 +16,7 @@ namespace mlx float stb_x = 0.0f; float stb_y = 0.0f; - auto char_data = font->GetCharData(); + const auto& char_data = font->GetCharData(); for(char c : text) { if(c < 32) From 601c2435f1b8bf0371d72f053dc063605986d1e2 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 31 Oct 2024 12:54:05 +0100 Subject: [PATCH 064/131] fixing texts --- runtime/Sources/Graphics/Text.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/runtime/Sources/Graphics/Text.cpp b/runtime/Sources/Graphics/Text.cpp index da0325c..90033d5 100644 --- a/runtime/Sources/Graphics/Text.cpp +++ b/runtime/Sources/Graphics/Text.cpp @@ -27,10 +27,10 @@ namespace mlx std::size_t index = vertex_data.size(); - vertex_data.emplace_back(Vec4f{ q.x0, q.y0, 0.0f, 1.0f }, Vec2f{ q.s0, q.t0 }); - vertex_data.emplace_back(Vec4f{ q.x1, q.y0, 0.0f, 1.0f }, Vec2f{ q.s1, q.t0 }); - vertex_data.emplace_back(Vec4f{ q.x1, q.y1, 0.0f, 1.0f }, Vec2f{ q.s1, q.t1 }); - vertex_data.emplace_back(Vec4f{ q.x0, q.y1, 0.0f, 1.0f }, Vec2f{ q.s0, q.t1 }); + vertex_data.emplace_back(Vec4f{ q.x0, q.y0, 0.0f, 1.0f }, -Vec2f{ q.s0, q.t0 }); + vertex_data.emplace_back(Vec4f{ q.x1, q.y0, 0.0f, 1.0f }, -Vec2f{ q.s1, q.t0 }); + vertex_data.emplace_back(Vec4f{ q.x1, q.y1, 0.0f, 1.0f }, -Vec2f{ q.s1, q.t1 }); + vertex_data.emplace_back(Vec4f{ q.x0, q.y1, 0.0f, 1.0f }, -Vec2f{ q.s0, q.t1 }); index_data.emplace_back(index + 0); index_data.emplace_back(index + 1); From fc404a4d2228fee9fc6cee643bfff5fbae57bf84 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 31 Oct 2024 13:07:07 +0100 Subject: [PATCH 065/131] updating example --- example/main.c | 2 ++ 1 file changed, 2 insertions(+) diff --git a/example/main.c b/example/main.c index c839c45..bd30ae4 100644 --- a/example/main.c +++ b/example/main.c @@ -41,6 +41,8 @@ int update(void* param) } mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150); + mlx_set_font_scale(mlx->mlx, "default", 8.f); + mlx_string_put(mlx->mlx, mlx->win, 210, 175, 0xFFAF2BFF, "hidden"); for(int j = 0; j < 20; j++) { From 77f1a2d8f8e3f77f666e71c40dc1497632be776b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 31 Oct 2024 15:16:01 +0100 Subject: [PATCH 066/131] added show and hide cursor support, fixing technological debt from original mlx, fixed missaligned comments in mlx.h --- README.md | 3 + XMAKE_BUILD.md | 3 + includes/mlx.h | 326 +++++++++++++------------- runtime/Includes/Core/Application.inl | 2 +- runtime/Includes/Core/SDLManager.h | 3 + runtime/Sources/Core/Bridge.cpp | 63 ++--- runtime/Sources/Core/SDLManager.cpp | 10 + 7 files changed, 205 insertions(+), 205 deletions(-) diff --git a/README.md b/README.md index f0f654b..92ef19d 100644 --- a/README.md +++ b/README.md @@ -97,6 +97,9 @@ And you can enjoy your project ### 📦 Compile mode By default the mlx is built in release mode but you can switch to debug by using `make DEBUG=true`. +### 🦺 Safety +MacroLibX has a strong safety support, mainly by checking every pointer that you pass to it. But this safety has a cost that can be avoided by enabling `DISABLE_ALL_SAFETIES=true` before compiling but don't be afraid to recieve segmentation faults from the mlx. + ### ðŸ› ï¸ Set the toolchain If you want to use `GCC` to build the mlx you can use `make TOOLCHAIN=gcc` diff --git a/XMAKE_BUILD.md b/XMAKE_BUILD.md index 9022f40..4f3c0e8 100644 --- a/XMAKE_BUILD.md +++ b/XMAKE_BUILD.md @@ -10,6 +10,9 @@ Just as the Makfile build system, you can configure how xmake should build the M ### 📦 Compile mode You can configure xmake to build the mlx in debug mode or in release mode (release mode is enabled by default). To do so you can use `xmake config --mode=debug` or `xmake config --mode=release`. +### 🦺 Safety +MacroLibX has a strong safety support, mainly by checking every pointer that you pass to it. But this safety has a cost that can be avoided by enabling `xmake config --disable_all_safeties=y` before compiling but don't be afraid to recieve segmentation faults from the mlx. + ### ðŸ› ï¸ Set the toolchain To change the compilation toolchain you can use `xmake config --toolchain=[gcc|clang|...]` diff --git a/includes/mlx.h b/includes/mlx.h index a163423..c7d5932 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/10/22 11:56:44 by maldavid ### ########.fr */ +/* Updated: 2024/10/31 15:15:24 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -34,353 +34,353 @@ typedef enum /** - * @brief Initializes the MLX internal application + * @brief Initializes the MLX internal application * - * @return (void*) An opaque pointer to the internal MLX application or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal MLX application or NULL (0x0) in case of error */ MLX_API void* mlx_init(); /** - * @brief Creates a new window + * @brief Creates a new window * - * @param mlx Internal MLX application - * @param w Width of the window - * @param h Height of the window - * @param title Title of the window + * @param mlx Internal MLX application + * @param w Width of the window + * @param h Height of the window + * @param title Title of the window * - * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error */ MLX_API void* mlx_new_window(void* mlx, int w, int h, const char* title); /** - * @brief Creates a new window + * @brief Creates a new window * - * @param mlx Internal MLX application - * @param win Internal window to move - * @param x New x position - * @param y New y position + * @param mlx Internal MLX application + * @param win Internal window to move + * @param x New x position + * @param y New y position * */ MLX_API void mlx_set_window_position(void *mlx, void *win, int x, int y); /** - * @brief Gives a function to be executed at each loop turn + * @brief Gives a function to be executed at each loop turn * - * @param mlx Internal MLX application - * @param f The function - * @param param Param to give to the function passed + * @param mlx Internal MLX application + * @param f The function + * @param param Param to give to the function passed * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_loop_hook(void* mlx, int (*f)(void*), void* param); +MLX_API void mlx_loop_hook(void* mlx, int (*f)(void*), void* param); /** - * @brief Starts the internal main loop + * @brief Starts the internal main loop * - * @param mlx Internal MLX application + * @param mlx Internal MLX application * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_loop(void* mlx); +MLX_API void mlx_loop(void* mlx); /** - * @brief Ends the internal main loop + * @brief Ends the internal main loop * - * @param mlx Internal MLX application + * @param mlx Internal MLX application * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_loop_end(void* mlx); +MLX_API void mlx_loop_end(void* mlx); /** - * @brief Shows mouse cursor + * @brief Shows mouse cursor * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_mouse_show(); +MLX_API void mlx_mouse_show(); /** - * @brief Hides mouse cursor + * @brief Hides mouse cursor * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_mouse_hide(); +MLX_API void mlx_mouse_hide(); /** - * @brief Moves cursor to givent position + * @brief Moves cursor to givent position * - * @param mlx Internal MLX application - * @param win Internal window from which cursor moves - * @param x X coordinate - * @param y Y coordinate + * @param mlx Internal MLX application + * @param win Internal window from which cursor moves + * @param x X coordinate + * @param y Y coordinate * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_mouse_move(void* mlx, void* win, int x, int y); +MLX_API void mlx_mouse_move(void* mlx, void* win, int x, int y); /** - * @brief Get cursor's position + * @brief Get cursor's position * - * @param mlx Internal MLX application - * @param x Get x coordinate - * @param y Get y coordinate + * @param mlx Internal MLX application + * @param x Get x coordinate + * @param y Get y coordinate * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_mouse_get_pos(void* mlx, int* x, int* y); +MLX_API void mlx_mouse_get_pos(void* mlx, int* x, int* y); /** - * @brief Gives a function to be executed on event type + * @brief Gives a function to be executed on event type * - * @param mlx Internal MLX application - * @param win Internal window - * @param event Event type (see union on top of this file) - * @param f Function to be executed - * @param param Parameter given to the function + * @param mlx Internal MLX application + * @param win Internal window + * @param event Event type (see union on top of this file) + * @param f Function to be executed + * @param param Parameter given to the function * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(int, void*), void* param); +MLX_API void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(int, void*), void* param); /** - * @brief Put a pixel in the window + * @brief Put a pixel in the window * - * @param mlx Internal MLX application - * @param win Internal window - * @param x X coordinate - * @param y Y coordinate - * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) + * @param mlx Internal MLX application + * @param win Internal window + * @param x X coordinate + * @param y Y coordinate + * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) * * Note : If your're reading pixel colors from an image, don't forget to shift them * one byte to the right as image pixels are encoded as 0xRRGGBBAA and pixel put takes 0xAARRGGBB. * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_pixel_put(void* mlx, void* win, int x, int y, int color); +MLX_API void mlx_pixel_put(void* mlx, void* win, int x, int y, int color); /** - * @brief Create a new empty image + * @brief Create a new empty image * - * @param mlx Internal MLX application - * @param width Width of the image - * @param height Height of the image + * @param mlx Internal MLX application + * @param width Width of the image + * @param height Height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_new_image(void* mlx, int width, int height); /** - * @brief Get image pixel data + * @brief Get image pixel data * - * @param mlx Internal MLX application - * @param img Internal image - * @param x X coordinate in the image - * @param y Y coordinate in the image + * @param mlx Internal MLX application + * @param img Internal image + * @param x X coordinate in the image + * @param y Y coordinate in the image * - * @return (int) Return the pixel data + * @return (int) Return the pixel data * * /!\ If you run into glitches when writing or reading pixels from images /!\ * You need to add IMAGES_OPTIMIZED=false to your make mlx command * ``` - * ~ git clone https://github.com/seekrs/MacroLibX.git - * ~ cd MacroLibX - * ~ make IMAGES_OPTIMIZED=false + * ~ git clone https://github.com/seekrs/MacroLibX.git + * ~ cd MacroLibX + * ~ make IMAGES_OPTIMIZED=false * ``` */ MLX_API int mlx_get_image_pixel(void* mlx, void* img, int x, int y); /** - * @brief Set image pixel data + * @brief Set image pixel data * - * @param mlx Internal MLX application - * @param img Internal image - * @param x X coordinate in the image - * @param y Y coordinate in the image - * @param color Color of the pixel to set + * @param mlx Internal MLX application + * @param img Internal image + * @param x X coordinate in the image + * @param y Y coordinate in the image + * @param color Color of the pixel to set * * @return (void) * * /!\ If you run into glitches when writing or reading pixels from images /!\ * You need to add IMAGES_OPTIMIZED=false to your make mlx command * ``` - * ~ git clone https://github.com/seekrs/MacroLibX.git - * ~ cd MacroLibX - * ~ make IMAGES_OPTIMIZED=false + * ~ git clone https://github.com/seekrs/MacroLibX.git + * ~ cd MacroLibX + * ~ make IMAGES_OPTIMIZED=false * ``` */ MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); /** - * @brief Put image to the given window + * @brief Put image to the given window * - * @param mlx Internal MLX application - * @param win Internal window - * @param img Internal image - * @param x X coordinate - * @param y Y coordinate + * @param mlx Internal MLX application + * @param win Internal window + * @param img Internal image + * @param x X coordinate + * @param y Y coordinate * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y); +MLX_API void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y); /** - * @brief Destroys internal image + * @brief Destroys internal image * - * @param mlx Internal MLX application - * @param img Internal image + * @param mlx Internal MLX application + * @param img Internal image * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_destroy_image(void* mlx, void* img); +MLX_API void mlx_destroy_image(void* mlx, void* img); /** - * @brief Create a new image from a png file + * @brief Create a new image from a png file * - * @param mlx Internal MLX application - * @param filename Path to the png file - * @param width Get the width of the image - * @param heigth Get the height of the image + * @param mlx Internal MLX application + * @param filename Path to the png file + * @param width Get the width of the image + * @param heigth Get the height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* height); /** - * @brief Create a new image from a jpg file + * @brief Create a new image from a jpg file * - * @param mlx Internal MLX application - * @param filename Path to the jpg file - * @param width Get the width of the image - * @param heigth Get the height of the image + * @param mlx Internal MLX application + * @param filename Path to the jpg file + * @param width Get the width of the image + * @param heigth Get the height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* height); /** - * @brief Create a new image from a bmp file + * @brief Create a new image from a bmp file * - * @param mlx Internal MLX application - * @param filename Path to the bmp file - * @param width Get the width of the image - * @param heigth Get the height of the image + * @param mlx Internal MLX application + * @param filename Path to the bmp file + * @param width Get the width of the image + * @param heigth Get the height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* height); /** - * @brief Put text in given window + * @brief Put text in given window * - * @param mlx Internal MLX application - * @param win Internal window - * @param x X coordinate - * @param y Y coordinate - * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) - * @param str Text to put + * @param mlx Internal MLX application + * @param win Internal window + * @param x X coordinate + * @param y Y coordinate + * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) + * @param str Text to put * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str); +MLX_API void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str); /** - * @brief Loads a font to be used by `mlx_string_put` + * @brief Loads a font to be used by `mlx_string_put` * - * @param mlx Internal MLX application - * @param win Internal window - * @param filepath Filepath to the font or "default" to reset to the embedded font + * @param mlx Internal MLX application + * @param win Internal window + * @param filepath Filepath to the font or "default" to reset to the embedded font * - * @return (void) + * @return (void) */ MLX_API void mlx_set_font(void* mlx, char* filepath); /** - * @brief Loads a font to be used by `mlx_string_put` and scales it + * @brief Loads a font to be used by `mlx_string_put` and scales it * - * @param mlx Internal MLX application - * @param win Internal window - * @param filepath Filepath to the font or "default" to reset to the embedded font - * @param scale Scale to apply to the font + * @param mlx Internal MLX application + * @param win Internal window + * @param filepath Filepath to the font or "default" to reset to the embedded font + * @param scale Scale to apply to the font * - * @return (void) + * @return (void) */ MLX_API void mlx_set_font_scale(void* mlx, char* filepath, float scale); /** - * @brief Clears the given window (resets all rendered data) + * @brief Clears the given window (resets all rendered data) * - * @param mlx Internal MLX application - * @param win Internal window + * @param mlx Internal MLX application + * @param win Internal window * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_clear_window(void* mlx, void* win); +MLX_API void mlx_clear_window(void* mlx, void* win); /** - * @brief Destroys internal window + * @brief Destroys internal window * - * @param mlx Internal MLX application - * @param win Internal window + * @param mlx Internal MLX application + * @param win Internal window * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_destroy_window(void* mlx, void* win); +MLX_API void mlx_destroy_window(void* mlx, void* win); /** - * @brief Destroy internal MLX application + * @brief Destroy internal MLX application * - * @param mlx Internal MLX application + * @param mlx Internal MLX application * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_destroy_display(void* mlx); +MLX_API void mlx_destroy_display(void* mlx); /** - * @brief Get the size of the screen the given window is on + * @brief Get the size of the screen the given window is on * - * @param mlx Internal MLX application - * @param win Internal window - * @param w Get width size - * @param h Get height size + * @param mlx Internal MLX application + * @param win Internal window + * @param w Get width size + * @param h Get height size * - * @return (int) Always return 0, made this to copy the behaviour of the original MLX + * @return (void) */ -MLX_API int mlx_get_screens_size(void* mlx, void* win, int* w, int* h); +MLX_API void mlx_get_screens_size(void* mlx, void* win, int* w, int* h); /** - * @brief Caps the FPS + * @brief Caps the FPS * - * @param mlx Internal MLX application - * @param fps The FPS cap + * @param mlx Internal MLX application + * @param fps The FPS cap * - * @return (int) Always return 0 + * @return (void) */ -MLX_API int mlx_set_fps_goal(void* mlx, int fps); +MLX_API void mlx_set_fps_goal(void* mlx, int fps); #ifdef __cplusplus } diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index ab5a366..5a84d68 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -137,7 +137,7 @@ namespace mlx CHECK_WINDOW_PTR(win); if(str == nullptr) { - Error("wrong text (NULL)"); + Error("invalid text (NULL)"); return; } if(std::strlen(str) == 0) diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 30defbc..fb43688 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -22,6 +22,9 @@ namespace mlx void GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept; void SetWindowPosition(Handle window, int x, int y) const noexcept; + static void HideCursor() noexcept; + static void ShowCursor() noexcept; + std::int32_t GetX() const noexcept; std::int32_t GetY() const noexcept; std::int32_t GetXRel() const noexcept; diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 8905390..59fc1a3 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -1,6 +1,7 @@ #include #include +#include #include #include #include @@ -54,52 +55,46 @@ extern "C" { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->LoopHook(f, param); - return 0; } - int mlx_loop(void* mlx) + void mlx_loop(void* mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->Run(); - return 0; } - int mlx_loop_end(void* mlx) + void mlx_loop_end(void* mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->LoopEnd(); - return 0; } - int mlx_mouse_show() + void mlx_mouse_show() { - return 0; + mlx::SDLManager::ShowCursor(); } - int mlx_mouse_hide() + void mlx_mouse_hide() { - return 0; + mlx::SDLManager::HideCursor(); } - int mlx_mouse_move(void* mlx, void* win, int x, int y) + void mlx_mouse_move(void* mlx, void* win, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->MouseMove(win, x, y); - return 0; } - int mlx_mouse_get_pos(void* mlx, int* x, int* y) + void mlx_mouse_get_pos(void* mlx, int* x, int* y) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->GetMousePos(x, y); - return 0; } - int mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*funct_ptr)(int, void*), void* param) + void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*funct_ptr)(int, void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->OnEvent(win, static_cast(event), funct_ptr, param); - return 0; } void* mlx_new_image(void* mlx, int width, int height) @@ -136,18 +131,16 @@ extern "C" static_cast(mlx)->SetTexturePixel(img, x, y, *reinterpret_cast(color_bits)); } - int mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y) + void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->TexturePut(win, img, x, y); - return 0; } - int mlx_destroy_image(void* mlx, void* img) + void mlx_destroy_image(void* mlx, void* img) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->DestroyTexture(img); - return 0; } void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* height) @@ -201,7 +194,7 @@ extern "C" return static_cast(mlx)->NewStbTexture(filename, width, height); } - int mlx_pixel_put(void* mlx, void* win, int x, int y, int color) + void mlx_pixel_put(void* mlx, void* win, int x, int y, int color) { MLX_CHECK_APPLICATION_POINTER(mlx); unsigned char color_bits[4]; @@ -210,10 +203,9 @@ extern "C" color_bits[2] = (color & 0x000000FF); color_bits[3] = (color & 0xFF000000) >> 24; static_cast(mlx)->PixelPut(win, x, y, *reinterpret_cast(color_bits)); - return 0; } - int mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str) + void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str) { MLX_CHECK_APPLICATION_POINTER(mlx); unsigned char color_bits[4]; @@ -222,7 +214,6 @@ extern "C" color_bits[2] = (color & 0x000000FF); color_bits[3] = (color & 0xFF000000) >> 24; static_cast(mlx)->StringPut(win, x, y, *reinterpret_cast(color_bits), str); - return 0; } void mlx_set_font(void* mlx, char* filepath) @@ -262,49 +253,39 @@ extern "C" static_cast(mlx)->LoadFont(file, scale); } - int mlx_clear_window(void* mlx, void* win) + void mlx_clear_window(void* mlx, void* win) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->ClearGraphicsSupport(win); - return 0; } - int mlx_destroy_window(void* mlx, void* win) + void mlx_destroy_window(void* mlx, void* win) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->DestroyGraphicsSupport(win); - return 0; } - int mlx_destroy_display(void* mlx) + void mlx_destroy_display(void* mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); delete static_cast(mlx); __mlx_ptr = nullptr; - return 0; } - int mlx_get_screens_size(void* mlx, void* win, int* w, int* h) + void mlx_get_screens_size(void* mlx, void* win, int* w, int* h) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->GetScreenSize(win, w, h); - return 0; } - int mlx_set_fps_goal(void* mlx, int fps) + void mlx_set_fps_goal(void* mlx, int fps) { MLX_CHECK_APPLICATION_POINTER(mlx); if(fps < 0) - { mlx::Error("You cannot set a negative FPS cap (nice try)"); - return 0; - } - if(fps == 0) - { + else if(fps == 0) mlx::Error("You cannot set a FPS cap to 0 (nice try)"); - return 0; - } - static_cast(mlx)->SetFPSCap(static_cast(fps)); - return 0; + else + static_cast(mlx)->SetFPSCap(static_cast(fps)); } } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 3212886..8caff3b 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -125,6 +125,16 @@ namespace mlx SDL_SetWindowPosition(static_cast(window)->window, x, y); } + void SDLManager::HideCursor() noexcept + { + SDL_ShowCursor(SDL_DISABLE); + } + + void SDLManager::ShowCursor() noexcept + { + SDL_ShowCursor(SDL_ENABLE); + } + std::int32_t SDLManager::GetX() const noexcept { int dummy; From af70e2e3541c4d5c15a25f12a79351893349545b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 3 Nov 2024 01:23:52 +0100 Subject: [PATCH 067/131] yes --- example/main.c | 2 +- includes/mlx.h | 50 +- runtime/Includes/Core/Application.h | 2 +- runtime/Includes/Core/Application.inl | 6 +- runtime/Includes/Core/Graphics.h | 2 +- runtime/Includes/Core/SDLManager.h | 2 +- runtime/Includes/Platform/Window.h | 2 +- runtime/Includes/Renderer/Renderer.h | 22 +- runtime/Includes/Renderer/Swapchain.h | 43 + runtime/Sources/Core/Bridge.cpp | 15 +- runtime/Sources/Core/Graphics.cpp | 11 +- runtime/Sources/Core/SDLManager.cpp | 12 +- runtime/Sources/Graphics/PutPixelManager.cpp | 2 +- runtime/Sources/Platform/Window.cpp | 4 +- .../Sources/Renderer/Pipelines/Graphics.cpp | 6 +- runtime/Sources/Renderer/RenderCore.cpp | 5 +- .../Renderer/RenderPasses/FinalPass.cpp | 2 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 4 +- runtime/Sources/Renderer/Renderer.cpp | 83 +- runtime/Sources/Renderer/Swapchain.cpp | 108 + third_party/kvf.h | 82 +- third_party/vulkan/vulkan.cppm | 164 +- third_party/vulkan/vulkan.hpp | 16993 ++++++++-------- third_party/vulkan/vulkan_beta.h | 28 +- third_party/vulkan/vulkan_core.h | 582 +- third_party/vulkan/vulkan_enums.hpp | 204 +- .../vulkan/vulkan_extension_inspection.hpp | 68 +- third_party/vulkan/vulkan_funcs.hpp | 3446 ++-- third_party/vulkan/vulkan_handles.hpp | 1097 +- third_party/vulkan/vulkan_hash.hpp | 482 +- third_party/vulkan/vulkan_hpp_macros.hpp | 58 +- third_party/vulkan/vulkan_raii.hpp | 6274 +++--- third_party/vulkan/vulkan_shared.hpp | 372 +- .../vulkan/vulkan_static_assertions.hpp | 328 +- third_party/vulkan/vulkan_structs.hpp | 3927 +++- third_party/vulkan/vulkan_to_string.hpp | 189 +- 36 files changed, 21446 insertions(+), 13231 deletions(-) create mode 100644 runtime/Includes/Renderer/Swapchain.h create mode 100644 runtime/Sources/Renderer/Swapchain.cpp diff --git a/example/main.c b/example/main.c index bd30ae4..bac9e5a 100644 --- a/example/main.c +++ b/example/main.c @@ -131,7 +131,7 @@ int main(void) int dummy; mlx.mlx = mlx_init(); - mlx.win = mlx_new_window(mlx.mlx, 400, 400, "My window"); + mlx.win = mlx_new_resizable_window(mlx.mlx, 400, 400, "My window"); mlx_set_fps_goal(mlx.mlx, 60); diff --git a/includes/mlx.h b/includes/mlx.h index c7d5932..6573599 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/10/31 15:15:24 by maldavid ### ########.fr */ +/* Updated: 2024/10/31 16:21:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -32,7 +32,6 @@ typedef enum MLX_WINDOW_EVENT = 5 } mlx_event_type; - /** * @brief Initializes the MLX internal application * @@ -40,7 +39,6 @@ typedef enum */ MLX_API void* mlx_init(); - /** * @brief Creates a new window * @@ -49,10 +47,21 @@ MLX_API void* mlx_init(); * @param h Height of the window * @param title Title of the window * - * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error */ MLX_API void* mlx_new_window(void* mlx, int w, int h, const char* title); +/** + * @brief Creates a new resizable window + * + * @param mlx Internal MLX application + * @param w Width of the window + * @param h Height of the window + * @param title Title of the window + * + * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error + */ +MLX_API void* mlx_new_resizable_window(void* mlx, int w, int h, const char* title); /** * @brief Creates a new window @@ -76,7 +85,6 @@ MLX_API void mlx_set_window_position(void *mlx, void *win, int x, int y); */ MLX_API void mlx_loop_hook(void* mlx, int (*f)(void*), void* param); - /** * @brief Starts the internal main loop * @@ -86,9 +94,8 @@ MLX_API void mlx_loop_hook(void* mlx, int (*f)(void*), void* param); */ MLX_API void mlx_loop(void* mlx); - /** - * @brief Ends the internal main loop + * @brief Ends the internal run loop * * @param mlx Internal MLX application * @@ -96,7 +103,6 @@ MLX_API void mlx_loop(void* mlx); */ MLX_API void mlx_loop_end(void* mlx); - /** * @brief Shows mouse cursor * @@ -104,7 +110,6 @@ MLX_API void mlx_loop_end(void* mlx); */ MLX_API void mlx_mouse_show(); - /** * @brief Hides mouse cursor * @@ -112,7 +117,6 @@ MLX_API void mlx_mouse_show(); */ MLX_API void mlx_mouse_hide(); - /** * @brief Moves cursor to givent position * @@ -125,7 +129,6 @@ MLX_API void mlx_mouse_hide(); */ MLX_API void mlx_mouse_move(void* mlx, void* win, int x, int y); - /** * @brief Get cursor's position * @@ -137,7 +140,6 @@ MLX_API void mlx_mouse_move(void* mlx, void* win, int x, int y); */ MLX_API void mlx_mouse_get_pos(void* mlx, int* x, int* y); - /** * @brief Gives a function to be executed on event type * @@ -151,7 +153,6 @@ MLX_API void mlx_mouse_get_pos(void* mlx, int* x, int* y); */ MLX_API void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(int, void*), void* param); - /** * @brief Put a pixel in the window * @@ -168,7 +169,6 @@ MLX_API void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(i */ MLX_API void mlx_pixel_put(void* mlx, void* win, int x, int y, int color); - /** * @brief Create a new empty image * @@ -176,11 +176,10 @@ MLX_API void mlx_pixel_put(void* mlx, void* win, int x, int y, int color); * @param width Width of the image * @param height Height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_new_image(void* mlx, int width, int height); - /** * @brief Get image pixel data * @@ -201,7 +200,6 @@ MLX_API void* mlx_new_image(void* mlx, int width, int height); */ MLX_API int mlx_get_image_pixel(void* mlx, void* img, int x, int y); - /** * @brief Set image pixel data * @@ -223,7 +221,6 @@ MLX_API int mlx_get_image_pixel(void* mlx, void* img, int x, int y); */ MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); - /** * @brief Put image to the given window * @@ -237,7 +234,6 @@ MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); */ MLX_API void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y); - /** * @brief Destroys internal image * @@ -248,7 +244,6 @@ MLX_API void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int */ MLX_API void mlx_destroy_image(void* mlx, void* img); - /** * @brief Create a new image from a png file * @@ -257,11 +252,10 @@ MLX_API void mlx_destroy_image(void* mlx, void* img); * @param width Get the width of the image * @param heigth Get the height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* height); - /** * @brief Create a new image from a jpg file * @@ -270,11 +264,10 @@ MLX_API void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* * @param width Get the width of the image * @param heigth Get the height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* height); - /** * @brief Create a new image from a bmp file * @@ -283,11 +276,10 @@ MLX_API void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* * @param width Get the width of the image * @param heigth Get the height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error */ MLX_API void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* height); - /** * @brief Put text in given window * @@ -302,7 +294,6 @@ MLX_API void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* */ MLX_API void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str); - /** * @brief Loads a font to be used by `mlx_string_put` * @@ -314,7 +305,6 @@ MLX_API void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* */ MLX_API void mlx_set_font(void* mlx, char* filepath); - /** * @brief Loads a font to be used by `mlx_string_put` and scales it * @@ -327,7 +317,6 @@ MLX_API void mlx_set_font(void* mlx, char* filepath); */ MLX_API void mlx_set_font_scale(void* mlx, char* filepath, float scale); - /** * @brief Clears the given window (resets all rendered data) * @@ -338,7 +327,6 @@ MLX_API void mlx_set_font_scale(void* mlx, char* filepath, float scale); */ MLX_API void mlx_clear_window(void* mlx, void* win); - /** * @brief Destroys internal window * @@ -358,7 +346,6 @@ MLX_API void mlx_destroy_window(void* mlx, void* win); */ MLX_API void mlx_destroy_display(void* mlx); - /** * @brief Get the size of the screen the given window is on * @@ -371,7 +358,6 @@ MLX_API void mlx_destroy_display(void* mlx); */ MLX_API void mlx_get_screens_size(void* mlx, void* win, int* w, int* h); - /** * @brief Caps the FPS * diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index b80b2ab..acb634a 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -26,7 +26,7 @@ namespace mlx inline void SetFPSCap(std::uint32_t fps) noexcept; - inline Handle NewGraphicsSuport(std::size_t w, std::size_t h, const char* title); + inline Handle NewGraphicsSuport(std::size_t w, std::size_t h, const char* title, bool is_resizable); inline void ClearGraphicsSupport(Handle win); inline void DestroyGraphicsSupport(Handle win); inline void SetGraphicsSupportPosition(Handle win, int x, int y); diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 5a84d68..445cf15 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -72,7 +72,7 @@ namespace mlx m_fps.SetMaxFPS(fps); } - void* Application::NewGraphicsSuport(std::size_t w, std::size_t h, const char* title) + void* Application::NewGraphicsSuport(std::size_t w, std::size_t h, const char* title, bool is_resizable) { MLX_PROFILE_FUNCTION(); if(m_image_registry.IsTextureKnown(reinterpret_cast(const_cast(title)))) @@ -88,13 +88,13 @@ namespace mlx { for(std::size_t i = 0; i < 8; i++) { - m_graphics.emplace_back(std::make_unique(std::rand() % 1920, std::rand() % 1080, "让我们在月光下åšçˆ±å§", m_graphics.size())); + m_graphics.emplace_back(std::make_unique(std::rand() % 1920, std::rand() % 1080, "让我们在月光下åšçˆ±å§", m_graphics.size(), is_resizable)); m_graphics.back()->GetWindow()->SetPosition(std::rand() % 1920, std::rand() % 1080); } } else { - m_graphics.emplace_back(std::make_unique(w, h, title, m_graphics.size())); + m_graphics.emplace_back(std::make_unique(w, h, title, m_graphics.size(), is_resizable)); m_in.RegisterWindow(m_graphics.back()->GetWindow()); } } diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index e631349..92c17d9 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -15,7 +15,7 @@ namespace mlx { public: GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id); - GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id); + GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id, bool is_resizable); [[nodiscard]] MLX_FORCEINLINE int& GetID() noexcept { return m_id; } [[nodiscard]] inline std::shared_ptr GetWindow() { return p_window; } diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index fb43688..2178f59 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -10,7 +10,7 @@ namespace mlx public: SDLManager(); - Handle CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id); + Handle CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id, bool is_resizable); void DestroyWindow(Handle window) noexcept; void InputsFetcher(func::function functor); diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index e4798d2..66ba306 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -9,7 +9,7 @@ namespace mlx class Window { public: - Window(std::size_t w, std::size_t h, const std::string& title, bool hidden = false); + Window(std::size_t w, std::size_t h, const std::string& title, bool is_resizable, bool hidden = false); inline Handle GetWindowHandle() const noexcept { return p_window; } inline int GetWidth() const noexcept { return m_width; } diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index a2df8cd..07f572b 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -6,6 +6,7 @@ #include #include #include +#include namespace mlx { @@ -16,46 +17,33 @@ namespace mlx void Init(NonOwningPtr window); - bool BeginFrame(); + void BeginFrame(); void EndFrame(); - [[nodiscard]] inline VkSwapchainKHR GetSwapchain() const noexcept { return m_swapchain; } - [[nodiscard]] inline VkSurfaceKHR GetSurface() const noexcept { return m_surface; } [[nodiscard]] inline VkSemaphore GetImageAvailableSemaphore(int index) const noexcept { return m_image_available_semaphores[index]; } [[nodiscard]] inline VkSemaphore GetRenderFinishedSemaphore(int index) const noexcept { return m_render_finished_semaphores[index]; } [[nodiscard]] inline VkCommandBuffer GetCommandBuffer(int index) const noexcept { return m_cmd_buffers[index]; } [[nodiscard]] inline VkCommandBuffer GetActiveCommandBuffer() const noexcept { return m_cmd_buffers[m_current_frame_index]; } - [[nodiscard]] inline const std::vector& GetSwapchainImages() const { return m_swapchain_images; } [[nodiscard]] inline std::size_t& GetDrawCallsCounterRef() noexcept { return m_drawcalls; } [[nodiscard]] inline std::size_t& GetPolygonDrawnCounterRef() noexcept { return m_polygons_drawn; } - [[nodiscard]] inline std::size_t GetSwapchainImageIndex() const noexcept { return m_swapchain_image_index; } [[nodiscard]] inline std::size_t GetCurrentFrameIndex() const noexcept { return m_current_frame_index; } [[nodiscard]] inline NonOwningPtr GetWindow() const noexcept { return p_window; } - - MLX_FORCEINLINE constexpr void RequireFramebufferResize() noexcept { m_framebuffers_resize = true; } + [[nodiscard]] inline const Swapchain& GetSwapchain() const noexcept { return m_swapchain; } void Destroy() noexcept; ~Renderer() = default; private: - void CreateSwapchain(); - void DestroySwapchain(); - - private: + Swapchain m_swapchain; std::array m_image_available_semaphores; std::array m_render_finished_semaphores; std::array m_cmd_buffers; std::array m_cmd_fences; - std::vector m_swapchain_images; NonOwningPtr p_window; - VkSurfaceKHR m_surface = VK_NULL_HANDLE; - VkSwapchainKHR m_swapchain = VK_NULL_HANDLE; std::uint32_t m_current_frame_index = 0; - std::uint32_t m_swapchain_image_index = 0; - std::size_t m_drawcalls = 0; std::size_t m_polygons_drawn = 0; - bool m_framebuffers_resize = false; + std::size_t m_drawcalls = 0; }; } diff --git a/runtime/Includes/Renderer/Swapchain.h b/runtime/Includes/Renderer/Swapchain.h new file mode 100644 index 0000000..c2ca82a --- /dev/null +++ b/runtime/Includes/Renderer/Swapchain.h @@ -0,0 +1,43 @@ +#ifndef __MLX_SWAPCHAIN__ +#define __MLX_SWAPCHAIN__ + +#include +#include + +namespace mlx +{ + class Swapchain + { + public: + Swapchain() = default; + + void Init(NonOwningPtr window); + void AquireFrame(VkSemaphore signal); + void Present(VkSemaphore wait) noexcept; + void Destroy(); + + [[nodiscard]] inline VkSwapchainKHR Get() const noexcept { return m_swapchain; } + [[nodiscard]] inline VkSurfaceKHR GetSurface() const noexcept { return m_surface; } + [[nodiscard]] inline std::uint32_t GetImagesCount() const noexcept { return m_images_count; } + [[nodiscard]] inline std::uint32_t GetMinImagesCount() const noexcept { return m_min_images_count; } + [[nodiscard]] inline std::uint32_t GetImageIndex() const noexcept { return m_current_image_index; } + [[nodiscard]] inline const std::vector& GetSwapchainImages() const { return m_swapchain_images; } + + ~Swapchain() = default; + + private: + void CreateSwapchain(); + + private: + std::vector m_swapchain_images; + VkSwapchainKHR m_swapchain = VK_NULL_HANDLE; + VkSurfaceKHR m_surface = VK_NULL_HANDLE; + NonOwningPtr p_window; + std::uint32_t m_images_count = 0; + std::uint32_t m_min_images_count = 0; + std::uint32_t m_current_image_index = 0; + bool m_resize = false; + }; +} + +#endif diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 59fc1a3..f842cf2 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -42,7 +42,18 @@ extern "C" mlx::FatalError("invalid window size (%d x %d)", w, h); return NULL; // not nullptr for the C compatibility } - return static_cast(mlx)->NewGraphicsSuport(w, h, title); + return static_cast(mlx)->NewGraphicsSuport(w, h, title, false); + } + + void* mlx_new_resizable_window(void* mlx, int w, int h, const char* title) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + if(w <= 0 || h <= 0) + { + mlx::FatalError("invalid window size (%d x %d)", w, h); + return NULL; // not nullptr for the C compatibility + } + return static_cast(mlx)->NewGraphicsSuport(w, h, title, true); } void mlx_set_window_position(void *mlx, void *win, int x, int y) @@ -51,7 +62,7 @@ extern "C" static_cast(mlx)->SetGraphicsSupportPosition(win, x, y); } - int mlx_loop_hook(void* mlx, int (*f)(void*), void* param) + void mlx_loop_hook(void* mlx, int (*f)(void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); static_cast(mlx)->LoopHook(f, param); diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index bd6149f..9769bbf 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -16,9 +16,9 @@ namespace mlx p_scene = std::make_unique(); } - GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id) : + GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id, bool is_resizable) : m_put_pixel_manager(&m_renderer), - p_window(std::make_shared(w, h, title)), + p_window(std::make_shared(w, h, title, is_resizable)), m_id(id), m_has_window(true) { @@ -31,13 +31,10 @@ namespace mlx void GraphicsSupport::Render() noexcept { MLX_PROFILE_FUNCTION(); - if(m_renderer.BeginFrame()) - { + m_renderer.BeginFrame(); m_draw_layer = 0; m_scene_renderer.Render(*p_scene, m_renderer); - m_renderer.EndFrame(); - } - + m_renderer.EndFrame(); #ifdef GRAPHICS_MEMORY_DUMP // dump memory to file every two seconds using namespace std::chrono_literals; diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 8caff3b..8239119 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -47,12 +47,20 @@ namespace mlx DebugLog("SDL Manager initialized"); } - Handle SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id) + Handle SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id, bool is_resizable) { Internal::WindowInfos* infos = new Internal::WindowInfos; Verify(infos != nullptr, "SDL: window allocation failed"); - infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, SDL_WINDOW_VULKAN | (hidden ? SDL_WINDOW_HIDDEN : SDL_WINDOW_SHOWN)); + std::uint32_t flags = SDL_WINDOW_VULKAN; + if(hidden) + flags |= SDL_WINDOW_HIDDEN; + else + flags |= SDL_WINDOW_SHOWN; + if(is_resizable) + flags |= SDL_WINDOW_RESIZABLE; + + infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, flags); if(!infos->window) FatalError("SDL: unable to open a new window; %", SDL_GetError()); infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 0523164..3266ef6 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -9,7 +9,7 @@ namespace mlx { Verify((bool)p_renderer, "invalid renderer pointer"); - VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain()); + VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain().Get()); #ifdef DEBUG auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(draw_layer)); #else diff --git a/runtime/Sources/Platform/Window.cpp b/runtime/Sources/Platform/Window.cpp index 7004a50..53bd6c3 100644 --- a/runtime/Sources/Platform/Window.cpp +++ b/runtime/Sources/Platform/Window.cpp @@ -5,9 +5,9 @@ namespace mlx { - Window::Window(std::size_t w, std::size_t h, const std::string& title, bool hidden) : m_name(title), m_width(w), m_height(h) + Window::Window(std::size_t w, std::size_t h, const std::string& title, bool is_resizable, bool hidden) : m_name(title), m_width(w), m_height(h) { - p_window = SDLManager::Get().CreateWindow(title, w, h, hidden, m_id); + p_window = SDLManager::Get().CreateWindow(title, w, h, hidden, m_id, is_resizable); } void Window::Destroy() noexcept diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 7d9b902..52950f1 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -153,8 +153,8 @@ namespace mlx std::vector attachment_views; if(p_renderer) { - attachments.push_back(kvfBuildSwapchainAttachmentDescription(p_renderer->GetSwapchain(), clear_attachments)); - attachment_views.push_back(p_renderer->GetSwapchainImages()[0].GetImageView()); + attachments.push_back(kvfBuildSwapchainAttachmentDescription(p_renderer->GetSwapchain().Get(), clear_attachments)); + attachment_views.push_back(p_renderer->GetSwapchain().GetSwapchainImages()[0].GetImageView()); } #pragma omp parallel for @@ -171,7 +171,7 @@ namespace mlx if(p_renderer) { - for(const Image& image : p_renderer->GetSwapchainImages()) + for(const Image& image : p_renderer->GetSwapchain().GetSwapchainImages()) { attachment_views[0] = image.GetImageView(); m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image.GetWidth(), .height = image.GetHeight() })); diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 402f893..3ea7461 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -1,4 +1,3 @@ -#include #include #include @@ -7,6 +6,8 @@ #define KVF_ENABLE_VALIDATION_LAYERS #endif +#define KVF_ASSERT(x) mlx::Assert(x, #x) + #if defined(MLX_COMPILER_GCC) || defined(MLX_COMPILER_CLANG) #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wmissing-field-initializers" @@ -60,8 +61,6 @@ namespace mlx kvfSetValidationErrorCallback(&ValidationErrorCallback); kvfSetValidationWarningCallback(&WarningCallback); - //kvfAddLayer("VK_LAYER_MESA_overlay"); - Window window(1, 1, "", true); std::vector instance_extensions = window.GetRequiredVulkanInstanceExtentions(); #ifdef MLX_PLAT_MACOS diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index 7464c6e..5775187 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -63,7 +63,7 @@ namespace mlx p_set->SetImage(renderer.GetCurrentFrameIndex(), 0, render_target); p_set->Update(renderer.GetCurrentFrameIndex(), cmd); - m_pipeline.BindPipeline(cmd, renderer.GetSwapchainImageIndex(), { 0.0f, 0.0f, 0.0f, 1.0f }); + m_pipeline.BindPipeline(cmd, renderer.GetSwapchain().GetImageIndex(), { 0.0f, 0.0f, 0.0f, 1.0f }); VkDescriptorSet set = p_set->GetSet(renderer.GetCurrentFrameIndex()); RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, 1, &set, 0, nullptr); RenderCore::Get().vkCmdDraw(cmd, 3, 1, 0, 0); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 9f5dc12..2e0c76f 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -14,7 +14,7 @@ namespace mlx if(event.What() == Event::ResizeEventCode) { m_main_render_texture.Destroy(); - auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); + auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); #ifdef DEBUG m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); #else @@ -24,7 +24,7 @@ namespace mlx } }; EventBus::RegisterListener({ functor, "__MlxRenderPasses" }); - auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain()); + auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); #ifdef DEBUG m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 823db60..7014e20 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -9,11 +9,6 @@ namespace mlx { namespace Internal { - struct ResizeEventBroadcast : public EventBase - { - Event What() const override { return Event::ResizeEventCode; } - }; - struct FrameBeginEventBroadcast : public EventBase { Event What() const override { return Event::FrameBeginEventCode; } @@ -23,20 +18,8 @@ namespace mlx void Renderer::Init(NonOwningPtr window) { MLX_PROFILE_FUNCTION(); - func::function functor = [this](const EventBase& event) - { - if(event.What() == Event::ResizeEventCode) - this->RequireFramebufferResize(); - }; - EventBus::RegisterListener({ functor, "__MlxRenderer" + std::to_string(reinterpret_cast(this)) }); - p_window = window; - - m_surface = p_window->CreateVulkanSurface(RenderCore::Get().GetInstance()); - DebugLog("Vulkan: surface created"); - - CreateSwapchain(); - + m_swapchain.Init(p_window); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { m_image_available_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); @@ -50,27 +33,16 @@ namespace mlx } } - bool Renderer::BeginFrame() + void Renderer::BeginFrame() { MLX_PROFILE_FUNCTION(); kvfWaitForFence(RenderCore::Get().GetDevice(), m_cmd_fences[m_current_frame_index]); - VkResult result = RenderCore::Get().vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, m_image_available_semaphores[m_current_frame_index], VK_NULL_HANDLE, &m_swapchain_image_index); - if(result == VK_ERROR_OUT_OF_DATE_KHR) - { - //DestroySwapchain(); - //CreateSwapchain(); - //EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); - //return false; - } - else if(result != VK_SUCCESS && result != VK_SUBOPTIMAL_KHR) - FatalError("Vulkan error: failed to acquire swapchain image, %", kvfVerbaliseVkResult(result)); - + m_swapchain.AquireFrame(m_image_available_semaphores[m_current_frame_index]); RenderCore::Get().vkResetCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); kvfBeginCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); m_drawcalls = 0; m_polygons_drawn = 0; EventBus::SendBroadcast(Internal::FrameBeginEventBroadcast{}); - return true; } void Renderer::EndFrame() @@ -79,55 +51,14 @@ namespace mlx VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; kvfEndCommandBuffer(m_cmd_buffers[m_current_frame_index]); kvfSubmitCommandBuffer(RenderCore::Get().GetDevice(), m_cmd_buffers[m_current_frame_index], KVF_GRAPHICS_QUEUE, m_render_finished_semaphores[m_current_frame_index], m_image_available_semaphores[m_current_frame_index], m_cmd_fences[m_current_frame_index], wait_stages); - if(!kvfQueuePresentKHR(RenderCore::Get().GetDevice(), m_render_finished_semaphores[m_current_frame_index], m_swapchain, m_swapchain_image_index) || m_framebuffers_resize) - { - m_framebuffers_resize = false; - //DestroySwapchain(); - //CreateSwapchain(); - //EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); - } + m_swapchain.Present(m_render_finished_semaphores[m_current_frame_index]); m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; } - void Renderer::CreateSwapchain() - { - MLX_PROFILE_FUNCTION(); - Vec2ui drawable_size = p_window->GetVulkanDrawableSize(); - VkExtent2D extent = { drawable_size.x, drawable_size.y }; - m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, false); - - std::uint32_t images_count = kvfGetSwapchainImagesCount(m_swapchain); - std::vector tmp(images_count); - m_swapchain_images.resize(images_count); - RenderCore::Get().vkGetSwapchainImagesKHR(RenderCore::Get().GetDevice(), m_swapchain, &images_count, tmp.data()); - for(std::size_t i = 0; i < images_count; i++) - { - #ifdef DEBUG - m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height, VK_IMAGE_LAYOUT_UNDEFINED, "mlx_swapchain_image_" + std::to_string(i)); - #else - m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height, VK_IMAGE_LAYOUT_UNDEFINED, {}); - #endif - m_swapchain_images[i].TransitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR); - m_swapchain_images[i].CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); - } - DebugLog("Vulkan: swapchain created"); - } - - void Renderer::DestroySwapchain() - { - MLX_PROFILE_FUNCTION(); - RenderCore::Get().WaitDeviceIdle(); - for(Image& img : m_swapchain_images) - img.DestroyImageView(); - kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), m_swapchain); - DebugLog("Vulkan: swapchain destroyed"); - } - void Renderer::Destroy() noexcept { MLX_PROFILE_FUNCTION(); RenderCore::Get().WaitDeviceIdle(); - for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { kvfDestroySemaphore(RenderCore::Get().GetDevice(), m_image_available_semaphores[i]); @@ -137,10 +68,6 @@ namespace mlx kvfDestroyFence(RenderCore::Get().GetDevice(), m_cmd_fences[i]); DebugLog("Vulkan: fence destroyed"); } - - DestroySwapchain(); - RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); - DebugLog("Vulkan: surface destroyed"); - m_surface = VK_NULL_HANDLE; + m_swapchain.Destroy(); } } diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp new file mode 100644 index 0000000..4c5af67 --- /dev/null +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -0,0 +1,108 @@ +#include + +#include +#include +#include +#include +#include + +namespace mlx +{ + namespace Internal + { + struct ResizeEventBroadcast : public EventBase + { + Event What() const override { return Event::ResizeEventCode; } + }; + } + + void Swapchain::Init(NonOwningPtr window) + { + p_window = window; + m_surface = window->CreateVulkanSurface(RenderCore::Get().GetInstance()); + DebugLog("Vulkan: surface created"); + CreateSwapchain(); + } + + void Swapchain::AquireFrame(VkSemaphore signal) + { + if(m_resize) + { + RenderCore::Get().WaitDeviceIdle(); + CreateSwapchain(); + EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); + } + + VkResult result = RenderCore::Get().vkAcquireNextImageKHR(RenderCore::Get().GetDevice(), m_swapchain, UINT64_MAX, signal, VK_NULL_HANDLE, &m_current_image_index); + if(result == VK_SUBOPTIMAL_KHR) + m_resize = true; // Recreate Swapchain next time + else if(result == VK_ERROR_OUT_OF_DATE_KHR) + { + m_resize = true; + AquireFrame(signal); + } + else if(result != VK_SUCCESS) + FatalError("Vulkan: failed to acquire swapchain image, %", kvfVerbaliseVkResult(result)); + } + + void Swapchain::Present(VkSemaphore wait) noexcept + { + if(!kvfQueuePresentKHR(RenderCore::Get().GetDevice(), wait, m_swapchain, m_current_image_index)) + m_resize = true; + } + + void Swapchain::Destroy() + { + RenderCore::Get().WaitDeviceIdle(); + + for(Image& img : m_swapchain_images) + img.DestroyImageView(); + // kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), m_swapchain); + + RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); + m_surface = VK_NULL_HANDLE; + DebugLog("Vulkan: surface destroyed"); + } + + void Swapchain::CreateSwapchain() + { + for(Image& img : m_swapchain_images) + img.DestroyImageView(); + m_swapchain_images.clear(); + + VkExtent2D extent; + do + { + Vec2ui size = p_window->GetVulkanDrawableSize(); + extent = { size.x, size.y }; + } while(extent.width == 0 || extent.height == 0); + + VkSwapchainKHR old_swapchain = m_swapchain; + m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, VK_NULL_HANDLE, true); + // if(old_swapchain != VK_NULL_HANDLE) + // kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), old_swapchain); + + m_images_count = kvfGetSwapchainImagesCount(m_swapchain); + m_min_images_count = kvfGetSwapchainMinImagesCount(m_swapchain); + std::vector tmp(m_images_count); + m_swapchain_images.resize(m_images_count); + RenderCore::Get().vkGetSwapchainImagesKHR(RenderCore::Get().GetDevice(), m_swapchain, &m_images_count, tmp.data()); + VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + for(std::size_t i = 0; i < m_images_count; i++) + { + #ifdef DEBUG + m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height, VK_IMAGE_LAYOUT_UNDEFINED, "mlx_swapchain_image_" + std::to_string(i)); + #else + m_swapchain_images[i].Init(tmp[i], kvfGetSwapchainImagesFormat(m_swapchain), extent.width, extent.height, VK_IMAGE_LAYOUT_UNDEFINED, {}); + #endif + m_swapchain_images[i].TransitionLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, cmd); + m_swapchain_images[i].CreateImageView(VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_ASPECT_COLOR_BIT); + } + kvfEndCommandBuffer(cmd); + VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); + kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); + kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + DebugLog("Vulkan: swapchain created"); + } +} diff --git a/third_party/kvf.h b/third_party/kvf.h index 7e4602f..206c651 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -51,12 +51,11 @@ #ifndef KBZ_8_VULKAN_FRAMEWORK_H #define KBZ_8_VULKAN_FRAMEWORK_H -#include "vulkan/vulkan_core.h" #ifdef KVF_IMPL_VK_NO_PROTOTYPES #define VK_NO_PROTOTYPES #endif -#include +#include #include #include @@ -141,7 +140,7 @@ VkSemaphore kvfCreateSemaphore(VkDevice device); void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore); #ifndef KVF_NO_KHR - VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool try_vsync); + VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, VkSwapchainKHR old_swapchain, bool try_vsync); VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain); uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain); uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain); @@ -402,10 +401,10 @@ typedef struct __KvfDevice VkDevice device; VkPhysicalDevice physical; VkCommandPool cmd_pool; - VkCommandBuffer* cmd_buffers = NULL; + VkCommandBuffer* cmd_buffers; __KvfDescriptorPool* sets_pools; - size_t cmd_buffers_size = 0; - size_t cmd_buffers_capacity = 0; + size_t cmd_buffers_size; + size_t cmd_buffers_capacity; size_t sets_pools_size; } __KvfDevice; @@ -449,34 +448,34 @@ struct KvfGraphicsPipelineBuilder }; // Dynamic arrays -__KvfDevice* __kvf_internal_devices = NULL; -size_t __kvf_internal_devices_size = 0; -size_t __kvf_internal_devices_capacity = 0; +static __KvfDevice* __kvf_internal_devices = NULL; +static size_t __kvf_internal_devices_size = 0; +static size_t __kvf_internal_devices_capacity = 0; #ifndef KVF_NO_KHR - __KvfSwapchain* __kvf_internal_swapchains = NULL; - size_t __kvf_internal_swapchains_size = 0; - size_t __kvf_internal_swapchains_capacity = 0; + static __KvfSwapchain* __kvf_internal_swapchains = NULL; + static size_t __kvf_internal_swapchains_size = 0; + static size_t __kvf_internal_swapchains_capacity = 0; #endif -__KvfFramebuffer* __kvf_internal_framebuffers = NULL; -size_t __kvf_internal_framebuffers_size = 0; -size_t __kvf_internal_framebuffers_capacity = 0; +static __KvfFramebuffer* __kvf_internal_framebuffers = NULL; +static size_t __kvf_internal_framebuffers_size = 0; +static size_t __kvf_internal_framebuffers_capacity = 0; #ifdef KVF_ENABLE_VALIDATION_LAYERS - VkDebugUtilsMessengerEXT __kvf_debug_messenger = VK_NULL_HANDLE; - char** __kvf_extra_layers = NULL; - size_t __kvf_extra_layers_count = 0; + static VkDebugUtilsMessengerEXT __kvf_debug_messenger = VK_NULL_HANDLE; + static char** __kvf_extra_layers = NULL; + static size_t __kvf_extra_layers_count = 0; #endif -KvfErrorCallback __kvf_error_callback = NULL; -KvfErrorCallback __kvf_warning_callback = NULL; -KvfErrorCallback __kvf_validation_error_callback = NULL; -KvfErrorCallback __kvf_validation_warning_callback = NULL; +static KvfErrorCallback __kvf_error_callback = NULL; +static KvfErrorCallback __kvf_warning_callback = NULL; +static KvfErrorCallback __kvf_validation_error_callback = NULL; +static KvfErrorCallback __kvf_validation_warning_callback = NULL; #ifdef KVF_IMPL_VK_NO_PROTOTYPES - KvfGlobalVulkanFunctions __kvf_g_fns; - KvfInstanceVulkanFunctions __kvf_i_fns; + static KvfGlobalVulkanFunctions __kvf_g_fns; + static KvfInstanceVulkanFunctions __kvf_i_fns; #endif void __kvfCheckVk(VkResult result, const char* function) @@ -670,7 +669,7 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) if(__kvf_internal_swapchains_size == __kvf_internal_swapchains_capacity) { // Resize the dynamic array if necessary - __kvf_internal_swapchains_capacity += 2; + __kvf_internal_swapchains_capacity += 5; __kvf_internal_swapchains = (__KvfSwapchain*)KVF_REALLOC(__kvf_internal_swapchains, __kvf_internal_swapchains_capacity * sizeof(__KvfSwapchain)); } @@ -680,6 +679,7 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_count = images_count; __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_extent = extent; __kvf_internal_swapchains_size++; + printf("new size updated %zu, capacity %zu\n", __kvf_internal_swapchains_size, __kvf_internal_swapchains_capacity); } void __kvfDestroySwapchain(VkDevice device, VkSwapchainKHR swapchain) @@ -701,6 +701,7 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) for(size_t j = i; j < __kvf_internal_swapchains_size - 1; j++) __kvf_internal_swapchains[j] = __kvf_internal_swapchains[j + 1]; __kvf_internal_swapchains_size--; + printf("new size delete %zu, capacity %zu\n", __kvf_internal_swapchains_size, __kvf_internal_swapchains_capacity); if(__kvf_internal_swapchains_size == 0) { KVF_FREE(__kvf_internal_swapchains); @@ -710,15 +711,18 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) } } } +#include __KvfSwapchain* __kvfGetKvfSwapchainFromVkSwapchainKHR(VkSwapchainKHR swapchain) { KVF_ASSERT(swapchain != VK_NULL_HANDLE); + printf("size %zu, capacity %zu\n", __kvf_internal_swapchains_size, __kvf_internal_swapchains_capacity); for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) { if(__kvf_internal_swapchains[i].swapchain == swapchain) return &__kvf_internal_swapchains[i]; } + puts("not found"); return NULL; } #endif @@ -894,7 +898,7 @@ VkPipelineStageFlags kvfLayoutToAccessMask(VkImageLayout layout, bool is_destina { case VK_IMAGE_LAYOUT_UNDEFINED: if(is_destination) - KVF_ASSERT(false && "Vulkan: the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); + KVF_ASSERT(false && "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_UNDEFINED"); break; case VK_IMAGE_LAYOUT_GENERAL: access_mask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; break; case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; break; @@ -909,13 +913,13 @@ VkPipelineStageFlags kvfLayoutToAccessMask(VkImageLayout layout, bool is_destina if(!is_destination) access_mask = VK_ACCESS_HOST_WRITE_BIT; else - KVF_ASSERT(false && "Vulkan: the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); + KVF_ASSERT(false && "Vulkan : the new layout used in a transition must not be VK_IMAGE_LAYOUT_PREINITIALIZED"); break; case VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; case VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: access_mask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT; break; case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR: access_mask = VK_ACCESS_MEMORY_READ_BIT; break; - default: KVF_ASSERT(false && "Vulkan: unexpected image layout"); break; + default: KVF_ASSERT(false && "Vulkan : unexpected image layout"); break; } return access_mask; @@ -929,7 +933,7 @@ VkPipelineStageFlags kvfAccessFlagsToPipelineStage(VkAccessFlags access_flags, V { VkAccessFlagBits _access_flag = (VkAccessFlagBits)(access_flags & (~(access_flags - 1))); if(_access_flag == 0 || (_access_flag & (_access_flag - 1)) != 0) - KVF_ASSERT(false && "Vulkan: an error has been caught during access flag to pipeline stage operation"); + KVF_ASSERT(false && "Vulkan : an error has been caught during access flag to pipeline stage operation"); access_flags &= ~_access_flag; switch(_access_flag) @@ -952,7 +956,7 @@ VkPipelineStageFlags kvfAccessFlagsToPipelineStage(VkAccessFlags access_flags, V case VK_ACCESS_MEMORY_READ_BIT: break; case VK_ACCESS_MEMORY_WRITE_BIT: break; - default: KVF_ASSERT(false && "Vulkan: unknown access flag"); break; + default: KVF_ASSERT(false && "Vulkan : unknown access flag"); break; } } return stages; @@ -973,7 +977,7 @@ VkFormat kvfFindSupportFormatInCandidates(VkDevice device, VkFormat* candidates, return candidates[i]; } - KVF_ASSERT(false && "Vulkan: failed to find image format"); + KVF_ASSERT(false && "Vulkan : failed to find image format"); return VK_FORMAT_R8G8B8A8_SRGB; // just to avoir warning } @@ -1913,7 +1917,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) return t > max ? max : t; } - VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, bool try_vsync) + VkSwapchainKHR kvfCreateSwapchainKHR(VkDevice device, VkPhysicalDevice physical, VkSurfaceKHR surface, VkExtent2D extent, VkSwapchainKHR old_swapchain, bool try_vsync) { KVF_ASSERT(device != VK_NULL_HANDLE); VkSwapchainKHR swapchain; @@ -1952,7 +1956,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; createInfo.presentMode = presentMode; createInfo.clipped = VK_TRUE; - createInfo.oldSwapchain = VK_NULL_HANDLE; + createInfo.oldSwapchain = old_swapchain; if(kvf_device->queues.graphics != kvf_device->queues.present) { @@ -1975,6 +1979,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) VkFormat kvfGetSwapchainImagesFormat(VkSwapchainKHR swapchain) { + KVF_ASSERT(swapchain != VK_NULL_HANDLE); __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); KVF_ASSERT(kvf_swapchain != NULL); return kvf_swapchain->images_format; @@ -1982,6 +1987,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) uint32_t kvfGetSwapchainImagesCount(VkSwapchainKHR swapchain) { + KVF_ASSERT(swapchain != VK_NULL_HANDLE); __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); KVF_ASSERT(kvf_swapchain != NULL); return kvf_swapchain->images_count; @@ -1989,6 +1995,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) uint32_t kvfGetSwapchainMinImagesCount(VkSwapchainKHR swapchain) { + KVF_ASSERT(swapchain != VK_NULL_HANDLE); __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); KVF_ASSERT(kvf_swapchain != NULL); return kvf_swapchain->support.capabilities.minImageCount; @@ -1996,6 +2003,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) VkExtent2D kvfGetSwapchainImagesSize(VkSwapchainKHR swapchain) { + KVF_ASSERT(swapchain != VK_NULL_HANDLE); __KvfSwapchain* kvf_swapchain = __kvfGetKvfSwapchainFromVkSwapchainKHR(swapchain); KVF_ASSERT(kvf_swapchain != NULL); return kvf_swapchain->images_extent; @@ -2974,10 +2982,10 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, K color_blending.logicOp = VK_LOGIC_OP_COPY; color_blending.attachmentCount = 1; color_blending.pAttachments = &builder->color_blend_attachment_state; - color_blending.blendConstants[0] = 1.0f; - color_blending.blendConstants[1] = 1.0f; - color_blending.blendConstants[2] = 1.0f; - color_blending.blendConstants[3] = 1.0f; + color_blending.blendConstants[0] = 0.0f; + color_blending.blendConstants[1] = 0.0f; + color_blending.blendConstants[2] = 0.0f; + color_blending.blendConstants[3] = 0.0f; VkDynamicState states[] = { VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR }; diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 60b70f0..65881c4 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -10,6 +10,12 @@ module; +#include + +#if defined( __cpp_lib_modules ) +# define VULKAN_HPP_ENABLE_STD_MODULE +#endif + #include #include #include @@ -26,14 +32,17 @@ export namespace VULKAN_HPP_NAMESPACE //===================================== using VULKAN_HPP_NAMESPACE::ArrayWrapper1D; using VULKAN_HPP_NAMESPACE::ArrayWrapper2D; - using VULKAN_HPP_NAMESPACE::DispatchLoaderBase; - using VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic; using VULKAN_HPP_NAMESPACE::Flags; using VULKAN_HPP_NAMESPACE::FlagTraits; + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase; + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic; #if !defined( VK_NO_PROTOTYPES ) - using VULKAN_HPP_NAMESPACE::DispatchLoaderStatic; + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic; #endif /*VK_NO_PROTOTYPES*/ + } // namespace detail using VULKAN_HPP_NAMESPACE::operator&; using VULKAN_HPP_NAMESPACE::operator|; @@ -50,14 +59,18 @@ export namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #if !defined( VULKAN_HPP_NO_SMART_HANDLE ) - using VULKAN_HPP_NAMESPACE::ObjectDestroy; - using VULKAN_HPP_NAMESPACE::ObjectDestroyShared; - using VULKAN_HPP_NAMESPACE::ObjectFree; - using VULKAN_HPP_NAMESPACE::ObjectFreeShared; - using VULKAN_HPP_NAMESPACE::ObjectRelease; - using VULKAN_HPP_NAMESPACE::ObjectReleaseShared; - using VULKAN_HPP_NAMESPACE::PoolFree; - using VULKAN_HPP_NAMESPACE::PoolFreeShared; + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::ObjectDestroy; + using VULKAN_HPP_NAMESPACE::detail::ObjectDestroyShared; + using VULKAN_HPP_NAMESPACE::detail::ObjectFree; + using VULKAN_HPP_NAMESPACE::detail::ObjectFreeShared; + using VULKAN_HPP_NAMESPACE::detail::ObjectRelease; + using VULKAN_HPP_NAMESPACE::detail::ObjectReleaseShared; + using VULKAN_HPP_NAMESPACE::detail::PoolFree; + using VULKAN_HPP_NAMESPACE::detail::PoolFreeShared; + } // namespace detail + using VULKAN_HPP_NAMESPACE::SharedHandle; using VULKAN_HPP_NAMESPACE::UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -879,6 +892,17 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance7 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR; + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT; + + //=== VK_EXT_depth_clamp_control === + using VULKAN_HPP_NAMESPACE::DepthClampModeEXT; + //========================= //=== Index Type Traits === //========================= @@ -2149,6 +2173,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::KHRFormatFeatureFlags2ExtensionName; using VULKAN_HPP_NAMESPACE::KHRFormatFeatureFlags2SpecVersion; + //=== VK_EXT_present_mode_fifo_latest_ready === + using VULKAN_HPP_NAMESPACE::EXTPresentModeFifoLatestReadyExtensionName; + using VULKAN_HPP_NAMESPACE::EXTPresentModeFifoLatestReadySpecVersion; + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === using VULKAN_HPP_NAMESPACE::FUCHSIAExternalMemoryExtensionName; @@ -2572,14 +2600,31 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::NVRayTracingValidationExtensionName; using VULKAN_HPP_NAMESPACE::NVRayTracingValidationSpecVersion; + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::EXTDeviceGeneratedCommandsExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDeviceGeneratedCommandsSpecVersion; + //=== VK_MESA_image_alignment_control === using VULKAN_HPP_NAMESPACE::MESAImageAlignmentControlExtensionName; using VULKAN_HPP_NAMESPACE::MESAImageAlignmentControlSpecVersion; + //=== VK_EXT_depth_clamp_control === + using VULKAN_HPP_NAMESPACE::EXTDepthClampControlExtensionName; + using VULKAN_HPP_NAMESPACE::EXTDepthClampControlSpecVersion; + + //=== VK_HUAWEI_hdr_vivid === + using VULKAN_HPP_NAMESPACE::HUAWEIHdrVividExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEIHdrVividSpecVersion; + + //=== VK_NV_cooperative_matrix2 === + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2ExtensionName; + using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2SpecVersion; + //======================== //=== CONSTEXPR VALUEs === //======================== using VULKAN_HPP_NAMESPACE::HeaderVersion; + using VULKAN_HPP_NAMESPACE::Use64BitPtrDefines; //========================= //=== CONSTEXPR CALLEEs === @@ -3220,13 +3265,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD; using VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD; - //=== VK_KHR_dynamic_rendering === - using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; - using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; - using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX; - using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT; - using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR; - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP; @@ -3333,6 +3371,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE; //=== VK_NVX_multiview_per_view_attributes === + using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX; using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; //=== VK_NV_viewport_swizzle === @@ -3434,6 +3473,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_AMD_mixed_attachment_samples === + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD; + using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV; + //=== VK_EXT_sample_locations === using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT; using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT; @@ -3603,7 +3646,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV; //=== VK_NV_device_diagnostic_checkpoints === + using VULKAN_HPP_NAMESPACE::CheckpointData2NV; using VULKAN_HPP_NAMESPACE::CheckpointDataNV; + using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV; using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV; //=== VK_INTEL_shader_integer_functions2 === @@ -3640,6 +3685,7 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_fragment_density_map === using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT; + using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT; using VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT; //=== VK_KHR_fragment_shading_rate === @@ -3648,6 +3694,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR; using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR; using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR; //=== VK_AMD_shader_core_properties2 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD; @@ -3876,10 +3923,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - using VULKAN_HPP_NAMESPACE::CheckpointData2NV; - using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV; - //=== VK_EXT_descriptor_buffer === using VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT; using VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT; @@ -3991,6 +4034,9 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_primitive_topology_list_restart === using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + //=== VK_EXT_present_mode_fifo_latest_ready === + using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === using VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA; @@ -4463,11 +4509,50 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_NV_ray_tracing_validation === using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingValidationFeaturesNV; + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT; + using VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT; + using VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + using VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT; + using VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT; + //=== VK_MESA_image_alignment_control === using VULKAN_HPP_NAMESPACE::ImageAlignmentControlCreateInfoMESA; using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA; using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlPropertiesMESA; + //=== VK_EXT_depth_clamp_control === + using VULKAN_HPP_NAMESPACE::DepthClampRangeEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT; + + //=== VK_HUAWEI_hdr_vivid === + using VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI; + + //=== VK_NV_cooperative_matrix2 === + using VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV; + //=============== //=== HANDLEs === //=============== @@ -4573,6 +4658,10 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_binary === using VULKAN_HPP_NAMESPACE::PipelineBinaryKHR; + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + using VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + //====================== //=== UNIQUE HANDLEs === //====================== @@ -4673,8 +4762,12 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::UniqueShaderEXT; //=== VK_KHR_pipeline_binary === - using VULKAN_HPP_NAMESPACE::UniqueHandleTraits; using VULKAN_HPP_NAMESPACE::UniquePipelineBinaryKHR; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::UniqueHandleTraits; + using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutEXT; + using VULKAN_HPP_NAMESPACE::UniqueIndirectExecutionSetEXT; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ //====================== @@ -4780,8 +4873,12 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::SharedShaderEXT; //=== VK_KHR_pipeline_binary === - using VULKAN_HPP_NAMESPACE::SharedHandleTraits; using VULKAN_HPP_NAMESPACE::SharedPipelineBinaryKHR; + + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_NAMESPACE::SharedHandleTraits; + using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutEXT; + using VULKAN_HPP_NAMESPACE::SharedIndirectExecutionSetEXT; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ //=========================== @@ -4801,7 +4898,10 @@ export namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - using VULKAN_HPP_NAMESPACE::DynamicLoader; + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::DynamicLoader; + } // namespace detail #endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/ //===================== @@ -4851,9 +4951,13 @@ export namespace VULKAN_HPP_NAMESPACE //====================== using VULKAN_HPP_RAII_NAMESPACE::Context; - using VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher; - using VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher; - using VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher; + + namespace detail + { + using VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher; + } // namespace detail //==================== //=== RAII HANDLEs === @@ -4966,6 +5070,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHR; using VULKAN_HPP_RAII_NAMESPACE::PipelineBinaryKHRs; + //=== VK_EXT_device_generated_commands === + using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT; + using VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT; + } // namespace VULKAN_HPP_RAII_NAMESPACE #endif } // namespace VULKAN_HPP_NAMESPACE diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index d5744c1..6ee70ed 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -8,24 +8,30 @@ #ifndef VULKAN_HPP #define VULKAN_HPP -#include -#include // ArrayWrapperND -#include // strnlen -#include // std::string -#include // std::exchange -#include #include -#if 17 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +# include +import VULKAN_HPP_STD_MODULE; +#else +# include +# include // ArrayWrapperND +# include // strnlen +# include // std::string +# include // std::exchange +#endif +#include + +#if 17 <= VULKAN_HPP_CPP_VERSION && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) +#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include // std::tie # include // std::vector #endif -#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) +#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include // std::is_error_code_enum #endif @@ -49,15 +55,15 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # endif #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -#if defined( VULKAN_HPP_SUPPORT_SPAN ) +#if defined( VULKAN_HPP_SUPPORT_SPAN ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -static_assert( VK_HEADER_VERSION == 295, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 301, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -948,4977 +954,5093 @@ namespace VULKAN_HPP_NAMESPACE # endif #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE - class DispatchLoaderBase + namespace detail { - public: - DispatchLoaderBase() = default; - DispatchLoaderBase( std::nullptr_t ) -#if !defined( NDEBUG ) - : m_valid( false ) -#endif + class DispatchLoaderBase { - } + public: + DispatchLoaderBase() = default; + DispatchLoaderBase( std::nullptr_t ) +#if !defined( NDEBUG ) + : m_valid( false ) +#endif + { + } #if !defined( NDEBUG ) - size_t getVkHeaderVersion() const - { - VULKAN_HPP_ASSERT( m_valid ); - return vkHeaderVersion; - } + size_t getVkHeaderVersion() const + { + VULKAN_HPP_ASSERT( m_valid ); + return vkHeaderVersion; + } - private: - size_t vkHeaderVersion = VK_HEADER_VERSION; - bool m_valid = true; + private: + size_t vkHeaderVersion = VK_HEADER_VERSION; + bool m_valid = true; #endif - }; + }; #if !defined( VK_NO_PROTOTYPES ) - class DispatchLoaderStatic : public DispatchLoaderBase - { - public: - //=== VK_VERSION_1_0 === - - VkResult - vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + class DispatchLoaderStatic : public DispatchLoaderBase { - return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); - } + public: + //=== VK_VERSION_1_0 === - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyInstance( instance, pAllocator ); - } + VkResult + vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + } - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); - } + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } - void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); - } + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } - void - vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); - } + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); - } + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } - void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); - } + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } - void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } - void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); - } + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetInstanceProcAddr( instance, pName ); - } + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } - PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceProcAddr( device, pName ); - } + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); - } + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } - void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDevice( device, pAllocator ); - } + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } - VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); - } + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, - const char * pLayerName, - uint32_t * pPropertyCount, - VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); - } + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } - VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); - } + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } - VkResult - vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); - } + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } - void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); - } + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } - VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); - } + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } - VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueWaitIdle( queue ); - } + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } - VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeviceWaitIdle( device ); - } + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } - VkResult vkAllocateMemory( VkDevice device, - const VkMemoryAllocateInfo * pAllocateInfo, - const VkAllocationCallbacks * pAllocator, - VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); - } + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } - void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeMemory( device, memory, pAllocator ); - } + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory( device, memory, offset, size, flags, ppData ); - } + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } - void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory( device, memory ); - } + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } - VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } - void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); - } + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); - } + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory( device, image, memory, memoryOffset ); - } + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } - void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); - } + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } - void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); - } + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } - void vkGetImageSparseMemoryRequirements( VkDevice device, - VkImage image, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } - void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkSampleCountFlagBits samples, - VkImageUsageFlags usage, - VkImageTiling tiling, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); - } + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } - VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); - } - - VkResult vkCreateFence( VkDevice device, - const VkFenceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); - } - - void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFence( device, fence, pAllocator ); - } - - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetFences( device, fenceCount, pFences ); - } - - VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceStatus( device, fence ); - } - - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); - } - - VkResult vkCreateSemaphore( VkDevice device, - const VkSemaphoreCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); - } - - void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySemaphore( device, semaphore, pAllocator ); - } - - VkResult vkCreateEvent( VkDevice device, - const VkEventCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); - } - - void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyEvent( device, event, pAllocator ); - } - - VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEventStatus( device, event ); - } - - VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetEvent( device, event ); - } - - VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetEvent( device, event ); - } - - VkResult vkCreateQueryPool( VkDevice device, - const VkQueryPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); - } - - void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyQueryPool( device, queryPool, pAllocator ); - } - - VkResult vkGetQueryPoolResults( VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void * pData, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); - } - - VkResult vkCreateBuffer( VkDevice device, - const VkBufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); - } - - void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBuffer( device, buffer, pAllocator ); - } - - VkResult vkCreateBufferView( VkDevice device, - const VkBufferViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); - } - - void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferView( device, bufferView, pAllocator ); - } - - VkResult vkCreateImage( VkDevice device, - const VkImageCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImage * pImage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); - } - - void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImage( device, image, pAllocator ); - } - - void vkGetImageSubresourceLayout( VkDevice device, - VkImage image, - const VkImageSubresource * pSubresource, - VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); - } - - VkResult vkCreateImageView( VkDevice device, - const VkImageViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImageView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); - } - - void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImageView( device, imageView, pAllocator ); - } - - VkResult vkCreateShaderModule( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); - } - - void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); - } - - VkResult vkCreatePipelineCache( VkDevice device, - const VkPipelineCacheCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); - } - - void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); - } - - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); - } - - VkResult - vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkCreateGraphicsPipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkCreateComputePipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkComputePipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipeline( device, pipeline, pAllocator ); - } - - VkResult vkCreatePipelineLayout( VkDevice device, - const VkPipelineLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); - } - - void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); - } - - VkResult vkCreateSampler( VkDevice device, - const VkSamplerCreateInfo * pCreateInfo, + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, - VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); - } - - void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySampler( device, sampler, pAllocator ); - } - - VkResult vkCreateDescriptorSetLayout( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); - } - - void vkDestroyDescriptorSetLayout( VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); - } - - VkResult vkCreateDescriptorPool( VkDevice device, - const VkDescriptorPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); - } - - void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); - } - - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetDescriptorPool( device, descriptorPool, flags ); - } - - VkResult vkAllocateDescriptorSets( VkDevice device, - const VkDescriptorSetAllocateInfo * pAllocateInfo, - VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); - } - - VkResult vkFreeDescriptorSets( VkDevice device, - VkDescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); - } - - void vkUpdateDescriptorSets( VkDevice device, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites, - uint32_t descriptorCopyCount, - const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); - } - - VkResult vkCreateFramebuffer( VkDevice device, - const VkFramebufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); - } - - void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); - } - - VkResult vkCreateRenderPass( VkDevice device, - const VkRenderPassCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); - } - - void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyRenderPass( device, renderPass, pAllocator ); - } - - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); - } - - VkResult vkCreateCommandPool( VkDevice device, - const VkCommandPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); - } - - void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCommandPool( device, commandPool, pAllocator ); - } - - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandPool( device, commandPool, flags ); - } - - VkResult vkAllocateCommandBuffers( VkDevice device, - const VkCommandBufferAllocateInfo * pAllocateInfo, - VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); - } - - void vkFreeCommandBuffers( VkDevice device, - VkCommandPool commandPool, - uint32_t commandBufferCount, - const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); - } - - VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); - } - - VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEndCommandBuffer( commandBuffer ); - } - - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandBuffer( commandBuffer, flags ); - } - - void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); - } - - void - vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); - } - - void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); - } - - void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); - } - - void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } - - void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); - } - - void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); - } - - void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); - } - - void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); - } - - void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); - } - - void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets( - commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); - } - - void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); - } - - void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); - } - - void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } - - void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, - uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } - - void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } - - void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); - } - - void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); - } - - void vkCmdCopyImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } - - void vkCmdBlitImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageBlit * pRegions, - VkFilter filter ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); - } - - void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); - } - - void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); - } - - void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); - } - - void - vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); - } - - void vkCmdClearColorImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearColorValue * pColor, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); - } - - void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearDepthStencilValue * pDepthStencil, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); - } - - void vkCmdClearAttachments( VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkClearAttachment * pAttachments, - uint32_t rectCount, - const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); - } - - void vkCmdResolveImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } - - void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent( commandBuffer, event, stageMask ); - } - - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent( commandBuffer, event, stageMask ); - } - - void vkCmdWaitEvents( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents( commandBuffer, - eventCount, - pEvents, - srcStageMask, - dstStageMask, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } - - void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - VkDependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier( commandBuffer, - srcStageMask, - dstStageMask, - dependencyFlags, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } - - void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); - } - - void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQuery( commandBuffer, queryPool, query ); - } - - void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); - } - - void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); - } - - void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); - } - - void vkCmdPushConstants( VkCommandBuffer commandBuffer, - VkPipelineLayout layout, - VkShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void * pValues ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); - } - - void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); - } - - void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass( commandBuffer, contents ); - } - - void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass( commandBuffer ); - } - - void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); - } - - //=== VK_VERSION_1_1 === - - VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceVersion( pApiVersion ); - } - - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); - } - - void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } - - void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); - } - - void vkCmdDispatchBase( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } - - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - void vkGetImageMemoryRequirements2( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } - - void vkGetBufferMemoryRequirements2( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } - - void vkGetImageSparseMemoryRequirements2( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); - } - - void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); - } - - void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); - } - - VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } - - void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } - - void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); - } - - void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } - - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPool( device, commandPool, flags ); - } - - void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); - } - - VkResult vkCreateSamplerYcbcrConversion( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } - - void vkDestroySamplerYcbcrConversion( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); - } - - VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } - - void vkDestroyDescriptorUpdateTemplate( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } + + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } + + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } + + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } + + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } + + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } + + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } + + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } + + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } + + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } + + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } + + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } + + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } + + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } + + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } + + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } + + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } + + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } + + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult + vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } + + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } + + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } + + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } + + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } + + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } + + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } + + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } + + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } + + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } + + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } + + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } + + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } + + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } + + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } + + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } + + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + } + + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } + + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } + + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + } + + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + } + + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + } + + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + } + + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } + + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } + + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } + + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } + + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } + + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( + commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } + + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } + + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + } + + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + void + vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + } + + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + } + + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } + + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } + + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + } + + void + vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } + + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } + + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } + + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } + + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } + + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } + + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } + + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } + + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } + + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } + + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } + + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } + + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } + + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } + + //=== VK_VERSION_1_1 === + + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } + + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); - } - - void vkUpdateDescriptorSetWithTemplate( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } - - void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } - - void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } - - void vkGetDescriptorSetLayoutSupport( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); - } - - //=== VK_VERSION_1_2 === - - void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - VkResult vkCreateRenderPass2( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); - } - - void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } - - void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } - - void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); - } - - void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); - } - - VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); - } - - VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphores( device, pWaitInfo, timeout ); - } - - VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphore( device, pSignalInfo ); - } - - VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddress( device, pInfo ); - } - - uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); - } - - uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); - } - - //=== VK_VERSION_1_3 === - - VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); - } - - VkResult vkCreatePrivateDataSlot( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } - - void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); - } - - VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); - } - - void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); - } - - void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); - } - - void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); - } - - void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } - - void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); - } - - void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); - } - - VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); - } - - void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); - } - - void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); - } - - void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); - } - - void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); - } - - void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); - } - - void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); - } - - void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); - } - - void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRendering( commandBuffer ); - } - - void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullMode( commandBuffer, cullMode ); - } - - void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFace( commandBuffer, frontFace ); - } - - void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); - } - - void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); - } - - void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); - } - - void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } - - void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); - } - - void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); - } - - void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); - } - - void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); - } - - void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); - } - - void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } - - void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); - } - - void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); - } - - void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); - } - - void vkGetDeviceBufferMemoryRequirements( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - //=== VK_KHR_surface === - - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); - } - - VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); - } - - VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); - } - - VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); - } - - VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); - } - - //=== VK_KHR_swapchain === - - VkResult vkCreateSwapchainKHR( VkDevice device, - const VkSwapchainCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); - } - - void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); - } - - VkResult vkGetSwapchainImagesKHR( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pSwapchainImageCount, - VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); - } - - VkResult vkAcquireNextImageKHR( - VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); - } - - VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueuePresentKHR( queue, pPresentInfo ); - } - - VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); - } - - VkResult - vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); - } - - VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pRectCount, - VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); - } - - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); - } - - //=== VK_KHR_display === - - VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t * pDisplayCount, - VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); - } - - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); - } - - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); - } - - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } - - //=== VK_KHR_display_swapchain === - - VkResult vkCreateSharedSwapchainsKHR( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); - } + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } + + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } + + //=== VK_VERSION_1_2 === + + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + } + + void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } + + VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } + + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddress( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); + } + + //=== VK_VERSION_1_3 === + + VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); + } + + VkResult vkCreatePrivateDataSlot( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); + } + + void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); + } + + void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); + } + + void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); + } + + void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); + } + + void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRendering( commandBuffer ); + } + + void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullMode( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFace( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); + } + + void vkGetDeviceBufferMemoryRequirements( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkAcquireNextImageKHR( + VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + + VkResult + vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } # if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === + //=== VK_KHR_xlib_surface === - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, - const VkXlibSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - Display * dpy, - VisualID visualID ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); - } + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === + //=== VK_KHR_xcb_surface === - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, - const VkXcbSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - xcb_connection_t * connection, - xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); - } + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === + //=== VK_KHR_wayland_surface === - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, - const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct wl_display * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); - } + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === + //=== VK_KHR_android_surface === - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, - const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === + //=== VK_KHR_win32_surface === - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, - const VkWin32SurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); - } + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_debug_report === + //=== VK_EXT_debug_report === - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); - } + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } - void vkDestroyDebugReportCallbackEXT( VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); - } + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } - void vkDebugReportMessageEXT( VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char * pLayerPrefix, - const char * pMessage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); - } + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } - //=== VK_EXT_debug_marker === + //=== VK_EXT_debug_marker === - VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); - } + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } - VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); - } + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } - void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); - } + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + } - void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerEndEXT( commandBuffer ); - } + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + } - void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); - } + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + } - //=== VK_KHR_video_queue === + //=== VK_KHR_video_queue === - VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, - const VkVideoProfileInfoKHR * pVideoProfile, - VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); - } + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); + } - VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, - uint32_t * pVideoFormatPropertyCount, - VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); - } + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); + } - VkResult vkCreateVideoSessionKHR( VkDevice device, - const VkVideoSessionCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); - } + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); + } - void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); - } + void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); + } - VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t * pMemoryRequirementsCount, - VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); - } + VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); + } - VkResult vkBindVideoSessionMemoryKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t bindSessionMemoryInfoCount, - const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); - } + VkResult vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); + } - VkResult vkCreateVideoSessionParametersKHR( VkDevice device, - const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); - } + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); + } - VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); - } + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); + } - void vkDestroyVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); - } + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); + } - void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); - } + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); + } - void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); - } + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); + } - void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); - } + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); + } - //=== VK_KHR_video_decode_queue === + //=== VK_KHR_video_decode_queue === - void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); - } + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); + } - //=== VK_EXT_transform_feedback === + //=== VK_EXT_transform_feedback === - void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); - } + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + } - void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer * pCounterBuffers, const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } - - void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer * pCounterBuffers, - const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } - - void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); - } - - void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); - } - - void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, - uint32_t instanceCount, - uint32_t firstInstance, - VkBuffer counterBuffer, - VkDeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); - } - - //=== VK_NVX_binary_import === - - VkResult vkCreateCuModuleNVX( VkDevice device, - const VkCuModuleCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); - } - - VkResult vkCreateCuFunctionNVX( VkDevice device, - const VkCuFunctionCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); - } - - void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuModuleNVX( device, module, pAllocator ); - } - - void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); - } - - void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); - } - - //=== VK_NVX_image_view_handle === - - uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewHandleNVX( device, pInfo ); - } - - VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); - } - - //=== VK_AMD_draw_indirect_count === - - void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - //=== VK_AMD_shader_info === - - VkResult vkGetShaderInfoAMD( VkDevice device, - VkPipeline pipeline, - VkShaderStageFlagBits shaderStage, - VkShaderInfoTypeAMD infoType, - size_t * pInfoSize, - void * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); - } - - //=== VK_KHR_dynamic_rendering === - - void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); - } - - void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderingKHR( commandBuffer ); - } - -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, - const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_GGP*/ - - //=== VK_NV_external_memory_capabilities === - - VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - - VkResult vkGetMemoryWin32HandleNV( VkDevice device, - VkDeviceMemory memory, - VkExternalMemoryHandleTypeFlagsNV handleType, - HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_get_physical_device_properties2 === - - void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); - } - - void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); - } - - void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); - } - - VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } - - void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } - - void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); - } - - void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } - - //=== VK_KHR_device_group === - - void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } - - void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); - } - - void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } - -# if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - - VkResult vkCreateViSurfaceNN( VkInstance instance, - const VkViSurfaceCreateInfoNN * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_VI_NN*/ - - //=== VK_KHR_maintenance1 === - - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPoolKHR( device, commandPool, flags ); - } - - //=== VK_KHR_device_group_creation === - - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - //=== VK_KHR_external_memory_capabilities === - - void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - - VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } - - VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - HANDLE handle, - VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - - VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); - } - - VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - int fd, - VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); - } - - //=== VK_KHR_external_semaphore_capabilities === - - void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, - const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); - } - - VkResult - vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); - } - - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); - } - - //=== VK_KHR_push_descriptor === - - void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); - } - - void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); - } - - //=== VK_EXT_conditional_rendering === - - void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); - } - - void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); - } - - //=== VK_KHR_descriptor_update_template === - - VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } - - void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); - } - - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - //=== VK_NV_clip_space_w_scaling === - - void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); - } - - //=== VK_EXT_direct_mode_display === - - VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseDisplayEXT( physicalDevice, display ); - } - -# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); - } - - VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); - } -# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - //=== VK_EXT_display_surface_counter === - - VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); - } - - //=== VK_EXT_display_control === - - VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); - } - - VkResult vkRegisterDeviceEventEXT( VkDevice device, - const VkDeviceEventInfoEXT * pDeviceEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); - } - - VkResult vkRegisterDisplayEventEXT( VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT * pDisplayEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); - } - - VkResult vkGetSwapchainCounterEXT( VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); - } - - //=== VK_GOOGLE_display_timing === - - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); - } - - VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pPresentationTimingCount, - VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); - } - - //=== VK_EXT_discard_rectangles === - - void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); - } - - void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); - } - - void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); - } - - //=== VK_EXT_hdr_metadata === - - void vkSetHdrMetadataEXT( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR * pSwapchains, - const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); - } - - //=== VK_KHR_create_renderpass2 === - - VkResult vkCreateRenderPass2KHR( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); - } - - void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } - - void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } - - void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); - } - - //=== VK_KHR_shared_presentable_image === - - VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainStatusKHR( device, swapchain ); - } - - //=== VK_KHR_external_fence_capabilities === - - void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); - } - - VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); - } - - VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); - } - - //=== VK_KHR_performance_query === - - VkResult - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - uint32_t * pCounterCount, - VkPerformanceCounterKHR * pCounters, - VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); - } - - void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, - const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, - uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); - } - - VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireProfilingLockKHR( device, pInfo ); - } - - void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseProfilingLockKHR( device ); - } - - //=== VK_KHR_get_surface_capabilities2 === - - VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); - } - - VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); - } - - //=== VK_KHR_get_display_properties2 === - - VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); - } - -# if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, - const VkIOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_IOS_MVK*/ - -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, - const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); - } - - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); - } - - void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); - } - - void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueEndDebugUtilsLabelEXT( queue ); - } - - void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); - } - - void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } - - void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); - } - - void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } - - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, - const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); - } - - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, - VkDebugUtilsMessengerEXT messenger, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); - } - - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); - } - -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - - VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, - const struct AHardwareBuffer * buffer, - VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); - } - - VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, - const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, - struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); - } -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - - VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, - VkPipeline executionGraph, - VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); - } - - VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, - VkPipeline executionGraph, - const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, - uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); - } - - void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, scratch ); - } - - void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, pCountInfo ); - } - - void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, pCountInfo ); - } - - void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, countInfo ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_sample_locations === - - void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); - } - - void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); - } - - //=== VK_KHR_get_memory_requirements2 === - - void vkGetImageMemoryRequirements2KHR( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetBufferMemoryRequirements2KHR( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - //=== VK_KHR_acceleration_structure === - - VkResult vkCreateAccelerationStructureKHR( VkDevice device, - const VkAccelerationStructureCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } - - void vkDestroyAccelerationStructureKHR( VkDevice device, - VkAccelerationStructureKHR accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); - } - - void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); - } - - void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkDeviceAddress * pIndirectDeviceAddresses, - const uint32_t * pIndirectStrides, - const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresIndirectKHR( - commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); - } - - VkResult vkBuildAccelerationStructuresKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); - } - - VkResult vkCopyAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); - } - - VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); - } - - VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); - } - - VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - size_t dataSize, - void * pData, - size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); - } - - void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); - } - - void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); - } - - void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); - } - - VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, - const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); - } - - void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesKHR( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } - - void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, - const VkAccelerationStructureVersionInfoKHR * pVersionInfo, - VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); - } - - void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, - VkAccelerationStructureBuildTypeKHR buildType, - const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, - const uint32_t * pMaxPrimitiveCounts, - VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); - } - - //=== VK_KHR_ray_tracing_pipeline === - - void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); - } - - VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkGetRayTracingShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirectKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); - } - - VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, - VkPipeline pipeline, - uint32_t group, - VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); - } - - void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); - } - - //=== VK_KHR_sampler_ycbcr_conversion === - - VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } - - void vkDestroySamplerYcbcrConversionKHR( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); - } - - //=== VK_KHR_bind_memory2 === - - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); - } - - //=== VK_EXT_image_drm_format_modifier === - - VkResult - vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); - } - - //=== VK_EXT_validation_cache === - - VkResult vkCreateValidationCacheEXT( VkDevice device, - const VkValidationCacheCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); - } - - void - vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); - } - - VkResult vkMergeValidationCachesEXT( VkDevice device, - VkValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); - } - - //=== VK_NV_shading_rate_image === - - void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); - } - - void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); - } - - void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, - VkCoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); - } - - //=== VK_NV_ray_tracing === - - VkResult vkCreateAccelerationStructureNV( VkDevice device, - const VkAccelerationStructureCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } - - void vkDestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); - } - - void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, - const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } - - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, - uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); - } - - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, - const VkAccelerationStructureInfoNV * pInfo, - VkBuffer instanceData, - VkDeviceSize instanceOffset, - VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkBuffer scratch, - VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); - } - - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); - } - - void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, - VkBuffer raygenShaderBindingTableBuffer, - VkDeviceSize raygenShaderBindingOffset, - VkBuffer missShaderBindingTableBuffer, - VkDeviceSize missShaderBindingOffset, - VkDeviceSize missShaderBindingStride, - VkBuffer hitShaderBindingTableBuffer, - VkDeviceSize hitShaderBindingOffset, - VkDeviceSize hitShaderBindingStride, - VkBuffer callableShaderBindingTableBuffer, - VkDeviceSize callableShaderBindingOffset, - VkDeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysNV( commandBuffer, - raygenShaderBindingTableBuffer, - raygenShaderBindingOffset, - missShaderBindingTableBuffer, - missShaderBindingOffset, - missShaderBindingStride, - hitShaderBindingTableBuffer, - hitShaderBindingOffset, - hitShaderBindingStride, - callableShaderBindingTableBuffer, - callableShaderBindingOffset, - callableShaderBindingStride, - width, - height, - depth ); - } - - VkResult vkCreateRayTracingPipelinesNV( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoNV * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkGetRayTracingShaderGroupHandlesNV( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - size_t dataSize, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); - } - - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureNV * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesNV( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } - - VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCompileDeferredNV( device, pipeline, shader ); - } - - //=== VK_KHR_maintenance3 === - - void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); - } - - //=== VK_KHR_draw_indirect_count === - - void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - //=== VK_EXT_external_memory_host === - - VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); - } - - //=== VK_AMD_buffer_marker === - - void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); - } - - //=== VK_EXT_calibrated_timestamps === - - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, - uint32_t * pTimeDomainCount, - VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); - } - - VkResult vkGetCalibratedTimestampsEXT( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoKHR * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } - - //=== VK_NV_mesh_shader === - - void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); - } - - void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - //=== VK_NV_scissor_exclusive === - - void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); - } - - void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); - } - - //=== VK_NV_device_diagnostic_checkpoints === - - void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); - } - - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); - } - - //=== VK_KHR_timeline_semaphore === - - VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); - } - - VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); - } - - VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphoreKHR( device, pSignalInfo ); - } - - //=== VK_INTEL_performance_query === - - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); - } - - void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUninitializePerformanceApiINTEL( device ); - } - - VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); - } - - VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, - const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); - } - - VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); - } - - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, - const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, - VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); - } - - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); - } - - VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); - } - - VkResult - vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); - } - - //=== VK_AMD_display_native_hdr === - - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); - } - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, - const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, - const VkMetalSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_fragment_shading_rate === - - VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pFragmentShadingRateCount, - VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); - } - - void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, - const VkExtent2D * pFragmentSize, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); - } - - //=== VK_KHR_dynamic_rendering_local_read === - - void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfoKHR * pLocationInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); - } - - void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); - } - - //=== VK_EXT_buffer_device_address === - - VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressEXT( device, pInfo ); - } - - //=== VK_EXT_tooling_info === - - VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); - } - - //=== VK_KHR_present_wait === - - VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); - } - - //=== VK_NV_cooperative_matrix === - - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); - } - - //=== VK_NV_coverage_reduction_mode === - - VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - - VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); - } - - VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); - } - - VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); - } - - VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_headless_surface === - - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, - const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } - - //=== VK_KHR_buffer_device_address === - - VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressKHR( device, pInfo ); - } - - uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); - } - - uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); - } - - //=== VK_EXT_line_rasterization === - - void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); - } - - //=== VK_EXT_host_query_reset === - - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); - } - - //=== VK_EXT_extended_dynamic_state === - - void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); - } - - void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); - } - - void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); - } - - void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); - } - - void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); - } - - void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } - - void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); - } - - void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); - } - - void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); - } - - void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); - } - - void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); - } - - void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } - - //=== VK_KHR_deferred_host_operations === - - VkResult vkCreateDeferredOperationKHR( VkDevice device, - const VkAllocationCallbacks * pAllocator, - VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); - } - - void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); - } - - uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); - } - - VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationResultKHR( device, operation ); - } - - VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeferredOperationJoinKHR( device, operation ); - } - - //=== VK_KHR_pipeline_executable_properties === - - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, - const VkPipelineInfoKHR * pPipelineInfo, - uint32_t * pExecutableCount, - VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); - } - - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pStatisticCount, - VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); - } - - VkResult - vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); - } - - //=== VK_EXT_host_image_copy === - - VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); - } - - VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); - } - - VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); - } - - VkResult - vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); - } - - void vkGetImageSubresourceLayout2EXT( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); - } - - //=== VK_KHR_map_memory2 === - - VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); - } - - VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); - } - - //=== VK_EXT_swapchain_maintenance1 === - - VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); - } - - //=== VK_NV_device_generated_commands === - - void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } - - void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); - } - - void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, - VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); - } - - void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline, - uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); - } - - VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } - - void vkDestroyIndirectCommandsLayoutNV( VkDevice device, - VkIndirectCommandsLayoutNV indirectCommandsLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); - } - - //=== VK_EXT_depth_bias_control === - - void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); - } - - //=== VK_EXT_acquire_drm_display === - - VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); - } - - VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); - } - - //=== VK_EXT_private_data === - - VkResult vkCreatePrivateDataSlotEXT( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } - - void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); - } - - VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); - } - - void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); - } - - //=== VK_KHR_video_encode_queue === - - VkResult - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, - VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); - } - - VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, - const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, - VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, - size_t * pDataSize, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); - } - - void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); - } - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - - VkResult vkCreateCudaModuleNV( VkDevice device, - const VkCudaModuleCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); - } - - VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); - } - - VkResult vkCreateCudaFunctionNV( VkDevice device, - const VkCudaFunctionCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); - } - - void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCudaModuleNV( device, module, pAllocator ); - } - - void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); - } - - void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - - void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); - } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - - void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); - } - - void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); - } - - void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } - - void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); - } - - void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); - } - - VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); - } - - void vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); - } - - void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); - } - - //=== VK_EXT_descriptor_buffer === - - void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); - } - - void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, - VkDescriptorSetLayout layout, - uint32_t binding, - VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); - } - - void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); - } - - void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, - uint32_t bufferCount, - const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); - } - - void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t setCount, - const uint32_t * pBufferIndices, - const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); - } - - void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); - } - - VkResult - vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult - vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult - vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - //=== VK_NV_fragment_shading_rate_enums === - - void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, - VkFragmentShadingRateNV shadingRate, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); - } - - //=== VK_EXT_mesh_shader === - - void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } - - void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, + { + return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + } + + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + } + + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + } + + //=== VK_NVX_binary_import === + + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); + } + + void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); + } + + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); + } + + //=== VK_NVX_image_view_handle === + + uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandleNVX( device, pInfo ); + } + + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + } + + //=== VK_AMD_draw_indirect_count === + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - //=== VK_KHR_copy_commands2 === + //=== VK_AMD_shader_info === - void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); - } + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } - void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); - } + //=== VK_KHR_dynamic_rendering === - void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); - } + void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); + } - void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); - } + void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderingKHR( commandBuffer ); + } - void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); - } +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === - void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); - } + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_EXT_device_fault === + //=== VK_NV_external_memory_capabilities === - VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); - } + VkResult + vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + } # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === + //=== VK_NV_external_memory_win32 === - VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); - } + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); - } + //=== VK_KHR_get_physical_device_properties2 === + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + //=== VK_KHR_device_group === + + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } + + //=== VK_KHR_device_group_creation === + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + //=== VK_KHR_external_memory_capabilities === + + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VkResult + vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } + + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + } + + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + } + + //=== VK_KHR_external_semaphore_capabilities === + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } + + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } + + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_push_descriptor === + + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + //=== VK_EXT_conditional_rendering === + + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } + + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + //=== VK_NV_clip_space_w_scaling === + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } + + //=== VK_EXT_direct_mode_display === + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } + + VkResult + vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } + + //=== VK_EXT_display_control === + + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } + + //=== VK_GOOGLE_display_timing === + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } + + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } + + //=== VK_EXT_discard_rectangles === + + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } + + void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); + } + + void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); + } + + //=== VK_EXT_hdr_metadata === + + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + //=== VK_KHR_create_renderpass2 === + + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } + + //=== VK_KHR_shared_presentable_image === + + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + //=== VK_KHR_external_fence_capabilities === + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } + + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } + + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_performance_query === + + VkResult + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + } + + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); + } + + VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireProfilingLockKHR( device, pInfo ); + } + + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + } + + //=== VK_KHR_get_display_properties2 === + + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } + + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } + + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } + + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } + + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } + + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); + } + + VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); + } + + void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); + } + + void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } + + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } + + //=== VK_KHR_get_memory_requirements2 === + + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_acceleration_structure === + + VkResult vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } + + void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); + } + + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } + + VkResult vkBuildAccelerationStructuresKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); + } + + VkResult vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } + + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } + + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } + + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); + } + + //=== VK_KHR_ray_tracing_pipeline === + + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + } + + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); + } + + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } + + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } + + //=== VK_KHR_bind_memory2 === + + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + //=== VK_EXT_image_drm_format_modifier === + + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } + + //=== VK_EXT_validation_cache === + + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } + + void + vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + //=== VK_NV_shading_rate_image === + + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } + + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } + + void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } + + //=== VK_NV_ray_tracing === + + VkResult vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } + + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } + + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } + + //=== VK_KHR_maintenance3 === + + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } + + //=== VK_KHR_draw_indirect_count === + + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_EXT_external_memory_host === + + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } + + //=== VK_AMD_buffer_marker === + + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } + + void vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_NV_mesh_shader === + + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } + + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_NV_scissor_exclusive === + + void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); + } + + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + + void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_KHR_timeline_semaphore === + + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } + + //=== VK_INTEL_performance_query === + + VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } + + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } + + VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + } + + VkResult + vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + } + + //=== VK_AMD_display_native_hdr === + + void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); + } + + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); + } + + //=== VK_KHR_dynamic_rendering_local_read === + + void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfoKHR * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); + } + + void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); + } + + //=== VK_EXT_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } + + //=== VK_EXT_tooling_info === + + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + } + + //=== VK_KHR_present_wait === + + VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); + } + + //=== VK_NV_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_NV_coverage_reduction_mode === + + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } + + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } + + //=== VK_EXT_line_rasterization === + + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + //=== VK_KHR_deferred_host_operations === + + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } + + void + vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } + + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } + + VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } + + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } + + //=== VK_KHR_pipeline_executable_properties === + + VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + VkResult + vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + //=== VK_EXT_host_image_copy === + + VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); + } + + VkResult vkTransitionImageLayoutEXT( VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); + } + + void vkGetImageSubresourceLayout2EXT( VkDevice device, + VkImage image, + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); + } + + //=== VK_KHR_map_memory2 === + + VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); + } + + VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); + } + + //=== VK_EXT_swapchain_maintenance1 === + + VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); + } + + //=== VK_NV_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } + + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + + VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } + + //=== VK_EXT_depth_bias_control === + + void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); + } + + //=== VK_EXT_acquire_drm_display === + + VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); + } + + VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); + } + + //=== VK_EXT_private_data === + + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } + + //=== VK_KHR_video_encode_queue === + + VkResult + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); + } + + VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); + } + + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VkResult vkCreateCudaModuleNV( VkDevice device, + const VkCudaModuleCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); + } + + VkResult vkCreateCudaFunctionNV( VkDevice device, + const VkCudaFunctionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaModuleNV( device, module, pAllocator ); + } + + void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); + } + + void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } + + void + vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); + } + + //=== VK_EXT_descriptor_buffer === + + void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); + } + + void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); + } + + void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); + } + + void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); + } + + void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); + } + + VkResult + vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult + vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + //=== VK_NV_fragment_shading_rate_enums === + + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } + + //=== VK_EXT_mesh_shader === + + void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_KHR_copy_commands2 === + + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } + + //=== VK_EXT_device_fault === + + VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } + + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === + //=== VK_EXT_directfb_surface === - VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, - const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 - vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); - } + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_EXT_vertex_input_dynamic_state === + //=== VK_EXT_vertex_input_dynamic_state === - void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, - uint32_t vertexBindingDescriptionCount, - const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, - uint32_t vertexAttributeDescriptionCount, - const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetVertexInputEXT( - commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); - } + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( + commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + } # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === + //=== VK_FUCHSIA_external_memory === - VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, - const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } - VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); - } + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === + //=== VK_FUCHSIA_external_semaphore === - VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); - } + VkResult + vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } - VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === + //=== VK_FUCHSIA_buffer_collection === - VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, - const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); - } + VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); + } - VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); - } + VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); + } - VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); - } + VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); + } - void vkDestroyBufferCollectionFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); - } + void vkDestroyBufferCollectionFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); + } - VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); - } + VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); + } # endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_HUAWEI_subpass_shading === + //=== VK_HUAWEI_subpass_shading === - VkResult - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); - } + VkResult + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); + } - void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); - } + void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); + } - //=== VK_HUAWEI_invocation_mask === + //=== VK_HUAWEI_invocation_mask === - void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); - } + void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); + } - //=== VK_NV_external_memory_rdma === + //=== VK_NV_external_memory_rdma === - VkResult vkGetMemoryRemoteAddressNV( VkDevice device, - const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, - VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); - } + VkResult vkGetMemoryRemoteAddressNV( VkDevice device, + const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); + } - //=== VK_EXT_pipeline_properties === + //=== VK_EXT_pipeline_properties === - VkResult - vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); - } + VkResult vkGetPipelinePropertiesEXT( VkDevice device, + const VkPipelineInfoEXT * pPipelineInfo, + VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); + } - //=== VK_EXT_extended_dynamic_state2 === + //=== VK_EXT_extended_dynamic_state2 === - void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); - } + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } - void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); - } + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); + } - void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); - } + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } - void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); - } + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } - void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); - } + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } # if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === + //=== VK_QNX_screen_surface === - VkResult vkCreateScreenSurfaceQNX( VkInstance instance, - const VkScreenSurfaceCreateInfoQNX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); - } + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); + } # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_EXT_color_write_enable === - - void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); - } - - //=== VK_KHR_ray_tracing_maintenance1 === - - void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); - } - - //=== VK_EXT_multi_draw === - - void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawInfoEXT * pVertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); - } - - void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawIndexedInfoEXT * pIndexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); - } - - //=== VK_EXT_opacity_micromap === - - VkResult vkCreateMicromapEXT( VkDevice device, - const VkMicromapCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); - } - - void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); - } - - void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); - } - - VkResult vkBuildMicromapsEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); - } - - VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); - } - - VkResult vkCopyMicromapToMemoryEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); - } - - VkResult vkCopyMemoryToMicromapEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); - } - - VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, - uint32_t micromapCount, - const VkMicromapEXT * pMicromaps, - VkQueryType queryType, - size_t dataSize, - void * pData, - size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); - } - - void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); - } - - void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); - } - - void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); - } - - void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, - uint32_t micromapCount, - const VkMicromapEXT * pMicromaps, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); - } - - void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, - const VkMicromapVersionInfoEXT * pVersionInfo, - VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); - } - - void vkGetMicromapBuildSizesEXT( VkDevice device, - VkAccelerationStructureBuildTypeKHR buildType, - const VkMicromapBuildInfoEXT * pBuildInfo, - VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); - } - - //=== VK_HUAWEI_cluster_culling_shader === - - void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } - - void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); - } - - //=== VK_EXT_pageable_device_local_memory === - - void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); - } - - //=== VK_KHR_maintenance4 === - - void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - //=== VK_VALVE_descriptor_set_host_mapping === - - void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, - const VkDescriptorSetBindingReferenceVALVE * pBindingReference, - VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); - } - - void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); - } - - //=== VK_NV_copy_memory_indirect === - - void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, - VkDeviceAddress copyBufferAddress, - uint32_t copyCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); - } - - void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, - VkDeviceAddress copyBufferAddress, - uint32_t copyCount, - uint32_t stride, - VkImage dstImage, - VkImageLayout dstImageLayout, - const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); - } - - //=== VK_NV_memory_decompression === - - void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, - uint32_t decompressRegionCount, - const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); - } - - void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, - VkDeviceAddress indirectCommandsAddress, - VkDeviceAddress indirectCommandsCountAddress, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); - } - - //=== VK_NV_device_generated_commands_compute === - - void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, - const VkComputePipelineCreateInfo * pCreateInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); - } - - void - vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); - } - - VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); - } - - //=== VK_EXT_extended_dynamic_state3 === - - void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); - } - - void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); - } - - void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); - } - - void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); - } - - void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); - } - - void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); - } - - void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); - } - - void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); - } - - void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); - } - - void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); - } - - void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); - } - - void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); - } - - void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, - VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); - } - - void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); - } - - void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); - } - - void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); - } - - void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); - } - - void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); - } - - void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); - } - - void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); - } - - void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); - } - - void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); - } - - void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); - } - - void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); - } - - void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); - } - - void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); - } - - void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); - } - - void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, - uint32_t coverageModulationTableCount, - const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); - } - - void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); - } - - void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); - } - - void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); - } - - //=== VK_EXT_shader_module_identifier === - - void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); - } - - void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); - } - - //=== VK_NV_optical_flow === - - VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, - const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, - uint32_t * pFormatCount, - VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); - } - - VkResult vkCreateOpticalFlowSessionNV( VkDevice device, - const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); - } - - void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); - } - - VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, - VkOpticalFlowSessionNV session, - VkOpticalFlowSessionBindingPointNV bindingPoint, - VkImageView view, - VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); - } - - void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, - VkOpticalFlowSessionNV session, - const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); - } - - //=== VK_KHR_maintenance5 === - - void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); - } - - void vkGetRenderingAreaGranularityKHR( VkDevice device, - const VkRenderingAreaInfoKHR * pRenderingAreaInfo, - VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); - } - - void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, - const VkDeviceImageSubresourceInfoKHR * pInfo, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); - } - - void vkGetImageSubresourceLayout2KHR( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); - } - - //=== VK_AMD_anti_lag === - - void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAntiLagUpdateAMD( device, pData ); - } - - //=== VK_EXT_shader_object === - - VkResult vkCreateShadersEXT( VkDevice device, - uint32_t createInfoCount, - const VkShaderCreateInfoEXT * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); - } - - void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderEXT( device, shader, pAllocator ); - } - - VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); - } - - void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, - uint32_t stageCount, - const VkShaderStageFlagBits * pStages, - const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); - } - - //=== VK_KHR_pipeline_binary === - - VkResult vkCreatePipelineBinariesKHR( VkDevice device, - const VkPipelineBinaryCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); - } - - void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); - } - - VkResult vkGetPipelineKeyKHR( VkDevice device, - const VkPipelineCreateInfoKHR * pPipelineCreateInfo, - VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); - } - - VkResult vkGetPipelineBinaryDataKHR( VkDevice device, - const VkPipelineBinaryDataInfoKHR * pInfo, - VkPipelineBinaryKeyKHR * pPipelineBinaryKey, - size_t * pPipelineBinaryDataSize, - void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); - } - - VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, - const VkReleaseCapturedPipelineDataInfoKHR * pInfo, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); - } - - //=== VK_QCOM_tile_properties === - - VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, - VkFramebuffer framebuffer, - uint32_t * pPropertiesCount, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); - } - - VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, - const VkRenderingInfo * pRenderingInfo, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); - } - - //=== VK_NV_low_latency2 === - - VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); - } - - VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); - } - - void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); - } - - void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); - } - - void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); - } - - //=== VK_KHR_cooperative_matrix === - - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - - void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); - } + //=== VK_EXT_color_write_enable === + + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); + } + + //=== VK_KHR_ray_tracing_maintenance1 === + + void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); + } + + //=== VK_EXT_multi_draw === + + void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); + } + + void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); + } + + //=== VK_EXT_opacity_micromap === + + VkResult vkCreateMicromapEXT( VkDevice device, + const VkMicromapCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); + } + + void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); + } + + void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); + } + + VkResult vkBuildMicromapsEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); + } + + VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMicromapToMemoryEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToMicromapEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, + const VkMicromapVersionInfoEXT * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); + } + + void vkGetMicromapBuildSizesEXT( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT * pBuildInfo, + VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); + } + + //=== VK_HUAWEI_cluster_culling_shader === + + void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); + } + + //=== VK_EXT_pageable_device_local_memory === + + void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); + } + + //=== VK_KHR_maintenance4 === + + void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_VALVE_descriptor_set_host_mapping === + + void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, + const VkDescriptorSetBindingReferenceVALVE * pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); + } + + void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); + } + + //=== VK_NV_copy_memory_indirect === + + void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); + } + + void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); + } + + //=== VK_NV_memory_decompression === + + void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); + } + + void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, + const VkComputePipelineCreateInfo * pCreateInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); + } + + void vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); + } + + VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); + } + + //=== VK_EXT_extended_dynamic_state3 === + + void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); + } + + void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); + } + + void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); + } + + void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); + } + + void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); + } + + void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); + } + + void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); + } + + void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); + } + + void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); + } + + void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); + } + + void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); + } + + void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); + } + + void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); + } + + void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); + } + + void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); + } + + void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); + } + + void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); + } + + void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); + } + + void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); + } + + void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); + } + + void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); + } + + void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); + } + + void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); + } + + void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); + } + + void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); + } + + void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); + } + + void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); + } + + void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + } + + void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); + } + + void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); + } + + void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); + } + + //=== VK_EXT_shader_module_identifier === + + void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); + } + + void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); + } + + //=== VK_NV_optical_flow === + + VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); + } + + VkResult vkCreateOpticalFlowSessionNV( VkDevice device, + const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); + } + + void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); + } + + VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); + } + + void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); + } + + //=== VK_KHR_maintenance5 === + + void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); + } + + void vkGetRenderingAreaGranularityKHR( VkDevice device, + const VkRenderingAreaInfoKHR * pRenderingAreaInfo, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, + const VkDeviceImageSubresourceInfoKHR * pInfo, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2KHR( VkDevice device, + VkImage image, + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); + } + + //=== VK_AMD_anti_lag === + + void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAntiLagUpdateAMD( device, pData ); + } + + //=== VK_EXT_shader_object === + + VkResult vkCreateShadersEXT( VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); + } + + void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderEXT( device, shader, pAllocator ); + } + + VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); + } + + void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits * pStages, + const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); + } + + void vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT * pDepthClampRange ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampRangeEXT( commandBuffer, depthClampMode, pDepthClampRange ); + } + + //=== VK_KHR_pipeline_binary === + + VkResult vkCreatePipelineBinariesKHR( VkDevice device, + const VkPipelineBinaryCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); + } + + void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); + } + + VkResult vkGetPipelineKeyKHR( VkDevice device, + const VkPipelineCreateInfoKHR * pPipelineCreateInfo, + VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); + } + + VkResult vkGetPipelineBinaryDataKHR( VkDevice device, + const VkPipelineBinaryDataInfoKHR * pInfo, + VkPipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); + } + + VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR * pInfo, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); + } + + //=== VK_QCOM_tile_properties === + + VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, + VkFramebuffer framebuffer, + uint32_t * pPropertiesCount, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); + } + + VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, + const VkRenderingInfo * pRenderingInfo, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); + } + + //=== VK_NV_low_latency2 === + + VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); + } + + VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); + } + + void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); + } + + //=== VK_KHR_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); + } # if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === + //=== VK_QNX_external_memory_screen_buffer === - VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, - const struct _screen_buffer * buffer, - VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); - } + VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, + const struct _screen_buffer * buffer, + VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); + } # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_line_rasterization === + //=== VK_KHR_line_rasterization === - void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_KHR_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsKHR( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_KHR_maintenance6 === + + void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); + } + + void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); + } + + void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); + } + + void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } + + void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); + } + + //=== VK_EXT_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsEXT( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsEXT( commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer ); + } + + void vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsEXT( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + VkResult vkCreateIndirectCommandsLayoutEXT( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutEXT * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutEXT( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutEXT( VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutEXT( device, indirectCommandsLayout, pAllocator ); + } + + VkResult vkCreateIndirectExecutionSetEXT( VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectExecutionSetEXT * pIndirectExecutionSet ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectExecutionSetEXT( device, pCreateInfo, pAllocator, pIndirectExecutionSet ); + } + + void vkDestroyIndirectExecutionSetEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectExecutionSetEXT( device, indirectExecutionSet, pAllocator ); + } + + void vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetPipelineEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + void vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetShaderEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + //=== VK_NV_cooperative_matrix2 === + + VkResult vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + }; + + inline DispatchLoaderStatic & getDispatchLoaderStatic() { - return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); + static DispatchLoaderStatic dls; + return dls; } - - //=== VK_KHR_calibrated_timestamps === - - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, - uint32_t * pTimeDomainCount, - VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); - } - - VkResult vkGetCalibratedTimestampsKHR( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoKHR * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } - - //=== VK_KHR_maintenance6 === - - void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); - } - - void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); - } - - void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); - } - - void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); - } - - void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, - const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); - } - - void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( - VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); - } - }; - - inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() - { - static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls; - return dls; - } #endif + } // namespace detail #if ( 14 <= VULKAN_HPP_CPP_VERSION ) using std::exchange; #else @@ -5934,208 +6056,211 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_SMART_HANDLE ) struct AllocationCallbacks; - template - class ObjectDestroy + namespace detail { - public: - ObjectDestroy() = default; - - ObjectDestroy( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) + template + class ObjectDestroy { - } + public: + ObjectDestroy() = default; - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + class NoParent; + + template + class ObjectDestroy { - return m_owner; - } + public: + ObjectDestroy() = default; - Optional getAllocator() const VULKAN_HPP_NOEXCEPT + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectFree { - return m_allocationCallbacks; - } + public: + ObjectFree() = default; - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectRelease { - return *m_dispatch; - } + public: + ObjectRelease() = default; - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT + ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; + }; + + template + class PoolFree { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); - } + public: + PoolFree() = default; - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; + PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + { + } - class NoParent; + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } - template - class ObjectDestroy - { - public: - ObjectDestroy() = default; + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } - ObjectDestroy( Optional allocationCallbacks, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - { - } + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT - { - return m_allocationCallbacks; - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + ( m_owner.free )( m_pool, t, *m_dispatch ); + } - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_dispatch ); - t.destroy( m_allocationCallbacks, *m_dispatch ); - } - - private: - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; - - template - class ObjectFree - { - public: - ObjectFree() = default; - - ObjectFree( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - { - } - - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - - Optional getAllocator() const VULKAN_HPP_NOEXCEPT - { - return m_allocationCallbacks; - } - - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); - } - - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; - - template - class ObjectRelease - { - public: - ObjectRelease() = default; - - ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_dispatch( &dispatch ) - { - } - - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.release( t, *m_dispatch ); - } - - private: - OwnerType m_owner = {}; - Dispatch const * m_dispatch = nullptr; - }; - - template - class PoolFree - { - public: - PoolFree() = default; - - PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_pool( pool ) - , m_dispatch( &dispatch ) - { - } - - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - - PoolType getPool() const VULKAN_HPP_NOEXCEPT - { - return m_pool; - } - - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - ( m_owner.free )( m_pool, t, *m_dispatch ); - } - - private: - OwnerType m_owner = OwnerType(); - PoolType m_pool = PoolType(); - Dispatch const * m_dispatch = nullptr; - }; + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; + }; + } // namespace detail #endif // !VULKAN_HPP_NO_SMART_HANDLE //================== @@ -6875,7 +7000,8 @@ namespace VULKAN_HPP_NAMESPACE //======================== //=== CONSTEXPR VALUEs === //======================== - VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; + VULKAN_HPP_CONSTEXPR_INLINE uint32_t Use64BitPtrDefines = VK_USE_64_BIT_PTR_DEFINES; //========================= //=== CONSTEXPR CALLEEs === @@ -8150,6 +8276,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2ExtensionName = VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2SpecVersion = VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION; + //=== VK_EXT_present_mode_fifo_latest_ready === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadyExtensionName = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadySpecVersion = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION; + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === VULKAN_HPP_CONSTEXPR_INLINE auto FUCHSIAExternalMemoryExtensionName = VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME; @@ -8568,10 +8698,26 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationExtensionName = VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationSpecVersion = VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION; + //=== VK_EXT_device_generated_commands === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceGeneratedCommandsExtensionName = VK_EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceGeneratedCommandsSpecVersion = VK_EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION; + //=== VK_MESA_image_alignment_control === VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlExtensionName = VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto MESAImageAlignmentControlSpecVersion = VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION; + //=== VK_EXT_depth_clamp_control === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlExtensionName = VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlSpecVersion = VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION; + + //=== VK_HUAWEI_hdr_vivid === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividExtensionName = VK_HUAWEI_HDR_VIVID_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividSpecVersion = VK_HUAWEI_HDR_VIVID_SPEC_VERSION; + + //=== VK_NV_cooperative_matrix2 === + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2ExtensionName = VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2SpecVersion = VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION; + } // namespace VULKAN_HPP_NAMESPACE // clang-format off @@ -8653,6 +8799,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_VERSION_1_1 === template <> struct StructExtends @@ -10603,70 +10758,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_dynamic_rendering === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_NV_corner_sampled_image === template <> struct StructExtends @@ -11010,6 +11101,33 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_viewport_swizzle === template <> struct StructExtends @@ -11324,6 +11442,25 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_AMD_mixed_attachment_samples === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_EXT_sample_locations === template <> struct StructExtends @@ -12041,6 +12178,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_INTEL_shader_integer_functions2 === template <> struct StructExtends @@ -12145,6 +12291,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_fragment_shading_rate === template <> struct StructExtends @@ -12191,6 +12346,15 @@ namespace VULKAN_HPP_NAMESPACE }; }; + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_AMD_shader_core_properties2 === template <> struct StructExtends @@ -13490,16 +13654,6 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_EXT_descriptor_buffer === template <> struct StructExtends @@ -14143,6 +14297,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_present_mode_fifo_latest_ready === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === template <> @@ -16801,6 +16974,70 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_device_generated_commands === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_MESA_image_alignment_control === template <> struct StructExtends @@ -16838,3745 +17075,3913 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_depth_clamp_control === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_HUAWEI_hdr_vivid === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_NV_cooperative_matrix2 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE -#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - class DynamicLoader + namespace detail { - public: -# ifdef VULKAN_HPP_NO_EXCEPTIONS - DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT -# else - DynamicLoader( std::string const & vulkanLibraryName = {} ) -# endif +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader { - if ( !vulkanLibraryName.empty() ) + public: +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) +# endif { + if ( !vulkanLibraryName.empty() ) + { # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); # elif defined( _WIN32 ) - m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); # else # error unsupported platform # endif - } - else - { + } + else + { # if defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) - { - m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); - } + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } # elif defined( __APPLE__ ) - m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) - { - m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); - } + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); + } # elif defined( _WIN32 ) - m_library = ::LoadLibraryA( "vulkan-1.dll" ); + m_library = ::LoadLibraryA( "vulkan-1.dll" ); # else # error unsupported platform # endif - } + } # ifndef VULKAN_HPP_NO_EXCEPTIONS - if ( m_library == nullptr ) - { - // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. - throw std::runtime_error( "Failed to load vulkan library!" ); - } + if ( m_library == nullptr ) + { + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. + throw std::runtime_error( "Failed to load vulkan library!" ); + } # endif - } + } - DynamicLoader( DynamicLoader const & ) = delete; + DynamicLoader( DynamicLoader const & ) = delete; - DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) - { - other.m_library = nullptr; - } + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) + { + other.m_library = nullptr; + } - DynamicLoader & operator=( DynamicLoader const & ) = delete; + DynamicLoader & operator=( DynamicLoader const & ) = delete; - DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT - { - std::swap( m_library, other.m_library ); - return *this; - } + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_library, other.m_library ); + return *this; + } - ~DynamicLoader() VULKAN_HPP_NOEXCEPT - { - if ( m_library ) + ~DynamicLoader() VULKAN_HPP_NOEXCEPT + { + if ( m_library ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + dlclose( m_library ); +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif + } + } + + template + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT { # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - dlclose( m_library ); + return (T)dlsym( m_library, function ); # elif defined( _WIN32 ) - ::FreeLibrary( m_library ); + return ( T )::GetProcAddress( m_library, function ); # else # error unsupported platform # endif } - } - template - T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT - { + bool success() const VULKAN_HPP_NOEXCEPT + { + return m_library != nullptr; + } + + private: # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - return (T)dlsym( m_library, function ); + void * m_library; # elif defined( _WIN32 ) - return ( T )::GetProcAddress( m_library, function ); + ::HINSTANCE m_library; # else # error unsupported platform # endif - } - - bool success() const VULKAN_HPP_NOEXCEPT - { - return m_library != nullptr; - } - - private: -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - void * m_library; -# elif defined( _WIN32 ) - ::HINSTANCE m_library; -# else -# error unsupported platform -# endif - }; + }; #endif - using PFN_dummy = void ( * )(); + using PFN_dummy = void ( * )(); - class DispatchLoaderDynamic : public DispatchLoaderBase - { - public: - //=== VK_VERSION_1_0 === - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - - //=== VK_VERSION_1_1 === - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - - //=== VK_VERSION_1_2 === - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; - - //=== VK_VERSION_1_3 === - PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; - PFN_vkSetPrivateData vkSetPrivateData = 0; - PFN_vkGetPrivateData vkGetPrivateData = 0; - PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; - PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; - PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; - PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; - PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; - PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; - PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; - PFN_vkCmdEndRendering vkCmdEndRendering = 0; - PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - - //=== VK_KHR_surface === - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; - - //=== VK_KHR_swapchain === - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - - //=== VK_KHR_display === - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; - - //=== VK_KHR_display_swapchain === - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - -#if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -#else - PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; -#else - PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_debug_report === - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - - //=== VK_EXT_debug_marker === - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - - //=== VK_KHR_video_queue === - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; - - //=== VK_KHR_video_decode_queue === - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; - - //=== VK_EXT_transform_feedback === - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - - //=== VK_NVX_binary_import === - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; - - //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - - //=== VK_AMD_draw_indirect_count === - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - - //=== VK_AMD_shader_info === - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - - //=== VK_KHR_dynamic_rendering === - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - -#if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -#else - PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; -#endif /*VK_USE_PLATFORM_GGP*/ - - //=== VK_NV_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; -#else - PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_get_physical_device_properties2 === - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - - //=== VK_KHR_device_group === - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - -#if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -#else - PFN_dummy vkCreateViSurfaceNN_placeholder = 0; -#endif /*VK_USE_PLATFORM_VI_NN*/ - - //=== VK_KHR_maintenance1 === - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - - //=== VK_KHR_device_group_creation === - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - - //=== VK_KHR_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; -#else - PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - - //=== VK_KHR_external_semaphore_capabilities === - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -#else - PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - - //=== VK_KHR_push_descriptor === - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - - //=== VK_EXT_conditional_rendering === - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - - //=== VK_KHR_descriptor_update_template === - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - - //=== VK_NV_clip_space_w_scaling === - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - - //=== VK_EXT_direct_mode_display === - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - -#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; -#else - PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; - PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - //=== VK_EXT_display_surface_counter === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - - //=== VK_EXT_display_control === - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - - //=== VK_GOOGLE_display_timing === - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - - //=== VK_EXT_discard_rectangles === - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; - PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; - - //=== VK_EXT_hdr_metadata === - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - - //=== VK_KHR_create_renderpass2 === - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - - //=== VK_KHR_shared_presentable_image === - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - - //=== VK_KHR_external_fence_capabilities === - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; -#else - PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - - //=== VK_KHR_performance_query === - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - - //=== VK_KHR_get_surface_capabilities2 === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - - //=== VK_KHR_get_display_properties2 === - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - -#if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -#else - PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -#else - PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; -#else - PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; - PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; - PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; - PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; - PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; - PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; -#else - PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; - PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_sample_locations === - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - - //=== VK_KHR_get_memory_requirements2 === - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - - //=== VK_KHR_acceleration_structure === - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - - //=== VK_KHR_ray_tracing_pipeline === - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; - - //=== VK_KHR_sampler_ycbcr_conversion === - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - - //=== VK_KHR_bind_memory2 === - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - - //=== VK_EXT_image_drm_format_modifier === - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - - //=== VK_EXT_validation_cache === - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - - //=== VK_NV_shading_rate_image === - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - - //=== VK_NV_ray_tracing === - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - - //=== VK_KHR_maintenance3 === - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - - //=== VK_KHR_draw_indirect_count === - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - - //=== VK_EXT_external_memory_host === - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - - //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - - //=== VK_NV_mesh_shader === - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - - //=== VK_NV_scissor_exclusive === - PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - - //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - - //=== VK_KHR_timeline_semaphore === - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - - //=== VK_INTEL_performance_query === - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - - //=== VK_AMD_display_native_hdr === - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; -#else - PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -#else - PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_fragment_shading_rate === - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - - //=== VK_KHR_dynamic_rendering_local_read === - PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; - - //=== VK_EXT_buffer_device_address === - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - - //=== VK_EXT_tooling_info === - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; - - //=== VK_KHR_present_wait === - PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; - - //=== VK_NV_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; - - //=== VK_NV_coverage_reduction_mode === - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; -#else - PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; - PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_headless_surface === - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; - - //=== VK_KHR_buffer_device_address === - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - - //=== VK_EXT_line_rasterization === - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - - //=== VK_EXT_host_query_reset === - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - - //=== VK_EXT_extended_dynamic_state === - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - - //=== VK_KHR_deferred_host_operations === - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - - //=== VK_KHR_pipeline_executable_properties === - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - - //=== VK_EXT_host_image_copy === - PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; - PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; - PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; - PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - - //=== VK_KHR_map_memory2 === - PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; - PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; - - //=== VK_EXT_swapchain_maintenance1 === - PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; - - //=== VK_NV_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - - //=== VK_EXT_depth_bias_control === - PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; - - //=== VK_EXT_acquire_drm_display === - PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; - PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; - - //=== VK_EXT_private_data === - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; - - //=== VK_KHR_video_encode_queue === - PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; - PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; - PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; - PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; - PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; - PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; - PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; -#else - PFN_dummy vkCreateCudaModuleNV_placeholder = 0; - PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; - PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; - PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; - PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; - PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; -#else - PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; - - //=== VK_EXT_descriptor_buffer === - PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; - PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; - PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; - PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; - - //=== VK_NV_fragment_shading_rate_enums === - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - - //=== VK_EXT_mesh_shader === - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - - //=== VK_KHR_copy_commands2 === - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - - //=== VK_EXT_device_fault === - PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === - PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; - PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; -#else - PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; - PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; -#else - PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - - //=== VK_EXT_vertex_input_dynamic_state === - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; -#else - PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; -#else - PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; -#else - PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - - //=== VK_HUAWEI_subpass_shading === - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - - //=== VK_HUAWEI_invocation_mask === - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; - - //=== VK_NV_external_memory_rdma === - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; - - //=== VK_EXT_pipeline_properties === - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; - - //=== VK_EXT_extended_dynamic_state2 === - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; - -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; -#else - PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_EXT_color_write_enable === - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - - //=== VK_KHR_ray_tracing_maintenance1 === - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; - - //=== VK_EXT_multi_draw === - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; - - //=== VK_EXT_opacity_micromap === - PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; - PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; - PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; - PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; - PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; - PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; - PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; - PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; - PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; - PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; - PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; - PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; - PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; - PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; - - //=== VK_HUAWEI_cluster_culling_shader === - PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; - PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; - - //=== VK_EXT_pageable_device_local_memory === - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; - - //=== VK_KHR_maintenance4 === - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; - - //=== VK_VALVE_descriptor_set_host_mapping === - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; - - //=== VK_NV_copy_memory_indirect === - PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; - PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; - - //=== VK_NV_memory_decompression === - PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; - PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; - - //=== VK_NV_device_generated_commands_compute === - PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; - PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; - PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; - - //=== VK_EXT_extended_dynamic_state3 === - PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; - PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; - PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; - PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; - PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; - PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; - PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; - PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; - PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; - PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; - PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; - PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; - PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; - PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; - PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; - PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; - PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; - PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; - PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; - PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; - PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; - PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; - PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; - PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; - PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; - PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; - PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; - PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; - PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; - PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; - - //=== VK_EXT_shader_module_identifier === - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; - - //=== VK_NV_optical_flow === - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; - PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; - PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; - PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; - PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; - - //=== VK_KHR_maintenance5 === - PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; - PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; - PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; - PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; - - //=== VK_AMD_anti_lag === - PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; - - //=== VK_EXT_shader_object === - PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; - PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; - PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; - PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; - - //=== VK_KHR_pipeline_binary === - PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; - PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; - PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; - PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; - PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; - - //=== VK_QCOM_tile_properties === - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; - - //=== VK_NV_low_latency2 === - PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; - PFN_vkLatencySleepNV vkLatencySleepNV = 0; - PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; - PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; - PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; - - //=== VK_KHR_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; - -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === - PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; -#else - PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_KHR_line_rasterization === - PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; - - //=== VK_KHR_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; - PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; - - //=== VK_KHR_maintenance6 === - PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; - PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; - PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; - PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; - - public: - DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; - DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + class DispatchLoaderDynamic : public DispatchLoaderBase { - init( getInstanceProcAddr ); - } - - // This interface does not require a linked vulkan library. - DispatchLoaderDynamic( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT - { - init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); - } - - template - void init() - { - static DynamicLoader dl; - init( dl ); - } - - template - void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT - { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); - init( getInstanceProcAddr ); - } - - void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getInstanceProcAddr ); - - vkGetInstanceProcAddr = getInstanceProcAddr; - + public: //=== VK_VERSION_1_0 === - vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); - vkEnumerateInstanceExtensionProperties = - PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); - vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; //=== VK_VERSION_1_1 === - vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); - } - - // This interface does not require a linked vulkan library. - void init( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); - vkGetInstanceProcAddr = getInstanceProcAddr; - init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); - if ( device ) - { - init( VULKAN_HPP_NAMESPACE::Device( device ) ); - } - } - - void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT - { - VkInstance instance = static_cast( instanceCpp ); - - //=== VK_VERSION_1_0 === - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties = - PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = - PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); - vkEnumerateDeviceExtensionProperties = - PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - - //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2 = - PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = - PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2 = - PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetPhysicalDeviceExternalBufferProperties = - PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalFenceProperties = - PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; //=== VK_VERSION_1_3 === - vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; //=== VK_KHR_surface === - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = - PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); - vkGetPhysicalDeviceSurfacePresentModesKHR = - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetPhysicalDevicePresentRectanglesKHR = - PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; //=== VK_KHR_display === - vkGetPhysicalDeviceDisplayPropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = - PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); - vkGetPhysicalDeviceXlibPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); - vkGetPhysicalDeviceXcbPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); - vkGetPhysicalDeviceWin32PresentationSupportKHR = - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; //=== VK_KHR_video_queue === - vkGetPhysicalDeviceVideoCapabilitiesKHR = - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; #endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - if ( !vkGetPhysicalDeviceFeatures2 ) - vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceProperties2 ) - vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; - vkGetPhysicalDeviceFormatProperties2KHR = - PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceFormatProperties2 ) - vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; - vkGetPhysicalDeviceImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceImageFormatProperties2 ) - vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) - vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; - vkGetPhysicalDeviceMemoryProperties2KHR = - PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceMemoryProperties2 ) - vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) - vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; #if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; #endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; //=== VK_KHR_device_group_creation === - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - if ( !vkEnumeratePhysicalDeviceGroups ) - vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; //=== VK_KHR_external_memory_capabilities === - vkGetPhysicalDeviceExternalBufferPropertiesKHR = - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalBufferProperties ) - vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; //=== VK_KHR_external_semaphore_capabilities === - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) - vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; //=== VK_EXT_direct_mode_display === - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === - vkGetPhysicalDeviceSurfaceCapabilities2EXT = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; //=== VK_KHR_external_fence_capabilities === - vkGetPhysicalDeviceExternalFencePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalFenceProperties ) - vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; //=== VK_KHR_performance_query === - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; //=== VK_KHR_get_surface_capabilities2 === - vkGetPhysicalDeviceSurfaceCapabilities2KHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; //=== VK_KHR_get_display_properties2 === - vkGetPhysicalDeviceDisplayProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; #if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_IOS_MVK*/ #if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +#else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); - vkGetPhysicalDeviceMultisamplePropertiesEXT = - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_EXT_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === - vkGetPhysicalDeviceFragmentShadingRatesKHR = - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; //=== VK_EXT_tooling_info === - vkGetPhysicalDeviceToolPropertiesEXT = - PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); - if ( !vkGetPhysicalDeviceToolProperties ) - vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; //=== VK_NV_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; //=== VK_NV_coverage_reduction_mode === - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - vkGetPhysicalDeviceSurfacePresentModes2EXT = - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; //=== VK_EXT_acquire_drm_display === - vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); - vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; //=== VK_KHR_video_encode_queue === - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +#else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +#else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); - vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +#else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); - vkGetPhysicalDeviceScreenPresentationSupportQNX = - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = - PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = - PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = - PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) ); + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; //=== VK_NV_optical_flow === - vkGetPhysicalDeviceOpticalFlowImageFormatsNV = - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = - PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) ); + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetInstanceProcAddr( instance, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetInstanceProcAddr( instance, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetInstanceProcAddr( instance, "vkReleaseCapturedPipelineDataKHR" ) ); + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetInstanceProcAddr( instance, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetInstanceProcAddr( instance, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetInstanceProcAddr( instance, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetInstanceProcAddr( instance, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetInstanceProcAddr( instance, "vkQueueNotifyOutOfBandNV" ) ); + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; //=== VK_KHR_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetInstanceProcAddr( instance, "vkGetScreenBufferPropertiesQNX" ) ); + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +#else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleKHR" ) ); + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; //=== VK_KHR_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsKHR" ) ); + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); - } + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; - void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT - { - VkDevice device = static_cast( deviceCpp ); + //=== VK_EXT_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; - //=== VK_VERSION_1_0 === - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; - //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + init( getInstanceProcAddr ); + } - //=== VK_VERSION_1_3 === - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + { + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); + } - //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + template + void init() + { + static DynamicLoader dl; + init( dl ); + } - //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + template + void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + init( getInstanceProcAddr ); + } - //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getInstanceProcAddr ); - //=== VK_KHR_video_queue === - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + vkGetInstanceProcAddr = getInstanceProcAddr; - //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + //=== VK_VERSION_1_0 === + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = + PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); - //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + //=== VK_VERSION_1_1 === + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + } - //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + // This interface does not require a linked vulkan library. + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); + } + } - //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast( instanceCpp ); - //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); - //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = + PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); - //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); - //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); - //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ - //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - - //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); - - //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; - - //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); - - //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); - - //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); - - //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); - - //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); - - //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; - - //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); - - //=== VK_KHR_performance_query === - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); - - //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = + PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = + PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); - //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; - //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = + PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); - //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); - //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; - //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; - //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); - //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); - //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); - //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; - //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; - //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); - //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); - //=== VK_EXT_calibrated_timestamps === - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; - //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); - //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); - //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; - //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); - //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - //=== VK_KHR_fragment_shading_rate === - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); - //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); - //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; - //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStippleKHR ) + vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; - //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; - //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; - //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); - //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; - //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); - //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); - //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); - //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); - //=== VK_KHR_video_encode_queue === - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); #if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; - //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); - //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); - //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; - //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); - //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); - //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); - //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); - //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); - //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; - - //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); - - //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); - - //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); - - //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); - - //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); - - //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); - - //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; - - //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); - - //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); - - //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); - - //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); - - //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); - - //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); - - //=== VK_NV_optical_flow === - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); - - //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); - - //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); - - //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); - - //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); - - //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); - - //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); - //=== VK_KHR_calibrated_timestamps === - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); - //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); - } + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); - template - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT - { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); - PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); - init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); - } + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = + PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); - template ( deviceCpp ); + + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStippleKHR ) + vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + } + + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); + } + + template - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT - { - static DynamicLoader dl; - init( instance, device, dl ); - } - }; + > + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT + { + static DynamicLoader dl; + init( instance, device, dl ); + } + }; + } // namespace detail } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/third_party/vulkan/vulkan_beta.h b/third_party/vulkan/vulkan_beta.h index df18b40..f5e9475 100644 --- a/third_party/vulkan/vulkan_beta.h +++ b/third_party/vulkan/vulkan_beta.h @@ -53,13 +53,14 @@ typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { // VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls. #define VK_AMDX_shader_enqueue 1 -#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 1 +#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 2 #define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue" #define VK_SHADER_INDEX_UNUSED_AMDX (~0U) typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX { VkStructureType sType; void* pNext; VkBool32 shaderEnqueue; + VkBool32 shaderMeshEnqueue; } VkPhysicalDeviceShaderEnqueueFeaturesAMDX; typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { @@ -70,12 +71,16 @@ typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { uint32_t maxExecutionGraphShaderPayloadSize; uint32_t maxExecutionGraphShaderPayloadCount; uint32_t executionGraphDispatchAddressAlignment; + uint32_t maxExecutionGraphWorkgroupCount[3]; + uint32_t maxExecutionGraphWorkgroups; } VkPhysicalDeviceShaderEnqueuePropertiesAMDX; typedef struct VkExecutionGraphPipelineScratchSizeAMDX { VkStructureType sType; void* pNext; - VkDeviceSize size; + VkDeviceSize minSize; + VkDeviceSize maxSize; + VkDeviceSize sizeGranularity; } VkExecutionGraphPipelineScratchSizeAMDX; typedef struct VkExecutionGraphPipelineCreateInfoAMDX { @@ -116,12 +121,12 @@ typedef struct VkPipelineShaderStageNodeCreateInfoAMDX { } VkPipelineShaderStageNodeCreateInfoAMDX; typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); -typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); -typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); -typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); -typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); +typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkPipeline executionGraph, VkDeviceAddress scratch, VkDeviceSize scratchSize); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, VkDeviceAddress countInfo); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX( @@ -145,21 +150,26 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX( VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch); + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize); VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, const VkDispatchGraphCountInfoAMDX* pCountInfo); VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, + VkDeviceSize scratchSize, VkDeviceAddress countInfo); #endif diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index 6f1c17f..d9cdd13 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 295 +#define VK_HEADER_VERSION 301 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -508,10 +508,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005, VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006, VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, - VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, @@ -553,6 +549,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, @@ -612,6 +609,7 @@ typedef enum VkStructureType { #ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, #endif + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, @@ -703,6 +701,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, @@ -718,11 +718,13 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR = 1000232000, @@ -848,8 +850,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009, VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010, VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, - VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002, @@ -902,6 +902,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT = 1000361000, VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, @@ -1143,9 +1144,30 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV = 1000563000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV = 1000568000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT = 1000572000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT = 1000572001, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT = 1000572002, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT = 1000572003, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT = 1000572004, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT = 1000572006, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT = 1000572007, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT = 1000572008, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT = 1000572009, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT = 1000572010, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT = 1000572011, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT = 1000572012, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT = 1000572013, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT = 1000572014, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA = 1000575000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA = 1000575001, VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT = 1000582001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI = 1000590000, + VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI = 1000590001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV = 1000593000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV = 1000593001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV = 1000593002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias @@ -1155,7 +1177,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, @@ -1229,6 +1250,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, @@ -1436,6 +1458,8 @@ typedef enum VkObjectType { VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000, VK_OBJECT_TYPE_SHADER_EXT = 1000482000, VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT = 1000572000, + VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT = 1000572001, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, @@ -2013,6 +2037,7 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032, VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT = 1000524000, VK_DYNAMIC_STATE_LINE_STIPPLE_KHR = 1000259000, + VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT = 1000582000, VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE, VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE, @@ -2248,6 +2273,8 @@ typedef enum VkAccessFlagBits { VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, VK_ACCESS_NONE_KHR = VK_ACCESS_NONE, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkAccessFlagBits; typedef VkFlags VkAccessFlags; @@ -2472,6 +2499,7 @@ typedef enum VkPipelineStageFlagBits { VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineStageFlagBits; typedef VkFlags VkPipelineStageFlags; @@ -2624,8 +2652,6 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, @@ -2634,6 +2660,8 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, @@ -2651,12 +2679,12 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000, VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000, VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, - // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -6655,6 +6683,7 @@ static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT = 0x00020000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; @@ -6731,6 +6760,8 @@ static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_ static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT = 0x00040000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; @@ -7608,6 +7639,7 @@ typedef enum VkPresentModeKHR { VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_FIFO_LATEST_READY_EXT = 1000361000, VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF } VkPresentModeKHR; @@ -8876,38 +8908,6 @@ typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderin typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; -typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; - VkExtent2D shadingRateAttachmentTexelSize; -} VkRenderingFragmentShadingRateAttachmentInfoKHR; - -typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; -} VkRenderingFragmentDensityMapAttachmentInfoEXT; - -typedef struct VkAttachmentSampleCountInfoAMD { - VkStructureType sType; - const void* pNext; - uint32_t colorAttachmentCount; - const VkSampleCountFlagBits* pColorAttachmentSamples; - VkSampleCountFlagBits depthStencilAttachmentSamples; -} VkAttachmentSampleCountInfoAMD; - -typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; - -typedef struct VkMultiviewPerViewAttributesInfoNVX { - VkStructureType sType; - const void* pNext; - VkBool32 perViewAttributes; - VkBool32 perViewAttributesPositionXOnly; -} VkMultiviewPerViewAttributesInfoNVX; - typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); @@ -10282,6 +10282,14 @@ typedef struct VkPhysicalDeviceFragmentShadingRateKHR { VkExtent2D fragmentSize; } VkPhysicalDeviceFragmentShadingRateKHR; +typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkExtent2D shadingRateAttachmentTexelSize; +} VkRenderingFragmentShadingRateAttachmentInfoKHR; + typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); @@ -10863,27 +10871,12 @@ typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; -typedef struct VkQueueFamilyCheckpointProperties2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 checkpointExecutionStageMask; -} VkQueueFamilyCheckpointProperties2NV; - -typedef struct VkCheckpointData2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 stage; - void* pCheckpointMarker; -} VkCheckpointData2NV; - typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); -typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); -typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( @@ -10917,18 +10910,6 @@ VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); - -VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags2 stage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker); - -VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( - VkQueue queue, - uint32_t* pCheckpointDataCount, - VkCheckpointData2NV* pCheckpointData); #endif @@ -11171,6 +11152,9 @@ typedef VkFlags64 VkPipelineCreateFlagBits2KHR; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; +#endif static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; @@ -11201,6 +11185,7 @@ static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT = 0x4000000000ULL; typedef VkFlags64 VkBufferUsageFlags2KHR; @@ -11215,7 +11200,9 @@ static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +#endif static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; @@ -11233,6 +11220,7 @@ static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUF static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT = 0x80000000ULL; typedef struct VkPhysicalDeviceMaintenance5FeaturesKHR { VkStructureType sType; @@ -12979,6 +12967,13 @@ typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { VkBool32 perViewPositionAllComponents; } VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; +typedef struct VkMultiviewPerViewAttributesInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 perViewAttributes; + VkBool32 perViewAttributesPositionXOnly; +} VkMultiviewPerViewAttributesInfoNVX; + // VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. @@ -13347,6 +13342,14 @@ typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFil #define VK_AMD_mixed_attachment_samples 1 #define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 #define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" +typedef struct VkAttachmentSampleCountInfoAMD { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const VkSampleCountFlagBits* pColorAttachmentSamples; + VkSampleCountFlagBits depthStencilAttachmentSamples; +} VkAttachmentSampleCountInfoAMD; + // VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. @@ -13526,6 +13529,8 @@ typedef struct VkPipelineCoverageModulationStateCreateInfoNV { const float* pCoverageModulationTable; } VkPipelineCoverageModulationStateCreateInfoNV; +typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; + // VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. @@ -14283,6 +14288,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( #define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 #define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( @@ -14291,6 +14297,13 @@ VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); #endif @@ -14561,8 +14574,22 @@ typedef struct VkCheckpointDataNV { void* pCheckpointMarker; } VkCheckpointDataNV; +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( @@ -14573,6 +14600,11 @@ VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); #endif @@ -14794,6 +14826,13 @@ typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { VkAttachmentReference fragmentDensityMapAttachment; } VkRenderPassFragmentDensityMapCreateInfoEXT; +typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; +} VkRenderingFragmentDensityMapAttachmentInfoEXT; + // VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. @@ -17070,6 +17109,18 @@ typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { +// VK_EXT_present_mode_fifo_latest_ready is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_present_mode_fifo_latest_ready 1 +#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION 1 +#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME "VK_EXT_present_mode_fifo_latest_ready" +typedef struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 presentModeFifoLatestReady; +} VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + + + // VK_HUAWEI_subpass_shading is a preprocessor guard. Do not pass it to API calls. #define VK_HUAWEI_subpass_shading 1 #define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 @@ -18858,6 +18909,12 @@ typedef enum VkShaderCodeTypeEXT { VK_SHADER_CODE_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF } VkShaderCodeTypeEXT; +typedef enum VkDepthClampModeEXT { + VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT = 0, + VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT = 1, + VK_DEPTH_CLAMP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthClampModeEXT; + typedef enum VkShaderCreateFlagBitsEXT { VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001, VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002, @@ -18866,6 +18923,7 @@ typedef enum VkShaderCreateFlagBitsEXT { VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT = 0x00000010, VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT = 0x00000020, VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00000040, + VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT = 0x00000080, VK_SHADER_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF } VkShaderCreateFlagBitsEXT; typedef VkFlags VkShaderCreateFlagsEXT; @@ -18901,10 +18959,16 @@ typedef struct VkShaderCreateInfoEXT { typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkShaderRequiredSubgroupSizeCreateInfoEXT; +typedef struct VkDepthClampRangeEXT { + float minDepthClamp; + float maxDepthClamp; +} VkDepthClampRangeEXT; + typedef VkResult (VKAPI_PTR *PFN_vkCreateShadersEXT)(VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders); typedef void (VKAPI_PTR *PFN_vkDestroyShaderEXT)(VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator); typedef VkResult (VKAPI_PTR *PFN_vkGetShaderBinaryDataEXT)(VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData); typedef void (VKAPI_PTR *PFN_vkCmdBindShadersEXT)(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampRangeEXT)(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR VkResult VKAPI_CALL vkCreateShadersEXT( @@ -18930,6 +18994,11 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT( uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampRangeEXT( + VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT* pDepthClampRange); #endif @@ -19511,6 +19580,286 @@ typedef struct VkPhysicalDeviceRayTracingValidationFeaturesNV { +// VK_EXT_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectExecutionSetEXT) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutEXT) +#define VK_EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 1 +#define VK_EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_EXT_device_generated_commands" + +typedef enum VkIndirectExecutionSetInfoTypeEXT { + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT = 0, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT = 1, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectExecutionSetInfoTypeEXT; + +typedef enum VkIndirectCommandsTokenTypeEXT { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT = 8, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT = 9, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT = 1000202002, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT = 1000202003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT = 1000328001, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT = 1000386004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeEXT; + +typedef enum VkIndirectCommandsInputModeFlagBitsEXT { + VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsInputModeFlagBitsEXT; +typedef VkFlags VkIndirectCommandsInputModeFlagsEXT; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsEXT { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsEXT; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsEXT; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; + VkBool32 dynamicGeneratedPipelineLayout; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxIndirectPipelineCount; + uint32_t maxIndirectShaderObjectCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsIndirectStride; + VkIndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes; + VkShaderStageFlags supportedIndirectCommandsShaderStages; + VkShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding; + VkShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding; + VkBool32 deviceGeneratedCommandsTransformFeedback; + VkBool32 deviceGeneratedCommandsMultiDrawIndirectCount; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + uint32_t maxSequenceCount; + uint32_t maxDrawCount; +} VkGeneratedCommandsMemoryRequirementsInfoEXT; + +typedef struct VkIndirectExecutionSetPipelineInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipeline initialPipeline; + uint32_t maxPipelineCount; +} VkIndirectExecutionSetPipelineInfoEXT; + +typedef struct VkIndirectExecutionSetShaderLayoutInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkIndirectExecutionSetShaderLayoutInfoEXT; + +typedef struct VkIndirectExecutionSetShaderInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pInitialShaders; + const VkIndirectExecutionSetShaderLayoutInfoEXT* pSetLayoutInfos; + uint32_t maxShaderCount; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkIndirectExecutionSetShaderInfoEXT; + +typedef union VkIndirectExecutionSetInfoEXT { + const VkIndirectExecutionSetPipelineInfoEXT* pPipelineInfo; + const VkIndirectExecutionSetShaderInfoEXT* pShaderInfo; +} VkIndirectExecutionSetInfoEXT; + +typedef struct VkIndirectExecutionSetCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectExecutionSetInfoTypeEXT type; + VkIndirectExecutionSetInfoEXT info; +} VkIndirectExecutionSetCreateInfoEXT; + +typedef struct VkGeneratedCommandsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags shaderStages; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + VkDeviceAddress indirectAddress; + VkDeviceSize indirectAddressSize; + VkDeviceAddress preprocessAddress; + VkDeviceSize preprocessSize; + uint32_t maxSequenceCount; + VkDeviceAddress sequenceCountAddress; + uint32_t maxDrawCount; +} VkGeneratedCommandsInfoEXT; + +typedef struct VkWriteIndirectExecutionSetPipelineEXT { + VkStructureType sType; + const void* pNext; + uint32_t index; + VkPipeline pipeline; +} VkWriteIndirectExecutionSetPipelineEXT; + +typedef struct VkIndirectCommandsPushConstantTokenEXT { + VkPushConstantRange updateRange; +} VkIndirectCommandsPushConstantTokenEXT; + +typedef struct VkIndirectCommandsVertexBufferTokenEXT { + uint32_t vertexBindingUnit; +} VkIndirectCommandsVertexBufferTokenEXT; + +typedef struct VkIndirectCommandsIndexBufferTokenEXT { + VkIndirectCommandsInputModeFlagBitsEXT mode; +} VkIndirectCommandsIndexBufferTokenEXT; + +typedef struct VkIndirectCommandsExecutionSetTokenEXT { + VkIndirectExecutionSetInfoTypeEXT type; + VkShaderStageFlags shaderStages; +} VkIndirectCommandsExecutionSetTokenEXT; + +typedef union VkIndirectCommandsTokenDataEXT { + const VkIndirectCommandsPushConstantTokenEXT* pPushConstant; + const VkIndirectCommandsVertexBufferTokenEXT* pVertexBuffer; + const VkIndirectCommandsIndexBufferTokenEXT* pIndexBuffer; + const VkIndirectCommandsExecutionSetTokenEXT* pExecutionSet; +} VkIndirectCommandsTokenDataEXT; + +typedef struct VkIndirectCommandsLayoutTokenEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeEXT type; + VkIndirectCommandsTokenDataEXT data; + uint32_t offset; +} VkIndirectCommandsLayoutTokenEXT; + +typedef struct VkIndirectCommandsLayoutCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsEXT flags; + VkShaderStageFlags shaderStages; + uint32_t indirectStride; + VkPipelineLayout pipelineLayout; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenEXT* pTokens; +} VkIndirectCommandsLayoutCreateInfoEXT; + +typedef struct VkDrawIndirectCountIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t stride; + uint32_t commandCount; +} VkDrawIndirectCountIndirectCommandEXT; + +typedef struct VkBindVertexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandEXT; + +typedef struct VkBindIndexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandEXT; + +typedef struct VkGeneratedCommandsPipelineInfoEXT { + VkStructureType sType; + void* pNext; + VkPipeline pipeline; +} VkGeneratedCommandsPipelineInfoEXT; + +typedef struct VkGeneratedCommandsShaderInfoEXT { + VkStructureType sType; + void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pShaders; +} VkGeneratedCommandsShaderInfoEXT; + +typedef struct VkWriteIndirectExecutionSetShaderEXT { + VkStructureType sType; + const void* pNext; + uint32_t index; + VkShaderEXT shader; +} VkWriteIndirectExecutionSetShaderEXT; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsEXT)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutEXT)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutEXT)(VkDevice device, VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectExecutionSetEXT)(VkDevice device, const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectExecutionSetEXT* pIndirectExecutionSet); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectExecutionSetEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetPipelineEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetShaderEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsEXT( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutEXT( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutEXT( + VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectExecutionSetEXT( + VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectExecutionSetEXT* pIndirectExecutionSet); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectExecutionSetEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetPipelineEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetShaderEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); +#endif + + // VK_MESA_image_alignment_control is a preprocessor guard. Do not pass it to API calls. #define VK_MESA_image_alignment_control 1 #define VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION 1 @@ -19535,6 +19884,93 @@ typedef struct VkImageAlignmentControlCreateInfoMESA { +// VK_EXT_depth_clamp_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_control 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clamp_control" +typedef struct VkPhysicalDeviceDepthClampControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClampControl; +} VkPhysicalDeviceDepthClampControlFeaturesEXT; + +typedef struct VkPipelineViewportDepthClampControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthClampModeEXT depthClampMode; + const VkDepthClampRangeEXT* pDepthClampRange; +} VkPipelineViewportDepthClampControlCreateInfoEXT; + + + +// VK_HUAWEI_hdr_vivid is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_hdr_vivid 1 +#define VK_HUAWEI_HDR_VIVID_SPEC_VERSION 1 +#define VK_HUAWEI_HDR_VIVID_EXTENSION_NAME "VK_HUAWEI_hdr_vivid" +typedef struct VkPhysicalDeviceHdrVividFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 hdrVivid; +} VkPhysicalDeviceHdrVividFeaturesHUAWEI; + +typedef struct VkHdrVividDynamicMetadataHUAWEI { + VkStructureType sType; + const void* pNext; + size_t dynamicMetadataSize; + const void* pDynamicMetadata; +} VkHdrVividDynamicMetadataHUAWEI; + + + +// VK_NV_cooperative_matrix2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix2 1 +#define VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME "VK_NV_cooperative_matrix2" +typedef struct VkCooperativeMatrixFlexibleDimensionsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MGranularity; + uint32_t NGranularity; + uint32_t KGranularity; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; + uint32_t workgroupInvocations; +} VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrix2FeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrixWorkgroupScope; + VkBool32 cooperativeMatrixFlexibleDimensions; + VkBool32 cooperativeMatrixReductions; + VkBool32 cooperativeMatrixConversions; + VkBool32 cooperativeMatrixPerElementOperations; + VkBool32 cooperativeMatrixTensorAddressing; + VkBool32 cooperativeMatrixBlockLoads; +} VkPhysicalDeviceCooperativeMatrix2FeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrix2PropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory; +} VkPhysicalDeviceCooperativeMatrix2PropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixFlexibleDimensionsPropertiesNV* pProperties); +#endif + + // VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index fe4f89d..5209b44 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -766,11 +766,6 @@ namespace VULKAN_HPP_NAMESPACE eVideoDecodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR, eVideoDecodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR, eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD, - eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, - eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, - eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, - eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, - eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, #if defined( VK_USE_PLATFORM_GGP ) eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP, #endif /*VK_USE_PLATFORM_GGP*/ @@ -822,6 +817,7 @@ namespace VULKAN_HPP_NAMESPACE eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT, ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, + eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, @@ -881,6 +877,8 @@ namespace VULKAN_HPP_NAMESPACE eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT, ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT, @@ -975,6 +973,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV, eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV, eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, + eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, + eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, @@ -995,11 +995,13 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT, ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, + eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, + eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR, @@ -1133,8 +1135,6 @@ namespace VULKAN_HPP_NAMESPACE eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT, eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT, #endif /*VK_USE_PLATFORM_METAL_EXT*/ - eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, - eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, @@ -1190,6 +1190,7 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT, ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT, ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, + ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT, #if defined( VK_USE_PLATFORM_FUCHSIA ) eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, @@ -1448,9 +1449,30 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV, ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT, ePhysicalDeviceRayTracingValidationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV, + ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT, + ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT, + eGeneratedCommandsMemoryRequirementsInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT, + eIndirectExecutionSetCreateInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT, + eGeneratedCommandsInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT, + eIndirectCommandsLayoutCreateInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT, + eIndirectCommandsLayoutTokenEXT = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT, + eWriteIndirectExecutionSetPipelineEXT = VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT, + eWriteIndirectExecutionSetShaderEXT = VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT, + eIndirectExecutionSetPipelineInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT, + eIndirectExecutionSetShaderInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT, + eIndirectExecutionSetShaderLayoutInfoEXT = VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT, + eGeneratedCommandsPipelineInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT, + eGeneratedCommandsShaderInfoEXT = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT, ePhysicalDeviceImageAlignmentControlFeaturesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA, ePhysicalDeviceImageAlignmentControlPropertiesMESA = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA, - eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA + eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA, + ePhysicalDeviceDepthClampControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT, + ePipelineViewportDepthClampControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT, + ePhysicalDeviceHdrVividFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI, + eHdrVividDynamicMetadataHUAWEI = VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI, + ePhysicalDeviceCooperativeMatrix2FeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV, + eCooperativeMatrixFlexibleDimensionsPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV, + ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV }; enum class PipelineCacheHeaderVersion @@ -1515,10 +1537,12 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_FUCHSIA ) eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA, #endif /*VK_USE_PLATFORM_FUCHSIA*/ - eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, - eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, - eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT, - ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR + eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, + eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, + eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT, + ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR, + eIndirectCommandsLayoutEXT = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT, + eIndirectExecutionSetEXT = VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT }; enum class VendorId @@ -2195,6 +2219,7 @@ namespace VULKAN_HPP_NAMESPACE eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + eCommandPreprocessEXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT, eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, @@ -2852,7 +2877,8 @@ namespace VULKAN_HPP_NAMESPACE eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV, eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT, eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eDepthClampRangeEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT }; enum class FrontFace @@ -2894,10 +2920,6 @@ namespace VULKAN_HPP_NAMESPACE eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, - eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, @@ -2906,6 +2928,10 @@ namespace VULKAN_HPP_NAMESPACE eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR, eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, @@ -2933,16 +2959,17 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = PipelineCreateFlagBits::eDisableOptimization | PipelineCreateFlagBits::eAllowDerivatives | PipelineCreateFlagBits::eDerivative | PipelineCreateFlagBits::eViewIndexFromDeviceIndex | PipelineCreateFlagBits::eDispatchBase | PipelineCreateFlagBits::eFailOnPipelineCompileRequired | - PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | - PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | + PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR | PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits::eRayTracingSkipAabbsKHR | PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR | - PipelineCreateFlagBits::eDeferCompileNV | PipelineCreateFlagBits::eCaptureStatisticsKHR | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | - PipelineCreateFlagBits::eIndirectBindableNV | PipelineCreateFlagBits::eLibraryKHR | PipelineCreateFlagBits::eDescriptorBufferEXT | - PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits::eLinkTimeOptimizationEXT | - PipelineCreateFlagBits::eRayTracingAllowMotionNV | PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | - PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT + PipelineCreateFlagBits::eDeferCompileNV | PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | + PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | PipelineCreateFlagBits::eCaptureStatisticsKHR | + PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits::eIndirectBindableNV | PipelineCreateFlagBits::eLibraryKHR | + PipelineCreateFlagBits::eDescriptorBufferEXT | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | + PipelineCreateFlagBits::eLinkTimeOptimizationEXT | PipelineCreateFlagBits::eRayTracingAllowMotionNV | + PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT #if defined( VK_ENABLE_BETA_EXTENSIONS ) | PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -3366,7 +3393,9 @@ namespace VULKAN_HPP_NAMESPACE eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, - eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV + eCommandPreprocessReadEXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT, + eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, + eCommandPreprocessWriteEXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT }; using AccessFlags = Flags; @@ -4217,6 +4246,7 @@ namespace VULKAN_HPP_NAMESPACE eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT, eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, + eCommandPreprocessEXT = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, @@ -4294,7 +4324,9 @@ namespace VULKAN_HPP_NAMESPACE eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT, eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, + eCommandPreprocessReadEXT = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT, eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, + eCommandPreprocessWriteEXT = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT, eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, @@ -4485,7 +4517,8 @@ namespace VULKAN_HPP_NAMESPACE eFifo = VK_PRESENT_MODE_FIFO_KHR, eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR, eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, - eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR + eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, + eFifoLatestReadyEXT = VK_PRESENT_MODE_FIFO_LATEST_READY_EXT }; enum class ColorSpaceKHR @@ -4501,13 +4534,13 @@ namespace VULKAN_HPP_NAMESPACE eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT, eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT, eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT, - eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT, - eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, - eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, - eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, - ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, - eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, - eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD + eDolbyvisionEXT VULKAN_HPP_DEPRECATED_17( "eDolbyvisionEXT is deprecated, but no reason was given in the API XML" ) = VK_COLOR_SPACE_DOLBYVISION_EXT, + eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, + eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, + eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, + ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, + eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, + eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD }; enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR @@ -6998,9 +7031,12 @@ namespace VULKAN_HPP_NAMESPACE enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR { - eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, - eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, - eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR, @@ -7030,7 +7066,8 @@ namespace VULKAN_HPP_NAMESPACE eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT, - eCaptureData = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR + eCaptureData = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR, + eIndirectBindableEXT = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT }; using PipelineCreateFlags2KHR = Flags; @@ -7040,8 +7077,11 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags = - PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative | - PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | + PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits2KHR::eExecutionGraphAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | PipelineCreateFlagBits2KHR::eCaptureStatistics | PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | @@ -7055,7 +7095,7 @@ namespace VULKAN_HPP_NAMESPACE PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT | PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT | PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2KHR::eDescriptorBufferEXT | - PipelineCreateFlagBits2KHR::eCaptureData; + PipelineCreateFlagBits2KHR::eCaptureData | PipelineCreateFlagBits2KHR::eIndirectBindableEXT; }; enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR @@ -7088,7 +7128,8 @@ namespace VULKAN_HPP_NAMESPACE eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, - eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT + eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT, + ePreprocessBufferEXT = VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT }; using BufferUsageFlags2KHR = Flags; @@ -7111,7 +7152,7 @@ namespace VULKAN_HPP_NAMESPACE BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly | BufferUsageFlagBits2KHR::eAccelerationStructureStorage | BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT | BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT | BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT | BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT | - BufferUsageFlagBits2KHR::eMicromapStorageEXT; + BufferUsageFlagBits2KHR::eMicromapStorageEXT | BufferUsageFlagBits2KHR::ePreprocessBufferEXT; }; //=== VK_AMD_anti_lag === @@ -7139,7 +7180,8 @@ namespace VULKAN_HPP_NAMESPACE eNoTaskShader = VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT, eDispatchBase = VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT, eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT, - eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT + eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eIndirectBindable = VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT }; using ShaderCreateFlagsEXT = Flags; @@ -7151,7 +7193,7 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCreateFlagsEXT allFlags = ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups | ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | - ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment; + ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment | ShaderCreateFlagBitsEXT::eIndirectBindable; }; enum class ShaderCodeTypeEXT @@ -7361,6 +7403,73 @@ namespace VULKAN_HPP_NAMESPACE eOpengles = VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR }; + //=== VK_EXT_device_generated_commands === + + enum class IndirectCommandsTokenTypeEXT + { + eExecutionSet = VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT, + ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT, + eSequenceIndex = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT, + eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT, + eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT, + eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT, + eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT, + eDrawIndexedCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT, + eDrawCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT, + eDispatch = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT, + eDrawMeshTasksNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT, + eDrawMeshTasksCountNV = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT, + eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT, + eDrawMeshTasksCount = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT, + eTraceRays2 = VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT + }; + + enum class IndirectExecutionSetInfoTypeEXT + { + ePipelines = VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT, + eShaderObjects = VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT + }; + + enum class IndirectCommandsLayoutUsageFlagBitsEXT : VkIndirectCommandsLayoutUsageFlagsEXT + { + eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT, + eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT + }; + + using IndirectCommandsLayoutUsageFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsEXT allFlags = + IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess | IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences; + }; + + enum class IndirectCommandsInputModeFlagBitsEXT : VkIndirectCommandsInputModeFlagsEXT + { + eVulkanIndexBuffer = VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT, + eDxgiIndexBuffer = VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT + }; + + using IndirectCommandsInputModeFlagsEXT = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsInputModeFlagsEXT allFlags = + IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer | IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer; + }; + + //=== VK_EXT_depth_clamp_control === + + enum class DepthClampModeEXT + { + eViewportRange = VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT, + eUserDefinedRange = VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT + }; + //=========================================================== //=== Mapping from ObjectType to DebugReportObjectTypeEXT === //=========================================================== @@ -7484,7 +7593,12 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; //=== VK_KHR_pipeline_binary === - case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR: + return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + //=== VK_EXT_device_generated_commands === + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; default: VULKAN_HPP_ASSERT( false && "unknown ObjectType" ); return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; } diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index da405d5..3075472 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -8,11 +8,15 @@ #ifndef VULKAN_EXTENSION_INSPECTION_HPP #define VULKAN_EXTENSION_INSPECTION_HPP -#include -#include -#include -#include -#include +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +import VULKAN_HPP_STD_MODULE; +#else +# include +# include +# include +# include +# include +#endif namespace VULKAN_HPP_NAMESPACE { @@ -342,6 +346,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_EXT_depth_clip_control", "VK_EXT_primitive_topology_list_restart", "VK_KHR_format_feature_flags2", + "VK_EXT_present_mode_fifo_latest_ready", #if defined( VK_USE_PLATFORM_FUCHSIA ) "VK_FUCHSIA_external_memory", "VK_FUCHSIA_external_semaphore", @@ -446,7 +451,11 @@ namespace VULKAN_HPP_NAMESPACE "VK_NV_shader_atomic_float16_vector", "VK_EXT_shader_replicated_composites", "VK_NV_ray_tracing_validation", - "VK_MESA_image_alignment_control" + "VK_EXT_device_generated_commands", + "VK_MESA_image_alignment_control", + "VK_EXT_depth_clamp_control", + "VK_HUAWEI_hdr_vivid", + "VK_NV_cooperative_matrix2" }; return deviceExtensions; } @@ -986,17 +995,15 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) { "VK_AMDX_shader_enqueue", { { "VK_VERSION_1_0", - { { - "VK_KHR_get_physical_device_properties2", - } } }, - { "VK_VERSION_1_1", { { "VK_KHR_synchronization2", + "VK_KHR_spirv_1_4", + "VK_EXT_extended_dynamic_state", } } }, { "VK_VERSION_1_3", { { + "VK_KHR_maintenance5", "VK_KHR_pipeline_library", - "VK_KHR_spirv_1_4", } } } } }, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ { "VK_EXT_inline_uniform_block", @@ -1821,6 +1828,11 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_present_mode_fifo_latest_ready", + { { "VK_VERSION_1_0", + { { + "VK_KHR_swapchain", + } } } } }, #if defined( VK_USE_PLATFORM_FUCHSIA ) { "VK_FUCHSIA_external_memory", { { "VK_VERSION_1_0", @@ -2333,12 +2345,39 @@ namespace VULKAN_HPP_NAMESPACE { "VK_KHR_maintenance6", { { "VK_VERSION_1_1", { {} } } } }, { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } }, { "VK_KHR_maintenance7", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_device_generated_commands", + { { "VK_VERSION_1_0", + { { + "VK_KHR_buffer_device_address", + "VK_KHR_maintenance5", + } } } } }, { "VK_MESA_image_alignment_control", { { "VK_VERSION_1_0", { { "VK_KHR_get_physical_device_properties2", } } }, - { "VK_VERSION_1_1", { {} } } } } + { "VK_VERSION_1_1", { {} } } } }, + { "VK_EXT_depth_clamp_control", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } }, + { "VK_HUAWEI_hdr_vivid", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_swapchain", + "VK_EXT_hdr_metadata", + } } } } }, + { "VK_NV_cooperative_matrix2", + { { "VK_VERSION_1_0", + { { + "VK_KHR_cooperative_matrix", + } } } } } }; auto depIt = dependencies.find( extension ); return ( depIt != dependencies.end() ) ? depIt->second : noDependencies; @@ -3063,7 +3102,7 @@ namespace VULKAN_HPP_NAMESPACE || ( extension == "VK_VALVE_mutable_descriptor_type" ) || ( extension == "VK_EXT_vertex_input_dynamic_state" ) || ( extension == "VK_EXT_physical_device_drm" ) || ( extension == "VK_EXT_device_address_binding_report" ) || ( extension == "VK_EXT_depth_clip_control" ) || ( extension == "VK_EXT_primitive_topology_list_restart" ) || - ( extension == "VK_KHR_format_feature_flags2" ) + ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_present_mode_fifo_latest_ready" ) #if defined( VK_USE_PLATFORM_FUCHSIA ) || ( extension == "VK_FUCHSIA_external_memory" ) || ( extension == "VK_FUCHSIA_external_semaphore" ) || ( extension == "VK_FUCHSIA_buffer_collection" ) #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -3116,7 +3155,8 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || ( extension == "VK_NV_command_buffer_inheritance" ) || ( extension == "VK_KHR_maintenance7" ) || ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_EXT_shader_replicated_composites" ) || ( extension == "VK_NV_ray_tracing_validation" ) || - ( extension == "VK_MESA_image_alignment_control" ); + ( extension == "VK_EXT_device_generated_commands" ) || ( extension == "VK_MESA_image_alignment_control" ) || + ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || ( extension == "VK_NV_cooperative_matrix2" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index f91d6a4..fe0174d 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -70,7 +70,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( instance, ObjectDestroy( allocator, d ) ) ); + result, UniqueHandle( instance, detail::ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -79,7 +79,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyInstance( m_instance, reinterpret_cast( pAllocator ) ); + d.vkDestroyInstance( static_cast( m_instance ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -102,11 +102,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); + return static_cast( + d.vkEnumeratePhysicalDevices( static_cast( m_instance ), pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const { @@ -175,7 +178,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -201,7 +204,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -231,7 +235,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), static_cast( tiling ), @@ -275,7 +279,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -302,11 +306,14 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const { @@ -363,7 +370,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -387,7 +395,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetInstanceProcAddr( m_instance, pName ); + return d.vkGetInstanceProcAddr( static_cast( m_instance ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -409,7 +417,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceProcAddr( m_device, pName ); + return d.vkGetDeviceProcAddr( static_cast( m_device ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -434,7 +442,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDevice( m_physicalDevice, + return static_cast( d.vkCreateDevice( static_cast( m_physicalDevice ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDevice ) ) ); @@ -482,7 +490,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( device, ObjectDestroy( allocator, d ) ) ); + result, UniqueHandle( device, detail::ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -491,7 +499,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDevice( m_device, reinterpret_cast( pAllocator ) ); + d.vkDestroyDevice( static_cast( m_device ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -520,7 +528,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName, Dispatch const & d ) { @@ -597,12 +608,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateDeviceExtensionProperties( + static_cast( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename ExtensionPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional layerName, Dispatch const & d ) const { @@ -682,7 +696,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type enumerateInstanceLayerProperties( Dispatch const & d ) { @@ -753,11 +769,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkEnumerateDeviceLayerProperties( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const { @@ -827,7 +846,7 @@ namespace VULKAN_HPP_NAMESPACE Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); + d.vkGetDeviceQueue( static_cast( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -854,7 +873,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + return static_cast( + d.vkQueueSubmit( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -880,7 +900,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueWaitIdle( m_queue ) ); + return static_cast( d.vkQueueWaitIdle( static_cast( m_queue ) ) ); } #else template @@ -903,7 +923,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDeviceWaitIdle( m_device ) ); + return static_cast( d.vkDeviceWaitIdle( static_cast( m_device ) ) ); } #else template @@ -928,7 +948,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAllocateMemory( m_device, + return static_cast( d.vkAllocateMemory( static_cast( m_device ), reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMemory ) ) ); @@ -978,7 +998,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( memory, ObjectFree( *this, allocator, d ) ) ); + result, UniqueHandle( memory, detail::ObjectFree( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -989,7 +1009,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1015,7 +1035,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeMemory( m_device, static_cast( memory ), reinterpret_cast( pAllocator ) ); + d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1044,7 +1064,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkMapMemory( m_device, + return static_cast( d.vkMapMemory( static_cast( m_device ), static_cast( memory ), static_cast( offset ), static_cast( size ), @@ -1082,7 +1102,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUnmapMemory( m_device, static_cast( memory ) ); + d.vkUnmapMemory( static_cast( m_device ), static_cast( memory ) ); } template @@ -1091,7 +1111,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + return static_cast( + d.vkFlushMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1120,7 +1141,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); + d.vkInvalidateMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1148,7 +1169,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceMemoryCommitment( m_device, static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); + d.vkGetDeviceMemoryCommitment( + static_cast( m_device ), static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1176,8 +1198,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return static_cast( d.vkBindBufferMemory( + static_cast( m_device ), static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template @@ -1205,8 +1227,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); + return static_cast( d.vkBindImageMemory( + static_cast( m_device ), static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template @@ -1232,7 +1254,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements( m_device, static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements( + static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1258,7 +1281,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements( m_device, static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements( + static_cast( m_device ), static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1285,14 +1309,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements( m_device, + d.vkGetImageSparseMemoryRequirements( static_cast( m_device ), static_cast( image ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const { @@ -1363,7 +1391,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), static_cast( samples ), @@ -1374,7 +1402,11 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -1473,8 +1505,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); + return static_cast( d.vkQueueBindSparse( + static_cast( m_queue ), bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1502,7 +1534,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateFence( m_device, + return static_cast( d.vkCreateFence( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); @@ -1548,7 +1580,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1559,7 +1591,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1585,7 +1617,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFence( m_device, static_cast( fence ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1611,7 +1643,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetFences( m_device, fenceCount, reinterpret_cast( pFences ) ) ); + return static_cast( d.vkResetFences( static_cast( m_device ), fenceCount, reinterpret_cast( pFences ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1637,7 +1669,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); + return static_cast( d.vkGetFenceStatus( static_cast( m_device ), static_cast( fence ) ) ); } #else template @@ -1664,8 +1696,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkWaitForFences( m_device, fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); + return static_cast( d.vkWaitForFences( + static_cast( m_device ), fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1697,7 +1729,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSemaphore( m_device, + return static_cast( d.vkCreateSemaphore( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSemaphore ) ) ); @@ -1747,7 +1779,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( semaphore, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( semaphore, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1758,7 +1790,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( + static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1784,7 +1817,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySemaphore( m_device, static_cast( semaphore ), reinterpret_cast( pAllocator ) ); + d.vkDestroySemaphore( + static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1811,7 +1845,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateEvent( m_device, + return static_cast( d.vkCreateEvent( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pEvent ) ) ); @@ -1857,7 +1891,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( event, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( event, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1868,7 +1902,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1894,7 +1928,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyEvent( m_device, static_cast( event ), reinterpret_cast( pAllocator ) ); + d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1919,7 +1953,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); + return static_cast( d.vkGetEventStatus( static_cast( m_device ), static_cast( event ) ) ); } #else template @@ -1943,7 +1977,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); + return static_cast( d.vkSetEvent( static_cast( m_device ), static_cast( event ) ) ); } #else template @@ -1967,7 +2001,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); + return static_cast( d.vkResetEvent( static_cast( m_device ), static_cast( event ) ) ); } #else template @@ -1992,7 +2026,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateQueryPool( m_device, + return static_cast( d.vkCreateQueryPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pQueryPool ) ) ); @@ -2042,7 +2076,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( queryPool, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( queryPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2053,7 +2087,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( + static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2079,7 +2114,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyQueryPool( m_device, static_cast( queryPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyQueryPool( + static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2110,7 +2146,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetQueryPoolResults( m_device, + return static_cast( d.vkGetQueryPoolResults( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount, @@ -2121,7 +2157,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, @@ -2189,7 +2228,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBuffer( m_device, + return static_cast( d.vkCreateBuffer( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBuffer ) ) ); @@ -2235,7 +2274,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( buffer, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( buffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2246,7 +2285,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2272,7 +2311,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBuffer( m_device, static_cast( buffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2299,7 +2338,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBufferView( m_device, + return static_cast( d.vkCreateBufferView( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); @@ -2349,7 +2388,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2360,7 +2399,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( + static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2386,7 +2426,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyBufferView( m_device, static_cast( bufferView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyBufferView( + static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2413,7 +2454,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImage( m_device, + return static_cast( d.vkCreateImage( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pImage ) ) ); @@ -2459,7 +2500,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( image, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( image, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2470,7 +2511,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2496,7 +2537,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImage( m_device, static_cast( image ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2523,7 +2564,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout( m_device, + d.vkGetImageSubresourceLayout( static_cast( m_device ), static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); @@ -2556,7 +2597,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImageView( m_device, + return static_cast( d.vkCreateImageView( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pView ) ) ); @@ -2606,7 +2647,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2617,7 +2658,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2643,7 +2685,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyImageView( m_device, static_cast( imageView ), reinterpret_cast( pAllocator ) ); + d.vkDestroyImageView( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2670,7 +2713,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateShaderModule( m_device, + return static_cast( d.vkCreateShaderModule( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pShaderModule ) ) ); @@ -2720,7 +2763,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( shaderModule, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( shaderModule, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2731,7 +2774,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2757,7 +2801,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderModule( m_device, static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderModule( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2784,7 +2829,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineCache( m_device, + return static_cast( d.vkCreatePipelineCache( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineCache ) ) ); @@ -2834,7 +2879,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( pipelineCache, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipelineCache, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2845,7 +2890,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( + static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2871,7 +2917,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineCache( m_device, static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineCache( + static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2898,11 +2945,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineCacheData( m_device, static_cast( pipelineCache ), pDataSize, pData ) ); + return static_cast( + d.vkGetPipelineCacheData( static_cast( m_device ), static_cast( pipelineCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const { @@ -2976,8 +3026,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + return static_cast( d.vkMergePipelineCaches( + static_cast( m_device ), static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3009,7 +3059,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateGraphicsPipelines( m_device, + return static_cast( d.vkCreateGraphicsPipelines( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), @@ -3018,7 +3068,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3103,7 +3155,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3128,7 +3183,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3165,7 +3220,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3198,7 +3253,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3212,7 +3267,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateComputePipelines( m_device, + return static_cast( d.vkCreateComputePipelines( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), @@ -3221,7 +3276,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3306,7 +3363,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -3331,7 +3391,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3368,7 +3428,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3401,7 +3461,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3412,7 +3472,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( + static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3438,7 +3499,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipeline( m_device, static_cast( pipeline ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipeline( + static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3465,7 +3527,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineLayout( m_device, + return static_cast( d.vkCreatePipelineLayout( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPipelineLayout ) ) ); @@ -3515,7 +3577,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( pipelineLayout, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipelineLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3526,7 +3588,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( + static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3552,7 +3615,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineLayout( m_device, static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineLayout( + static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3579,7 +3643,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSampler( m_device, + return static_cast( d.vkCreateSampler( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSampler ) ) ); @@ -3625,7 +3689,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( sampler, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( sampler, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3636,7 +3700,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3662,7 +3726,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySampler( m_device, static_cast( sampler ), reinterpret_cast( pAllocator ) ); + d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3689,7 +3753,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorSetLayout( m_device, + return static_cast( d.vkCreateDescriptorSetLayout( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSetLayout ) ) ); @@ -3739,7 +3803,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( setLayout, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( setLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3750,8 +3814,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorSetLayout( - m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3778,8 +3843,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorSetLayout( - m_device, static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorSetLayout( static_cast( m_device ), + static_cast( descriptorSetLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3807,7 +3873,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorPool( m_device, + return static_cast( d.vkCreateDescriptorPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorPool ) ) ); @@ -3857,7 +3923,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( descriptorPool, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( descriptorPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3868,7 +3934,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3894,7 +3961,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorPool( m_device, static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3921,8 +3989,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkResetDescriptorPool( m_device, static_cast( descriptorPool ), static_cast( flags ) ) ); + return static_cast( d.vkResetDescriptorPool( + static_cast( m_device ), static_cast( descriptorPool ), static_cast( flags ) ) ); } #else template @@ -3945,12 +4013,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAllocateDescriptorSets( - m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( d.vkAllocateDescriptorSets( static_cast( m_device ), + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const { @@ -3989,7 +4060,11 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename DescriptorSetAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const @@ -4005,7 +4080,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets; uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); @@ -4035,7 +4110,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); @@ -4052,8 +4127,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkFreeDescriptorSets( - m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4079,8 +4156,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkFreeDescriptorSets( - m_device, static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); + return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), + static_cast( descriptorPool ), + descriptorSetCount, + reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4107,7 +4186,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSets( m_device, + d.vkUpdateDescriptorSets( static_cast( m_device ), descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), descriptorCopyCount, @@ -4141,7 +4220,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateFramebuffer( m_device, + return static_cast( d.vkCreateFramebuffer( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFramebuffer ) ) ); @@ -4191,7 +4270,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( framebuffer, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( framebuffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4202,7 +4281,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( + static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4228,7 +4308,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyFramebuffer( m_device, static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); + d.vkDestroyFramebuffer( + static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4255,7 +4336,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass( m_device, + return static_cast( d.vkCreateRenderPass( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); @@ -4305,7 +4386,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4316,7 +4397,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( + static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4342,7 +4424,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyRenderPass( m_device, static_cast( renderPass ), reinterpret_cast( pAllocator ) ); + d.vkDestroyRenderPass( + static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4368,7 +4451,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetRenderAreaGranularity( m_device, static_cast( renderPass ), reinterpret_cast( pGranularity ) ); + d.vkGetRenderAreaGranularity( static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4395,7 +4478,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCommandPool( m_device, + return static_cast( d.vkCreateCommandPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCommandPool ) ) ); @@ -4445,7 +4528,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( commandPool, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( commandPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4456,7 +4539,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( + static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4482,7 +4566,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCommandPool( m_device, static_cast( commandPool ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCommandPool( + static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4509,7 +4594,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); + return static_cast( + d.vkResetCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ) ); } #else template @@ -4535,12 +4621,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAllocateCommandBuffers( - m_device, reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); + return static_cast( d.vkAllocateCommandBuffers( static_cast( m_device ), + reinterpret_cast( pAllocateInfo ), + reinterpret_cast( pCommandBuffers ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const { @@ -4579,7 +4668,11 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename CommandBufferAllocator, + typename std::enable_if>::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const @@ -4595,7 +4688,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers; uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); @@ -4625,7 +4718,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); @@ -4642,8 +4735,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeCommandBuffers( - m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( static_cast( m_device ), + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4669,8 +4764,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkFreeCommandBuffers( - m_device, static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkFreeCommandBuffers( static_cast( m_device ), + static_cast( commandPool ), + commandBufferCount, + reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4694,7 +4791,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( pBeginInfo ) ) ); + return static_cast( + d.vkBeginCommandBuffer( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4720,7 +4818,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); + return static_cast( d.vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); } #else template @@ -4744,7 +4842,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); + return static_cast( d.vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); } #else template @@ -4769,7 +4867,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindPipeline( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + d.vkCmdBindPipeline( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } template @@ -4779,7 +4878,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewports ) ); + d.vkCmdSetViewport( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4804,7 +4903,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissor( static_cast( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4826,7 +4925,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); + d.vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); } template @@ -4834,21 +4933,21 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + d.vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } template VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); + d.vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); + d.vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); } template @@ -4856,7 +4955,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast( faceMask ), compareMask ); + d.vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); } template @@ -4864,7 +4963,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast( faceMask ), writeMask ); + d.vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); } template @@ -4872,7 +4971,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilReference( m_commandBuffer, static_cast( faceMask ), reference ); + d.vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); } template @@ -4886,7 +4985,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorSets( m_commandBuffer, + d.vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, @@ -4928,7 +5027,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); + d.vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( indexType ) ); } template @@ -4939,8 +5041,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers( - m_commandBuffer, firstBinding, bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); + d.vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), + firstBinding, + bindingCount, + reinterpret_cast( pBuffers ), + reinterpret_cast( pOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4976,7 +5081,7 @@ namespace VULKAN_HPP_NAMESPACE uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + d.vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); } template @@ -4988,7 +5093,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + d.vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } template @@ -4999,7 +5104,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -5010,7 +5116,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawIndexedIndirect( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -5018,7 +5125,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template @@ -5027,7 +5134,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchIndirect( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + d.vkCmdDispatchIndirect( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); } template @@ -5038,7 +5145,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer( m_commandBuffer, + d.vkCmdCopyBuffer( static_cast( m_commandBuffer ), static_cast( srcBuffer ), static_cast( dstBuffer ), regionCount, @@ -5075,7 +5182,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage( m_commandBuffer, + d.vkCmdCopyImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), @@ -5119,7 +5226,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage( m_commandBuffer, + d.vkCmdBlitImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), @@ -5164,7 +5271,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage( m_commandBuffer, + d.vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), static_cast( srcBuffer ), static_cast( dstImage ), static_cast( dstImageLayout ), @@ -5203,7 +5310,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer( m_commandBuffer, + d.vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstBuffer ), @@ -5241,8 +5348,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdUpdateBuffer( - m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( dataSize ), pData ); + d.vkCmdUpdateBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( dataSize ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5273,7 +5383,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdFillBuffer( m_commandBuffer, static_cast( dstBuffer ), static_cast( dstOffset ), static_cast( size ), data ); + d.vkCmdFillBuffer( static_cast( m_commandBuffer ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + static_cast( size ), + data ); } template @@ -5285,7 +5399,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearColorImage( m_commandBuffer, + d.vkCmdClearColorImage( static_cast( m_commandBuffer ), static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pColor ), @@ -5324,7 +5438,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearDepthStencilImage( m_commandBuffer, + d.vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), static_cast( image ), static_cast( imageLayout ), reinterpret_cast( pDepthStencil ), @@ -5363,7 +5477,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearAttachments( m_commandBuffer, + d.vkCmdClearAttachments( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pAttachments ), rectCount, @@ -5399,7 +5513,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage( m_commandBuffer, + d.vkCmdResolveImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), static_cast( dstImage ), @@ -5438,7 +5552,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkCmdSetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -5447,7 +5561,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkCmdResetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -5464,7 +5578,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents( m_commandBuffer, + d.vkCmdWaitEvents( static_cast( m_commandBuffer ), eventCount, reinterpret_cast( pEvents ), static_cast( srcStageMask ), @@ -5520,7 +5634,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier( m_commandBuffer, + d.vkCmdPipelineBarrier( static_cast( m_commandBuffer ), static_cast( srcStageMask ), static_cast( dstStageMask ), static_cast( dependencyFlags ), @@ -5568,14 +5682,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginQuery( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ) ); + d.vkCmdBeginQuery( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); } template VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndQuery( m_commandBuffer, static_cast( queryPool ), query ); + d.vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); } template @@ -5585,7 +5700,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetQueryPool( m_commandBuffer, static_cast( queryPool ), firstQuery, queryCount ); + d.vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); } template @@ -5595,7 +5710,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp( m_commandBuffer, static_cast( pipelineStage ), static_cast( queryPool ), query ); + d.vkCmdWriteTimestamp( + static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); } template @@ -5609,7 +5725,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyQueryPoolResults( m_commandBuffer, + d.vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount, @@ -5628,7 +5744,12 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushConstants( m_commandBuffer, static_cast( layout ), static_cast( stageFlags ), offset, size, pValues ); + d.vkCmdPushConstants( static_cast( m_commandBuffer ), + static_cast( layout ), + static_cast( stageFlags ), + offset, + size, + pValues ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5659,7 +5780,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); + d.vkCmdBeginRenderPass( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + static_cast( contents ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5681,14 +5804,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass( m_commandBuffer, static_cast( contents ) ); + d.vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); } template VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass( m_commandBuffer ); + d.vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); } template @@ -5697,7 +5820,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast( pCommandBuffers ) ); + d.vkCmdExecuteCommands( static_cast( m_commandBuffer ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5746,7 +5869,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( + d.vkBindBufferMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5773,7 +5897,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( + d.vkBindImageMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5803,7 +5928,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceGroupPeerMemoryFeatures( - m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5829,7 +5954,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); + d.vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); } template @@ -5842,7 +5967,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatchBase( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } template @@ -5852,12 +5977,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumeratePhysicalDeviceGroups( - m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const @@ -5935,8 +6065,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements2( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5982,8 +6113,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements2( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements2( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6030,14 +6162,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2( m_device, + d.vkGetImageSparseMemoryRequirements2( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const { @@ -6105,7 +6241,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6148,7 +6284,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6192,7 +6328,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + d.vkGetPhysicalDeviceFormatProperties2( + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6237,7 +6374,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); } @@ -6292,11 +6429,14 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties2( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { @@ -6349,7 +6489,10 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const { @@ -6428,7 +6571,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6474,14 +6618,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const { @@ -6550,7 +6698,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkTrimCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ); + d.vkTrimCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); } template @@ -6559,7 +6707,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceQueue2( m_device, reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); + d.vkGetDeviceQueue2( static_cast( m_device ), reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6587,7 +6735,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSamplerYcbcrConversion( m_device, + return static_cast( d.vkCreateSamplerYcbcrConversion( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); @@ -6639,7 +6787,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6650,8 +6799,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversion( - m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6679,8 +6829,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversion( - m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6710,7 +6861,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorUpdateTemplate( m_device, + return static_cast( d.vkCreateDescriptorUpdateTemplate( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); @@ -6763,7 +6914,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6774,8 +6925,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplate( - m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6803,8 +6955,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplate( - m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6833,8 +6986,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSetWithTemplate( - m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6863,7 +7018,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); } @@ -6895,7 +7050,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); } @@ -6928,7 +7083,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, + d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); } @@ -6960,8 +7115,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSupport( - m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + d.vkGetDescriptorSetLayoutSupport( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7013,7 +7169,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCount( m_commandBuffer, + d.vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -7032,7 +7188,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, + d.vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -7048,7 +7204,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass2( m_device, + return static_cast( d.vkCreateRenderPass2( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); @@ -7098,7 +7254,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7109,8 +7265,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass2( - m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7135,8 +7292,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass2( - m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2( static_cast( m_commandBuffer ), + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7160,7 +7318,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7182,7 +7340,7 @@ namespace VULKAN_HPP_NAMESPACE Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkResetQueryPool( m_device, static_cast( queryPool ), firstQuery, queryCount ); + d.vkResetQueryPool( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); } template @@ -7191,7 +7349,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), pValue ) ); + return static_cast( d.vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7219,7 +7377,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + return static_cast( d.vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7246,7 +7404,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( d.vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7272,7 +7430,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7297,7 +7456,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7322,7 +7481,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), + reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7350,12 +7510,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceToolProperties( + static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolProperties( Dispatch const & d ) const @@ -7434,7 +7598,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePrivateDataSlot( m_device, + return static_cast( d.vkCreatePrivateDataSlot( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPrivateDataSlot ) ) ); @@ -7484,7 +7648,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7495,7 +7660,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPrivateDataSlot( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7522,7 +7688,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlot( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPrivateDataSlot( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7552,8 +7719,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkSetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + return static_cast( d.vkSetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); } #else template @@ -7584,7 +7751,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); + d.vkGetPrivateData( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7612,7 +7780,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent2( m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + d.vkCmdSetEvent2( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7636,7 +7805,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent2( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkCmdResetEvent2( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -7646,8 +7815,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents2( - m_commandBuffer, eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); + d.vkCmdWaitEvents2( static_cast( m_commandBuffer ), + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7681,7 +7852,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + d.vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7705,7 +7876,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + d.vkCmdWriteTimestamp2( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); } template @@ -7715,7 +7887,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + return static_cast( + d.vkQueueSubmit2( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7741,7 +7914,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + d.vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7762,7 +7935,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + d.vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7783,7 +7956,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + d.vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferToImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7805,7 +7978,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); + d.vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7826,7 +7999,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + d.vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7847,7 +8020,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + d.vkCmdResolveImage2( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7869,7 +8042,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + d.vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7890,21 +8063,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRendering( m_commandBuffer ); + d.vkCmdEndRendering( static_cast( m_commandBuffer ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCullMode( m_commandBuffer, static_cast( cullMode ) ); + d.vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFrontFace( m_commandBuffer, static_cast( frontFace ) ); + d.vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } template @@ -7912,7 +8085,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast( primitiveTopology ) ); + d.vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } template @@ -7921,7 +8094,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + d.vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7944,7 +8117,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7972,7 +8145,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2( m_commandBuffer, + d.vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), firstBinding, bindingCount, reinterpret_cast( pBuffers ), @@ -8028,21 +8201,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast( depthTestEnable ) ); + d.vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast( depthWriteEnable ) ); + d.vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast( depthCompareOp ) ); + d.vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } template @@ -8050,14 +8223,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + d.vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast( stencilTestEnable ) ); + d.vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } template @@ -8069,7 +8242,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOp( m_commandBuffer, + d.vkCmdSetStencilOp( static_cast( m_commandBuffer ), static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), @@ -8082,14 +8255,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + d.vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast( depthBiasEnable ) ); + d.vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } template @@ -8097,7 +8270,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + d.vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } template @@ -8106,8 +8279,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceBufferMemoryRequirements( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8153,8 +8327,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageMemoryRequirements( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetDeviceImageMemoryRequirements( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8201,14 +8376,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirements( m_device, + d.vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const { @@ -8280,7 +8459,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + d.vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8306,7 +8486,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySurfaceKHR( m_instance, static_cast( surface ), reinterpret_cast( pAllocator ) ); + d.vkDestroySurfaceKHR( + static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8334,7 +8515,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( - m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); + static_cast( m_physicalDevice ), queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8362,8 +8543,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8392,12 +8574,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( - m_physicalDevice, static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + pSurfaceFormatCount, + reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { @@ -8473,12 +8659,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( - m_physicalDevice, static_cast( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), + static_cast( surface ), + pPresentModeCount, + reinterpret_cast( pPresentModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { @@ -8556,7 +8746,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSwapchainKHR( m_device, + return static_cast( d.vkCreateSwapchainKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSwapchain ) ) ); @@ -8606,7 +8796,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8617,7 +8807,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8643,7 +8834,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySwapchainKHR( m_device, static_cast( swapchain ), reinterpret_cast( pAllocator ) ); + d.vkDestroySwapchainKHR( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8670,12 +8862,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSwapchainImagesKHR( m_device, static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); + return static_cast( d.vkGetSwapchainImagesKHR( + static_cast( m_device ), static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { @@ -8751,8 +8945,12 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireNextImageKHR( - m_device, static_cast( swapchain ), timeout, static_cast( semaphore ), static_cast( fence ), pImageIndex ) ); + return static_cast( d.vkAcquireNextImageKHR( static_cast( m_device ), + static_cast( swapchain ), + timeout, + static_cast( semaphore ), + static_cast( fence ), + pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8787,7 +8985,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( pPresentInfo ) ) ); + return static_cast( d.vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( pPresentInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8814,8 +9012,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); + return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( + static_cast( m_device ), reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8845,7 +9043,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( - m_device, static_cast( surface ), reinterpret_cast( pModes ) ) ); + static_cast( m_device ), static_cast( surface ), reinterpret_cast( pModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8875,12 +9073,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); + return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( + static_cast( m_physicalDevice ), static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const { @@ -8955,7 +9155,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast( pAcquireInfo ), pImageIndex ) ); + return static_cast( + d.vkAcquireNextImage2KHR( static_cast( m_device ), reinterpret_cast( pAcquireInfo ), pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8990,12 +9191,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayPropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const { @@ -9067,12 +9271,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayPlanePropertiesKHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const @@ -9148,12 +9356,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); + return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( + static_cast( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const { @@ -9225,12 +9435,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayModePropertiesKHR( - m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const @@ -9310,7 +9525,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDisplayModeKHR( m_physicalDevice, + return static_cast( d.vkCreateDisplayModeKHR( static_cast( m_physicalDevice ), static_cast( display ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -9365,7 +9580,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( mode, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( mode, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9378,8 +9593,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( - m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); + return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), + static_cast( mode ), + planeIndex, + reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9408,7 +9625,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, + return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -9458,7 +9675,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9473,7 +9690,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSharedSwapchainsKHR( m_device, + return static_cast( d.vkCreateSharedSwapchainsKHR( static_cast( m_device ), swapchainCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), @@ -9481,7 +9698,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, @@ -9554,7 +9773,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template >::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -9576,7 +9798,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains; uniqueSwapchains.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & swapchain : swapchains ) { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); @@ -9610,7 +9832,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); uniqueSwapchains.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & swapchain : swapchains ) { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); @@ -9639,7 +9861,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9654,7 +9876,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateXlibSurfaceKHR( m_instance, + return static_cast( d.vkCreateXlibSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -9704,7 +9926,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9714,7 +9936,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); + return static_cast( + d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9745,7 +9968,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateXcbSurfaceKHR( m_instance, + return static_cast( d.vkCreateXcbSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -9795,7 +10018,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9807,7 +10030,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); + return static_cast( + d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9840,7 +10064,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateWaylandSurfaceKHR( m_instance, + return static_cast( d.vkCreateWaylandSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -9890,7 +10114,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9901,7 +10125,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); + return static_cast( + d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, display ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9932,7 +10157,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAndroidSurfaceKHR( m_instance, + return static_cast( d.vkCreateAndroidSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -9982,7 +10207,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9998,7 +10223,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateWin32SurfaceKHR( m_instance, + return static_cast( d.vkCreateWin32SurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -10048,7 +10273,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10057,7 +10282,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); + return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ) ); } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -10071,7 +10296,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDebugReportCallbackEXT( m_instance, + return static_cast( d.vkCreateDebugReportCallbackEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCallback ) ) ); @@ -10121,7 +10346,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( callback, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( callback, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10133,7 +10359,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDebugReportCallbackEXT( - m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10161,7 +10387,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDebugReportCallbackEXT( - m_instance, static_cast( callback ), reinterpret_cast( pAllocator ) ); + static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10193,7 +10419,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDebugReportMessageEXT( m_instance, + d.vkDebugReportMessageEXT( static_cast( m_instance ), static_cast( flags ), static_cast( objectType_ ), object, @@ -10237,7 +10463,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + return static_cast( + d.vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10263,7 +10490,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + return static_cast( + d.vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10289,7 +10517,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + d.vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10310,7 +10538,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); + d.vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); } template @@ -10318,7 +10546,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ); + d.vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10343,8 +10571,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( - m_physicalDevice, reinterpret_cast( pVideoProfile ), reinterpret_cast( pCapabilities ) ) ); + return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pVideoProfile ), + reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10392,14 +10621,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pVideoFormatInfo ), pVideoFormatPropertyCount, reinterpret_cast( pVideoFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const @@ -10486,7 +10718,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateVideoSessionKHR( m_device, + return static_cast( d.vkCreateVideoSessionKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pVideoSession ) ) ); @@ -10536,7 +10768,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( videoSession, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( videoSession, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10547,7 +10779,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionKHR( m_device, static_cast( videoSession ), reinterpret_cast( pAllocator ) ); + d.vkDestroyVideoSessionKHR( + static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10574,7 +10807,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionKHR( m_device, static_cast( videoSession ), reinterpret_cast( pAllocator ) ); + d.vkDestroyVideoSessionKHR( + static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10603,14 +10837,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, + return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), static_cast( videoSession ), pMemoryRequirementsCount, reinterpret_cast( pMemoryRequirements ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const @@ -10698,7 +10936,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindVideoSessionMemoryKHR( m_device, + return static_cast( d.vkBindVideoSessionMemoryKHR( static_cast( m_device ), static_cast( videoSession ), bindSessionMemoryInfoCount, reinterpret_cast( pBindSessionMemoryInfos ) ) ); @@ -10735,7 +10973,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateVideoSessionParametersKHR( m_device, + return static_cast( d.vkCreateVideoSessionParametersKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pVideoSessionParameters ) ) ); @@ -10786,7 +11024,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - videoSessionParameters, ObjectDestroy( *this, allocator, d ) ) ); + videoSessionParameters, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10798,7 +11036,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkUpdateVideoSessionParametersKHR( m_device, + return static_cast( d.vkUpdateVideoSessionParametersKHR( static_cast( m_device ), static_cast( videoSessionParameters ), reinterpret_cast( pUpdateInfo ) ) ); } @@ -10831,8 +11069,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionParametersKHR( - m_device, static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); + d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10859,8 +11098,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyVideoSessionParametersKHR( - m_device, static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); + d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), + static_cast( videoSessionParameters ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10886,7 +11126,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast( pBeginInfo ) ); + d.vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10908,7 +11148,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast( pEndCodingInfo ) ); + d.vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEndCodingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10930,7 +11170,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast( pCodingControlInfo ) ); + d.vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCodingControlInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10954,7 +11195,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast( pDecodeInfo ) ); + d.vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pDecodeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10982,7 +11223,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, + d.vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), firstBinding, bindingCount, reinterpret_cast( pBuffers ), @@ -11034,7 +11275,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, + d.vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), @@ -11078,7 +11319,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, + d.vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), firstCounterBuffer, counterBufferCount, reinterpret_cast( pCounterBuffers ), @@ -11122,7 +11363,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, static_cast( flags ), index ); + d.vkCmdBeginQueryIndexedEXT( + static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); } template @@ -11130,7 +11372,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast( queryPool ), query, index ); + d.vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); } template @@ -11143,7 +11385,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, + d.vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), instanceCount, firstInstance, static_cast( counterBuffer ), @@ -11161,7 +11403,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCuModuleNVX( m_device, + return static_cast( d.vkCreateCuModuleNVX( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pModule ) ) ); @@ -11211,7 +11453,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11223,7 +11465,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCuFunctionNVX( m_device, + return static_cast( d.vkCreateCuFunctionNVX( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFunction ) ) ); @@ -11273,7 +11515,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11284,7 +11526,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuModuleNVX( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11310,7 +11553,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuModuleNVX( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCuModuleNVX( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11336,7 +11580,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuFunctionNVX( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCuFunctionNVX( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11362,7 +11607,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCuFunctionNVX( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCuFunctionNVX( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11387,7 +11633,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + d.vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11411,7 +11657,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11436,8 +11682,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetImageViewAddressNVX( + static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11471,7 +11717,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountAMD( m_commandBuffer, + d.vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -11490,7 +11736,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, + d.vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -11510,7 +11756,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetShaderInfoAMD( m_device, + return static_cast( d.vkGetShaderInfoAMD( static_cast( m_device ), static_cast( pipeline ), static_cast( shaderStage ), static_cast( infoType ), @@ -11519,7 +11765,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, @@ -11616,7 +11864,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast( pRenderingInfo ) ); + d.vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11637,7 +11885,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderingKHR( m_commandBuffer ); + d.vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); } #if defined( VK_USE_PLATFORM_GGP ) @@ -11651,7 +11899,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, + return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -11701,7 +11949,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11722,7 +11970,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, + d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), static_cast( tiling ), @@ -11775,8 +12023,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), pHandle ) ); + return static_cast( d.vkGetMemoryWin32HandleNV( + static_cast( m_device ), static_cast( memory ), static_cast( handleType ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11806,7 +12054,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast( pFeatures ) ); + d.vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11849,7 +12097,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast( pProperties ) ); + d.vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11894,7 +12142,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceFormatProperties2KHR( - m_physicalDevice, static_cast( format ), reinterpret_cast( pFormatProperties ) ); + static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11941,7 +12189,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); } @@ -11996,11 +12244,14 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( - m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); + static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename QueueFamilyProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const { @@ -12053,7 +12304,10 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const { @@ -12132,7 +12386,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast( pMemoryProperties ) ); + d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), + reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12178,14 +12433,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, + d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pFormatInfo ), pPropertyCount, reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename SparseImageFormatProperties2Allocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const @@ -12260,7 +12519,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceGroupPeerMemoryFeaturesKHR( - m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); + static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12286,7 +12545,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); + d.vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); } template @@ -12299,7 +12558,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDispatchBaseKHR( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } #if defined( VK_USE_PLATFORM_VI_NN ) @@ -12312,7 +12571,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateViSurfaceNN( m_instance, + return static_cast( d.vkCreateViSurfaceNN( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -12362,7 +12621,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12376,7 +12635,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkTrimCommandPoolKHR( m_device, static_cast( commandPool ), static_cast( flags ) ); + d.vkTrimCommandPoolKHR( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); } //=== VK_KHR_device_group_creation === @@ -12388,12 +12647,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( - m_instance, pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); + return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), + pPhysicalDeviceGroupCount, + reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const @@ -12473,7 +12737,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); } @@ -12508,8 +12772,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + return static_cast( d.vkGetMemoryWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12539,7 +12803,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, + return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), static_cast( handleType ), handle, reinterpret_cast( pMemoryWin32HandleProperties ) ) ); @@ -12576,7 +12840,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( d.vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12605,8 +12869,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryFdPropertiesKHR( - m_device, static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); + return static_cast( d.vkGetMemoryFdPropertiesKHR( static_cast( m_device ), + static_cast( handleType ), + fd, + reinterpret_cast( pMemoryFdProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12637,7 +12903,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); } @@ -12671,8 +12937,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); + return static_cast( d.vkImportSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12699,8 +12965,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + return static_cast( d.vkGetSemaphoreWin32HandleKHR( + static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12730,7 +12996,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( pImportSemaphoreFdInfo ) ) ); + return static_cast( + d.vkImportSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pImportSemaphoreFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12757,7 +13024,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( + d.vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12790,7 +13058,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetKHR( m_commandBuffer, + d.vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set, @@ -12829,8 +13097,11 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetWithTemplateKHR( - m_commandBuffer, static_cast( descriptorUpdateTemplate ), static_cast( layout ), set, pData ); + d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12862,7 +13133,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast( pConditionalRenderingBegin ) ); + d.vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), + reinterpret_cast( pConditionalRenderingBegin ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12883,7 +13155,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); + d.vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); } //=== VK_KHR_descriptor_update_template === @@ -12896,7 +13168,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( m_device, + return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pDescriptorUpdateTemplate ) ) ); @@ -12949,7 +13221,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12960,8 +13232,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDescriptorUpdateTemplateKHR( - m_device, static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDescriptorUpdateTemplateKHR( static_cast( m_device ), + static_cast( descriptorUpdateTemplate ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12990,8 +13263,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSetWithTemplateKHR( - m_device, static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); + d.vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), + static_cast( descriptorSet ), + static_cast( descriptorUpdateTemplate ), + pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13023,7 +13298,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); + d.vkCmdSetViewportWScalingNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13050,7 +13326,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast( display ) ) ); + return static_cast( d.vkReleaseDisplayEXT( static_cast( m_physicalDevice ), static_cast( display ) ) ); } #else template @@ -13074,7 +13350,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast( display ) ) ); + return static_cast( d.vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), dpy, static_cast( display ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13102,7 +13378,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); + return static_cast( + d.vkGetRandROutputDisplayEXT( static_cast( m_physicalDevice ), dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13139,7 +13416,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13154,8 +13431,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( - m_physicalDevice, static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), + static_cast( surface ), + reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13186,8 +13464,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); + return static_cast( d.vkDisplayPowerControlEXT( + static_cast( m_device ), static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13216,7 +13494,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkRegisterDeviceEventEXT( m_device, + return static_cast( d.vkRegisterDeviceEventEXT( static_cast( m_device ), reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFence ) ) ); @@ -13266,7 +13544,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13279,7 +13557,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkRegisterDisplayEventEXT( m_device, + return static_cast( d.vkRegisterDisplayEventEXT( static_cast( m_device ), static_cast( display ), reinterpret_cast( pDisplayEventInfo ), reinterpret_cast( pAllocator ), @@ -13334,7 +13612,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13346,8 +13624,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); + return static_cast( d.vkGetSwapchainCounterEXT( + static_cast( m_device ), static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13378,8 +13656,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetRefreshCycleDurationGOOGLE( - m_device, static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); + return static_cast( d.vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), + static_cast( swapchain ), + reinterpret_cast( pDisplayTimingProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13409,14 +13688,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPastPresentationTimingGOOGLE( m_device, + return static_cast( d.vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), static_cast( swapchain ), pPresentationTimingCount, reinterpret_cast( pPresentationTimings ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PastPresentationTimingGOOGLEAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const @@ -13505,7 +13788,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); + d.vkCmdSetDiscardRectangleEXT( + static_cast( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13529,7 +13813,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast( discardRectangleEnable ) ); + d.vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); } template @@ -13537,7 +13821,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast( discardRectangleMode ) ); + d.vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleMode ) ); } //=== VK_EXT_hdr_metadata === @@ -13549,8 +13833,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetHdrMetadataEXT( - m_device, swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); + d.vkSetHdrMetadataEXT( static_cast( m_device ), + swapchainCount, + reinterpret_cast( pSwapchains ), + reinterpret_cast( pMetadata ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13588,7 +13874,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass2KHR( m_device, + return static_cast( d.vkCreateRenderPass2KHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pRenderPass ) ) ); @@ -13638,7 +13924,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13649,8 +13935,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginRenderPass2KHR( - m_commandBuffer, reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); + d.vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pRenderPassBegin ), + reinterpret_cast( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13675,8 +13962,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdNextSubpass2KHR( - m_commandBuffer, reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pSubpassBeginInfo ), + reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13700,7 +13988,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast( pSubpassEndInfo ) ); + d.vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13725,7 +14013,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSwapchainStatusKHR( m_device, static_cast( swapchain ) ) ); + return static_cast( d.vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( swapchain ) ) ); } #else template @@ -13755,7 +14043,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); } @@ -13789,8 +14077,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); + return static_cast( d.vkImportFenceWin32HandleKHR( static_cast( m_device ), + reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13818,7 +14106,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); + d.vkGetFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13848,7 +14136,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( pImportFenceFdInfo ) ) ); + return static_cast( + d.vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( pImportFenceFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13875,7 +14164,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( pGetFdInfo ), pFd ) ); + return static_cast( d.vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13909,7 +14198,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, + d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast( m_physicalDevice ), queueFamilyIndex, pCounterCount, reinterpret_cast( pCounters ), @@ -13917,7 +14206,13 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value && + std::is_same::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type @@ -14025,8 +14320,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - m_physicalDevice, reinterpret_cast( pPerformanceQueryCreateInfo ), pNumPasses ); + d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( pPerformanceQueryCreateInfo ), + pNumPasses ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14053,7 +14349,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14078,7 +14375,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkReleaseProfilingLockKHR( m_device ); + d.vkReleaseProfilingLockKHR( static_cast( m_device ) ); } //=== VK_KHR_get_surface_capabilities2 === @@ -14090,7 +14387,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); } @@ -14145,14 +14442,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { @@ -14228,7 +14527,10 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { @@ -14334,12 +14636,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const @@ -14415,12 +14720,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayPlaneProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const @@ -14498,12 +14807,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayModeProperties2KHR( - m_physicalDevice, static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( display ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename DisplayModeProperties2KHRAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const @@ -14583,7 +14898,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, + return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); } @@ -14619,7 +14934,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIOSSurfaceMVK( m_instance, + return static_cast( d.vkCreateIOSSurfaceMVK( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -14669,7 +14984,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14685,7 +15000,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMacOSSurfaceMVK( m_instance, + return static_cast( d.vkCreateMacOSSurfaceMVK( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -14735,7 +15050,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14748,7 +15063,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( pNameInfo ) ) ); + return static_cast( + d.vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14774,7 +15090,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( pTagInfo ) ) ); + return static_cast( + d.vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14800,7 +15117,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14821,7 +15138,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueEndDebugUtilsLabelEXT( m_queue ); + d.vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); } template @@ -14829,7 +15146,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast( pLabelInfo ) ); + d.vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14851,7 +15168,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + d.vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14872,7 +15189,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer ); + d.vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); } template @@ -14880,7 +15197,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast( pLabelInfo ) ); + d.vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14905,7 +15222,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDebugUtilsMessengerEXT( m_instance, + return static_cast( d.vkCreateDebugUtilsMessengerEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMessenger ) ) ); @@ -14955,7 +15272,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( messenger, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( messenger, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14966,8 +15284,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14994,8 +15313,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyDebugUtilsMessengerEXT( - m_instance, static_cast( messenger ), reinterpret_cast( pAllocator ) ); + d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), + static_cast( messenger ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15023,7 +15343,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSubmitDebugUtilsMessageEXT( m_instance, + d.vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), static_cast( messageSeverity ), static_cast( messageTypes ), reinterpret_cast( pCallbackData ) ); @@ -15058,8 +15378,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( + static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15109,8 +15429,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( pInfo ), pBuffer ) ); + return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( + static_cast( m_device ), reinterpret_cast( pInfo ), pBuffer ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15147,7 +15467,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( m_device, + return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), @@ -15156,7 +15476,9 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -15241,7 +15563,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -15267,7 +15592,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -15305,7 +15630,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -15338,7 +15663,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15350,8 +15675,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( - m_device, static_cast( executionGraph ), reinterpret_cast( pSizeInfo ) ) ); + return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast( m_device ), + static_cast( executionGraph ), + reinterpret_cast( pSizeInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15382,8 +15708,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( - m_device, static_cast( executionGraph ), reinterpret_cast( pNodeInfo ), pNodeIndex ) ); + return static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), + static_cast( executionGraph ), + reinterpret_cast( pNodeInfo ), + pNodeIndex ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15406,25 +15734,35 @@ namespace VULKAN_HPP_NAMESPACE # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast( scratch ) ); + d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), + static_cast( executionGraph ), + static_cast( scratch ), + static_cast( scratchSize ) ); } template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast( scratch ), reinterpret_cast( pCountInfo ) ); + d.vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( pCountInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { @@ -15433,23 +15771,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphAMDX && "Function requires " ); # endif - d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast( scratch ), reinterpret_cast( &countInfo ) ); + d.vkCmdDispatchGraphAMDX( m_commandBuffer, + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchGraphIndirectAMDX( - m_commandBuffer, static_cast( scratch ), reinterpret_cast( pCountInfo ) ); + d.vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( pCountInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { @@ -15458,18 +15803,24 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.vkCmdDispatchGraphIndirectAMDX && "Function requires " ); # endif - d.vkCmdDispatchGraphIndirectAMDX( - m_commandBuffer, static_cast( scratch ), reinterpret_cast( &countInfo ) ); + d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer, + static_cast( scratch ), + static_cast( scratchSize ), + reinterpret_cast( &countInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast( scratch ), static_cast( countInfo ) ); + d.vkCmdDispatchGraphIndirectCountAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + static_cast( countInfo ) ); } #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -15480,7 +15831,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast( pSampleLocationsInfo ) ); + d.vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), reinterpret_cast( pSampleLocationsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15503,8 +15854,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( - m_physicalDevice, static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); + d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), + static_cast( samples ), + reinterpret_cast( pMultisampleProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15534,8 +15886,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageMemoryRequirements2KHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetImageMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15581,8 +15934,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetBufferMemoryRequirements2KHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15629,14 +15983,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2KHR( m_device, + d.vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const { @@ -15710,7 +16068,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAccelerationStructureKHR( m_device, + return static_cast( d.vkCreateAccelerationStructureKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); @@ -15759,9 +16117,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15772,8 +16130,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureKHR( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15800,8 +16159,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureKHR( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15830,7 +16190,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, + d.vkCmdBuildAccelerationStructuresKHR( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ), reinterpret_cast( ppBuildRangeInfos ) ); @@ -15872,7 +16232,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, + d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ), reinterpret_cast( pIndirectDeviceAddresses ), @@ -15932,7 +16292,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkBuildAccelerationStructuresKHR( m_device, + d.vkBuildAccelerationStructuresKHR( static_cast( m_device ), static_cast( deferredOperation ), infoCount, reinterpret_cast( pInfos ), @@ -15981,8 +16341,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyAccelerationStructureKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16015,8 +16376,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16050,8 +16412,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16089,7 +16452,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, + return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), @@ -16099,7 +16462,10 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, @@ -16162,7 +16528,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16184,7 +16551,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16207,7 +16575,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16230,8 +16599,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( + static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16263,7 +16632,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, + d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), @@ -16301,7 +16670,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, + d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), reinterpret_cast( pVersionInfo ), reinterpret_cast( pCompatibility ) ); } @@ -16335,7 +16704,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureBuildSizesKHR( m_device, + d.vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), static_cast( buildType ), reinterpret_cast( pBuildInfo ), pMaxPrimitiveCounts, @@ -16388,7 +16757,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysKHR( m_commandBuffer, + d.vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), @@ -16436,7 +16805,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRayTracingPipelinesKHR( m_device, + return static_cast( d.vkCreateRayTracingPipelinesKHR( static_cast( m_device ), static_cast( deferredOperation ), static_cast( pipelineCache ), createInfoCount, @@ -16446,7 +16815,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -16546,7 +16917,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, @@ -16577,7 +16951,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -16620,7 +16994,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -16658,7 +17032,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16672,12 +17046,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { @@ -16724,12 +17101,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const @@ -16777,7 +17157,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, + d.vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), reinterpret_cast( pHitShaderBindingTable ), @@ -16815,15 +17195,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast( pipeline ), group, static_cast( groupShader ) ) ); + return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( + static_cast( m_device ), static_cast( pipeline ), group, static_cast( groupShader ) ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize ); + d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); } //=== VK_KHR_sampler_ycbcr_conversion === @@ -16836,7 +17216,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSamplerYcbcrConversionKHR( m_device, + return static_cast( d.vkCreateSamplerYcbcrConversionKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pYcbcrConversion ) ) ); @@ -16888,7 +17268,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16899,8 +17280,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroySamplerYcbcrConversionKHR( - m_device, static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); + d.vkDestroySamplerYcbcrConversionKHR( static_cast( m_device ), + static_cast( ycbcrConversion ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16930,7 +17312,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( + d.vkBindBufferMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16958,7 +17341,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( + d.vkBindImageMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16987,7 +17371,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( - m_device, static_cast( image ), reinterpret_cast( pProperties ) ) ); + static_cast( m_device ), static_cast( image ), reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17019,7 +17403,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateValidationCacheEXT( m_device, + return static_cast( d.vkCreateValidationCacheEXT( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pValidationCache ) ) ); @@ -17069,7 +17453,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( validationCache, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( validationCache, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17081,7 +17466,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyValidationCacheEXT( - m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17109,7 +17494,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyValidationCacheEXT( - m_device, static_cast( validationCache ), reinterpret_cast( pAllocator ) ); + static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17137,8 +17522,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkMergeValidationCachesEXT( - m_device, static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); + return static_cast( d.vkMergeValidationCachesEXT( static_cast( m_device ), + static_cast( dstCache ), + srcCacheCount, + reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17168,11 +17555,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetValidationCacheDataEXT( m_device, static_cast( validationCache ), pDataSize, pData ) ); + return static_cast( + d.vkGetValidationCacheDataEXT( static_cast( m_device ), static_cast( validationCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const { @@ -17247,7 +17637,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + d.vkCmdBindShadingRateImageNV( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); } template @@ -17258,7 +17649,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportShadingRatePaletteNV( - m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17285,7 +17676,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, + d.vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), static_cast( sampleOrderType ), customSampleOrderCount, reinterpret_cast( pCustomSampleOrders ) ); @@ -17320,7 +17711,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAccelerationStructureNV( m_device, + return static_cast( d.vkCreateAccelerationStructureNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pAccelerationStructure ) ) ); @@ -17369,9 +17760,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17382,8 +17773,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureNV( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17410,8 +17802,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyAccelerationStructureNV( - m_device, static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); + d.vkDestroyAccelerationStructureNV( static_cast( m_device ), + static_cast( accelerationStructure ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17439,7 +17832,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, + d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); } @@ -17490,8 +17883,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast( pBindInfos ) ) ); + return static_cast( d.vkBindAccelerationStructureMemoryNV( + static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17524,7 +17917,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, + d.vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ), static_cast( instanceData ), static_cast( instanceOffset ), @@ -17571,7 +17964,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, + d.vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), static_cast( dst ), static_cast( src ), static_cast( mode ) ); @@ -17595,7 +17988,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysNV( m_commandBuffer, + d.vkCmdTraceRaysNV( static_cast( m_commandBuffer ), static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), static_cast( missShaderBindingTableBuffer ), @@ -17621,7 +18014,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRayTracingPipelinesNV( m_device, + return static_cast( d.vkCreateRayTracingPipelinesNV( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, reinterpret_cast( pCreateInfos ), @@ -17630,7 +18023,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -17715,7 +18110,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename PipelineAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -17740,7 +18138,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -17777,7 +18175,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -17810,7 +18208,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17824,12 +18222,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); + return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( + static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const { @@ -17874,12 +18275,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), dataSize, pData ) ); + return static_cast( d.vkGetAccelerationStructureHandleNV( + static_cast( m_device ), static_cast( accelerationStructure ), dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const { @@ -17924,7 +18328,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, + d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), static_cast( queryType ), @@ -17963,7 +18367,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); + return static_cast( d.vkCompileDeferredNV( static_cast( m_device ), static_cast( pipeline ), shader ) ); } #else template @@ -17991,8 +18395,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSupportKHR( - m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); + d.vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18046,7 +18451,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountKHR( m_commandBuffer, + d.vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -18065,7 +18470,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, + d.vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -18084,7 +18489,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( m_device, + return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), static_cast( handleType ), pHostPointer, reinterpret_cast( pMemoryHostPointerProperties ) ) ); @@ -18124,13 +18529,28 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, + d.vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); } + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + //=== VK_EXT_calibrated_timestamps === template @@ -18139,12 +18559,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const { @@ -18219,12 +18641,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetCalibratedTimestampsEXT( - m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + return static_cast( d.vkGetCalibratedTimestampsEXT( static_cast( m_device ), + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d ) const @@ -18298,7 +18725,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask ); + d.vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); } template @@ -18309,7 +18736,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawMeshTasksIndirectNV( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -18322,7 +18750,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, + d.vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -18340,8 +18768,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetExclusiveScissorEnableNV( - m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissorEnables ) ); + d.vkCmdSetExclusiveScissorEnableNV( static_cast( m_commandBuffer ), + firstExclusiveScissor, + exclusiveScissorCount, + reinterpret_cast( pExclusiveScissorEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18368,7 +18798,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); + d.vkCmdSetExclusiveScissorNV( + static_cast( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18393,7 +18824,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker ); + d.vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), pCheckpointMarker ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18415,11 +18846,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + d.vkGetQueueCheckpointDataNV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointDataNV( Dispatch const & d ) const { @@ -18468,6 +18901,67 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetQueueCheckpointData2NV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif + + std::vector checkpointData; + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); +# endif + + std::vector checkpointData( checkpointData2NVAllocator ); + uint32_t checkpointDataCount; + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_timeline_semaphore === template @@ -18476,7 +18970,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), pValue ) ); + return static_cast( d.vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18504,7 +18998,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( pWaitInfo ), timeout ) ); + return static_cast( + d.vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18531,7 +19026,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( pSignalInfo ) ) ); + return static_cast( d.vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18559,8 +19054,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( pInitializeInfo ) ) ); + return static_cast( d.vkInitializePerformanceApiINTEL( static_cast( m_device ), + reinterpret_cast( pInitializeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18585,7 +19080,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUninitializePerformanceApiINTEL( m_device ); + d.vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); } template @@ -18593,7 +19088,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18619,8 +19115,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( pMarkerInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18646,8 +19142,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( pOverrideInfo ) ) ); + return static_cast( d.vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), + reinterpret_cast( pOverrideInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18675,7 +19171,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquirePerformanceConfigurationINTEL( m_device, + return static_cast( d.vkAcquirePerformanceConfigurationINTEL( static_cast( m_device ), reinterpret_cast( pAcquireInfo ), reinterpret_cast( pConfiguration ) ) ); } @@ -18719,7 +19215,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( configuration, ObjectRelease( *this, d ) ) ); + result, + UniqueHandle( configuration, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18730,7 +19227,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + return static_cast( + d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); } #else template @@ -18756,7 +19254,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); + return static_cast( + d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); } #else template @@ -18782,7 +19281,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); + return static_cast( + d.vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), static_cast( configuration ) ) ); } #else template @@ -18810,7 +19310,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPerformanceParameterINTEL( - m_device, static_cast( parameter ), reinterpret_cast( pValue ) ) ); + static_cast( m_device ), static_cast( parameter ), reinterpret_cast( pValue ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18840,7 +19340,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetLocalDimmingAMD( m_device, static_cast( swapChain ), static_cast( localDimmingEnable ) ); + d.vkSetLocalDimmingAMD( static_cast( m_device ), static_cast( swapChain ), static_cast( localDimmingEnable ) ); } #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -18854,7 +19354,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, + return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -18904,7 +19404,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18920,7 +19420,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMetalSurfaceEXT( m_instance, + return static_cast( d.vkCreateMetalSurfaceEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -18970,7 +19470,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18985,12 +19485,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( - m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast( pFragmentShadingRates ) ) ); + return static_cast( + d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast( m_physicalDevice ), + pFragmentShadingRateCount, + reinterpret_cast( pFragmentShadingRates ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const @@ -19071,8 +19577,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFragmentShadingRateKHR( - m_commandBuffer, reinterpret_cast( pFragmentSize ), reinterpret_cast( combinerOps ) ); + d.vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pFragmentSize ), + reinterpret_cast( combinerOps ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19098,7 +19605,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast( pLocationInfo ) ); + d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), + reinterpret_cast( pLocationInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19122,7 +19630,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, + d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), reinterpret_cast( pInputAttachmentIndexInfo ) ); } @@ -19150,7 +19658,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19178,12 +19687,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast( pToolProperties ) ) ); + return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( + static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename PhysicalDeviceToolPropertiesAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const @@ -19265,7 +19778,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWaitForPresentKHR( m_device, static_cast( swapchain ), presentId, timeout ) ); + return static_cast( d.vkWaitForPresentKHR( static_cast( m_device ), static_cast( swapchain ), presentId, timeout ) ); } #else template @@ -19296,11 +19809,15 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( - m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const @@ -19381,11 +19898,15 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - m_physicalDevice, pCombinationCount, reinterpret_cast( pCombinations ) ) ); + static_cast( m_physicalDevice ), pCombinationCount, reinterpret_cast( pCombinations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const @@ -19471,14 +19992,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, + return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), reinterpret_cast( pSurfaceInfo ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const { @@ -19561,7 +20084,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); } # else template @@ -19587,7 +20110,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); + return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); } # else template @@ -19614,8 +20137,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( - m_device, reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); + return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), + reinterpret_cast( pSurfaceInfo ), + reinterpret_cast( pModes ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19648,7 +20172,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateHeadlessSurfaceEXT( m_instance, + return static_cast( d.vkCreateHeadlessSurfaceEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -19698,7 +20222,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19710,7 +20234,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast( pInfo ) ) ); + return static_cast( + d.vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19735,7 +20260,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19760,7 +20285,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast( pInfo ) ); + return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), + reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19787,7 +20313,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + d.vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } //=== VK_EXT_host_query_reset === @@ -19799,7 +20325,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkResetQueryPoolEXT( m_device, static_cast( queryPool ), firstQuery, queryCount ); + d.vkResetQueryPoolEXT( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); } //=== VK_EXT_extended_dynamic_state === @@ -19808,14 +20334,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast( cullMode ) ); + d.vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast( frontFace ) ); + d.vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } template @@ -19823,7 +20349,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast( primitiveTopology ) ); + d.vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } template @@ -19832,7 +20358,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast( pViewports ) ); + d.vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19855,7 +20381,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast( pScissors ) ); + d.vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19883,7 +20409,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, + d.vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), firstBinding, bindingCount, reinterpret_cast( pBuffers ), @@ -19939,21 +20465,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast( depthTestEnable ) ); + d.vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast( depthWriteEnable ) ); + d.vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast( depthCompareOp ) ); + d.vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } template @@ -19961,14 +20487,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast( depthBoundsTestEnable ) ); + d.vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast( stencilTestEnable ) ); + d.vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } template @@ -19980,7 +20506,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOpEXT( m_commandBuffer, + d.vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), static_cast( faceMask ), static_cast( failOp ), static_cast( passOp ), @@ -19996,8 +20522,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDeferredOperationKHR( - m_device, reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); + return static_cast( d.vkCreateDeferredOperationKHR( static_cast( m_device ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pDeferredOperation ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20038,7 +20565,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( deferredOperation, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( deferredOperation, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20050,7 +20578,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDeferredOperationKHR( - m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20078,7 +20606,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDeferredOperationKHR( - m_device, static_cast( operation ), reinterpret_cast( pAllocator ) ); + static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20104,7 +20632,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast( operation ) ); + return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( operation ) ); } #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20113,7 +20641,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDeferredOperationResultKHR( m_device, static_cast( operation ) ) ); + return static_cast( d.vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( operation ) ) ); } #else template @@ -20138,7 +20666,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkDeferredOperationJoinKHR( m_device, static_cast( operation ) ) ); + return static_cast( d.vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( operation ) ) ); } #else template @@ -20170,14 +20698,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineExecutablePropertiesKHR( m_device, + return static_cast( d.vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), reinterpret_cast( pPipelineInfo ), pExecutableCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const @@ -20267,14 +20799,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineExecutableStatisticsKHR( m_device, + return static_cast( d.vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), reinterpret_cast( pExecutableInfo ), pStatisticCount, reinterpret_cast( pStatistics ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const @@ -20365,14 +20901,18 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, + d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast( m_device ), reinterpret_cast( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast( pInternalRepresentations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< std::vector>::type Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const @@ -20463,7 +21003,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( pCopyMemoryToImageInfo ) ) ); + return static_cast( + d.vkCopyMemoryToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20489,7 +21030,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( pCopyImageToMemoryInfo ) ) ); + return static_cast( + d.vkCopyImageToMemoryEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20515,7 +21057,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyImageToImageEXT( m_device, reinterpret_cast( pCopyImageToImageInfo ) ) ); + return static_cast( + d.vkCopyImageToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20542,8 +21085,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast( pTransitions ) ) ); + return static_cast( d.vkTransitionImageLayoutEXT( + static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20572,7 +21115,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2EXT( m_device, + d.vkGetImageSubresourceLayout2EXT( static_cast( m_device ), static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); @@ -20629,7 +21172,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkMapMemory2KHR( m_device, reinterpret_cast( pMemoryMapInfo ), ppData ) ); + return static_cast( + d.vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20656,7 +21200,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( pMemoryUnmapInfo ) ) ); + return static_cast( d.vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20684,7 +21228,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast( pReleaseInfo ) ) ); + return static_cast( + d.vkReleaseSwapchainImagesEXT( static_cast( m_device ), reinterpret_cast( pReleaseInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20713,7 +21258,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, + d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); } @@ -20764,7 +21309,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), + reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20787,8 +21333,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteGeneratedCommandsNV( - m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); + d.vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20814,7 +21361,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); + d.vkCmdBindPipelineShaderGroupNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); } template @@ -20825,7 +21373,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIndirectCommandsLayoutNV( m_device, + return static_cast( d.vkCreateIndirectCommandsLayoutNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pIndirectCommandsLayout ) ) ); @@ -20876,7 +21424,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - indirectCommandsLayout, ObjectDestroy( *this, allocator, d ) ) ); + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20887,8 +21435,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutNV( - m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20915,8 +21464,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutNV( - m_device, static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); + d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20944,7 +21494,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast( pDepthBiasInfo ) ); + d.vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pDepthBiasInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20970,7 +21520,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); + return static_cast( d.vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); } #else template @@ -20997,7 +21547,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( display ) ) ); + return static_cast( + d.vkGetDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, connectorId, reinterpret_cast( display ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21034,7 +21585,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21048,7 +21599,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePrivateDataSlotEXT( m_device, + return static_cast( d.vkCreatePrivateDataSlotEXT( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pPrivateDataSlot ) ) ); @@ -21098,7 +21649,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21109,7 +21661,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPrivateDataSlotEXT( m_device, static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPrivateDataSlotEXT( + static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21139,8 +21692,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkSetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); + return static_cast( d.vkSetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); } #else template @@ -21171,7 +21724,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); + d.vkGetPrivateDataEXT( + static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21203,7 +21757,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice, + d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pQualityLevelInfo ), reinterpret_cast( pQualityLevelProperties ) ) ); } @@ -21264,7 +21818,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetEncodedVideoSessionParametersKHR( m_device, + d.vkGetEncodedVideoSessionParametersKHR( static_cast( m_device ), reinterpret_cast( pVideoSessionParametersInfo ), reinterpret_cast( pFeedbackInfo ), pDataSize, @@ -21272,7 +21826,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>>::type Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, @@ -21356,7 +21912,12 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, std::vector>>::type Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, @@ -21451,7 +22012,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast( pEncodeInfo ) ); + d.vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEncodeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21478,7 +22039,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCudaModuleNV( m_device, + return static_cast( d.vkCreateCudaModuleNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pModule ) ) ); @@ -21528,7 +22089,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21540,11 +22101,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetCudaModuleCacheNV( m_device, static_cast( module ), pCacheSize, pCacheData ) ); + return static_cast( d.vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( module ), pCacheSize, pCacheData ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d ) const { @@ -21616,7 +22179,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCudaFunctionNV( m_device, + return static_cast( d.vkCreateCudaFunctionNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pFunction ) ) ); @@ -21666,7 +22229,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21677,7 +22240,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaModuleNV( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21703,7 +22267,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaModuleNV( m_device, static_cast( module ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCudaModuleNV( + static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21729,7 +22294,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaFunctionNV( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21755,7 +22321,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyCudaFunctionNV( m_device, static_cast( function ), reinterpret_cast( pAllocator ) ); + d.vkDestroyCudaFunctionNV( + static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21780,7 +22347,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCudaLaunchKernelNV( m_commandBuffer, reinterpret_cast( pLaunchInfo ) ); + d.vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21806,7 +22373,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkExportMetalObjectsEXT( m_device, reinterpret_cast( pMetalObjectsInfo ) ); + d.vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( pMetalObjectsInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21851,7 +22418,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast( event ), reinterpret_cast( pDependencyInfo ) ); + d.vkCmdSetEvent2KHR( + static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21875,7 +22443,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast( event ), static_cast( stageMask ) ); + d.vkCmdResetEvent2KHR( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -21885,8 +22453,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents2KHR( - m_commandBuffer, eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); + d.vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), + eventCount, + reinterpret_cast( pEvents ), + reinterpret_cast( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21920,7 +22490,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast( pDependencyInfo ) ); + d.vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21944,7 +22514,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast( stage ), static_cast( queryPool ), query ); + d.vkCmdWriteTimestamp2KHR( + static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); } template @@ -21955,7 +22526,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); + d.vkQueueSubmit2KHR( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21976,77 +22547,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarker2AMD( - m_commandBuffer, static_cast( stage ), static_cast( dstBuffer ), static_cast( dstOffset ), marker ); - } - - template - VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); -# endif - - std::vector checkpointData; - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); -# endif - - std::vector checkpointData( checkpointData2NVAllocator ); - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_EXT_descriptor_buffer === template @@ -22055,7 +22555,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast( layout ), reinterpret_cast( pLayoutSizeInBytes ) ); + d.vkGetDescriptorSetLayoutSizeEXT( + static_cast( m_device ), static_cast( layout ), reinterpret_cast( pLayoutSizeInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22082,7 +22583,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast( layout ), binding, reinterpret_cast( pOffset ) ); + d.vkGetDescriptorSetLayoutBindingOffsetEXT( + static_cast( m_device ), static_cast( layout ), binding, reinterpret_cast( pOffset ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22110,7 +22612,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorEXT( m_device, reinterpret_cast( pDescriptorInfo ), dataSize, pDescriptor ); + d.vkGetDescriptorEXT( static_cast( m_device ), reinterpret_cast( pDescriptorInfo ), dataSize, pDescriptor ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22151,7 +22653,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast( pBindingInfos ) ); + d.vkCmdBindDescriptorBuffersEXT( + static_cast( m_commandBuffer ), bufferCount, reinterpret_cast( pBindingInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22179,7 +22682,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer, + d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), firstSet, @@ -22228,7 +22731,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( - m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( layout ), set ); + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); } template @@ -22236,8 +22739,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + return static_cast( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22265,8 +22768,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + return static_cast( d.vkGetImageOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22294,8 +22797,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + return static_cast( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22323,8 +22826,8 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( pInfo ), pData ) ); + return static_cast( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22353,7 +22856,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - m_device, reinterpret_cast( pInfo ), pData ) ); + static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22385,8 +22888,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetFragmentShadingRateEnumNV( - m_commandBuffer, static_cast( shadingRate ), reinterpret_cast( combinerOps ) ); + d.vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), + static_cast( shadingRate ), + reinterpret_cast( combinerOps ) ); } //=== VK_EXT_mesh_shader === @@ -22396,7 +22900,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template @@ -22407,7 +22911,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast( buffer ), static_cast( offset ), drawCount, stride ); + d.vkCmdDrawMeshTasksIndirectEXT( + static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -22420,7 +22925,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer, + d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( countBuffer ), @@ -22436,7 +22941,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferInfo ) ); + d.vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22458,7 +22963,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast( pCopyImageInfo ) ); + d.vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22480,7 +22985,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast( pCopyBufferToImageInfo ) ); + d.vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCopyBufferToImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22502,7 +23008,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast( pCopyImageToBufferInfo ) ); + d.vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22524,7 +23031,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast( pBlitImageInfo ) ); + d.vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22546,7 +23053,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast( pResolveImageInfo ) ); + d.vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22572,7 +23079,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeviceFaultInfoEXT( - m_device, reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); + static_cast( m_device ), reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); } #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -22583,7 +23090,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); + return static_cast( d.vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( display ) ) ); } # else template @@ -22609,7 +23116,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( pDisplay ) ) ); + return static_cast( + d.vkGetWinrtDisplayNV( static_cast( m_physicalDevice ), deviceRelativeId, reinterpret_cast( pDisplay ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22646,7 +23154,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22662,7 +23170,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDirectFBSurfaceEXT( m_instance, + return static_cast( d.vkCreateDirectFBSurfaceEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -22712,7 +23220,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22723,7 +23231,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) ); + return static_cast( + d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, dfb ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22754,7 +23263,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetVertexInputEXT( m_commandBuffer, + d.vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), vertexBindingDescriptionCount, reinterpret_cast( pVertexBindingDescriptions ), vertexAttributeDescriptionCount, @@ -22791,8 +23300,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22823,7 +23332,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device, + d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast( m_device ), static_cast( handleType ), zirconHandle, reinterpret_cast( pMemoryZirconHandleProperties ) ) ); @@ -22864,7 +23373,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( - m_device, reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); + static_cast( m_device ), reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22893,8 +23402,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); + return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( + static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22928,7 +23437,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBufferCollectionFUCHSIA( m_device, + return static_cast( d.vkCreateBufferCollectionFUCHSIA( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pCollection ) ) ); @@ -22978,7 +23487,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( collection, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( collection, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22990,8 +23500,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pImageConstraintsInfo ) ) ); + return static_cast( + d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pImageConstraintsInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23022,8 +23534,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pBufferConstraintsInfo ) ) ); + return static_cast( + d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pBufferConstraintsInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23054,7 +23568,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferCollectionFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pAllocator ) ); + static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23082,7 +23596,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferCollectionFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pAllocator ) ); + static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23110,8 +23624,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( - m_device, static_cast( collection ), reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), + static_cast( collection ), + reinterpret_cast( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23144,7 +23659,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( - m_device, static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); + static_cast( m_device ), static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23171,7 +23686,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSubpassShadingHUAWEI( m_commandBuffer ); + d.vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); } //=== VK_HUAWEI_invocation_mask === @@ -23182,7 +23697,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast( imageView ), static_cast( imageLayout ) ); + d.vkCmdBindInvocationMaskHUAWEI( + static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); } //=== VK_NV_external_memory_rdma === @@ -23194,8 +23710,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryRemoteAddressNV( - m_device, reinterpret_cast( pMemoryGetRemoteAddressInfo ), reinterpret_cast( pAddress ) ) ); + return static_cast( d.vkGetMemoryRemoteAddressNV( static_cast( m_device ), + reinterpret_cast( pMemoryGetRemoteAddressInfo ), + reinterpret_cast( pAddress ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23225,8 +23742,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelinePropertiesEXT( - m_device, reinterpret_cast( pPipelineInfo ), reinterpret_cast( pPipelineProperties ) ) ); + return static_cast( d.vkGetPipelinePropertiesEXT( static_cast( m_device ), + reinterpret_cast( pPipelineInfo ), + reinterpret_cast( pPipelineProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23254,7 +23772,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints ); + d.vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); } template @@ -23262,21 +23780,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast( rasterizerDiscardEnable ) ); + d.vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast( depthBiasEnable ) ); + d.vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast( logicOp ) ); + d.vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); } template @@ -23284,7 +23802,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast( primitiveRestartEnable ) ); + d.vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -23297,7 +23815,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateScreenSurfaceQNX( m_instance, + return static_cast( d.vkCreateScreenSurfaceQNX( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSurface ) ) ); @@ -23347,7 +23865,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23358,7 +23876,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) ); + return static_cast( + d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, window ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23387,7 +23906,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast( pColorWriteEnables ) ); + d.vkCmdSetColorWriteEnableEXT( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pColorWriteEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23411,7 +23930,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast( indirectDeviceAddress ) ); + d.vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); } //=== VK_EXT_multi_draw === @@ -23425,7 +23944,12 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast( pVertexInfo ), instanceCount, firstInstance, stride ); + d.vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), + drawCount, + reinterpret_cast( pVertexInfo ), + instanceCount, + firstInstance, + stride ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23459,8 +23983,13 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMultiIndexedEXT( - m_commandBuffer, drawCount, reinterpret_cast( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset ); + d.vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), + drawCount, + reinterpret_cast( pIndexInfo ), + instanceCount, + firstInstance, + stride, + pVertexOffset ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23496,7 +24025,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMicromapEXT( m_device, + return static_cast( d.vkCreateMicromapEXT( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pMicromap ) ) ); @@ -23546,7 +24075,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( micromap, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( micromap, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23557,7 +24086,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyMicromapEXT( m_device, static_cast( micromap ), reinterpret_cast( pAllocator ) ); + d.vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23583,7 +24113,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyMicromapEXT( m_device, static_cast( micromap ), reinterpret_cast( pAllocator ) ); + d.vkDestroyMicromapEXT( + static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23609,7 +24140,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast( pInfos ) ); + d.vkCmdBuildMicromapsEXT( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23633,8 +24164,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBuildMicromapsEXT( - m_device, static_cast( deferredOperation ), infoCount, reinterpret_cast( pInfos ) ) ); + return static_cast( d.vkBuildMicromapsEXT( static_cast( m_device ), + static_cast( deferredOperation ), + infoCount, + reinterpret_cast( pInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23666,8 +24199,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkCopyMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyMicromapEXT( + static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23698,8 +24231,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyMicromapToMemoryEXT( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyMicromapToMemoryEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23729,8 +24263,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCopyMemoryToMicromapEXT( - m_device, static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); + return static_cast( d.vkCopyMemoryToMicromapEXT( static_cast( m_device ), + static_cast( deferredOperation ), + reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23764,12 +24299,20 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWriteMicromapsPropertiesEXT( - m_device, micromapCount, reinterpret_cast( pMicromaps ), static_cast( queryType ), dataSize, pData, stride ) ); + return static_cast( d.vkWriteMicromapsPropertiesEXT( static_cast( m_device ), + micromapCount, + reinterpret_cast( pMicromaps ), + static_cast( queryType ), + dataSize, + pData, + stride ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, @@ -23828,7 +24371,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23849,7 +24392,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23871,7 +24414,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast( pInfo ) ); + d.vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23897,7 +24440,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer, + d.vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), micromapCount, reinterpret_cast( pMicromaps ), static_cast( queryType ), @@ -23934,7 +24477,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceMicromapCompatibilityEXT( m_device, + d.vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), reinterpret_cast( pVersionInfo ), reinterpret_cast( pCompatibility ) ); } @@ -23965,7 +24508,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetMicromapBuildSizesEXT( m_device, + d.vkGetMicromapBuildSizesEXT( static_cast( m_device ), static_cast( buildType ), reinterpret_cast( pBuildInfo ), reinterpret_cast( pSizeInfo ) ); @@ -24000,7 +24543,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); + d.vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template @@ -24009,7 +24552,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast( buffer ), static_cast( offset ) ); + d.vkCmdDrawClusterIndirectHUAWEI( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); } //=== VK_EXT_pageable_device_local_memory === @@ -24018,7 +24561,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast( memory ), priority ); + d.vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( memory ), priority ); } //=== VK_KHR_maintenance4 === @@ -24029,8 +24572,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceBufferMemoryRequirementsKHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24076,8 +24620,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageMemoryRequirementsKHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24124,14 +24669,18 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device, + d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast( pSparseMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const { @@ -24203,7 +24752,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device, + d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), reinterpret_cast( pBindingReference ), reinterpret_cast( pHostMapping ) ); } @@ -24234,7 +24783,7 @@ namespace VULKAN_HPP_NAMESPACE Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast( descriptorSet ), ppData ); + d.vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( descriptorSet ), ppData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24264,7 +24813,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast( copyBufferAddress ), copyCount, stride ); + d.vkCmdCopyMemoryIndirectNV( static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); } template @@ -24277,7 +24826,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer, + d.vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride, @@ -24319,7 +24868,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast( pDecompressMemoryRegions ) ); + d.vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), + decompressRegionCount, + reinterpret_cast( pDecompressMemoryRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24345,8 +24896,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDecompressMemoryIndirectCountNV( - m_commandBuffer, static_cast( indirectCommandsAddress ), static_cast( indirectCommandsCountAddress ), stride ); + d.vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), + static_cast( indirectCommandsAddress ), + static_cast( indirectCommandsCountAddress ), + stride ); } //=== VK_NV_device_generated_commands_compute === @@ -24357,8 +24910,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPipelineIndirectMemoryRequirementsNV( - m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pMemoryRequirements ) ); + d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24406,7 +24960,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast( pipelineBindPoint ), static_cast( pipeline ) ); + d.vkCmdUpdatePipelineIndirectBufferNV( + static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } template @@ -24415,7 +24970,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast( pInfo ) ) ); + d.vkGetPipelineIndirectDeviceAddressNV( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24441,14 +24996,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast( depthClampEnable ) ); + d.vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast( polygonMode ) ); + d.vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); } template @@ -24456,7 +25011,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast( rasterizationSamples ) ); + d.vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), static_cast( rasterizationSamples ) ); } template @@ -24465,7 +25020,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast( samples ), reinterpret_cast( pSampleMask ) ); + d.vkCmdSetSampleMaskEXT( + static_cast( m_commandBuffer ), static_cast( samples ), reinterpret_cast( pSampleMask ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24496,21 +25052,21 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast( alphaToCoverageEnable ) ); + d.vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast( alphaToOneEnable ) ); + d.vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast( logicOpEnable ) ); + d.vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); } template @@ -24520,7 +25076,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEnables ) ); + d.vkCmdSetColorBlendEnableEXT( + static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24546,8 +25103,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorBlendEquationEXT( - m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEquations ) ); + d.vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast( pColorBlendEquations ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24575,7 +25134,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorWriteMasks ) ); + d.vkCmdSetColorWriteMaskEXT( + static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorWriteMasks ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24601,14 +25161,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast( domainOrigin ) ); + d.vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), static_cast( domainOrigin ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream ); + d.vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); } template @@ -24617,7 +25177,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast( conservativeRasterizationMode ) ); + d.vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), + static_cast( conservativeRasterizationMode ) ); } template @@ -24625,14 +25186,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize ); + d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); } template VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast( depthClipEnable ) ); + d.vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); } template @@ -24640,7 +25201,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast( sampleLocationsEnable ) ); + d.vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); } template @@ -24650,8 +25211,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetColorBlendAdvancedEXT( - m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast( pColorBlendAdvanced ) ); + d.vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), + firstAttachment, + attachmentCount, + reinterpret_cast( pColorBlendAdvanced ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24677,7 +25240,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast( provokingVertexMode ) ); + d.vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), static_cast( provokingVertexMode ) ); } template @@ -24685,14 +25248,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast( lineRasterizationMode ) ); + d.vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( lineRasterizationMode ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast( stippledLineEnable ) ); + d.vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); } template @@ -24700,7 +25263,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast( negativeOneToOne ) ); + d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); } template @@ -24708,7 +25271,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast( viewportWScalingEnable ) ); + d.vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); } template @@ -24718,7 +25281,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast( pViewportSwizzles ) ); + d.vkCmdSetViewportSwizzleNV( + static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportSwizzles ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24744,14 +25308,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast( coverageToColorEnable ) ); + d.vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); } template VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation ); + d.vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); } template @@ -24759,7 +25323,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast( coverageModulationMode ) ); + d.vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), static_cast( coverageModulationMode ) ); } template @@ -24767,7 +25331,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast( coverageModulationTableEnable ) ); + d.vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), static_cast( coverageModulationTableEnable ) ); } template @@ -24776,7 +25340,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + d.vkCmdSetCoverageModulationTableNV( static_cast( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24799,7 +25363,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast( shadingRateImageEnable ) ); + d.vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); } template @@ -24807,7 +25371,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast( representativeFragmentTestEnable ) ); + d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), static_cast( representativeFragmentTestEnable ) ); } template @@ -24815,7 +25379,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast( coverageReductionMode ) ); + d.vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), static_cast( coverageReductionMode ) ); } //=== VK_EXT_shader_module_identifier === @@ -24826,7 +25390,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetShaderModuleIdentifierEXT( m_device, static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); + d.vkGetShaderModuleIdentifierEXT( + static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24852,8 +25417,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetShaderModuleCreateInfoIdentifierEXT( - m_device, reinterpret_cast( pCreateInfo ), reinterpret_cast( pIdentifier ) ); + d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pIdentifier ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24887,14 +25453,18 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice, + d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast( m_physicalDevice ), reinterpret_cast( pOpticalFlowImageFormatInfo ), pFormatCount, reinterpret_cast( pImageFormatProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, @@ -24984,7 +25554,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateOpticalFlowSessionNV( m_device, + return static_cast( d.vkCreateOpticalFlowSessionNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pSession ) ) ); @@ -25034,7 +25604,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( session, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( session, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25045,7 +25615,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyOpticalFlowSessionNV( m_device, static_cast( session ), reinterpret_cast( pAllocator ) ); + d.vkDestroyOpticalFlowSessionNV( + static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25072,7 +25643,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyOpticalFlowSessionNV( m_device, static_cast( session ), reinterpret_cast( pAllocator ) ); + d.vkDestroyOpticalFlowSessionNV( + static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25102,7 +25674,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindOpticalFlowSessionImageNV( m_device, + return static_cast( d.vkBindOpticalFlowSessionImageNV( static_cast( m_device ), static_cast( session ), static_cast( bindingPoint ), static_cast( view ), @@ -25140,8 +25712,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdOpticalFlowExecuteNV( - m_commandBuffer, static_cast( session ), reinterpret_cast( pExecuteInfo ) ); + d.vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), + static_cast( session ), + reinterpret_cast( pExecuteInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25170,7 +25743,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindIndexBuffer2KHR( m_commandBuffer, + d.vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), static_cast( size ), @@ -25183,8 +25756,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetRenderingAreaGranularityKHR( - m_device, reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); + d.vkGetRenderingAreaGranularityKHR( static_cast( m_device ), + reinterpret_cast( pRenderingAreaInfo ), + reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25211,8 +25785,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSubresourceLayoutKHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); + d.vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25257,7 +25832,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2KHR( m_device, + d.vkGetImageSubresourceLayout2KHR( static_cast( m_device ), static_cast( image ), reinterpret_cast( pSubresource ), reinterpret_cast( pLayout ) ); @@ -25312,7 +25887,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkAntiLagUpdateAMD( m_device, reinterpret_cast( pData ) ); + d.vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25338,7 +25913,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateShadersEXT( m_device, + return static_cast( d.vkCreateShadersEXT( static_cast( m_device ), createInfoCount, reinterpret_cast( pCreateInfos ), reinterpret_cast( pAllocator ), @@ -25346,7 +25921,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, @@ -25425,7 +26002,10 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template < + typename Dispatch, + typename ShaderEXTAllocator, + typename std::enable_if>::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, ShaderEXTAllocator>> Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator, @@ -25448,7 +26028,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders; uniqueShaders.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & shader : shaders ) { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); @@ -25483,7 +26063,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); uniqueShaders.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & shader : shaders ) { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); @@ -25514,7 +26094,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( - result, UniqueHandle( shader, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( shader, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25525,7 +26105,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderEXT( m_device, static_cast( shader ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderEXT( + static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25551,7 +26132,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyShaderEXT( m_device, static_cast( shader ), reinterpret_cast( pAllocator ) ); + d.vkDestroyShaderEXT( + static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25576,11 +26158,13 @@ namespace VULKAN_HPP_NAMESPACE Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetShaderBinaryDataEXT( m_device, static_cast( shader ), pDataSize, pData ) ); + return static_cast( d.vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( shader ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const { @@ -25652,8 +26236,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindShadersEXT( - m_commandBuffer, stageCount, reinterpret_cast( pStages ), reinterpret_cast( pShaders ) ); + d.vkCmdBindShadersEXT( static_cast( m_commandBuffer ), + stageCount, + reinterpret_cast( pStages ), + reinterpret_cast( pShaders ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25682,6 +26268,36 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetDepthClampRangeEXT( static_cast( m_commandBuffer ), + static_cast( depthClampMode ), + reinterpret_cast( pDepthClampRange ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetDepthClampRangeEXT && + "Function requires or " ); +# endif + + d.vkCmdSetDepthClampRangeEXT( + m_commandBuffer, + static_cast( depthClampMode ), + reinterpret_cast( static_cast( depthClampRange ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_pipeline_binary === template @@ -25691,14 +26307,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineBinariesKHR( m_device, + return static_cast( d.vkCreatePipelineBinariesKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), reinterpret_cast( pBinaries ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator, @@ -25810,7 +26428,11 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template + template >::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator, @@ -25862,7 +26484,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries; uniquePipelineBinaries.reserve( pipelineBinaries.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipelineBinary : pipelineBinaries ) { uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); @@ -25929,7 +26551,7 @@ namespace VULKAN_HPP_NAMESPACE std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries( pipelineBinaryKHRAllocator ); uniquePipelineBinaries.reserve( pipelineBinaries.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipelineBinary : pipelineBinaries ) { uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); @@ -25946,7 +26568,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineBinaryKHR( m_device, static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineBinaryKHR( + static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25973,7 +26596,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineBinaryKHR( m_device, static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); + d.vkDestroyPipelineBinaryKHR( + static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26000,8 +26624,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineKeyKHR( - m_device, reinterpret_cast( pPipelineCreateInfo ), reinterpret_cast( pPipelineKey ) ) ); + return static_cast( d.vkGetPipelineKeyKHR( static_cast( m_device ), + reinterpret_cast( pPipelineCreateInfo ), + reinterpret_cast( pPipelineKey ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26033,7 +26658,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineBinaryDataKHR( m_device, + return static_cast( d.vkGetPipelineBinaryDataKHR( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pPipelineBinaryKey ), pPipelineBinaryDataSize, @@ -26041,7 +26666,9 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>>::type Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const @@ -26123,8 +26750,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleaseCapturedPipelineDataKHR( - m_device, reinterpret_cast( pInfo ), reinterpret_cast( pAllocator ) ) ); + return static_cast( d.vkReleaseCapturedPipelineDataKHR( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pAllocator ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26154,12 +26782,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetFramebufferTilePropertiesQCOM( - m_device, static_cast( framebuffer ), pPropertiesCount, reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), + static_cast( framebuffer ), + pPropertiesCount, + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const { @@ -26234,8 +26866,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( - m_device, reinterpret_cast( pRenderingInfo ), reinterpret_cast( pProperties ) ) ); + return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), + reinterpret_cast( pRenderingInfo ), + reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26264,8 +26897,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkSetLatencySleepModeNV( m_device, static_cast( swapchain ), reinterpret_cast( pSleepModeInfo ) ) ); + return static_cast( d.vkSetLatencySleepModeNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepModeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26293,8 +26926,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkLatencySleepNV( m_device, static_cast( swapchain ), reinterpret_cast( pSleepInfo ) ) ); + return static_cast( d.vkLatencySleepNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26318,7 +26951,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSetLatencyMarkerNV( m_device, static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); + d.vkSetLatencyMarkerNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26342,11 +26976,16 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); + d.vkGetLatencyTimingsNV( + static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template < + typename LatencyTimingsFrameReportNVAllocator, + typename Dispatch, + typename std::enable_if::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const { @@ -26396,7 +27035,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast( pQueueTypeInfo ) ); + d.vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( pQueueTypeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26421,11 +27060,15 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( - m_physicalDevice, pPropertyCount, reinterpret_cast( pProperties ) ) ); + static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, + int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const @@ -26505,7 +27148,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast( aspectMask ) ); + d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast( m_commandBuffer ), static_cast( aspectMask ) ); } #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -26517,7 +27160,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast( pProperties ) ) ); + return static_cast( + d.vkGetScreenBufferPropertiesQNX( static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26565,7 +27209,7 @@ namespace VULKAN_HPP_NAMESPACE CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetLineStippleKHR( m_commandBuffer, lineStippleFactor, lineStipplePattern ); + d.vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } //=== VK_KHR_calibrated_timestamps === @@ -26576,12 +27220,14 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( m_physicalDevice, pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); + return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type PhysicalDevice::getCalibrateableTimeDomainsKHR( Dispatch const & d ) const { @@ -26656,12 +27302,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetCalibratedTimestampsKHR( - m_device, timestampCount, reinterpret_cast( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); + return static_cast( d.vkGetCalibratedTimestampsKHR( static_cast( m_device ), + timestampCount, + reinterpret_cast( pTimestampInfos ), + pTimestamps, + pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template + template ::value, int>::type> VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType, uint64_t>>::type Device::getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d ) const @@ -26736,7 +27387,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast( pBindDescriptorSetsInfo ) ); + d.vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorSetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26758,7 +27410,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast( pPushConstantsInfo ) ); + d.vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26780,7 +27432,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast( pPushDescriptorSetInfo ) ); + d.vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26803,7 +27456,7 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, + d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); } @@ -26829,7 +27482,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDescriptorBufferOffsets2EXT( m_commandBuffer, reinterpret_cast( pSetDescriptorBufferOffsetsInfo ) ); + d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), + reinterpret_cast( pSetDescriptorBufferOffsetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26854,7 +27508,8 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( - m_commandBuffer, reinterpret_cast( pBindDescriptorBufferEmbeddedSamplersInfo ) ); + static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorBufferEmbeddedSamplersInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26874,5 +27529,518 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_device_generated_commands === + + template + VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( pInfo ), + reinterpret_cast( pMemoryRequirements ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 + Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPreprocessGeneratedCommandsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( pGeneratedCommandsInfo ), + static_cast( stateCommandBuffer ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsEXT && "Function requires " ); +# endif + + d.vkCmdPreprocessGeneratedCommandsEXT( + m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ), static_cast( stateCommandBuffer ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdExecuteGeneratedCommandsEXT( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( pGeneratedCommandsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsEXT && "Function requires " ); +# endif + + d.vkCmdExecuteGeneratedCommandsEXT( + m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIndirectCommandsLayoutEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectCommandsLayout ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( indirectCommandsLayout ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectCommandsLayoutEXT( + m_device, + static_cast( indirectCommandsLayout ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkCreateIndirectExecutionSetEXT( static_cast( m_device ), + reinterpret_cast( pCreateInfo ), + reinterpret_cast( pAllocator ), + reinterpret_cast( pIndirectExecutionSet ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type + Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXT" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectExecutionSet ) ); + } + +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + Device::createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); +# endif + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( + m_device, + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXTUnique" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectExecutionSet, detail::ObjectDestroy( *this, allocator, d ) ) ); + } +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectExecutionSetEXT( + m_device, + static_cast( indirectExecutionSet ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + reinterpret_cast( pAllocator ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); +# endif + + d.vkDestroyIndirectExecutionSetEXT( + m_device, + static_cast( indirectExecutionSet ), + reinterpret_cast( static_cast( allocator ) ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast( pExecutionSetWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetPipelineEXT && + "Function requires " ); +# endif + + d.vkUpdateIndirectExecutionSetPipelineEXT( m_device, + static_cast( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkUpdateIndirectExecutionSetShaderEXT( static_cast( m_device ), + static_cast( indirectExecutionSet ), + executionSetWriteCount, + reinterpret_cast( pExecutionSetWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetShaderEXT && + "Function requires " ); +# endif + + d.vkUpdateIndirectExecutionSetShaderEXT( m_device, + static_cast( indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_NV_cooperative_matrix2 === + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + pPropertyCount, + reinterpret_cast( pProperties ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } + + template ::value, + int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< + std::vector>::type + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); +# endif + + std::vector properties( + cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator ); + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index 4670c46..4964e9b 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -630,13 +630,6 @@ namespace VULKAN_HPP_NAMESPACE struct ShaderResourceUsageAMD; struct ShaderStatisticsInfoAMD; - //=== VK_KHR_dynamic_rendering === - struct RenderingFragmentShadingRateAttachmentInfoKHR; - struct RenderingFragmentDensityMapAttachmentInfoEXT; - struct AttachmentSampleCountInfoAMD; - using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; - struct MultiviewPerViewAttributesInfoNVX; - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === struct StreamDescriptorSurfaceCreateInfoGGP; @@ -744,6 +737,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NVX_multiview_per_view_attributes === struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + struct MultiviewPerViewAttributesInfoNVX; //=== VK_NV_viewport_swizzle === struct ViewportSwizzleNV; @@ -844,6 +838,10 @@ namespace VULKAN_HPP_NAMESPACE union DeviceOrHostAddressConstAMDX; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + //=== VK_AMD_mixed_attachment_samples === + struct AttachmentSampleCountInfoAMD; + using AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD; + //=== VK_EXT_sample_locations === struct SampleLocationEXT; struct SampleLocationsInfoEXT; @@ -1015,6 +1013,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_device_diagnostic_checkpoints === struct QueueFamilyCheckpointPropertiesNV; struct CheckpointDataNV; + struct QueueFamilyCheckpointProperties2NV; + struct CheckpointData2NV; //=== VK_INTEL_shader_integer_functions2 === struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; @@ -1051,6 +1051,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFragmentDensityMapFeaturesEXT; struct PhysicalDeviceFragmentDensityMapPropertiesEXT; struct RenderPassFragmentDensityMapCreateInfoEXT; + struct RenderingFragmentDensityMapAttachmentInfoEXT; //=== VK_KHR_fragment_shading_rate === struct FragmentShadingRateAttachmentInfoKHR; @@ -1058,6 +1059,7 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceFragmentShadingRateFeaturesKHR; struct PhysicalDeviceFragmentShadingRatePropertiesKHR; struct PhysicalDeviceFragmentShadingRateKHR; + struct RenderingFragmentShadingRateAttachmentInfoKHR; //=== VK_AMD_shader_core_properties2 === struct PhysicalDeviceShaderCoreProperties2AMD; @@ -1286,10 +1288,6 @@ namespace VULKAN_HPP_NAMESPACE struct ImportMetalSharedEventInfoEXT; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - struct QueueFamilyCheckpointProperties2NV; - struct CheckpointData2NV; - //=== VK_EXT_descriptor_buffer === struct PhysicalDeviceDescriptorBufferPropertiesEXT; struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; @@ -1401,6 +1399,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_primitive_topology_list_restart === struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + //=== VK_EXT_present_mode_fifo_latest_ready === + struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === struct ImportMemoryZirconHandleInfoFUCHSIA; @@ -1873,11 +1874,50 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_ray_tracing_validation === struct PhysicalDeviceRayTracingValidationFeaturesNV; + //=== VK_EXT_device_generated_commands === + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + struct GeneratedCommandsMemoryRequirementsInfoEXT; + struct IndirectExecutionSetCreateInfoEXT; + union IndirectExecutionSetInfoEXT; + struct IndirectExecutionSetPipelineInfoEXT; + struct IndirectExecutionSetShaderInfoEXT; + struct GeneratedCommandsInfoEXT; + struct WriteIndirectExecutionSetPipelineEXT; + struct IndirectCommandsLayoutCreateInfoEXT; + struct IndirectCommandsLayoutTokenEXT; + struct DrawIndirectCountIndirectCommandEXT; + struct IndirectCommandsVertexBufferTokenEXT; + struct BindVertexBufferIndirectCommandEXT; + struct IndirectCommandsIndexBufferTokenEXT; + struct BindIndexBufferIndirectCommandEXT; + struct IndirectCommandsPushConstantTokenEXT; + struct IndirectCommandsExecutionSetTokenEXT; + union IndirectCommandsTokenDataEXT; + struct IndirectExecutionSetShaderLayoutInfoEXT; + struct GeneratedCommandsPipelineInfoEXT; + struct GeneratedCommandsShaderInfoEXT; + struct WriteIndirectExecutionSetShaderEXT; + //=== VK_MESA_image_alignment_control === struct PhysicalDeviceImageAlignmentControlFeaturesMESA; struct PhysicalDeviceImageAlignmentControlPropertiesMESA; struct ImageAlignmentControlCreateInfoMESA; + //=== VK_EXT_depth_clamp_control === + struct PhysicalDeviceDepthClampControlFeaturesEXT; + struct PipelineViewportDepthClampControlCreateInfoEXT; + struct DepthClampRangeEXT; + + //=== VK_HUAWEI_hdr_vivid === + struct PhysicalDeviceHdrVividFeaturesHUAWEI; + struct HdrVividDynamicMetadataHUAWEI; + + //=== VK_NV_cooperative_matrix2 === + struct CooperativeMatrixFlexibleDimensionsPropertiesNV; + struct PhysicalDeviceCooperativeMatrix2FeaturesNV; + struct PhysicalDeviceCooperativeMatrix2PropertiesNV; + //=================================== //=== HANDLE forward declarations === //=================================== @@ -1981,6 +2021,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_binary === class PipelineBinaryKHR; + //=== VK_EXT_device_generated_commands === + class IndirectCommandsLayoutEXT; + class IndirectExecutionSetEXT; + #ifndef VULKAN_HPP_NO_SMART_HANDLE //====================== //=== UNIQUE HANDLEs === @@ -1991,7 +2035,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueInstance = UniqueHandle; @@ -2000,7 +2044,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDevice = UniqueHandle; @@ -2009,7 +2053,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectFree; + using deleter = detail::ObjectFree; }; using UniqueDeviceMemory = UniqueHandle; @@ -2018,7 +2062,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueFence = UniqueHandle; @@ -2027,7 +2071,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSemaphore = UniqueHandle; @@ -2036,7 +2080,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueEvent = UniqueHandle; @@ -2045,7 +2089,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueQueryPool = UniqueHandle; @@ -2054,7 +2098,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueBuffer = UniqueHandle; @@ -2063,7 +2107,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueBufferView = UniqueHandle; @@ -2072,7 +2116,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueImage = UniqueHandle; @@ -2081,7 +2125,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueImageView = UniqueHandle; @@ -2090,7 +2134,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueShaderModule = UniqueHandle; @@ -2099,7 +2143,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipelineCache = UniqueHandle; @@ -2108,7 +2152,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipeline = UniqueHandle; @@ -2117,7 +2161,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipelineLayout = UniqueHandle; @@ -2126,7 +2170,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSampler = UniqueHandle; @@ -2135,7 +2179,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDescriptorPool = UniqueHandle; @@ -2144,7 +2188,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = PoolFree; + using deleter = detail::PoolFree; }; using UniqueDescriptorSet = UniqueHandle; @@ -2153,7 +2197,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDescriptorSetLayout = UniqueHandle; @@ -2162,7 +2206,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueFramebuffer = UniqueHandle; @@ -2171,7 +2215,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueRenderPass = UniqueHandle; @@ -2180,7 +2224,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCommandPool = UniqueHandle; @@ -2189,7 +2233,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = PoolFree; + using deleter = detail::PoolFree; }; using UniqueCommandBuffer = UniqueHandle; @@ -2199,7 +2243,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSamplerYcbcrConversion = UniqueHandle; @@ -2209,7 +2253,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDescriptorUpdateTemplate = UniqueHandle; @@ -2220,7 +2264,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePrivateDataSlot = UniqueHandle; @@ -2231,7 +2275,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSurfaceKHR = UniqueHandle; @@ -2241,7 +2285,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSwapchainKHR = UniqueHandle; @@ -2251,7 +2295,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDisplayKHR = UniqueHandle; @@ -2261,7 +2305,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDebugReportCallbackEXT = UniqueHandle; @@ -2271,7 +2315,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueVideoSessionKHR = UniqueHandle; @@ -2280,7 +2324,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueVideoSessionParametersKHR = UniqueHandle; @@ -2290,7 +2334,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCuModuleNVX = UniqueHandle; @@ -2299,7 +2343,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCuFunctionNVX = UniqueHandle; @@ -2309,7 +2353,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDebugUtilsMessengerEXT = UniqueHandle; @@ -2319,7 +2363,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueAccelerationStructureKHR = UniqueHandle; @@ -2329,7 +2373,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueValidationCacheEXT = UniqueHandle; @@ -2339,7 +2383,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueAccelerationStructureNV = UniqueHandle; @@ -2349,7 +2393,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePerformanceConfigurationINTEL = UniqueHandle; @@ -2359,7 +2403,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDeferredOperationKHR = UniqueHandle; @@ -2369,7 +2413,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueIndirectCommandsLayoutNV = UniqueHandle; @@ -2380,7 +2424,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCudaModuleNV = UniqueHandle; @@ -2389,7 +2433,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCudaFunctionNV = UniqueHandle; @@ -2401,7 +2445,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueBufferCollectionFUCHSIA = UniqueHandle; @@ -2412,7 +2456,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueMicromapEXT = UniqueHandle; @@ -2422,7 +2466,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueOpticalFlowSessionNV = UniqueHandle; @@ -2432,7 +2476,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueShaderEXT = UniqueHandle; @@ -2442,10 +2486,29 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipelineBinaryKHR = UniqueHandle; + + //=== VK_EXT_device_generated_commands === + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueIndirectCommandsLayoutEXT = UniqueHandle; + + template + class UniqueHandleTraits + { + public: + using deleter = detail::ObjectDestroy; + }; + + using UniqueIndirectExecutionSetEXT = UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ //=============== @@ -6022,33 +6085,40 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMDX_shader_enqueue === template - void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -6308,6 +6378,13 @@ namespace VULKAN_HPP_NAMESPACE uint32_t marker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_mesh_shader === template @@ -6624,13 +6701,6 @@ namespace VULKAN_HPP_NAMESPACE uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - //=== VK_EXT_descriptor_buffer === template @@ -7187,6 +7257,17 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_attachment_feedback_loop_dynamic_state === template @@ -7258,6 +7339,30 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_device_generated_commands === + + template + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT { return m_commandBuffer; @@ -8855,6 +8960,116 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; + class IndirectCommandsLayoutEXT + { + public: + using CType = VkIndirectCommandsLayoutEXT; + using NativeType = VkIndirectCommandsLayoutEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectCommandsLayoutEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT const & rhs ) = default; + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) = default; + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) = default; +#else + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutEXT, {} ) ) + { + } + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayoutEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutEXT( VkIndirectCommandsLayoutEXT indirectCommandsLayoutEXT ) VULKAN_HPP_NOEXCEPT + : m_indirectCommandsLayoutEXT( indirectCommandsLayoutEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectCommandsLayoutEXT & operator=( VkIndirectCommandsLayoutEXT indirectCommandsLayoutEXT ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = indirectCommandsLayoutEXT; + return *this; + } +#endif + + IndirectCommandsLayoutEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectCommandsLayoutEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutEXT const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT == rhs.m_indirectCommandsLayoutEXT; + } + + bool operator!=( IndirectCommandsLayoutEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT != rhs.m_indirectCommandsLayoutEXT; + } + + bool operator<( IndirectCommandsLayoutEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT < rhs.m_indirectCommandsLayoutEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayoutEXT == VK_NULL_HANDLE; + } + + private: + VkIndirectCommandsLayoutEXT m_indirectCommandsLayoutEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class IndirectCommandsLayoutNV { public: @@ -8965,6 +9180,116 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; }; + class IndirectExecutionSetEXT + { + public: + using CType = VkIndirectExecutionSetEXT; + using NativeType = VkIndirectExecutionSetEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: + IndirectExecutionSetEXT() VULKAN_HPP_NOEXCEPT{}; // = default - try to workaround a compiler issue + IndirectExecutionSetEXT( IndirectExecutionSetEXT const & rhs ) = default; + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT const & rhs ) = default; + +#if !defined( VULKAN_HPP_HANDLES_MOVE_EXCHANGE ) + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) = default; + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) = default; +#else + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_indirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSetEXT, {} ) ) + { + } + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSetEXT, {} ); + return *this; + } +#endif + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT {} + + VULKAN_HPP_TYPESAFE_EXPLICIT IndirectExecutionSetEXT( VkIndirectExecutionSetEXT indirectExecutionSetEXT ) VULKAN_HPP_NOEXCEPT + : m_indirectExecutionSetEXT( indirectExecutionSetEXT ) + { + } + +#if ( VULKAN_HPP_TYPESAFE_CONVERSION == 1 ) + IndirectExecutionSetEXT & operator=( VkIndirectExecutionSetEXT indirectExecutionSetEXT ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = indirectExecutionSetEXT; + return *this; + } +#endif + + IndirectExecutionSetEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + m_indirectExecutionSetEXT = {}; + return *this; + } + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectExecutionSetEXT const & ) const = default; +#else + bool operator==( IndirectExecutionSetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT == rhs.m_indirectExecutionSetEXT; + } + + bool operator!=( IndirectExecutionSetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT != rhs.m_indirectExecutionSetEXT; + } + + bool operator<( IndirectExecutionSetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT < rhs.m_indirectExecutionSetEXT; + } +#endif + + VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT; + } + + explicit operator bool() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT != VK_NULL_HANDLE; + } + + bool operator!() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSetEXT == VK_NULL_HANDLE; + } + + private: + VkIndirectExecutionSetEXT m_indirectExecutionSetEXT = {}; + }; + + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + }; + +#if ( VK_USE_64_BIT_PTR_DEFINES == 1 ) + template <> + struct CppType + { + using Type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + }; +#endif + + template <> + struct isVulkanHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class PrivateDataSlot { public: @@ -10002,8 +10327,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename CheckpointDataNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -10014,6 +10341,25 @@ namespace VULKAN_HPP_NAMESPACE getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + void getCheckpointData2NV( uint32_t * pCheckpointDataCount, + VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename CheckpointData2NVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD std::vector + getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_INTEL_performance_query === #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10042,23 +10388,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - template - void getCheckpointData2NV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> - VULKAN_HPP_NODISCARD std::vector - getCheckpointData2NV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; - template < - typename CheckpointData2NVAllocator = std::allocator, - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, - typename std::enable_if::value, int>::type = 0> - VULKAN_HPP_NODISCARD std::vector - getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - //=== VK_NV_low_latency2 === template @@ -10365,7 +10694,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -10620,7 +10952,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, @@ -10909,7 +11244,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -10942,7 +11279,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -10964,8 +11303,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -10998,7 +11339,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -11020,8 +11363,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -11253,7 +11598,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11265,8 +11612,11 @@ namespace VULKAN_HPP_NAMESPACE DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename DescriptorSetAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11473,7 +11823,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11485,8 +11837,11 @@ namespace VULKAN_HPP_NAMESPACE CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename CommandBufferAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11602,7 +11957,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -11940,7 +12298,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -12005,7 +12366,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -12069,7 +12432,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -12088,8 +12453,11 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template < + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename SwapchainKHRAllocator = std::allocator>, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -12185,7 +12553,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, @@ -12386,7 +12757,9 @@ namespace VULKAN_HPP_NAMESPACE void * pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, @@ -12673,7 +13046,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -12859,7 +13235,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -12881,8 +13259,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createExecutionGraphPipelinesAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -12974,7 +13354,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -13093,7 +13476,10 @@ namespace VULKAN_HPP_NAMESPACE size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type writeAccelerationStructuresPropertiesKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, @@ -13155,7 +13541,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -13180,8 +13568,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, @@ -13217,7 +13607,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, @@ -13238,7 +13631,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, @@ -13388,7 +13784,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -13478,7 +13876,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13500,8 +13900,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13534,7 +13936,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, @@ -13552,7 +13957,10 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template @@ -13613,7 +14021,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t * pMaxDeviation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -13907,7 +14317,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, @@ -13931,7 +14344,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, @@ -13956,7 +14372,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType< std::vector>::type getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, @@ -14199,7 +14618,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>>::type getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, @@ -14215,8 +14636,9 @@ namespace VULKAN_HPP_NAMESPACE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Uint8_tAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -14261,7 +14683,9 @@ namespace VULKAN_HPP_NAMESPACE void * pCacheData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCudaModuleCacheNV( VULKAN_HPP_NAMESPACE::CudaModuleNV module, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -14734,7 +15158,10 @@ namespace VULKAN_HPP_NAMESPACE size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & micromaps, VULKAN_HPP_NAMESPACE::QueryType queryType, @@ -14819,7 +15246,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15034,7 +15464,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15053,8 +15485,10 @@ namespace VULKAN_HPP_NAMESPACE Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >> + template >, + typename std::enable_if>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, ShaderEXTAllocator>> createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15104,7 +15538,9 @@ namespace VULKAN_HPP_NAMESPACE void * pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15122,8 +15558,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename PipelineBinaryKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD ResultValue> createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15139,7 +15577,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; # ifndef VULKAN_HPP_NO_SMART_HANDLE template >> + typename PipelineBinaryKHRAllocator = std::allocator>, + typename std::enable_if< + std::is_same>::value, + int>::type = 0> VULKAN_HPP_NODISCARD ResultValue, PipelineBinaryKHRAllocator>> createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, @@ -15197,7 +15638,9 @@ namespace VULKAN_HPP_NAMESPACE void * pPipelineBinaryData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>>::type getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15229,8 +15672,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename TilePropertiesQCOMAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -15294,8 +15739,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename LatencyTimingsFrameReportNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -15335,7 +15783,9 @@ namespace VULKAN_HPP_NAMESPACE uint64_t * pMaxDeviation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, uint64_t>>::type getCalibratedTimestampsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & timestampInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15352,6 +15802,133 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_device_generated_commands === + + template + void getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type + createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# ifndef VULKAN_HPP_NO_SMART_HANDLE + template + VULKAN_HPP_NODISCARD typename ResultValueType>::type + createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +# endif /* VULKAN_HPP_NO_SMART_HANDLE */ +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + Optional allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateIndirectExecutionSetPipelineEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void updateIndirectExecutionSetShaderEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + operator VkDevice() const VULKAN_HPP_NOEXCEPT { return m_device; @@ -15625,7 +16202,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15672,8 +16251,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15692,7 +16273,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15712,8 +16295,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SparseImageFormatPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, @@ -15796,7 +16382,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15808,7 +16396,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15894,7 +16486,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15936,8 +16531,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SurfaceFormatKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15957,7 +16554,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -15978,7 +16577,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Rect2D * pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -15996,7 +16597,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16013,8 +16616,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayPlanePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -16033,7 +16639,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16049,8 +16657,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayModePropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -16177,8 +16788,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename VideoFormatPropertiesKHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16276,7 +16890,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16288,7 +16904,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD std::vector getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD std::vector getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16436,7 +17056,11 @@ namespace VULKAN_HPP_NAMESPACE #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , typename PerformanceCounterDescriptionKHRAllocator = std::allocator, - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value && + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType, std::vector>>::type @@ -16491,8 +17115,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename SurfaceFormat2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16506,7 +17132,8 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16528,7 +17155,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16545,8 +17174,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayPlaneProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -16565,8 +17197,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename DisplayModeProperties2KHRAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template < @@ -16611,7 +17246,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16629,7 +17266,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16652,7 +17292,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16673,7 +17316,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16697,7 +17343,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16721,7 +17370,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16850,7 +17501,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo, @@ -16875,7 +17529,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCooperativeMatrixPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -16897,7 +17554,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type getCalibrateableTimeDomainsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -16907,6 +17566,36 @@ namespace VULKAN_HPP_NAMESPACE getCalibrateableTimeDomainsKHR( TimeDomainKHRAllocator & timeDomainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_NV_cooperative_matrix2 === + + template + VULKAN_HPP_NODISCARD Result + getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template < + typename CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, + int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType< + std::vector>::type + getCooperativeMatrixFlexibleDimensionsPropertiesNV( + CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT { return m_physicalDevice; @@ -17031,7 +17720,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; template , @@ -17056,7 +17747,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17358,7 +18052,10 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if< + std::is_same::value, + int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; @@ -17669,8 +18366,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , - typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template < + typename ExtensionPropertiesAllocator = std::allocator, + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceExtensionProperties( Optional layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); @@ -17689,7 +18388,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template , typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE> + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> VULKAN_HPP_NODISCARD typename ResultValueType>::type enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); template , diff --git a/third_party/vulkan/vulkan_hash.hpp b/third_party/vulkan/vulkan_hash.hpp index 1894033..c53be12 100644 --- a/third_party/vulkan/vulkan_hash.hpp +++ b/third_party/vulkan/vulkan_hash.hpp @@ -536,6 +536,26 @@ namespace std } }; + //=== VK_EXT_device_generated_commands === + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const & indirectCommandsLayoutEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectCommandsLayoutEXT ) ); + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const & indirectExecutionSetEXT ) const VULKAN_HPP_NOEXCEPT + { + return std::hash{}( static_cast( indirectExecutionSetEXT ) ); + } + }; + #if 14 <= VULKAN_HPP_CPP_VERSION //====================================== //=== HASH structures for structures === @@ -1474,6 +1494,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT const & bindIndexBufferIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindIndexBufferIndirectCommandEXT.indexType ); + return seed; + } + }; + template <> struct hash { @@ -1653,6 +1686,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT const & bindVertexBufferIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, bindVertexBufferIndirectCommandEXT.stride ); + return seed; + } + }; + template <> struct hash { @@ -2481,6 +2527,29 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( + VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV const & cooperativeMatrixFlexibleDimensionsPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.MGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.NGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.KGranularity ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.AType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.BType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.CType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.ResultType ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.saturatingAccumulation ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.scope ); + VULKAN_HPP_HASH_COMBINE( seed, cooperativeMatrixFlexibleDimensionsPropertiesNV.workgroupInvocations ); + return seed; + } + }; + template <> struct hash { @@ -3249,6 +3318,18 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DepthClampRangeEXT const & depthClampRangeEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, depthClampRangeEXT.minDepthClamp ); + VULKAN_HPP_HASH_COMBINE( seed, depthClampRangeEXT.maxDepthClamp ); + return seed; + } + }; + template <> struct hash { @@ -4418,6 +4499,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT const & drawIndirectCountIndirectCommandEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.bufferAddress ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.stride ); + VULKAN_HPP_HASH_COMBINE( seed, drawIndirectCountIndirectCommandEXT.commandCount ); + return seed; + } + }; + template <> struct hash { @@ -4555,7 +4649,9 @@ namespace std std::size_t seed = 0; VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sType ); VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.size ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.minSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.maxSize ); + VULKAN_HPP_HASH_COMBINE( seed, executionGraphPipelineScratchSizeAMDX.sizeGranularity ); return seed; } }; @@ -5205,6 +5301,28 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT const & generatedCommandsInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.shaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectExecutionSet ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.indirectAddressSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.preprocessAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.preprocessSize ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.maxSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.sequenceCountAddress ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsInfoEXT.maxDrawCount ); + return seed; + } + }; + template <> struct hash { @@ -5242,6 +5360,23 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT const & generatedCommandsMemoryRequirementsInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.indirectExecutionSet ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.indirectCommandsLayout ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.maxSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsMemoryRequirementsInfoEXT.maxDrawCount ); + return seed; + } + }; + template <> struct hash { @@ -5259,6 +5394,33 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT const & generatedCommandsPipelineInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsPipelineInfoEXT.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT const & generatedCommandsShaderInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.shaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, generatedCommandsShaderInfoEXT.pShaders ); + return seed; + } + }; + template <> struct hash { @@ -5631,6 +5793,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI const & hdrVividDynamicMetadataHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.dynamicMetadataSize ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pDynamicMetadata ); + return seed; + } + }; + template <> struct hash { @@ -6441,6 +6617,82 @@ namespace std }; # endif /*VK_USE_PLATFORM_FUCHSIA*/ + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT const & indirectCommandsExecutionSetTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsExecutionSetTokenEXT.type ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsExecutionSetTokenEXT.shaderStages ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT const & indirectCommandsIndexBufferTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsIndexBufferTokenEXT.mode ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT const & indirectCommandsPushConstantTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsPushConstantTokenEXT.updateRange ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT const & indirectCommandsVertexBufferTokenEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsVertexBufferTokenEXT.vertexBindingUnit ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & indirectCommandsLayoutCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.flags ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.shaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.indirectStride ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pipelineLayout ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.tokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectCommandsLayoutCreateInfoEXT.pTokens ); + return seed; + } + }; + template <> struct hash { @@ -6484,6 +6736,53 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT const & indirectExecutionSetPipelineInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.initialPipeline ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetPipelineInfoEXT.maxPipelineCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT const & indirectExecutionSetShaderLayoutInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.setLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderLayoutInfoEXT.pSetLayouts ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT const & indirectExecutionSetShaderInfoEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.shaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pInitialShaders ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pSetLayoutInfos ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.maxShaderCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pushConstantRangeCount ); + VULKAN_HPP_HASH_COMBINE( seed, indirectExecutionSetShaderInfoEXT.pPushConstantRanges ); + return seed; + } + }; + template <> struct hash { @@ -7829,6 +8128,42 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV const & physicalDeviceCooperativeMatrix2FeaturesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixWorkgroupScope ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixFlexibleDimensions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixReductions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixConversions ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixPerElementOperations ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixTensorAddressing ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2FeaturesNV.cooperativeMatrixBlockLoads ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV const & physicalDeviceCooperativeMatrix2PropertiesNV ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixFlexibleDimensionsMaxDimension ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceCooperativeMatrix2PropertiesNV.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + return seed; + } + }; + template <> struct hash { @@ -8063,6 +8398,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT const & physicalDeviceDepthClampControlFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDepthClampControlFeaturesEXT.depthClampControl ); + return seed; + } + }; + template <> struct hash { @@ -8313,6 +8662,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & physicalDeviceDeviceGeneratedCommandsFeaturesEXT ) + const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.deviceGeneratedCommands ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsFeaturesEXT.dynamicGeneratedPipelineLayout ); + return seed; + } + }; + template <> struct hash { @@ -8327,6 +8691,32 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & physicalDeviceDeviceGeneratedCommandsPropertiesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectPipelineCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectShaderObjectCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectSequenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsTokenCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsTokenOffset ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.maxIndirectCommandsIndirectStride ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsInputModes ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStagesPipelineBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.supportedIndirectCommandsShaderStagesShaderBinding ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.deviceGeneratedCommandsTransformFeedback ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDeviceGeneratedCommandsPropertiesEXT.deviceGeneratedCommandsMultiDrawIndirectCount ); + return seed; + } + }; + template <> struct hash { @@ -9135,6 +9525,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI const & physicalDeviceHdrVividFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.hdrVivid ); + return seed; + } + }; + template <> struct hash { @@ -10775,6 +11178,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & physicalDevicePresentModeFifoLatestReadyFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePresentModeFifoLatestReadyFeaturesEXT.presentModeFifoLatestReady ); + return seed; + } + }; + template <> struct hash { @@ -11535,6 +11953,7 @@ namespace std VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.sType ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.pNext ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderEnqueue ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueueFeaturesAMDX.shaderMeshEnqueue ); return seed; } }; @@ -11555,6 +11974,11 @@ namespace std VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadSize ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphShaderPayloadCount ); VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.executionGraphDispatchAddressAlignment ); + for ( size_t i = 0; i < 3; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroupCount[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderEnqueuePropertiesAMDX.maxExecutionGraphWorkgroups ); return seed; } }; @@ -13201,19 +13625,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantRange const & pushConstantRange ) const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.stageFlags ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.offset ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantRange.size ); - return seed; - } - }; - template <> struct hash { @@ -13510,6 +13921,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT const & pipelineViewportDepthClampControlCreateInfoEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.depthClampMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineViewportDepthClampControlCreateInfoEXT.pDepthClampRange ); + return seed; + } + }; + template <> struct hash { @@ -17139,6 +17565,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT const & writeIndirectExecutionSetPipelineEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.index ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetPipelineEXT.pipeline ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT const & writeIndirectExecutionSetShaderEXT ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.index ); + VULKAN_HPP_HASH_COMBINE( seed, writeIndirectExecutionSetShaderEXT.shader ); + return seed; + } + }; + # if defined( VK_USE_PLATFORM_XCB_KHR ) template <> struct hash diff --git a/third_party/vulkan/vulkan_hpp_macros.hpp b/third_party/vulkan/vulkan_hpp_macros.hpp index b754679..aba3b9a 100644 --- a/third_party/vulkan/vulkan_hpp_macros.hpp +++ b/third_party/vulkan/vulkan_hpp_macros.hpp @@ -87,6 +87,19 @@ # define VULKAN_HPP_SUPPORT_SPAN #endif +#if defined( __cpp_lib_modules ) && !defined( VULKAN_HPP_STD_MODULE ) && defined( VULKAN_HPP_ENABLE_STD_MODULE ) +# define VULKAN_HPP_STD_MODULE std.compat +#endif + +#ifndef VK_USE_64_BIT_PTR_DEFINES +# if defined( __LP64__ ) || defined( _WIN64 ) || ( defined( __x86_64__ ) && !defined( __ILP32__ ) ) || defined( _M_X64 ) || defined( __ia64 ) || \ + defined( _M_IA64 ) || defined( __aarch64__ ) || defined( __powerpc64__ ) || ( defined( __riscv ) && __riscv_xlen == 64 ) +# define VK_USE_64_BIT_PTR_DEFINES 1 +# else +# define VK_USE_64_BIT_PTR_DEFINES 0 +# endif +#endif + // 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default. // To enable this feature on 32-bit platforms please #define VULKAN_HPP_TYPESAFE_CONVERSION 1 // To disable this feature on 64-bit platforms please #define VULKAN_HPP_TYPESAFE_CONVERSION 0 @@ -185,6 +198,12 @@ # define VULKAN_HPP_DEPRECATED( msg ) #endif +#if 17 <= VULKAN_HPP_CPP_VERSION +# define VULKAN_HPP_DEPRECATED_17( msg ) [[deprecated( msg )]] +#else +# define VULKAN_HPP_DEPRECATED_17( msg ) +#endif + #if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS ) # define VULKAN_HPP_NODISCARD [[nodiscard]] # if defined( VULKAN_HPP_NO_EXCEPTIONS ) @@ -238,33 +257,40 @@ namespace VULKAN_HPP_NAMESPACE { - class DispatchLoaderDynamic; + namespace detail + { + class DispatchLoaderDynamic; + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# endif +#endif + } // namespace detail } // namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) # if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ - namespace VULKAN_HPP_NAMESPACE \ - { \ - VULKAN_HPP_STORAGE_API ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + namespace detail \ + { \ + VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } \ } - -namespace VULKAN_HPP_NAMESPACE -{ - extern VULKAN_HPP_STORAGE_API VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic; -} // namespace VULKAN_HPP_NAMESPACE # else -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::getDispatchLoaderStatic() # define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE # endif #endif #if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) # if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic # else -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic # endif #endif @@ -279,7 +305,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if !defined( VULKAN_HPP_EXPECTED ) && ( 23 <= VULKAN_HPP_CPP_VERSION ) && defined( __cpp_lib_expected ) -# include +# if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include +# endif # define VULKAN_HPP_EXPECTED std::expected # define VULKAN_HPP_UNEXPECTED std::unexpected #endif diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index ac672bf..d8ca95f 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -8,2611 +8,2656 @@ #ifndef VULKAN_RAII_HPP #define VULKAN_RAII_HPP -#include // std::unique_ptr -#include // std::forward #include +#if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include // std::unique_ptr +# include // std::forward +#endif #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) namespace VULKAN_HPP_NAMESPACE { namespace VULKAN_HPP_RAII_NAMESPACE { - template - class CreateReturnType + namespace detail { - public: + template + class CreateReturnType + { + public: # if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) - using Type = VULKAN_HPP_EXPECTED; + using Type = VULKAN_HPP_EXPECTED; # else - using Type = T; + using Type = T; # endif - }; + }; - class ContextDispatcher : public DispatchLoaderBase - { - public: - ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) - : vkGetInstanceProcAddr( getProcAddr ) - //=== VK_VERSION_1_0 === - , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) - , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) - , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) - //=== VK_VERSION_1_1 === - , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + using PFN_dummy = void ( * )(); + + class ContextDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase { - } + public: + ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) + : vkGetInstanceProcAddr( getProcAddr ) + //=== VK_VERSION_1_0 === + , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) + , vkEnumerateInstanceExtensionProperties( + PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) + , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) + //=== VK_VERSION_1_1 === + , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + { + } - public: - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + public: + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - //=== VK_VERSION_1_0 === - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - - //=== VK_VERSION_1_1 === - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - }; - - class InstanceDispatcher : public DispatchLoaderBase - { - public: - InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) - { //=== VK_VERSION_1_0 === - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFormatProperties = - PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties = - PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = - PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceMemoryProperties = - PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkEnumerateDeviceExtensionProperties = - PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; //=== VK_VERSION_1_1 === - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2 = - PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = - PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2 = - PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkGetPhysicalDeviceExternalBufferProperties = - PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalFenceProperties = - PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + }; + + class InstanceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = + PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = + PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + + //=== VK_EXT_sample_locations === + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); + + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; //=== VK_VERSION_1_3 === - vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; //=== VK_KHR_surface === - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = - PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); - vkGetPhysicalDeviceSurfacePresentModesKHR = - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; //=== VK_KHR_swapchain === - vkGetPhysicalDevicePresentRectanglesKHR = - PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; //=== VK_KHR_display === - vkGetPhysicalDeviceDisplayPropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = - PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); - vkGetPhysicalDeviceXlibPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); - vkGetPhysicalDeviceXcbPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +# else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); - vkGetPhysicalDeviceWin32PresentationSupportKHR = - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; //=== VK_KHR_video_queue === - vkGetPhysicalDeviceVideoCapabilitiesKHR = - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +# else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; # endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; //=== VK_KHR_get_physical_device_properties2 === - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - if ( !vkGetPhysicalDeviceFeatures2 ) - vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceProperties2 ) - vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; - vkGetPhysicalDeviceFormatProperties2KHR = - PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceFormatProperties2 ) - vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; - vkGetPhysicalDeviceImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceImageFormatProperties2 ) - vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) - vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; - vkGetPhysicalDeviceMemoryProperties2KHR = - PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceMemoryProperties2 ) - vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) - vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +# else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; # endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_device_group_creation === - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - if ( !vkEnumeratePhysicalDeviceGroups ) - vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; //=== VK_KHR_external_memory_capabilities === - vkGetPhysicalDeviceExternalBufferPropertiesKHR = - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalBufferProperties ) - vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; //=== VK_KHR_external_semaphore_capabilities === - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) - vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; //=== VK_EXT_direct_mode_display === - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +# else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === - vkGetPhysicalDeviceSurfaceCapabilities2EXT = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; //=== VK_KHR_external_fence_capabilities === - vkGetPhysicalDeviceExternalFencePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalFenceProperties ) - vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; //=== VK_KHR_performance_query === - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; //=== VK_KHR_get_surface_capabilities2 === - vkGetPhysicalDeviceSurfaceCapabilities2KHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; //=== VK_KHR_get_display_properties2 === - vkGetPhysicalDeviceDisplayProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; # endif /*VK_USE_PLATFORM_IOS_MVK*/ # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; # endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; //=== VK_EXT_sample_locations === - vkGetPhysicalDeviceMultisamplePropertiesEXT = - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; //=== VK_EXT_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +# else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +# else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === - vkGetPhysicalDeviceFragmentShadingRatesKHR = - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; //=== VK_EXT_tooling_info === - vkGetPhysicalDeviceToolPropertiesEXT = - PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); - if ( !vkGetPhysicalDeviceToolProperties ) - vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; //=== VK_NV_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; //=== VK_NV_coverage_reduction_mode === - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - vkGetPhysicalDeviceSurfacePresentModes2EXT = - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; //=== VK_EXT_acquire_drm_display === - vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); - vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; //=== VK_KHR_video_encode_queue === - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); - vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +# else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +# else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); - vkGetPhysicalDeviceScreenPresentationSupportQNX = - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +# else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_NV_optical_flow === - vkGetPhysicalDeviceOpticalFlowImageFormatsNV = - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; //=== VK_KHR_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; //=== VK_KHR_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - } + //=== VK_NV_cooperative_matrix2 === + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - public: - //=== VK_VERSION_1_0 === - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + }; - //=== VK_VERSION_1_1 === - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + class DeviceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); - //=== VK_VERSION_1_3 === - PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); - //=== VK_KHR_surface === - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); - //=== VK_KHR_swapchain === - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); - //=== VK_KHR_display === - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); -# if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); -# if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_XCB_KHR*/ + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); -# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); # if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -# else - PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; -# else - PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_debug_report === - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; - //=== VK_KHR_video_queue === - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStippleKHR ) + vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -# else - PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; -# endif /*VK_USE_PLATFORM_GGP*/ + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; - //=== VK_NV_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; - //=== VK_KHR_get_physical_device_properties2 === - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); -# if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -# else - PFN_dummy vkCreateViSurfaceNN_placeholder = 0; -# endif /*VK_USE_PLATFORM_VI_NN*/ + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - //=== VK_KHR_device_group_creation === - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; - //=== VK_KHR_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); - //=== VK_KHR_external_semaphore_capabilities === - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); - //=== VK_EXT_direct_mode_display === - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); -# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; -# else - PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; - PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); - //=== VK_EXT_display_surface_counter === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; - //=== VK_KHR_external_fence_capabilities === - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); - //=== VK_KHR_performance_query === - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - - //=== VK_KHR_get_surface_capabilities2 === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - - //=== VK_KHR_get_display_properties2 === - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - -# if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -# else - PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; -# endif /*VK_USE_PLATFORM_IOS_MVK*/ - -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -# else - PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - - //=== VK_EXT_sample_locations === - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; -# else - PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -# else - PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); # endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_fragment_shading_rate === - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; - //=== VK_EXT_tooling_info === - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); - //=== VK_NV_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_NV_coverage_reduction_mode === - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; -# else - PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; - //=== VK_EXT_headless_surface === - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); - //=== VK_EXT_acquire_drm_display === - PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; - PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); - //=== VK_KHR_video_encode_queue === - PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === - PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; - PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; -# else - PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; - PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; -# else - PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); # if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; -# else - PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_NV_optical_flow === - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); - //=== VK_KHR_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); - //=== VK_KHR_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - }; + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + } - class DeviceDispatcher : public DispatchLoaderBase - { - public: - DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) - { + public: //=== VK_VERSION_1_0 === - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; //=== VK_VERSION_1_3 === - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; //=== VK_KHR_video_queue === - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +# else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +# else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +# else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +# else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; //=== VK_KHR_performance_query === - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +# else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +# else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_EXT_calibrated_timestamps === - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; //=== VK_KHR_fragment_shading_rate === - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; //=== VK_KHR_video_encode_queue === - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +# else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +# else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +# else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +# else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +# else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = - PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = - PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = - PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; //=== VK_NV_optical_flow === - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = - PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +# else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; //=== VK_KHR_calibrated_timestamps === - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); - } - - public: - //=== VK_VERSION_1_0 === - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - - //=== VK_VERSION_1_1 === - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - - //=== VK_VERSION_1_2 === - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; - - //=== VK_VERSION_1_3 === - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; - PFN_vkSetPrivateData vkSetPrivateData = 0; - PFN_vkGetPrivateData vkGetPrivateData = 0; - PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; - PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; - PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; - PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; - PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; - PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; - PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; - PFN_vkCmdEndRendering vkCmdEndRendering = 0; - PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - - //=== VK_KHR_swapchain === - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - - //=== VK_KHR_display_swapchain === - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - - //=== VK_EXT_debug_marker === - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - - //=== VK_KHR_video_queue === - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; - - //=== VK_KHR_video_decode_queue === - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; - - //=== VK_EXT_transform_feedback === - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - - //=== VK_NVX_binary_import === - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; - - //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - - //=== VK_AMD_draw_indirect_count === - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - - //=== VK_AMD_shader_info === - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - - //=== VK_KHR_dynamic_rendering === - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; -# else - PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_device_group === - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - - //=== VK_KHR_maintenance1 === - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; -# else - PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -# else - PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - - //=== VK_KHR_push_descriptor === - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - - //=== VK_EXT_conditional_rendering === - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - - //=== VK_KHR_descriptor_update_template === - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - - //=== VK_NV_clip_space_w_scaling === - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - - //=== VK_EXT_display_control === - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - - //=== VK_GOOGLE_display_timing === - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - - //=== VK_EXT_discard_rectangles === - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; - PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; - - //=== VK_EXT_hdr_metadata === - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - - //=== VK_KHR_create_renderpass2 === - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - - //=== VK_KHR_shared_presentable_image === - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; -# else - PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - - //=== VK_KHR_performance_query === - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - - //=== VK_EXT_debug_utils === - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; -# else - PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; - PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; - PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; - PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; - PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; - PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; -# else - PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; - PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_sample_locations === - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - - //=== VK_KHR_get_memory_requirements2 === - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - - //=== VK_KHR_acceleration_structure === - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - - //=== VK_KHR_ray_tracing_pipeline === - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; - - //=== VK_KHR_sampler_ycbcr_conversion === - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - - //=== VK_KHR_bind_memory2 === - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - - //=== VK_EXT_image_drm_format_modifier === - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - - //=== VK_EXT_validation_cache === - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - - //=== VK_NV_shading_rate_image === - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - - //=== VK_NV_ray_tracing === - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - - //=== VK_KHR_maintenance3 === - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - - //=== VK_KHR_draw_indirect_count === - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - - //=== VK_EXT_external_memory_host === - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - - //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - - //=== VK_NV_mesh_shader === - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - - //=== VK_NV_scissor_exclusive === - PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - - //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - - //=== VK_KHR_timeline_semaphore === - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - - //=== VK_INTEL_performance_query === - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - - //=== VK_AMD_display_native_hdr === - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - - //=== VK_KHR_fragment_shading_rate === - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - - //=== VK_KHR_dynamic_rendering_local_read === - PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; - - //=== VK_EXT_buffer_device_address === - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - - //=== VK_KHR_present_wait === - PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; -# else - PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_buffer_device_address === - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - - //=== VK_EXT_line_rasterization === - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - - //=== VK_EXT_host_query_reset === - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - - //=== VK_EXT_extended_dynamic_state === - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - - //=== VK_KHR_deferred_host_operations === - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - - //=== VK_KHR_pipeline_executable_properties === - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - - //=== VK_EXT_host_image_copy === - PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; - PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; - PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; - PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - - //=== VK_KHR_map_memory2 === - PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; - PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; - - //=== VK_EXT_swapchain_maintenance1 === - PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; - - //=== VK_NV_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - - //=== VK_EXT_depth_bias_control === - PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; - - //=== VK_EXT_private_data === - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; - - //=== VK_KHR_video_encode_queue === - PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; - PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; - PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; - PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; - PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; - PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; -# else - PFN_dummy vkCreateCudaModuleNV_placeholder = 0; - PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; - PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; - PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; - PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; - PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; -# else - PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; - - //=== VK_EXT_descriptor_buffer === - PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; - PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; - PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; - PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; - - //=== VK_NV_fragment_shading_rate_enums === - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - - //=== VK_EXT_mesh_shader === - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - - //=== VK_KHR_copy_commands2 === - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - - //=== VK_EXT_device_fault === - PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; - - //=== VK_EXT_vertex_input_dynamic_state === - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; -# else - PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; -# else - PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; -# else - PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - - //=== VK_HUAWEI_subpass_shading === - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - - //=== VK_HUAWEI_invocation_mask === - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; - - //=== VK_NV_external_memory_rdma === - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; - - //=== VK_EXT_pipeline_properties === - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; - - //=== VK_EXT_extended_dynamic_state2 === - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; - - //=== VK_EXT_color_write_enable === - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - - //=== VK_KHR_ray_tracing_maintenance1 === - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; - - //=== VK_EXT_multi_draw === - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; - - //=== VK_EXT_opacity_micromap === - PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; - PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; - PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; - PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; - PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; - PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; - PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; - PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; - PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; - PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; - PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; - PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; - PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; - PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; - - //=== VK_HUAWEI_cluster_culling_shader === - PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; - PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; - - //=== VK_EXT_pageable_device_local_memory === - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; - - //=== VK_KHR_maintenance4 === - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; - - //=== VK_VALVE_descriptor_set_host_mapping === - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; - - //=== VK_NV_copy_memory_indirect === - PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; - PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; - - //=== VK_NV_memory_decompression === - PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; - PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; - - //=== VK_NV_device_generated_commands_compute === - PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; - PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; - PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; - - //=== VK_EXT_extended_dynamic_state3 === - PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; - PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; - PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; - PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; - PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; - PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; - PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; - PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; - PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; - PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; - PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; - PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; - PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; - PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; - PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; - PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; - PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; - PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; - PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; - PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; - PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; - PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; - PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; - PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; - PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; - PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; - PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; - PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; - PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; - PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; - - //=== VK_EXT_shader_module_identifier === - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; - - //=== VK_NV_optical_flow === - PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; - PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; - PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; - PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; - - //=== VK_KHR_maintenance5 === - PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; - PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; - PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; - PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; - - //=== VK_AMD_anti_lag === - PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; - - //=== VK_EXT_shader_object === - PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; - PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; - PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; - PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; - - //=== VK_KHR_pipeline_binary === - PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; - PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; - PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; - PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; - PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; - - //=== VK_QCOM_tile_properties === - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; - - //=== VK_NV_low_latency2 === - PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; - PFN_vkLatencySleepNV vkLatencySleepNV = 0; - PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; - PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; - PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; - -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === - PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; -# else - PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_KHR_line_rasterization === - PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; - - //=== VK_KHR_calibrated_timestamps === - PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; - - //=== VK_KHR_maintenance6 === - PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; - PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; - PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; - PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; - }; + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; + + //=== VK_EXT_device_generated_commands === + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; + }; + + } // namespace detail //======================================== //=== RAII HANDLE forward declarations === @@ -2717,6 +2762,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_binary === class PipelineBinaryKHR; + //=== VK_EXT_device_generated_commands === + class IndirectCommandsLayoutEXT; + class IndirectExecutionSetEXT; + //==================== //=== RAII HANDLES === //==================== @@ -2726,11 +2775,11 @@ namespace VULKAN_HPP_NAMESPACE public: # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL Context() - : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( m_dynamicLoader.getProcAddress( "vkGetInstanceProcAddr" ) ) ) # else Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) - : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) ) + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( getInstanceProcAddr ) ) # endif { } @@ -2761,7 +2810,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -2777,9 +2826,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector @@ -2793,9 +2843,9 @@ namespace VULKAN_HPP_NAMESPACE private: # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - VULKAN_HPP_NAMESPACE::DynamicLoader m_dynamicLoader; + VULKAN_HPP_NAMESPACE::detail::DynamicLoader m_dynamicLoader; # endif - std::unique_ptr m_dispatcher; + std::unique_ptr m_dispatcher; }; class Instance @@ -2823,8 +2873,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) : m_instance( instance ), m_allocator( static_cast( allocator ) ) { - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, - static_cast( m_instance ) ) ); + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, + static_cast( m_instance ) ) ); } Instance( std::nullptr_t ) {} @@ -2885,7 +2935,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_instance, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -2900,8 +2950,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type enumeratePhysicalDevices() const; VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; @@ -2912,60 +2962,66 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -2981,18 +3037,20 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_GGP*/ # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_VI_NN*/ @@ -3003,25 +3061,27 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_IOS_MVK*/ # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3033,50 +3093,55 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_EXT_headless_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - std::unique_ptr m_dispatcher; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; }; class PhysicalDevice @@ -3151,7 +3216,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_physicalDevice, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -3182,9 +3247,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector @@ -3272,8 +3338,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getDisplayPlanePropertiesKHR() const; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; # if defined( VK_USE_PLATFORM_XLIB_KHR ) @@ -3378,8 +3444,9 @@ namespace VULKAN_HPP_NAMESPACE void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === @@ -3460,8 +3527,9 @@ namespace VULKAN_HPP_NAMESPACE void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_video_encode_queue === @@ -3475,8 +3543,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) @@ -3506,9 +3575,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getCalibrateableTimeDomainsKHR() const; + //=== VK_NV_cooperative_matrix2 === + + VULKAN_HPP_NODISCARD std::vector + getCooperativeMatrixFlexibleDimensionsPropertiesNV() const; + private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class PhysicalDevices : public std::vector @@ -3561,8 +3635,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) : m_device( device ), m_allocator( static_cast( allocator ) ) { - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, - static_cast( m_device ) ) ); + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, + static_cast( m_device ) ) ); } Device( std::nullptr_t ) {} @@ -3623,7 +3697,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_device, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -3640,13 +3714,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; void waitIdle() const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3655,9 +3730,10 @@ namespace VULKAN_HPP_NAMESPACE void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const; @@ -3666,125 +3742,138 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createComputePipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; //=== VK_VERSION_1_1 === @@ -3813,17 +3902,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3837,9 +3927,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; @@ -3857,7 +3948,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_3 === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3891,7 +3982,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_swapchain === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3906,13 +3997,13 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display_swapchain === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3926,26 +4017,27 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_video_queue === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_NVX_binary_import === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3991,8 +4083,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_descriptor_update_template === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4005,15 +4097,17 @@ namespace VULKAN_HPP_NAMESPACE void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, - VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_EXT_hdr_metadata === @@ -4023,9 +4117,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_create_renderpass2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_WIN32_KHR ) @@ -4072,17 +4167,18 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMDX_shader_enqueue === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createExecutionGraphPipelineAMDX( - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_KHR_get_memory_requirements2 === @@ -4106,8 +4202,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_acceleration_structure === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR>::Type createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4156,24 +4252,25 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_ray_tracing_pipeline === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRayTracingPipelineKHR( - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_sampler_ycbcr_conversion === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4191,15 +4288,15 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_validation_cache === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_NV_ray_tracing === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV>::Type createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4215,12 +4312,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const @@ -4260,8 +4358,8 @@ namespace VULKAN_HPP_NAMESPACE void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4293,7 +4391,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_deferred_host_operations === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4337,8 +4435,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV>::Type createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4346,7 +4444,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_private_data === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4377,13 +4475,13 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_cuda_kernel_launch === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4448,8 +4546,8 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA>::Type createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4466,9 +4564,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_opacity_micromap === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result @@ -4547,7 +4646,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_optical_flow === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4571,19 +4670,20 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_pipeline_binary === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; @@ -4619,10 +4719,31 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::pair getCalibratedTimestampKHR( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR & timestampInfo ) const; + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type + createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT>::Type + createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - std::unique_ptr m_dispatcher; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; }; class AccelerationStructureKHR @@ -4724,7 +4845,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4739,10 +4860,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class AccelerationStructureNV @@ -4844,7 +4965,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4867,10 +4988,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD DataType getHandle() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Buffer @@ -4971,7 +5092,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4992,10 +5113,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -5098,7 +5219,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5121,10 +5242,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -5226,7 +5347,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5241,10 +5362,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CommandPool @@ -5345,7 +5466,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5372,10 +5493,10 @@ namespace VULKAN_HPP_NAMESPACE void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CommandBuffer @@ -5462,7 +5583,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5893,15 +6014,20 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_AMDX_shader_enqueue === - void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT; + void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT; void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT; void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -6022,6 +6148,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT; + //=== VK_NV_mesh_shader === void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT; @@ -6152,11 +6283,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT; - void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT; - //=== VK_EXT_descriptor_buffer === void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindingInfos ) const @@ -6390,6 +6516,10 @@ namespace VULKAN_HPP_NAMESPACE void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & stages, VULKAN_HPP_NAMESPACE::ArrayProxy const & shaders ) const; + void setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange + VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_attachment_feedback_loop_dynamic_state === void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const @@ -6416,11 +6546,19 @@ namespace VULKAN_HPP_NAMESPACE void bindDescriptorBufferEmbeddedSamplers2EXT( const VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT & bindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_device_generated_commands === + + void preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT; + + void executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; + private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; - VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CommandBuffers : public std::vector @@ -6547,7 +6685,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6562,10 +6700,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CuModuleNVX @@ -6666,7 +6804,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6681,10 +6819,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -6786,7 +6924,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6801,10 +6939,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -6907,7 +7045,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6926,10 +7064,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getCache() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -7032,7 +7170,7 @@ namespace VULKAN_HPP_NAMESPACE return m_instance; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7047,10 +7185,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class DebugUtilsMessengerEXT @@ -7152,7 +7290,7 @@ namespace VULKAN_HPP_NAMESPACE return m_instance; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7167,10 +7305,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class DeferredOperationKHR @@ -7271,7 +7409,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7294,10 +7432,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorPool @@ -7399,7 +7537,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7418,10 +7556,10 @@ namespace VULKAN_HPP_NAMESPACE void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorSet @@ -7510,7 +7648,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7539,10 +7677,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorSets : public std::vector @@ -7670,7 +7808,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7691,10 +7829,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorUpdateTemplate @@ -7796,7 +7934,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7811,10 +7949,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DeviceMemory @@ -7915,7 +8053,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7950,10 +8088,10 @@ namespace VULKAN_HPP_NAMESPACE void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DisplayKHR @@ -8060,7 +8198,7 @@ namespace VULKAN_HPP_NAMESPACE return m_physicalDevice; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8078,7 +8216,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getModeProperties() const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -8093,9 +8231,9 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VK_USE_PLATFORM_WIN32_KHR*/ private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class DisplayKHRs : public std::vector @@ -8208,7 +8346,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_displayModeKHR, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8226,9 +8364,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const; private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class Event @@ -8329,7 +8467,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8352,10 +8490,10 @@ namespace VULKAN_HPP_NAMESPACE void reset() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Event m_event = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Event m_event = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Fence @@ -8475,7 +8613,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8494,10 +8632,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Fence m_fence = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Fence m_fence = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Framebuffer @@ -8598,7 +8736,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8617,10 +8755,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getTilePropertiesQCOM() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Image @@ -8721,7 +8859,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8769,10 +8907,10 @@ namespace VULKAN_HPP_NAMESPACE getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Image m_image = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Image m_image = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class ImageView @@ -8873,7 +9011,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8892,10 +9030,130 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class IndirectCommandsLayoutEXT + { + public: + using CType = VkIndirectCommandsLayoutEXT; + using CppType = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createIndirectCommandsLayoutEXT( createInfo, allocator ); + } +# endif + + IndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_indirectCommandsLayout( indirectCommandsLayout ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + IndirectCommandsLayoutEXT( std::nullptr_t ) {} + + ~IndirectCommandsLayoutEXT() + { + clear(); + } + + IndirectCommandsLayoutEXT() = delete; + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT const & ) = delete; + + IndirectCommandsLayoutEXT( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT const & ) = delete; + + IndirectCommandsLayoutEXT & operator=( IndirectCommandsLayoutEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayout; + } + + operator VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectCommandsLayout; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_indirectCommandsLayout ) + { + getDispatcher()->vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), + static_cast( m_indirectCommandsLayout ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_indirectCommandsLayout = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class IndirectCommandsLayoutNV @@ -8997,7 +9255,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9012,10 +9270,138 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; + }; + + class IndirectExecutionSetEXT + { + public: + using CType = VkIndirectExecutionSetEXT; + using CppType = VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT; + + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; + + public: +# if !defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + { + *this = device.createIndirectExecutionSetEXT( createInfo, allocator ); + } +# endif + + IndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, + VkIndirectExecutionSetEXT indirectExecutionSet, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) + : m_device( device ) + , m_indirectExecutionSet( indirectExecutionSet ) + , m_allocator( static_cast( allocator ) ) + , m_dispatcher( device.getDispatcher() ) + { + } + + IndirectExecutionSetEXT( std::nullptr_t ) {} + + ~IndirectExecutionSetEXT() + { + clear(); + } + + IndirectExecutionSetEXT() = delete; + IndirectExecutionSetEXT( IndirectExecutionSetEXT const & ) = delete; + + IndirectExecutionSetEXT( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + : m_device( VULKAN_HPP_NAMESPACE::exchange( rhs.m_device, {} ) ) + , m_indirectExecutionSet( VULKAN_HPP_NAMESPACE::exchange( rhs.m_indirectExecutionSet, {} ) ) + , m_allocator( VULKAN_HPP_NAMESPACE::exchange( rhs.m_allocator, {} ) ) + , m_dispatcher( VULKAN_HPP_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) ) + { + } + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT const & ) = delete; + + IndirectExecutionSetEXT & operator=( IndirectExecutionSetEXT && rhs ) VULKAN_HPP_NOEXCEPT + { + if ( this != &rhs ) + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectExecutionSet, rhs.m_indirectExecutionSet ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + return *this; + } + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT const & operator*() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSet; + } + + operator VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT + { + return m_indirectExecutionSet; + } + + void clear() VULKAN_HPP_NOEXCEPT + { + if ( m_indirectExecutionSet ) + { + getDispatcher()->vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSet ), + reinterpret_cast( m_allocator ) ); + } + m_device = nullptr; + m_indirectExecutionSet = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + } + + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT release() + { + m_device = nullptr; + m_allocator = nullptr; + m_dispatcher = nullptr; + return VULKAN_HPP_NAMESPACE::exchange( m_indirectExecutionSet, nullptr ); + } + + VULKAN_HPP_NAMESPACE::Device getDevice() const + { + return m_device; + } + + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const + { + VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); + return m_dispatcher; + } + + void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT & rhs ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_device, rhs.m_device ); + std::swap( m_indirectExecutionSet, rhs.m_indirectExecutionSet ); + std::swap( m_allocator, rhs.m_allocator ); + std::swap( m_dispatcher, rhs.m_dispatcher ); + } + + //=== VK_EXT_device_generated_commands === + + void updatePipeline( VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const + VULKAN_HPP_NOEXCEPT; + + void updateShader( VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const + VULKAN_HPP_NOEXCEPT; + + private: + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT m_indirectExecutionSet = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class MicromapEXT @@ -9116,7 +9502,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9131,10 +9517,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class OpticalFlowSessionNV @@ -9236,7 +9622,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9257,10 +9643,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageLayout layout ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PerformanceConfigurationINTEL @@ -9351,7 +9737,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9365,9 +9751,9 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PipelineCache @@ -9469,7 +9855,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9490,10 +9876,10 @@ namespace VULKAN_HPP_NAMESPACE void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Pipeline @@ -9649,7 +10035,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9706,11 +10092,11 @@ namespace VULKAN_HPP_NAMESPACE void compileDeferredNV( uint32_t shader ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Pipelines : public std::vector @@ -9885,7 +10271,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9901,11 +10287,11 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinary = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinary = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PipelineBinaryKHRs : public std::vector @@ -10034,7 +10420,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10049,10 +10435,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PrivateDataSlot @@ -10154,7 +10540,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10169,10 +10555,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class QueryPool @@ -10273,7 +10659,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10313,10 +10699,10 @@ namespace VULKAN_HPP_NAMESPACE void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Queue @@ -10404,7 +10790,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_queue, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10447,6 +10833,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getCheckpointDataNV() const; + VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; + //=== VK_INTEL_performance_query === void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const; @@ -10456,15 +10844,13 @@ namespace VULKAN_HPP_NAMESPACE void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const; - VULKAN_HPP_NODISCARD std::vector getCheckpointData2NV() const; - //=== VK_NV_low_latency2 === void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Queue m_queue = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Queue m_queue = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class RenderPass @@ -10574,7 +10960,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10597,10 +10983,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getSubpassShadingMaxWorkgroupSizeHUAWEI() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Sampler @@ -10701,7 +11087,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10716,10 +11102,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class SamplerYcbcrConversion @@ -10821,7 +11207,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10836,10 +11222,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Semaphore @@ -10940,7 +11326,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10963,10 +11349,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class ShaderEXT @@ -11078,7 +11464,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11098,11 +11484,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getBinaryData() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class ShaderEXTs : public std::vector @@ -11230,7 +11616,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11249,10 +11635,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class SurfaceKHR @@ -11505,7 +11891,7 @@ namespace VULKAN_HPP_NAMESPACE return m_instance; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11520,10 +11906,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class SwapchainKHR @@ -11624,7 +12010,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11688,10 +12074,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getLatencyTimingsNV() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class SwapchainKHRs : public std::vector @@ -11820,7 +12206,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11841,10 +12227,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getData() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class VideoSessionKHR @@ -11946,7 +12332,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11967,10 +12353,10 @@ namespace VULKAN_HPP_NAMESPACE void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class VideoSessionParametersKHR @@ -12072,7 +12458,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -12091,10 +12477,10 @@ namespace VULKAN_HPP_NAMESPACE void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; //=========================== @@ -12103,9 +12489,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Instance instance; @@ -12125,8 +12511,8 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance( *this, *reinterpret_cast( &instance ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Instance::enumeratePhysicalDevices() const { std::vector physicalDevices; @@ -12270,7 +12656,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -12407,7 +12793,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Queue queue; @@ -12442,9 +12828,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DeviceMemory memory; @@ -12638,7 +13024,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -12694,9 +13080,9 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( result ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Semaphore semaphore; @@ -12718,7 +13104,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -12770,9 +13156,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::QueryPool queryPool; @@ -12839,7 +13225,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -12861,9 +13247,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, *reinterpret_cast( &buffer ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -12886,7 +13272,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -12922,9 +13308,9 @@ namespace VULKAN_HPP_NAMESPACE return layout; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::ImageView view; @@ -12945,9 +13331,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, *reinterpret_cast( &view ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -12969,9 +13355,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, *reinterpret_cast( &shaderModule ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13033,7 +13419,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13065,9 +13451,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createGraphicsPipeline( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -13093,7 +13479,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13125,8 +13511,8 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const @@ -13152,9 +13538,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13176,9 +13562,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, *reinterpret_cast( &pipelineLayout ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Sampler sampler; @@ -13200,7 +13586,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -13223,12 +13609,15 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, *reinterpret_cast( &setLayout ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { + VULKAN_HPP_ASSERT( + createInfo.flags & vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet && + "createInfo.flags need to have vk::DescriptorPoolCreateFlagBits::eFreeDesriptors set in order to allow destruction of VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet which requires to return individual allocations to the pool" ); VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateDescriptorPool( static_cast( m_device ), @@ -13255,8 +13644,8 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_device ), static_cast( m_descriptorPool ), static_cast( flags ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const { std::vector descriptorSets( allocateInfo.descriptorSetCount ); @@ -13296,9 +13685,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( descriptorCopies.data() ) ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13320,9 +13709,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, *reinterpret_cast( &framebuffer ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13355,9 +13744,9 @@ namespace VULKAN_HPP_NAMESPACE return granularity; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13388,8 +13777,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const { std::vector commandBuffers( allocateInfo.commandBufferCount ); @@ -14382,7 +14771,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Queue queue; @@ -14393,7 +14782,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -14418,7 +14807,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -14568,9 +14957,9 @@ namespace VULKAN_HPP_NAMESPACE stride ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -14729,7 +15118,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -15249,9 +15638,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_swapchain === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15474,7 +15863,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const { std::vector displays; @@ -15539,9 +15928,9 @@ namespace VULKAN_HPP_NAMESPACE return properties; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; @@ -15579,9 +15968,9 @@ namespace VULKAN_HPP_NAMESPACE return capabilities; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15605,8 +15994,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display_swapchain === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator ) const { @@ -15635,9 +16024,9 @@ namespace VULKAN_HPP_NAMESPACE return swapchainsRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15663,9 +16052,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15703,9 +16092,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15743,9 +16132,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15783,9 +16172,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15811,9 +16200,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15850,7 +16239,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_report === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -16009,7 +16398,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -16076,8 +16465,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -16261,9 +16650,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NVX_binary_import === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16285,9 +16674,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, *reinterpret_cast( &module ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16440,9 +16829,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16774,9 +17163,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -17047,7 +17436,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_descriptor_update_template === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -17123,9 +17512,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetRandROutputDisplayEXT( @@ -17174,7 +17563,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -17198,7 +17587,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const @@ -17336,9 +17725,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_create_renderpass2 === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -17765,9 +18154,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -17793,9 +18182,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -17883,7 +18272,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -17973,7 +18362,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMDX_shader_enqueue === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -18005,9 +18394,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createExecutionGraphPipelineAMDX( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -18062,43 +18451,55 @@ namespace VULKAN_HPP_NAMESPACE return nodeIndex; } - VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, + VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX && "Function requires " ); - getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), static_cast( scratch ) ); + getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), + static_cast( executionGraph ), + static_cast( scratch ), + static_cast( scratchSize ) ); } VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function requires " ); getDispatcher()->vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), static_cast( scratch ), + static_cast( scratchSize ), reinterpret_cast( &countInfo ) ); } VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function requires " ); getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), static_cast( scratch ), + static_cast( scratchSize ), reinterpret_cast( &countInfo ) ); } VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, + VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX && "Function requires " ); - getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( - static_cast( m_commandBuffer ), static_cast( scratch ), static_cast( countInfo ) ); + getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX( static_cast( m_commandBuffer ), + static_cast( scratch ), + static_cast( scratchSize ), + static_cast( countInfo ) ); } # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -18216,7 +18617,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_acceleration_structure === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -18559,7 +18960,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, @@ -18594,9 +18995,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRayTracingPipelineKHR( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, @@ -18743,7 +19144,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_sampler_ycbcr_conversion === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -18820,7 +19221,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_validation_cache === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -18922,7 +19323,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_ray_tracing === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -19056,7 +19457,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -19088,9 +19489,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRayTracingPipelineNV( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -19322,6 +19723,20 @@ namespace VULKAN_HPP_NAMESPACE marker ); } + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function requires " ); + + getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + //=== VK_EXT_calibrated_timestamps === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector PhysicalDevice::getCalibrateableTimeDomainsEXT() const @@ -19483,6 +19898,26 @@ namespace VULKAN_HPP_NAMESPACE return checkpointData; } + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && + "Function requires " ); + + std::vector checkpointData; + uint32_t checkpointDataCount; + getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); + checkpointData.resize( checkpointDataCount ); + getDispatcher()->vkGetQueueCheckpointData2NV( + static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); + + VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); + if ( checkpointDataCount < checkpointData.size() ) + { + checkpointData.resize( checkpointDataCount ); + } + return checkpointData; + } + //=== VK_KHR_timeline_semaphore === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const @@ -19569,8 +20004,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -19628,9 +20063,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -19656,9 +20091,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -19950,9 +20385,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_headless_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20175,7 +20610,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_deferred_host_operations === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20509,7 +20944,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -20553,9 +20988,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDrmDisplayEXT( @@ -20575,7 +21010,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_private_data === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -20754,9 +21189,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20805,9 +21240,9 @@ namespace VULKAN_HPP_NAMESPACE return cacheData; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20933,39 +21368,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); } - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function requires " ); - - getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), - static_cast( stage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); - } - - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector Queue::getCheckpointData2NV() const - { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && "Function requires " ); - - std::vector checkpointData; - uint32_t checkpointDataCount; - getDispatcher()->vkGetQueueCheckpointData2NV( static_cast( m_queue ), &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - getDispatcher()->vkGetQueueCheckpointData2NV( - static_cast( m_queue ), &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } - //=== VK_EXT_descriptor_buffer === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT() const VULKAN_HPP_NOEXCEPT @@ -21252,8 +21654,9 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDeviceFaultInfoEXT( - m_device, reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); + return static_cast( d.vkGetDeviceFaultInfoEXT( static_cast( m_device ), + reinterpret_cast( pFaultCounts ), + reinterpret_cast( pFaultInfo ) ) ); } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -21267,9 +21670,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetWinrtDisplayNV( @@ -21290,9 +21693,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -21411,7 +21814,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_FUCHSIA_buffer_collection === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -21590,9 +21993,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -21683,9 +22086,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_opacity_micromap === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -22530,7 +22933,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -22683,7 +23086,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator ) const { @@ -22712,9 +23115,9 @@ namespace VULKAN_HPP_NAMESPACE return shadersRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::ShaderEXT shader; @@ -22782,10 +23185,23 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( shaders.data() ) ); } + VULKAN_HPP_INLINE void + CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, + Optional depthClampRange ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampRangeEXT && + "Function requires or " ); + + getDispatcher()->vkCmdSetDepthClampRangeEXT( + static_cast( m_commandBuffer ), + static_cast( depthClampMode ), + reinterpret_cast( static_cast( depthClampRange ) ) ); + } + //=== VK_KHR_pipeline_binary === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const { @@ -23212,6 +23628,168 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &bindDescriptorBufferEmbeddedSamplersInfo ) ); } + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsEXT( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return memoryRequirements; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getGeneratedCommandsMemoryRequirementsEXT( + const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT && + "Function requires " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); + getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &memoryRequirements ) ); + + return structureChain; + } + + VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, + VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT && + "Function requires " ); + + getDispatcher()->vkCmdPreprocessGeneratedCommandsEXT( static_cast( m_commandBuffer ), + reinterpret_cast( &generatedCommandsInfo ), + static_cast( stateCommandBuffer ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, + const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsEXT && + "Function requires " ); + + getDispatcher()->vkCmdExecuteGeneratedCommandsEXT( static_cast( m_commandBuffer ), + static_cast( isPreprocessed ), + reinterpret_cast( &generatedCommandsInfo ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type + Device::createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectCommandsLayoutEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectCommandsLayout ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectCommandsLayoutEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT( + *this, *reinterpret_cast( &indirectCommandsLayout ), allocator ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT + { + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCreateIndirectExecutionSetEXT( + static_cast( m_device ), + reinterpret_cast( &createInfo ), + reinterpret_cast( static_cast( allocator ) ), + reinterpret_cast( &indirectExecutionSet ) ) ); + if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess ) + { +# if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) + return VULKAN_HPP_UNEXPECTED( result ); +# else + VULKAN_HPP_NAMESPACE::detail::throwResultException( result, "Device::createIndirectExecutionSetEXT" ); +# endif + } + + return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT( + *this, *reinterpret_cast( &indirectExecutionSet ), allocator ); + } + + VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updatePipeline( + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT && + "Function requires " ); + + getDispatcher()->vkUpdateIndirectExecutionSetPipelineEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } + + VULKAN_HPP_INLINE void IndirectExecutionSetEXT::updateShader( + VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT && + "Function requires " ); + + getDispatcher()->vkUpdateIndirectExecutionSetShaderEXT( static_cast( m_device ), + static_cast( m_indirectExecutionSet ), + executionSetWrites.size(), + reinterpret_cast( executionSetWrites.data() ) ); + } + + //=== VK_NV_cooperative_matrix2 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && + "Function requires " ); + + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + properties.resize( propertyCount ); + result = static_cast( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + static_cast( m_physicalDevice ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + properties.resize( propertyCount ); + } + return properties; + } + //==================== //=== RAII Helpers === //==================== diff --git a/third_party/vulkan/vulkan_shared.hpp b/third_party/vulkan/vulkan_shared.hpp index 1bff517..7b96b6c 100644 --- a/third_party/vulkan/vulkan_shared.hpp +++ b/third_party/vulkan/vulkan_shared.hpp @@ -8,9 +8,12 @@ #ifndef VULKAN_SHARED_HPP #define VULKAN_SHARED_HPP -#include // std::atomic_size_t #include +#if !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) +# include // std::atomic_size_t +#endif + namespace VULKAN_HPP_NAMESPACE { #if !defined( VULKAN_HPP_NO_SMART_HANDLE ) @@ -305,8 +308,8 @@ namespace VULKAN_HPP_NAMESPACE using BaseType::internalDestroy; }; - template - class SharedHandleTraits; + namespace detail + { // Silence the function cast warnings. # if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) @@ -314,150 +317,152 @@ namespace VULKAN_HPP_NAMESPACE # pragma GCC diagnostic ignored "-Wcast-function-type" # endif - template - class ObjectDestroyShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; - - template - using DestroyFunctionPointerType = - typename std::conditional::value, - void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const, - void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type; - - using SelectorType = typename std::conditional::value, DestructorType, HandleType>::type; - - template - ObjectDestroyShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &SelectorType::destroy ) ) ) - , m_dispatch( &dispatch ) - , m_allocationCallbacks( allocationCallbacks ) + template + class ObjectDestroyShared { - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - public: - template - typename std::enable_if::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + template + using DestroyFunctionPointerType = + typename std::conditional::value, + void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const, + void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type; + + using SelectorType = typename std::conditional::value, DestructorType, HandleType>::type; + + template + ObjectDestroyShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &SelectorType::destroy ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + template + typename std::enable_if::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + template + typename std::enable_if::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectFreeShared { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - template - typename std::enable_if::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const; + + template + ObjectFreeShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectReleaseShared { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch ); - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - Optional m_allocationCallbacks = nullptr; - }; + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const; - template - class ObjectFreeShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; + template + ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::release ) ) ) + , m_dispatch( &dispatch ) + { + } - template - using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const; + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, *m_dispatch ); + } - template - ObjectFreeShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) - , m_dispatch( &dispatch ) - , m_allocationCallbacks( allocationCallbacks ) + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + }; + + template + class PoolFreeShared { - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - public: - void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); - } + using PoolTypeExport = PoolType; - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - Optional m_allocationCallbacks = nullptr; - }; + template + using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); - template - class ObjectReleaseShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; + template + using DestroyFunctionPointerType = ReturnType ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const; - template - using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const; + PoolFreeShared() = default; - template - ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &DestructorType::release ) ) ) - , m_dispatch( &dispatch ) - { - } + template + PoolFreeShared( SharedHandle pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_pool( std::move( pool ) ) + { + } - public: - void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( parent.*m_destroy )( handle, *m_dispatch ); - } + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch && m_pool ); + ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); + } - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - }; - - template - class PoolFreeShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; - - using PoolTypeExport = PoolType; - - template - using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); - - template - using DestroyFunctionPointerType = ReturnType ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const; - - PoolFreeShared() = default; - - template - PoolFreeShared( SharedHandle pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) - , m_dispatch( &dispatch ) - , m_pool( std::move( pool ) ) - { - } - - public: - void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch && m_pool ); - ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); - } - - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - SharedHandle m_pool{}; - }; + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + SharedHandle m_pool{}; + }; # if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) # pragma GCC diagnostic pop # endif + } // namespace detail + //====================== //=== SHARED HANDLEs === //====================== @@ -468,7 +473,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = NoDestructor; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedInstance = SharedHandle; @@ -478,7 +483,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = NoDestructor; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDevice = SharedHandle; @@ -488,7 +493,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectFreeShared; + using deleter = detail::ObjectFreeShared; }; using SharedDeviceMemory = SharedHandle; @@ -498,7 +503,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedFence = SharedHandle; @@ -508,7 +513,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSemaphore = SharedHandle; @@ -518,7 +523,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedEvent = SharedHandle; @@ -528,7 +533,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedQueryPool = SharedHandle; @@ -538,7 +543,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedBuffer = SharedHandle; @@ -548,7 +553,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedBufferView = SharedHandle; @@ -558,7 +563,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedImage = SharedHandle; @@ -568,7 +573,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedImageView = SharedHandle; @@ -578,7 +583,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedShaderModule = SharedHandle; @@ -588,7 +593,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipelineCache = SharedHandle; @@ -598,7 +603,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipeline = SharedHandle; @@ -608,7 +613,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipelineLayout = SharedHandle; @@ -618,7 +623,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSampler = SharedHandle; @@ -628,7 +633,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDescriptorPool = SharedHandle; @@ -638,7 +643,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = PoolFreeShared; + using deleter = detail::PoolFreeShared; }; using SharedDescriptorSet = SharedHandle; @@ -648,7 +653,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDescriptorSetLayout = SharedHandle; @@ -658,7 +663,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedFramebuffer = SharedHandle; @@ -668,7 +673,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedRenderPass = SharedHandle; @@ -678,7 +683,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCommandPool = SharedHandle; @@ -688,7 +693,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = PoolFreeShared; + using deleter = detail::PoolFreeShared; }; using SharedCommandBuffer = SharedHandle; @@ -699,7 +704,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSamplerYcbcrConversion = SharedHandle; @@ -710,7 +715,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDescriptorUpdateTemplate = SharedHandle; @@ -722,7 +727,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPrivateDataSlot = SharedHandle; @@ -734,7 +739,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Instance; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSurfaceKHR = SharedHandle; @@ -745,7 +750,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSwapchainKHR = SharedHandle; @@ -756,7 +761,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = PhysicalDevice; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDisplayKHR = SharedHandle; @@ -767,7 +772,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Instance; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDebugReportCallbackEXT = SharedHandle; @@ -778,7 +783,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedVideoSessionKHR = SharedHandle; @@ -788,7 +793,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedVideoSessionParametersKHR = SharedHandle; @@ -799,7 +804,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCuModuleNVX = SharedHandle; @@ -809,7 +814,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCuFunctionNVX = SharedHandle; @@ -820,7 +825,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Instance; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDebugUtilsMessengerEXT = SharedHandle; @@ -831,7 +836,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedAccelerationStructureKHR = SharedHandle; @@ -842,7 +847,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedValidationCacheEXT = SharedHandle; @@ -853,7 +858,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedAccelerationStructureNV = SharedHandle; @@ -864,7 +869,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPerformanceConfigurationINTEL = SharedHandle; @@ -875,7 +880,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDeferredOperationKHR = SharedHandle; @@ -886,7 +891,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedIndirectCommandsLayoutNV = SharedHandle; @@ -898,7 +903,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCudaModuleNV = SharedHandle; @@ -908,7 +913,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCudaFunctionNV = SharedHandle; @@ -921,7 +926,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedBufferCollectionFUCHSIA = SharedHandle; @@ -933,7 +938,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedMicromapEXT = SharedHandle; @@ -944,7 +949,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedOpticalFlowSessionNV = SharedHandle; @@ -955,7 +960,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedShaderEXT = SharedHandle; @@ -966,11 +971,32 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipelineBinaryKHR = SharedHandle; + //=== VK_EXT_device_generated_commands === + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectCommandsLayoutEXT = SharedHandle; + + template <> + class SharedHandleTraits + { + public: + using DestructorType = Device; + using deleter = detail::ObjectDestroyShared; + }; + + using SharedIndirectExecutionSetEXT = SharedHandle; + enum class SwapchainOwns { no, diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index 1d0e054..4fa9d6b 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -2540,36 +2540,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" ); -//=== VK_KHR_dynamic_rendering === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == - sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == - sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === @@ -2924,6 +2894,12 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" ); + //=== VK_NV_viewport_swizzle === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" ); @@ -3326,6 +3302,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" ); + //=== VK_EXT_sample_locations === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" ); @@ -4121,6 +4105,18 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "CheckpointDataNV is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CheckpointData2NV is not nothrow_move_constructible!" ); + //=== VK_INTEL_shader_integer_functions2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == @@ -4260,6 +4256,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) == + sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" ); + //=== VK_KHR_fragment_shading_rate === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), @@ -4300,6 +4304,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) == + sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" ); + //=== VK_AMD_shader_core_properties2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), @@ -5287,20 +5299,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "CheckpointData2NV is not nothrow_move_constructible!" ); - //=== VK_EXT_descriptor_buffer === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT ) == @@ -5784,6 +5782,16 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" ); +//=== VK_EXT_present_mode_fifo_latest_ready === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT ) == + sizeof( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT is not nothrow_move_constructible!" ); + #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === @@ -7759,6 +7767,176 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceRayTracingValidationFeaturesNV is not nothrow_move_constructible!" ); +//=== VK_EXT_device_generated_commands === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT ) == + sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsMemoryRequirementsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT ) == sizeof( VkIndirectExecutionSetCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT ) == sizeof( VkIndirectExecutionSetInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT ) == sizeof( VkIndirectExecutionSetPipelineInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetPipelineInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT ) == sizeof( VkIndirectExecutionSetShaderInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetShaderInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT ) == sizeof( VkGeneratedCommandsInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT ) == sizeof( VkWriteIndirectExecutionSetPipelineEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteIndirectExecutionSetPipelineEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT ) == sizeof( VkIndirectCommandsLayoutCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT ) == sizeof( VkIndirectCommandsLayoutTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCountIndirectCommandEXT ) == sizeof( VkDrawIndirectCountIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DrawIndirectCountIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT ) == sizeof( VkIndirectCommandsVertexBufferTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsVertexBufferTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandEXT ) == sizeof( VkBindVertexBufferIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindVertexBufferIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT ) == sizeof( VkIndirectCommandsIndexBufferTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsIndexBufferTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandEXT ) == sizeof( VkBindIndexBufferIndirectCommandEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindIndexBufferIndirectCommandEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT ) == sizeof( VkIndirectCommandsPushConstantTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsPushConstantTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT ) == sizeof( VkIndirectCommandsExecutionSetTokenEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsExecutionSetTokenEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT ) == sizeof( VkIndirectCommandsTokenDataEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsTokenDataEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT ) == sizeof( VkIndirectCommandsLayoutEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "IndirectCommandsLayoutEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectCommandsLayoutEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT ) == sizeof( VkIndirectExecutionSetEXT ), + "handle and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_copy_constructible::value, + "IndirectExecutionSetEXT is not copy_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT ) == sizeof( VkIndirectExecutionSetShaderLayoutInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "IndirectExecutionSetShaderLayoutInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsPipelineInfoEXT ) == sizeof( VkGeneratedCommandsPipelineInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsPipelineInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsShaderInfoEXT ) == sizeof( VkGeneratedCommandsShaderInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "GeneratedCommandsShaderInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT ) == sizeof( VkWriteIndirectExecutionSetShaderEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "WriteIndirectExecutionSetShaderEXT is not nothrow_move_constructible!" ); + //=== VK_MESA_image_alignment_control === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageAlignmentControlFeaturesMESA ) == @@ -7784,4 +7962,66 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "ImageAlignmentControlCreateInfoMESA is not nothrow_move_constructible!" ); +//=== VK_EXT_depth_clamp_control === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampControlFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDepthClampControlFeaturesEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT ) == + sizeof( VkPipelineViewportDepthClampControlCreateInfoEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineViewportDepthClampControlCreateInfoEXT is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthClampRangeEXT ) == sizeof( VkDepthClampRangeEXT ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DepthClampRangeEXT is not nothrow_move_constructible!" ); + +//=== VK_HUAWEI_hdr_vivid === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceHdrVividFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHdrVividFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI ) == sizeof( VkHdrVividDynamicMetadataHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HdrVividDynamicMetadataHUAWEI is not nothrow_move_constructible!" ); + +//=== VK_NV_cooperative_matrix2 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV ) == + sizeof( VkCooperativeMatrixFlexibleDimensionsPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CooperativeMatrixFlexibleDimensionsPropertiesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrix2FeaturesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2FeaturesNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV ) == + sizeof( VkPhysicalDeviceCooperativeMatrix2PropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceCooperativeMatrix2PropertiesNV is not nothrow_move_constructible!" ); + #endif diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index 5f6391c..1bc9452 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -8695,6 +8695,103 @@ namespace VULKAN_HPP_NAMESPACE using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo; + struct BindIndexBufferIndirectCommandEXT + { + using NativeType = VkBindIndexBufferIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + BindIndexBufferIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16 ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , indexType{ indexType_ } + { + } + + VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandEXT( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindIndexBufferIndirectCommandEXT( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindIndexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindIndexBufferIndirectCommandEXT & operator=( BindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindIndexBufferIndirectCommandEXT & operator=( VkBindIndexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandEXT & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT + { + indexType = indexType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindIndexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindIndexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, indexType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindIndexBufferIndirectCommandEXT const & ) const = default; +#else + bool operator==( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( indexType == rhs.indexType ); +# endif + } + + bool operator!=( BindIndexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; + }; + struct BindIndexBufferIndirectCommandNV { using NativeType = VkBindIndexBufferIndirectCommandNV; @@ -10179,6 +10276,102 @@ namespace VULKAN_HPP_NAMESPACE using Type = BindSparseInfo; }; + struct BindVertexBufferIndirectCommandEXT + { + using NativeType = VkBindVertexBufferIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t size_ = {}, + uint32_t stride_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , size{ size_ } + , stride{ stride_ } + { + } + + VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandEXT( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + BindVertexBufferIndirectCommandEXT( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : BindVertexBufferIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + BindVertexBufferIndirectCommandEXT & operator=( BindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + BindVertexBufferIndirectCommandEXT & operator=( VkBindVertexBufferIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkBindVertexBufferIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkBindVertexBufferIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, size, stride ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( BindVertexBufferIndirectCommandEXT const & ) const = default; +#else + bool operator==( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( size == rhs.size ) && ( stride == rhs.stride ); +# endif + } + + bool operator!=( BindVertexBufferIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t size = {}; + uint32_t stride = {}; + }; + struct BindVertexBufferIndirectCommandNV { using NativeType = VkBindVertexBufferIndirectCommandNV; @@ -16699,6 +16892,133 @@ namespace VULKAN_HPP_NAMESPACE using ConformanceVersionKHR = ConformanceVersion; + struct CooperativeMatrixFlexibleDimensionsPropertiesNV + { + using NativeType = VkCooperativeMatrixFlexibleDimensionsPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t MGranularity_ = {}, + uint32_t NGranularity_ = {}, + uint32_t KGranularity_ = {}, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType_ = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16, + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation_ = {}, + VULKAN_HPP_NAMESPACE::ScopeKHR scope_ = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice, + uint32_t workgroupInvocations_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , MGranularity{ MGranularity_ } + , NGranularity{ NGranularity_ } + , KGranularity{ KGranularity_ } + , AType{ AType_ } + , BType{ BType_ } + , CType{ CType_ } + , ResultType{ ResultType_ } + , saturatingAccumulation{ saturatingAccumulation_ } + , scope{ scope_ } + , workgroupInvocations{ workgroupInvocations_ } + { + } + + VULKAN_HPP_CONSTEXPR + CooperativeMatrixFlexibleDimensionsPropertiesNV( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CooperativeMatrixFlexibleDimensionsPropertiesNV( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : CooperativeMatrixFlexibleDimensionsPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CooperativeMatrixFlexibleDimensionsPropertiesNV & operator=( VkCooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCooperativeMatrixFlexibleDimensionsPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( + sType, pNext, MGranularity, NGranularity, KGranularity, AType, BType, CType, ResultType, saturatingAccumulation, scope, workgroupInvocations ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CooperativeMatrixFlexibleDimensionsPropertiesNV const & ) const = default; +#else + bool operator==( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( MGranularity == rhs.MGranularity ) && ( NGranularity == rhs.NGranularity ) && + ( KGranularity == rhs.KGranularity ) && ( AType == rhs.AType ) && ( BType == rhs.BType ) && ( CType == rhs.CType ) && + ( ResultType == rhs.ResultType ) && ( saturatingAccumulation == rhs.saturatingAccumulation ) && ( scope == rhs.scope ) && + ( workgroupInvocations == rhs.workgroupInvocations ); +# endif + } + + bool operator!=( CooperativeMatrixFlexibleDimensionsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV; + void * pNext = {}; + uint32_t MGranularity = {}; + uint32_t NGranularity = {}; + uint32_t KGranularity = {}; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR AType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR BType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR CType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::ComponentTypeKHR ResultType = VULKAN_HPP_NAMESPACE::ComponentTypeKHR::eFloat16; + VULKAN_HPP_NAMESPACE::Bool32 saturatingAccumulation = {}; + VULKAN_HPP_NAMESPACE::ScopeKHR scope = VULKAN_HPP_NAMESPACE::ScopeKHR::eDevice; + uint32_t workgroupInvocations = {}; + }; + + template <> + struct CppType + { + using Type = CooperativeMatrixFlexibleDimensionsPropertiesNV; + }; + struct CooperativeMatrixPropertiesKHR { using NativeType = VkCooperativeMatrixPropertiesKHR; @@ -23509,6 +23829,89 @@ namespace VULKAN_HPP_NAMESPACE using Type = DepthBiasRepresentationInfoEXT; }; + struct DepthClampRangeEXT + { + using NativeType = VkDepthClampRangeEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( float minDepthClamp_ = {}, float maxDepthClamp_ = {} ) VULKAN_HPP_NOEXCEPT + : minDepthClamp{ minDepthClamp_ } + , maxDepthClamp{ maxDepthClamp_ } + { + } + + VULKAN_HPP_CONSTEXPR DepthClampRangeEXT( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DepthClampRangeEXT( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT : DepthClampRangeEXT( *reinterpret_cast( &rhs ) ) {} + + DepthClampRangeEXT & operator=( DepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DepthClampRangeEXT & operator=( VkDepthClampRangeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMinDepthClamp( float minDepthClamp_ ) VULKAN_HPP_NOEXCEPT + { + minDepthClamp = minDepthClamp_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DepthClampRangeEXT & setMaxDepthClamp( float maxDepthClamp_ ) VULKAN_HPP_NOEXCEPT + { + maxDepthClamp = maxDepthClamp_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDepthClampRangeEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDepthClampRangeEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( minDepthClamp, maxDepthClamp ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DepthClampRangeEXT const & ) const = default; +#else + bool operator==( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( minDepthClamp == rhs.minDepthClamp ) && ( maxDepthClamp == rhs.maxDepthClamp ); +# endif + } + + bool operator!=( DepthClampRangeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + float minDepthClamp = {}; + float maxDepthClamp = {}; + }; + struct DescriptorAddressInfoEXT { using NativeType = VkDescriptorAddressInfoEXT; @@ -33201,6 +33604,102 @@ namespace VULKAN_HPP_NAMESPACE uint32_t firstInstance = {}; }; + struct DrawIndirectCountIndirectCommandEXT + { + using NativeType = VkDrawIndirectCountIndirectCommandEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, + uint32_t stride_ = {}, + uint32_t commandCount_ = {} ) VULKAN_HPP_NOEXCEPT + : bufferAddress{ bufferAddress_ } + , stride{ stride_ } + , commandCount{ commandCount_ } + { + } + + VULKAN_HPP_CONSTEXPR DrawIndirectCountIndirectCommandEXT( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DrawIndirectCountIndirectCommandEXT( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DrawIndirectCountIndirectCommandEXT( *reinterpret_cast( &rhs ) ) + { + } + + DrawIndirectCountIndirectCommandEXT & operator=( DrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DrawIndirectCountIndirectCommandEXT & operator=( VkDrawIndirectCountIndirectCommandEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT + { + bufferAddress = bufferAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT + { + stride = stride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DrawIndirectCountIndirectCommandEXT & setCommandCount( uint32_t commandCount_ ) VULKAN_HPP_NOEXCEPT + { + commandCount = commandCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDrawIndirectCountIndirectCommandEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDrawIndirectCountIndirectCommandEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( bufferAddress, stride, commandCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DrawIndirectCountIndirectCommandEXT const & ) const = default; +#else + bool operator==( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( bufferAddress == rhs.bufferAddress ) && ( stride == rhs.stride ) && ( commandCount == rhs.commandCount ); +# endif + } + + bool operator!=( DrawIndirectCountIndirectCommandEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {}; + uint32_t stride = {}; + uint32_t commandCount = {}; + }; + struct DrawMeshTasksIndirectCommandEXT { using NativeType = VkDrawMeshTasksIndirectCommandEXT; @@ -34133,9 +34632,14 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } - , size{ size_ } + , minSize{ minSize_ } + , maxSize{ maxSize_ } + , sizeGranularity{ sizeGranularity_ } { } @@ -34162,9 +34666,21 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMinSize( VULKAN_HPP_NAMESPACE::DeviceSize minSize_ ) VULKAN_HPP_NOEXCEPT { - size = size_; + minSize = minSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setMaxSize( VULKAN_HPP_NAMESPACE::DeviceSize maxSize_ ) VULKAN_HPP_NOEXCEPT + { + maxSize = maxSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 ExecutionGraphPipelineScratchSizeAMDX & setSizeGranularity( VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity_ ) VULKAN_HPP_NOEXCEPT + { + sizeGranularity = sizeGranularity_; return *this; } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ @@ -34183,11 +34699,15 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, size ); + return std::tie( sType, pNext, minSize, maxSize, sizeGranularity ); } # endif @@ -34199,7 +34719,8 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( size == rhs.size ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minSize == rhs.minSize ) && ( maxSize == rhs.maxSize ) && + ( sizeGranularity == rhs.sizeGranularity ); # endif } @@ -34210,9 +34731,11 @@ namespace VULKAN_HPP_NAMESPACE # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExecutionGraphPipelineScratchSizeAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceSize minSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize maxSize = {}; + VULKAN_HPP_NAMESPACE::DeviceSize sizeGranularity = {}; }; template <> @@ -38834,6 +39357,211 @@ namespace VULKAN_HPP_NAMESPACE using Type = FramebufferMixedSamplesCombinationNV; }; + struct GeneratedCommandsInfoEXT + { + using NativeType = VkGeneratedCommandsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, + uint32_t maxSequenceCount_ = {}, + VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ = {}, + uint32_t maxDrawCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderStages{ shaderStages_ } + , indirectExecutionSet{ indirectExecutionSet_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , indirectAddress{ indirectAddress_ } + , indirectAddressSize{ indirectAddressSize_ } + , preprocessAddress{ preprocessAddress_ } + , preprocessSize{ preprocessSize_ } + , maxSequenceCount{ maxSequenceCount_ } + , sequenceCountAddress{ sequenceCountAddress_ } + , maxDrawCount{ maxDrawCount_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoEXT( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsInfoEXT( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsInfoEXT & operator=( GeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsInfoEXT & operator=( VkGeneratedCommandsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT + { + shaderStages = shaderStages_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & + setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT + { + indirectExecutionSet = indirectExecutionSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddress( VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress_ ) VULKAN_HPP_NOEXCEPT + { + indirectAddress = indirectAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setIndirectAddressSize( VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize_ ) VULKAN_HPP_NOEXCEPT + { + indirectAddressSize = indirectAddressSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessAddress( VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress_ ) VULKAN_HPP_NOEXCEPT + { + preprocessAddress = preprocessAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT + { + preprocessSize = preprocessSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequenceCount = maxSequenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setSequenceCountAddress( VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress_ ) VULKAN_HPP_NOEXCEPT + { + sequenceCountAddress = sequenceCountAddress_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT + { + maxDrawCount = maxDrawCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeneratedCommandsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + shaderStages, + indirectExecutionSet, + indirectCommandsLayout, + indirectAddress, + indirectAddressSize, + preprocessAddress, + preprocessSize, + maxSequenceCount, + sequenceCountAddress, + maxDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderStages == rhs.shaderStages ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( indirectAddress == rhs.indirectAddress ) && + ( indirectAddressSize == rhs.indirectAddressSize ) && ( preprocessAddress == rhs.preprocessAddress ) && ( preprocessSize == rhs.preprocessSize ) && + ( maxSequenceCount == rhs.maxSequenceCount ) && ( sequenceCountAddress == rhs.sequenceCountAddress ) && ( maxDrawCount == rhs.maxDrawCount ); +# endif + } + + bool operator!=( GeneratedCommandsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress indirectAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize indirectAddressSize = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress preprocessAddress = {}; + VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {}; + uint32_t maxSequenceCount = {}; + VULKAN_HPP_NAMESPACE::DeviceAddress sequenceCountAddress = {}; + uint32_t maxDrawCount = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsInfoEXT; + }; + struct IndirectCommandsStreamNV { using NativeType = VkIndirectCommandsStreamNV; @@ -39203,6 +39931,138 @@ namespace VULKAN_HPP_NAMESPACE using Type = GeneratedCommandsInfoNV; }; + struct GeneratedCommandsMemoryRequirementsInfoEXT + { + using NativeType = VkGeneratedCommandsMemoryRequirementsInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ = {}, + uint32_t maxSequenceCount_ = {}, + uint32_t maxDrawCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , indirectExecutionSet{ indirectExecutionSet_ } + , indirectCommandsLayout{ indirectCommandsLayout_ } + , maxSequenceCount{ maxSequenceCount_ } + , maxDrawCount{ maxDrawCount_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoEXT( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsMemoryRequirementsInfoEXT( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsMemoryRequirementsInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsMemoryRequirementsInfoEXT & operator=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsMemoryRequirementsInfoEXT & operator=( VkGeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & + setIndirectExecutionSet( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet_ ) VULKAN_HPP_NOEXCEPT + { + indirectExecutionSet = indirectExecutionSet_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & + setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT + { + indirectCommandsLayout = indirectCommandsLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxSequenceCount( uint32_t maxSequenceCount_ ) VULKAN_HPP_NOEXCEPT + { + maxSequenceCount = maxSequenceCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoEXT & setMaxDrawCount( uint32_t maxDrawCount_ ) VULKAN_HPP_NOEXCEPT + { + maxDrawCount = maxDrawCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsMemoryRequirementsInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, indirectExecutionSet, indirectCommandsLayout, maxSequenceCount, maxDrawCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsMemoryRequirementsInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectExecutionSet == rhs.indirectExecutionSet ) && + ( indirectCommandsLayout == rhs.indirectCommandsLayout ) && ( maxSequenceCount == rhs.maxSequenceCount ) && ( maxDrawCount == rhs.maxDrawCount ); +# endif + } + + bool operator!=( GeneratedCommandsMemoryRequirementsInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout = {}; + uint32_t maxSequenceCount = {}; + uint32_t maxDrawCount = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsMemoryRequirementsInfoEXT; + }; + struct GeneratedCommandsMemoryRequirementsInfoNV { using NativeType = VkGeneratedCommandsMemoryRequirementsInfoNV; @@ -39336,6 +40196,226 @@ namespace VULKAN_HPP_NAMESPACE using Type = GeneratedCommandsMemoryRequirementsInfoNV; }; + struct GeneratedCommandsPipelineInfoEXT + { + using NativeType = VkGeneratedCommandsPipelineInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsPipelineInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsPipelineInfoEXT( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsPipelineInfoEXT( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsPipelineInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + GeneratedCommandsPipelineInfoEXT & operator=( GeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsPipelineInfoEXT & operator=( VkGeneratedCommandsPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsPipelineInfoEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeneratedCommandsPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsPipelineInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( GeneratedCommandsPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsPipelineInfoEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsPipelineInfoEXT; + }; + + struct GeneratedCommandsShaderInfoEXT + { + using NativeType = VkGeneratedCommandsShaderInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsShaderInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( uint32_t shaderCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCount{ shaderCount_ } + , pShaders{ pShaders_ } + { + } + + VULKAN_HPP_CONSTEXPR GeneratedCommandsShaderInfoEXT( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + GeneratedCommandsShaderInfoEXT( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : GeneratedCommandsShaderInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsShaderInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shaders_, + void * pNext_ = nullptr ) + : pNext( pNext_ ), shaderCount( static_cast( shaders_.size() ) ), pShaders( shaders_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + GeneratedCommandsShaderInfoEXT & operator=( GeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + GeneratedCommandsShaderInfoEXT & operator=( VkGeneratedCommandsShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = shaderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsShaderInfoEXT & setPShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders_ ) VULKAN_HPP_NOEXCEPT + { + pShaders = pShaders_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + GeneratedCommandsShaderInfoEXT & + setShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & shaders_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = static_cast( shaders_.size() ); + pShaders = shaders_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkGeneratedCommandsShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkGeneratedCommandsShaderInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCount, pShaders ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( GeneratedCommandsShaderInfoEXT const & ) const = default; +#else + bool operator==( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pShaders == rhs.pShaders ); +# endif + } + + bool operator!=( GeneratedCommandsShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsShaderInfoEXT; + void * pNext = {}; + uint32_t shaderCount = {}; + const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders = {}; + }; + + template <> + struct CppType + { + using Type = GeneratedCommandsShaderInfoEXT; + }; + struct LatencyTimingsFrameReportNV { using NativeType = VkLatencyTimingsFrameReportNV; @@ -42631,6 +43711,130 @@ namespace VULKAN_HPP_NAMESPACE using Type = HdrMetadataEXT; }; + struct HdrVividDynamicMetadataHUAWEI + { + using NativeType = VkHdrVividDynamicMetadataHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrVividDynamicMetadataHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + HdrVividDynamicMetadataHUAWEI( size_t dynamicMetadataSize_ = {}, const void * pDynamicMetadata_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicMetadataSize{ dynamicMetadataSize_ } + , pDynamicMetadata{ pDynamicMetadata_ } + { + } + + VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrVividDynamicMetadataHUAWEI( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : HdrVividDynamicMetadataHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dynamicMetadataSize( dynamicMetadata_.size() * sizeof( T ) ), pDynamicMetadata( dynamicMetadata_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + HdrVividDynamicMetadataHUAWEI & operator=( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HdrVividDynamicMetadataHUAWEI & operator=( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setDynamicMetadataSize( size_t dynamicMetadataSize_ ) VULKAN_HPP_NOEXCEPT + { + dynamicMetadataSize = dynamicMetadataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPDynamicMetadata( const void * pDynamicMetadata_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicMetadata = pDynamicMetadata_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI & setDynamicMetadata( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_ ) VULKAN_HPP_NOEXCEPT + { + dynamicMetadataSize = dynamicMetadata_.size() * sizeof( T ); + pDynamicMetadata = dynamicMetadata_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkHdrVividDynamicMetadataHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrVividDynamicMetadataHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicMetadataSize, pDynamicMetadata ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrVividDynamicMetadataHUAWEI const & ) const = default; +#else + bool operator==( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicMetadataSize == rhs.dynamicMetadataSize ) && + ( pDynamicMetadata == rhs.pDynamicMetadata ); +# endif + } + + bool operator!=( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrVividDynamicMetadataHUAWEI; + const void * pNext = {}; + size_t dynamicMetadataSize = {}; + const void * pDynamicMetadata = {}; + }; + + template <> + struct CppType + { + using Type = HdrVividDynamicMetadataHUAWEI; + }; + struct HeadlessSurfaceCreateInfoEXT { using NativeType = VkHeadlessSurfaceCreateInfoEXT; @@ -48702,6 +49906,786 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_FUCHSIA*/ + struct IndirectCommandsExecutionSetTokenEXT + { + using NativeType = VkIndirectCommandsExecutionSetTokenEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {} ) VULKAN_HPP_NOEXCEPT + : type{ type_ } + , shaderStages{ shaderStages_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsExecutionSetTokenEXT( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsExecutionSetTokenEXT( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsExecutionSetTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsExecutionSetTokenEXT & operator=( IndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsExecutionSetTokenEXT & operator=( VkIndirectCommandsExecutionSetTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsExecutionSetTokenEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT + { + shaderStages = shaderStages_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsExecutionSetTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsExecutionSetTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, shaderStages ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsExecutionSetTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( type == rhs.type ) && ( shaderStages == rhs.shaderStages ); +# endif + } + + bool operator!=( IndirectCommandsExecutionSetTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + }; + + struct IndirectCommandsIndexBufferTokenEXT + { + using NativeType = VkIndirectCommandsIndexBufferTokenEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + IndirectCommandsIndexBufferTokenEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ = + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) VULKAN_HPP_NOEXCEPT + : mode{ mode_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsIndexBufferTokenEXT( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsIndexBufferTokenEXT( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsIndexBufferTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsIndexBufferTokenEXT & operator=( IndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsIndexBufferTokenEXT & operator=( VkIndirectCommandsIndexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsIndexBufferTokenEXT & + setMode( VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode_ ) VULKAN_HPP_NOEXCEPT + { + mode = mode_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsIndexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsIndexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( mode ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsIndexBufferTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( mode == rhs.mode ); +# endif + } + + bool operator!=( IndirectCommandsIndexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT mode = VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer; + }; + + struct PushConstantRange + { + using NativeType = VkPushConstantRange; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT + : stageFlags{ stageFlags_ } + , offset{ offset_ } + , size{ size_ } + { + } + + VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast( &rhs ) ) {} + + PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + { + stageFlags = stageFlags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + { + size = size_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( stageFlags, offset, size ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PushConstantRange const & ) const = default; +#else + bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); +# endif + } + + bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; + uint32_t offset = {}; + uint32_t size = {}; + }; + + struct IndirectCommandsPushConstantTokenEXT + { + using NativeType = VkIndirectCommandsPushConstantTokenEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( VULKAN_HPP_NAMESPACE::PushConstantRange updateRange_ = {} ) VULKAN_HPP_NOEXCEPT + : updateRange{ updateRange_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsPushConstantTokenEXT( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsPushConstantTokenEXT( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsPushConstantTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsPushConstantTokenEXT & operator=( IndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsPushConstantTokenEXT & operator=( VkIndirectCommandsPushConstantTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsPushConstantTokenEXT & + setUpdateRange( VULKAN_HPP_NAMESPACE::PushConstantRange const & updateRange_ ) VULKAN_HPP_NOEXCEPT + { + updateRange = updateRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsPushConstantTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsPushConstantTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( updateRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsPushConstantTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( updateRange == rhs.updateRange ); +# endif + } + + bool operator!=( IndirectCommandsPushConstantTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::PushConstantRange updateRange = {}; + }; + + struct IndirectCommandsVertexBufferTokenEXT + { + using NativeType = VkIndirectCommandsVertexBufferTokenEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( uint32_t vertexBindingUnit_ = {} ) VULKAN_HPP_NOEXCEPT : vertexBindingUnit{ vertexBindingUnit_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsVertexBufferTokenEXT( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsVertexBufferTokenEXT( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsVertexBufferTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsVertexBufferTokenEXT & operator=( IndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsVertexBufferTokenEXT & operator=( VkIndirectCommandsVertexBufferTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsVertexBufferTokenEXT & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT + { + vertexBindingUnit = vertexBindingUnit_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsVertexBufferTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsVertexBufferTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( vertexBindingUnit ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsVertexBufferTokenEXT const & ) const = default; +#else + bool operator==( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( vertexBindingUnit == rhs.vertexBindingUnit ); +# endif + } + + bool operator!=( IndirectCommandsVertexBufferTokenEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t vertexBindingUnit = {}; + }; + + union IndirectCommandsTokenDataEXT + { + using NativeType = VkIndirectCommandsTokenDataEXT; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ = {} ) + : pPushConstant( pPushConstant_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) + : pVertexBuffer( pVertexBuffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) + : pIndexBuffer( pIndexBuffer_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) + : pExecutionSet( pExecutionSet_ ) + { + } +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPPushConstant( const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstant = pPushConstant_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPVertexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pVertexBuffer = pVertexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPIndexBuffer( const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer_ ) VULKAN_HPP_NOEXCEPT + { + pIndexBuffer = pIndexBuffer_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsTokenDataEXT & + setPExecutionSet( const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet_ ) VULKAN_HPP_NOEXCEPT + { + pExecutionSet = pExecutionSet_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkIndirectCommandsTokenDataEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsTokenDataEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::IndirectCommandsPushConstantTokenEXT * pPushConstant; + const VULKAN_HPP_NAMESPACE::IndirectCommandsVertexBufferTokenEXT * pVertexBuffer; + const VULKAN_HPP_NAMESPACE::IndirectCommandsIndexBufferTokenEXT * pIndexBuffer; + const VULKAN_HPP_NAMESPACE::IndirectCommandsExecutionSetTokenEXT * pExecutionSet; +#else + const VkIndirectCommandsPushConstantTokenEXT * pPushConstant; + const VkIndirectCommandsVertexBufferTokenEXT * pVertexBuffer; + const VkIndirectCommandsIndexBufferTokenEXT * pIndexBuffer; + const VkIndirectCommandsExecutionSetTokenEXT * pExecutionSet; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct IndirectCommandsLayoutTokenEXT + { + using NativeType = VkIndirectCommandsLayoutTokenEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet, + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data_ = {}, + uint32_t offset_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , data{ data_ } + , offset{ offset_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutTokenEXT( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutTokenEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectCommandsLayoutTokenEXT & operator=( IndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsLayoutTokenEXT & operator=( VkIndirectCommandsLayoutTokenEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setData( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT const & data_ ) VULKAN_HPP_NOEXCEPT + { + data = data_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenEXT & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + { + offset = offset_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsLayoutTokenEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutTokenEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, data, offset ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeEXT::eExecutionSet; + VULKAN_HPP_NAMESPACE::IndirectCommandsTokenDataEXT data = {}; + uint32_t offset = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutTokenEXT; + }; + + struct IndirectCommandsLayoutCreateInfoEXT + { + using NativeType = VkIndirectCommandsLayoutCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ = {}, + uint32_t indirectStride_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, + uint32_t tokenCount_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , shaderStages{ shaderStages_ } + , indirectStride{ indirectStride_ } + , pipelineLayout{ pipelineLayout_ } + , tokenCount{ tokenCount_ } + , pTokens{ pTokens_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoEXT( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectCommandsLayoutCreateInfoEXT( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectCommandsLayoutCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoEXT( + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_, + uint32_t indirectStride_, + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , flags( flags_ ) + , shaderStages( shaderStages_ ) + , indirectStride( indirectStride_ ) + , pipelineLayout( pipelineLayout_ ) + , tokenCount( static_cast( tokens_.size() ) ) + , pTokens( tokens_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectCommandsLayoutCreateInfoEXT & operator=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectCommandsLayoutCreateInfoEXT & operator=( VkIndirectCommandsLayoutCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & + setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setShaderStages( VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages_ ) VULKAN_HPP_NOEXCEPT + { + shaderStages = shaderStages_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setIndirectStride( uint32_t indirectStride_ ) VULKAN_HPP_NOEXCEPT + { + indirectStride = indirectStride_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + pipelineLayout = pipelineLayout_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = tokenCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoEXT & + setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens_ ) VULKAN_HPP_NOEXCEPT + { + pTokens = pTokens_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectCommandsLayoutCreateInfoEXT & + setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & tokens_ ) VULKAN_HPP_NOEXCEPT + { + tokenCount = static_cast( tokens_.size() ); + pTokens = tokens_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectCommandsLayoutCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectCommandsLayoutCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, shaderStages, indirectStride, pipelineLayout, tokenCount, pTokens ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectCommandsLayoutCreateInfoEXT const & ) const = default; +#else + bool operator==( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( shaderStages == rhs.shaderStages ) && + ( indirectStride == rhs.indirectStride ) && ( pipelineLayout == rhs.pipelineLayout ) && ( tokenCount == rhs.tokenCount ) && + ( pTokens == rhs.pTokens ); +# endif + } + + bool operator!=( IndirectCommandsLayoutCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStages = {}; + uint32_t indirectStride = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {}; + uint32_t tokenCount = {}; + const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenEXT * pTokens = {}; + }; + + template <> + struct CppType + { + using Type = IndirectCommandsLayoutCreateInfoEXT; + }; + struct IndirectCommandsLayoutTokenNV { using NativeType = VkIndirectCommandsLayoutTokenNV; @@ -49196,6 +51180,593 @@ namespace VULKAN_HPP_NAMESPACE using Type = IndirectCommandsLayoutCreateInfoNV; }; + struct IndirectExecutionSetPipelineInfoEXT + { + using NativeType = VkIndirectExecutionSetPipelineInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetPipelineInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ = {}, + uint32_t maxPipelineCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , initialPipeline{ initialPipeline_ } + , maxPipelineCount{ maxPipelineCount_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetPipelineInfoEXT( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetPipelineInfoEXT( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetPipelineInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectExecutionSetPipelineInfoEXT & operator=( IndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectExecutionSetPipelineInfoEXT & operator=( VkIndirectExecutionSetPipelineInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setInitialPipeline( VULKAN_HPP_NAMESPACE::Pipeline initialPipeline_ ) VULKAN_HPP_NOEXCEPT + { + initialPipeline = initialPipeline_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetPipelineInfoEXT & setMaxPipelineCount( uint32_t maxPipelineCount_ ) VULKAN_HPP_NOEXCEPT + { + maxPipelineCount = maxPipelineCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectExecutionSetPipelineInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetPipelineInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, initialPipeline, maxPipelineCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectExecutionSetPipelineInfoEXT const & ) const = default; +#else + bool operator==( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( initialPipeline == rhs.initialPipeline ) && ( maxPipelineCount == rhs.maxPipelineCount ); +# endif + } + + bool operator!=( IndirectExecutionSetPipelineInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetPipelineInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Pipeline initialPipeline = {}; + uint32_t maxPipelineCount = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetPipelineInfoEXT; + }; + + struct IndirectExecutionSetShaderLayoutInfoEXT + { + using NativeType = VkIndirectExecutionSetShaderLayoutInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( uint32_t setLayoutCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , setLayoutCount{ setLayoutCount_ } + , pSetLayouts{ pSetLayouts_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderLayoutInfoEXT( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetShaderLayoutInfoEXT( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetShaderLayoutInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderLayoutInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ), setLayoutCount( static_cast( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectExecutionSetShaderLayoutInfoEXT & operator=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectExecutionSetShaderLayoutInfoEXT & operator=( VkIndirectExecutionSetShaderLayoutInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = setLayoutCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderLayoutInfoEXT & + setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayouts = pSetLayouts_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderLayoutInfoEXT & + setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayouts_ ) VULKAN_HPP_NOEXCEPT + { + setLayoutCount = static_cast( setLayouts_.size() ); + pSetLayouts = setLayouts_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectExecutionSetShaderLayoutInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderLayoutInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, setLayoutCount, pSetLayouts ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectExecutionSetShaderLayoutInfoEXT const & ) const = default; +#else + bool operator==( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( setLayoutCount == rhs.setLayoutCount ) && ( pSetLayouts == rhs.pSetLayouts ); +# endif + } + + bool operator!=( IndirectExecutionSetShaderLayoutInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderLayoutInfoEXT; + const void * pNext = {}; + uint32_t setLayoutCount = {}; + const VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayouts = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetShaderLayoutInfoEXT; + }; + + struct IndirectExecutionSetShaderInfoEXT + { + using NativeType = VkIndirectExecutionSetShaderInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetShaderInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( uint32_t shaderCount_ = {}, + const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ = {}, + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ = {}, + uint32_t maxShaderCount_ = {}, + uint32_t pushConstantRangeCount_ = {}, + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , shaderCount{ shaderCount_ } + , pInitialShaders{ pInitialShaders_ } + , pSetLayoutInfos{ pSetLayoutInfos_ } + , maxShaderCount{ maxShaderCount_ } + , pushConstantRangeCount{ pushConstantRangeCount_ } + , pPushConstantRanges{ pPushConstantRanges_ } + { + } + + VULKAN_HPP_CONSTEXPR IndirectExecutionSetShaderInfoEXT( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetShaderInfoEXT( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetShaderInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ = {}, + uint32_t maxShaderCount_ = {}, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ = {}, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , shaderCount( static_cast( initialShaders_.size() ) ) + , pInitialShaders( initialShaders_.data() ) + , pSetLayoutInfos( setLayoutInfos_.data() ) + , maxShaderCount( maxShaderCount_ ) + , pushConstantRangeCount( static_cast( pushConstantRanges_.size() ) ) + , pPushConstantRanges( pushConstantRanges_.data() ) + { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( setLayoutInfos_.empty() || ( initialShaders_.size() == setLayoutInfos_.size() ) ); +# else + if ( !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() ) ) + { + throw LogicError( + VULKAN_HPP_NAMESPACE_STRING + "::IndirectExecutionSetShaderInfoEXT::IndirectExecutionSetShaderInfoEXT: !setLayoutInfos_.empty() && ( initialShaders_.size() != setLayoutInfos_.size() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + IndirectExecutionSetShaderInfoEXT & operator=( IndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectExecutionSetShaderInfoEXT & operator=( VkIndirectExecutionSetShaderInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setShaderCount( uint32_t shaderCount_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = shaderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPInitialShaders( const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders_ ) VULKAN_HPP_NOEXCEPT + { + pInitialShaders = pInitialShaders_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & + setInitialShaders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & initialShaders_ ) VULKAN_HPP_NOEXCEPT + { + shaderCount = static_cast( initialShaders_.size() ); + pInitialShaders = initialShaders_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPSetLayoutInfos( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos_ ) VULKAN_HPP_NOEXCEPT + { + pSetLayoutInfos = pSetLayoutInfos_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & setSetLayoutInfos( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & setLayoutInfos_ ) + VULKAN_HPP_NOEXCEPT + { + shaderCount = static_cast( setLayoutInfos_.size() ); + pSetLayoutInfos = setLayoutInfos_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setMaxShaderCount( uint32_t maxShaderCount_ ) VULKAN_HPP_NOEXCEPT + { + maxShaderCount = maxShaderCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = pushConstantRangeCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetShaderInfoEXT & + setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pPushConstantRanges = pPushConstantRanges_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + IndirectExecutionSetShaderInfoEXT & setPushConstantRanges( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT + { + pushConstantRangeCount = static_cast( pushConstantRanges_.size() ); + pPushConstantRanges = pushConstantRanges_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectExecutionSetShaderInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetShaderInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, shaderCount, pInitialShaders, pSetLayoutInfos, maxShaderCount, pushConstantRangeCount, pPushConstantRanges ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( IndirectExecutionSetShaderInfoEXT const & ) const = default; +#else + bool operator==( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderCount == rhs.shaderCount ) && ( pInitialShaders == rhs.pInitialShaders ) && + ( pSetLayoutInfos == rhs.pSetLayoutInfos ) && ( maxShaderCount == rhs.maxShaderCount ) && + ( pushConstantRangeCount == rhs.pushConstantRangeCount ) && ( pPushConstantRanges == rhs.pPushConstantRanges ); +# endif + } + + bool operator!=( IndirectExecutionSetShaderInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetShaderInfoEXT; + const void * pNext = {}; + uint32_t shaderCount = {}; + const VULKAN_HPP_NAMESPACE::ShaderEXT * pInitialShaders = {}; + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderLayoutInfoEXT * pSetLayoutInfos = {}; + uint32_t maxShaderCount = {}; + uint32_t pushConstantRangeCount = {}; + const VULKAN_HPP_NAMESPACE::PushConstantRange * pPushConstantRanges = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetShaderInfoEXT; + }; + + union IndirectExecutionSetInfoEXT + { + using NativeType = VkIndirectExecutionSetInfoEXT; +#if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS ) + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ = {} ) + : pPipelineInfo( pPipelineInfo_ ) + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) + : pShaderInfo( pShaderInfo_ ) + { + } +#endif /*VULKAN_HPP_NO_UNION_CONSTRUCTORS*/ + +#if !defined( VULKAN_HPP_NO_UNION_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & + setPPipelineInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo_ ) VULKAN_HPP_NOEXCEPT + { + pPipelineInfo = pPipelineInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetInfoEXT & + setPShaderInfo( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo_ ) VULKAN_HPP_NOEXCEPT + { + pShaderInfo = pShaderInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ + + operator VkIndirectExecutionSetInfoEXT const &() const + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetInfoEXT &() + { + return *reinterpret_cast( this ); + } + +#ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetPipelineInfoEXT * pPipelineInfo; + const VULKAN_HPP_NAMESPACE::IndirectExecutionSetShaderInfoEXT * pShaderInfo; +#else + const VkIndirectExecutionSetPipelineInfoEXT * pPipelineInfo; + const VkIndirectExecutionSetShaderInfoEXT * pShaderInfo; +#endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/ + }; + + struct IndirectExecutionSetCreateInfoEXT + { + using NativeType = VkIndirectExecutionSetCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectExecutionSetCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines, + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , type{ type_ } + , info{ info_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + IndirectExecutionSetCreateInfoEXT( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : IndirectExecutionSetCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + IndirectExecutionSetCreateInfoEXT & operator=( IndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + IndirectExecutionSetCreateInfoEXT & operator=( VkIndirectExecutionSetCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setType( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type_ ) VULKAN_HPP_NOEXCEPT + { + type = type_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 IndirectExecutionSetCreateInfoEXT & setInfo( VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT const & info_ ) VULKAN_HPP_NOEXCEPT + { + info = info_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkIndirectExecutionSetCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkIndirectExecutionSetCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, type, info ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectExecutionSetCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT type = VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoTypeEXT::ePipelines; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetInfoEXT info = {}; + }; + + template <> + struct CppType + { + using Type = IndirectExecutionSetCreateInfoEXT; + }; + struct InitializePerformanceApiInfoINTEL { using NativeType = VkInitializePerformanceApiInfoINTEL; @@ -49265,7 +51836,7 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( InitializePerformanceApiInfoINTEL const & ) const = default; #else - bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( InitializePerformanceApiInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -59305,6 +61876,281 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT; }; + struct PhysicalDeviceCooperativeMatrix2FeaturesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2FeaturesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScope{ cooperativeMatrixWorkgroupScope_ } + , cooperativeMatrixFlexibleDimensions{ cooperativeMatrixFlexibleDimensions_ } + , cooperativeMatrixReductions{ cooperativeMatrixReductions_ } + , cooperativeMatrixConversions{ cooperativeMatrixConversions_ } + , cooperativeMatrixPerElementOperations{ cooperativeMatrixPerElementOperations_ } + , cooperativeMatrixTensorAddressing{ cooperativeMatrixTensorAddressing_ } + , cooperativeMatrixBlockLoads{ cooperativeMatrixBlockLoads_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2FeaturesNV( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2FeaturesNV( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2FeaturesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2FeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixWorkgroupScope( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixWorkgroupScope = cooperativeMatrixWorkgroupScope_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixFlexibleDimensions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixFlexibleDimensions = cooperativeMatrixFlexibleDimensions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixReductions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixReductions = cooperativeMatrixReductions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixConversions( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixConversions = cooperativeMatrixConversions_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixPerElementOperations( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixPerElementOperations = cooperativeMatrixPerElementOperations_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixTensorAddressing( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixTensorAddressing = cooperativeMatrixTensorAddressing_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrix2FeaturesNV & + setCooperativeMatrixBlockLoads( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads_ ) VULKAN_HPP_NOEXCEPT + { + cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScope, + cooperativeMatrixFlexibleDimensions, + cooperativeMatrixReductions, + cooperativeMatrixConversions, + cooperativeMatrixPerElementOperations, + cooperativeMatrixTensorAddressing, + cooperativeMatrixBlockLoads ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2FeaturesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixWorkgroupScope == rhs.cooperativeMatrixWorkgroupScope ) && + ( cooperativeMatrixFlexibleDimensions == rhs.cooperativeMatrixFlexibleDimensions ) && + ( cooperativeMatrixReductions == rhs.cooperativeMatrixReductions ) && ( cooperativeMatrixConversions == rhs.cooperativeMatrixConversions ) && + ( cooperativeMatrixPerElementOperations == rhs.cooperativeMatrixPerElementOperations ) && + ( cooperativeMatrixTensorAddressing == rhs.cooperativeMatrixTensorAddressing ) && + ( cooperativeMatrixBlockLoads == rhs.cooperativeMatrixBlockLoads ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2FeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixWorkgroupScope = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixFlexibleDimensions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixReductions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixConversions = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixPerElementOperations = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixTensorAddressing = {}; + VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixBlockLoads = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2FeaturesNV; + }; + + struct PhysicalDeviceCooperativeMatrix2PropertiesNV + { + using NativeType = VkPhysicalDeviceCooperativeMatrix2PropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ = {}, + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension_ = {}, + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , cooperativeMatrixWorkgroupScopeMaxWorkgroupSize{ cooperativeMatrixWorkgroupScopeMaxWorkgroupSize_ } + , cooperativeMatrixFlexibleDimensionsMaxDimension{ cooperativeMatrixFlexibleDimensionsMaxDimension_ } + , cooperativeMatrixWorkgroupScopeReservedSharedMemory{ cooperativeMatrixWorkgroupScopeReservedSharedMemory_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrix2PropertiesNV( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceCooperativeMatrix2PropertiesNV( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceCooperativeMatrix2PropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceCooperativeMatrix2PropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCooperativeMatrix2PropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + cooperativeMatrixWorkgroupScopeMaxWorkgroupSize, + cooperativeMatrixFlexibleDimensionsMaxDimension, + cooperativeMatrixWorkgroupScopeReservedSharedMemory ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default; +#else + bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && + ( cooperativeMatrixWorkgroupScopeMaxWorkgroupSize == rhs.cooperativeMatrixWorkgroupScopeMaxWorkgroupSize ) && + ( cooperativeMatrixFlexibleDimensionsMaxDimension == rhs.cooperativeMatrixFlexibleDimensionsMaxDimension ) && + ( cooperativeMatrixWorkgroupScopeReservedSharedMemory == rhs.cooperativeMatrixWorkgroupScopeReservedSharedMemory ); +# endif + } + + bool operator!=( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; + void * pNext = {}; + uint32_t cooperativeMatrixWorkgroupScopeMaxWorkgroupSize = {}; + uint32_t cooperativeMatrixFlexibleDimensionsMaxDimension = {}; + uint32_t cooperativeMatrixWorkgroupScopeReservedSharedMemory = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; + }; + struct PhysicalDeviceCooperativeMatrixFeaturesKHR { using NativeType = VkPhysicalDeviceCooperativeMatrixFeaturesKHR; @@ -60876,6 +63722,104 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceDepthBiasControlFeaturesEXT; }; + struct PhysicalDeviceDepthClampControlFeaturesEXT + { + using NativeType = VkPhysicalDeviceDepthClampControlFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampControl{ depthClampControl_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampControlFeaturesEXT( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDepthClampControlFeaturesEXT( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDepthClampControlFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDepthClampControlFeaturesEXT & operator=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDepthClampControlFeaturesEXT & operator=( VkPhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClampControlFeaturesEXT & + setDepthClampControl( VULKAN_HPP_NAMESPACE::Bool32 depthClampControl_ ) VULKAN_HPP_NOEXCEPT + { + depthClampControl = depthClampControl_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDepthClampControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampControl ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDepthClampControlFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampControl == rhs.depthClampControl ); +# endif + } + + bool operator!=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthClampControl = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDepthClampControlFeaturesEXT; + }; + struct PhysicalDeviceDepthClampZeroOneFeaturesEXT { using NativeType = VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; @@ -62668,6 +65612,116 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; }; + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , deviceGeneratedCommands{ deviceGeneratedCommands_ } + , dynamicGeneratedPipelineLayout{ dynamicGeneratedPipelineLayout_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & + setDynamicGeneratedPipelineLayout( VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout_ ) VULKAN_HPP_NOEXCEPT + { + dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, deviceGeneratedCommands, dynamicGeneratedPipelineLayout ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ) && + ( dynamicGeneratedPipelineLayout == rhs.dynamicGeneratedPipelineLayout ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicGeneratedPipelineLayout = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + }; + struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; @@ -62767,6 +65821,161 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV; }; + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT + { + using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( uint32_t maxIndirectPipelineCount_ = {}, + uint32_t maxIndirectShaderObjectCount_ = {}, + uint32_t maxIndirectSequenceCount_ = {}, + uint32_t maxIndirectCommandsTokenCount_ = {}, + uint32_t maxIndirectCommandsTokenOffset_ = {}, + uint32_t maxIndirectCommandsIndirectStride_ = {}, + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxIndirectPipelineCount{ maxIndirectPipelineCount_ } + , maxIndirectShaderObjectCount{ maxIndirectShaderObjectCount_ } + , maxIndirectSequenceCount{ maxIndirectSequenceCount_ } + , maxIndirectCommandsTokenCount{ maxIndirectCommandsTokenCount_ } + , maxIndirectCommandsTokenOffset{ maxIndirectCommandsTokenOffset_ } + , maxIndirectCommandsIndirectStride{ maxIndirectCommandsIndirectStride_ } + , supportedIndirectCommandsInputModes{ supportedIndirectCommandsInputModes_ } + , supportedIndirectCommandsShaderStages{ supportedIndirectCommandsShaderStages_ } + , supportedIndirectCommandsShaderStagesPipelineBinding{ supportedIndirectCommandsShaderStagesPipelineBinding_ } + , supportedIndirectCommandsShaderStagesShaderBinding{ supportedIndirectCommandsShaderStagesShaderBinding_ } + , deviceGeneratedCommandsTransformFeedback{ deviceGeneratedCommandsTransformFeedback_ } + , deviceGeneratedCommandsMultiDrawIndirectCount{ deviceGeneratedCommandsMultiDrawIndirectCount_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & + operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + maxIndirectPipelineCount, + maxIndirectShaderObjectCount, + maxIndirectSequenceCount, + maxIndirectCommandsTokenCount, + maxIndirectCommandsTokenOffset, + maxIndirectCommandsIndirectStride, + supportedIndirectCommandsInputModes, + supportedIndirectCommandsShaderStages, + supportedIndirectCommandsShaderStagesPipelineBinding, + supportedIndirectCommandsShaderStagesShaderBinding, + deviceGeneratedCommandsTransformFeedback, + deviceGeneratedCommandsMultiDrawIndirectCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxIndirectPipelineCount == rhs.maxIndirectPipelineCount ) && + ( maxIndirectShaderObjectCount == rhs.maxIndirectShaderObjectCount ) && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount ) && + ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount ) && + ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset ) && + ( maxIndirectCommandsIndirectStride == rhs.maxIndirectCommandsIndirectStride ) && + ( supportedIndirectCommandsInputModes == rhs.supportedIndirectCommandsInputModes ) && + ( supportedIndirectCommandsShaderStages == rhs.supportedIndirectCommandsShaderStages ) && + ( supportedIndirectCommandsShaderStagesPipelineBinding == rhs.supportedIndirectCommandsShaderStagesPipelineBinding ) && + ( supportedIndirectCommandsShaderStagesShaderBinding == rhs.supportedIndirectCommandsShaderStagesShaderBinding ) && + ( deviceGeneratedCommandsTransformFeedback == rhs.deviceGeneratedCommandsTransformFeedback ) && + ( deviceGeneratedCommandsMultiDrawIndirectCount == rhs.deviceGeneratedCommandsMultiDrawIndirectCount ); +# endif + } + + bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + void * pNext = {}; + uint32_t maxIndirectPipelineCount = {}; + uint32_t maxIndirectShaderObjectCount = {}; + uint32_t maxIndirectSequenceCount = {}; + uint32_t maxIndirectCommandsTokenCount = {}; + uint32_t maxIndirectCommandsTokenOffset = {}; + uint32_t maxIndirectCommandsIndirectStride = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStages = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding = {}; + VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsTransformFeedback = {}; + VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommandsMultiDrawIndirectCount = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + }; + struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV { using NativeType = VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; @@ -68353,6 +71562,102 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct PhysicalDeviceHdrVividFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceHdrVividFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hdrVivid{ hdrVivid_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHdrVividFeaturesHUAWEI( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHdrVividFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setHdrVivid( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ ) VULKAN_HPP_NOEXCEPT + { + hdrVivid = hdrVivid_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hdrVivid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHdrVividFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdrVivid == rhs.hdrVivid ); +# endif + } + + bool operator!=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hdrVivid = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHdrVividFeaturesHUAWEI; + }; + struct PhysicalDeviceHostImageCopyFeaturesEXT { using NativeType = VkPhysicalDeviceHostImageCopyFeaturesEXT; @@ -79104,6 +82409,106 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDevicePresentIdFeaturesKHR; }; + struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT + { + using NativeType = VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , presentModeFifoLatestReady{ presentModeFifoLatestReady_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & + operator=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & operator=( VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT & + setPresentModeFifoLatestReady( VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady_ ) VULKAN_HPP_NOEXCEPT + { + presentModeFifoLatestReady = presentModeFifoLatestReady_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, presentModeFifoLatestReady ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady ); +# endif + } + + bool operator!=( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 presentModeFifoLatestReady = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; + }; + struct PhysicalDevicePresentWaitFeaturesKHR { using NativeType = VkPhysicalDevicePresentWaitFeaturesKHR; @@ -84357,10 +87762,12 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueueFeaturesAMDX( VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderEnqueue{ shaderEnqueue_ } + , shaderMeshEnqueue{ shaderMeshEnqueue_ } { } @@ -84392,6 +87799,13 @@ namespace VULKAN_HPP_NAMESPACE shaderEnqueue = shaderEnqueue_; return *this; } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueueFeaturesAMDX & + setShaderMeshEnqueue( VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue_ ) VULKAN_HPP_NOEXCEPT + { + shaderMeshEnqueue = shaderMeshEnqueue_; + return *this; + } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX const &() const VULKAN_HPP_NOEXCEPT @@ -84408,11 +87822,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { - return std::tie( sType, pNext, shaderEnqueue ); + return std::tie( sType, pNext, shaderEnqueue, shaderMeshEnqueue ); } # endif @@ -84424,7 +87838,7 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderEnqueue == rhs.shaderEnqueue ) && ( shaderMeshEnqueue == rhs.shaderMeshEnqueue ); # endif } @@ -84435,9 +87849,10 @@ namespace VULKAN_HPP_NAMESPACE # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueueFeaturesAMDX; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderEnqueue = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderMeshEnqueue = {}; }; template <> @@ -84456,22 +87871,26 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; # if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, - uint32_t maxExecutionGraphShaderOutputNodes_ = {}, - uint32_t maxExecutionGraphShaderPayloadSize_ = {}, - uint32_t maxExecutionGraphShaderPayloadCount_ = {}, - uint32_t executionGraphDispatchAddressAlignment_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( uint32_t maxExecutionGraphDepth_ = {}, + uint32_t maxExecutionGraphShaderOutputNodes_ = {}, + uint32_t maxExecutionGraphShaderPayloadSize_ = {}, + uint32_t maxExecutionGraphShaderPayloadCount_ = {}, + uint32_t executionGraphDispatchAddressAlignment_ = {}, + std::array const & maxExecutionGraphWorkgroupCount_ = {}, + uint32_t maxExecutionGraphWorkgroups_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxExecutionGraphDepth{ maxExecutionGraphDepth_ } , maxExecutionGraphShaderOutputNodes{ maxExecutionGraphShaderOutputNodes_ } , maxExecutionGraphShaderPayloadSize{ maxExecutionGraphShaderPayloadSize_ } , maxExecutionGraphShaderPayloadCount{ maxExecutionGraphShaderPayloadCount_ } , executionGraphDispatchAddressAlignment{ executionGraphDispatchAddressAlignment_ } + , maxExecutionGraphWorkgroupCount{ maxExecutionGraphWorkgroupCount_ } + , maxExecutionGraphWorkgroups{ maxExecutionGraphWorkgroups_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast( &rhs ) ) @@ -84527,6 +87946,20 @@ namespace VULKAN_HPP_NAMESPACE executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; return *this; } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroupCount( std::array maxExecutionGraphWorkgroupCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroups( uint32_t maxExecutionGraphWorkgroups_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroups = maxExecutionGraphWorkgroups_; + return *this; + } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT @@ -84549,6 +87982,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t const &, uint32_t const &, uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -84559,7 +87994,9 @@ namespace VULKAN_HPP_NAMESPACE maxExecutionGraphShaderOutputNodes, maxExecutionGraphShaderPayloadSize, maxExecutionGraphShaderPayloadCount, - executionGraphDispatchAddressAlignment ); + executionGraphDispatchAddressAlignment, + maxExecutionGraphWorkgroupCount, + maxExecutionGraphWorkgroups ); } # endif @@ -84575,7 +88012,8 @@ namespace VULKAN_HPP_NAMESPACE ( maxExecutionGraphShaderOutputNodes == rhs.maxExecutionGraphShaderOutputNodes ) && ( maxExecutionGraphShaderPayloadSize == rhs.maxExecutionGraphShaderPayloadSize ) && ( maxExecutionGraphShaderPayloadCount == rhs.maxExecutionGraphShaderPayloadCount ) && - ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ); + ( executionGraphDispatchAddressAlignment == rhs.executionGraphDispatchAddressAlignment ) && + ( maxExecutionGraphWorkgroupCount == rhs.maxExecutionGraphWorkgroupCount ) && ( maxExecutionGraphWorkgroups == rhs.maxExecutionGraphWorkgroups ); # endif } @@ -84586,13 +88024,15 @@ namespace VULKAN_HPP_NAMESPACE # endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; - void * pNext = {}; - uint32_t maxExecutionGraphDepth = {}; - uint32_t maxExecutionGraphShaderOutputNodes = {}; - uint32_t maxExecutionGraphShaderPayloadSize = {}; - uint32_t maxExecutionGraphShaderPayloadCount = {}; - uint32_t executionGraphDispatchAddressAlignment = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderEnqueuePropertiesAMDX; + void * pNext = {}; + uint32_t maxExecutionGraphDepth = {}; + uint32_t maxExecutionGraphShaderOutputNodes = {}; + uint32_t maxExecutionGraphShaderPayloadSize = {}; + uint32_t maxExecutionGraphShaderPayloadCount = {}; + uint32_t executionGraphDispatchAddressAlignment = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D maxExecutionGraphWorkgroupCount = {}; + uint32_t maxExecutionGraphWorkgroups = {}; }; template <> @@ -96561,98 +100001,6 @@ namespace VULKAN_HPP_NAMESPACE using PipelineInfoEXT = PipelineInfoKHR; - struct PushConstantRange - { - using NativeType = VkPushConstantRange; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PushConstantRange( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {} ) VULKAN_HPP_NOEXCEPT - : stageFlags{ stageFlags_ } - , offset{ offset_ } - , size{ size_ } - { - } - - VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantRange( *reinterpret_cast( &rhs ) ) {} - - PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - -#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT - { - stageFlags = stageFlags_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT - { - offset = offset_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT - { - size = size_; - return *this; - } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPushConstantRange const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( stageFlags, offset, size ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PushConstantRange const & ) const = default; -#else - bool operator==( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( stageFlags == rhs.stageFlags ) && ( offset == rhs.offset ) && ( size == rhs.size ); -# endif - } - - bool operator!=( PushConstantRange const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; - uint32_t offset = {}; - uint32_t size = {}; - }; - struct PipelineLayoutCreateInfo { using NativeType = VkPipelineLayoutCreateInfo; @@ -98922,6 +102270,119 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV; }; + struct PipelineViewportDepthClampControlCreateInfoEXT + { + using NativeType = VkPipelineViewportDepthClampControlCreateInfoEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PipelineViewportDepthClampControlCreateInfoEXT( + VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange, + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , depthClampMode{ depthClampMode_ } + , pDepthClampRange{ pDepthClampRange_ } + { + } + + VULKAN_HPP_CONSTEXPR + PipelineViewportDepthClampControlCreateInfoEXT( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PipelineViewportDepthClampControlCreateInfoEXT( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineViewportDepthClampControlCreateInfoEXT( *reinterpret_cast( &rhs ) ) + { + } + + PipelineViewportDepthClampControlCreateInfoEXT & operator=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PipelineViewportDepthClampControlCreateInfoEXT & operator=( VkPipelineViewportDepthClampControlCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & + setDepthClampMode( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode_ ) VULKAN_HPP_NOEXCEPT + { + depthClampMode = depthClampMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PipelineViewportDepthClampControlCreateInfoEXT & + setPDepthClampRange( const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange_ ) VULKAN_HPP_NOEXCEPT + { + pDepthClampRange = pDepthClampRange_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPipelineViewportDepthClampControlCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPipelineViewportDepthClampControlCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, depthClampMode, pDepthClampRange ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PipelineViewportDepthClampControlCreateInfoEXT const & ) const = default; +#else + bool operator==( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( depthClampMode == rhs.depthClampMode ) && ( pDepthClampRange == rhs.pDepthClampRange ); +# endif + } + + bool operator!=( PipelineViewportDepthClampControlCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportDepthClampControlCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode = VULKAN_HPP_NAMESPACE::DepthClampModeEXT::eViewportRange; + const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange = {}; + }; + + template <> + struct CppType + { + using Type = PipelineViewportDepthClampControlCreateInfoEXT; + }; + struct PipelineViewportDepthClipControlCreateInfoEXT { using NativeType = VkPipelineViewportDepthClipControlCreateInfoEXT; @@ -128671,6 +132132,218 @@ namespace VULKAN_HPP_NAMESPACE using WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock; + struct WriteIndirectExecutionSetPipelineEXT + { + using NativeType = VkWriteIndirectExecutionSetPipelineEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetPipelineEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( uint32_t index_ = {}, + VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , index{ index_ } + , pipeline{ pipeline_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetPipelineEXT( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteIndirectExecutionSetPipelineEXT( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteIndirectExecutionSetPipelineEXT( *reinterpret_cast( &rhs ) ) + { + } + + WriteIndirectExecutionSetPipelineEXT & operator=( WriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteIndirectExecutionSetPipelineEXT & operator=( VkWriteIndirectExecutionSetPipelineEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetPipelineEXT & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT + { + pipeline = pipeline_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWriteIndirectExecutionSetPipelineEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetPipelineEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, index, pipeline ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteIndirectExecutionSetPipelineEXT const & ) const = default; +#else + bool operator==( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( pipeline == rhs.pipeline ); +# endif + } + + bool operator!=( WriteIndirectExecutionSetPipelineEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetPipelineEXT; + const void * pNext = {}; + uint32_t index = {}; + VULKAN_HPP_NAMESPACE::Pipeline pipeline = {}; + }; + + template <> + struct CppType + { + using Type = WriteIndirectExecutionSetPipelineEXT; + }; + + struct WriteIndirectExecutionSetShaderEXT + { + using NativeType = VkWriteIndirectExecutionSetShaderEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteIndirectExecutionSetShaderEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( uint32_t index_ = {}, + VULKAN_HPP_NAMESPACE::ShaderEXT shader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , index{ index_ } + , shader{ shader_ } + { + } + + VULKAN_HPP_CONSTEXPR WriteIndirectExecutionSetShaderEXT( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + WriteIndirectExecutionSetShaderEXT( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : WriteIndirectExecutionSetShaderEXT( *reinterpret_cast( &rhs ) ) + { + } + + WriteIndirectExecutionSetShaderEXT & operator=( WriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + WriteIndirectExecutionSetShaderEXT & operator=( VkWriteIndirectExecutionSetShaderEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setIndex( uint32_t index_ ) VULKAN_HPP_NOEXCEPT + { + index = index_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 WriteIndirectExecutionSetShaderEXT & setShader( VULKAN_HPP_NAMESPACE::ShaderEXT shader_ ) VULKAN_HPP_NOEXCEPT + { + shader = shader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkWriteIndirectExecutionSetShaderEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkWriteIndirectExecutionSetShaderEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, index, shader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( WriteIndirectExecutionSetShaderEXT const & ) const = default; +#else + bool operator==( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( index == rhs.index ) && ( shader == rhs.shader ); +# endif + } + + bool operator!=( WriteIndirectExecutionSetShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteIndirectExecutionSetShaderEXT; + const void * pNext = {}; + uint32_t index = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT shader = {}; + }; + + template <> + struct CppType + { + using Type = WriteIndirectExecutionSetShaderEXT; + }; + #if defined( VK_USE_PLATFORM_XCB_KHR ) struct XcbSurfaceCreateInfoKHR { diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index 8417c14..1c00c12 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -10,10 +10,23 @@ #include -#if __cpp_lib_format -# include // std::format +// ignore warnings on using deprecated enum values in this header +#if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wdeprecated-declarations" +#elif defined( _MSC_VER ) +# pragma warning( push ) +# pragma warning( disable : 4996 ) +#endif + +#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) +import VULKAN_HPP_STD_MODULE; #else -# include // std::stringstream +# if __cpp_lib_format +# include // std::format +# else +# include // std::stringstream +# endif #endif namespace VULKAN_HPP_NAMESPACE @@ -736,10 +749,6 @@ namespace VULKAN_HPP_NAMESPACE result += "FailOnPipelineCompileRequired | "; if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) result += "EarlyReturnOnFailure | "; - if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) - result += "RenderingFragmentShadingRateAttachmentKHR | "; - if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) - result += "RenderingFragmentDensityMapAttachmentEXT | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) @@ -756,6 +765,10 @@ namespace VULKAN_HPP_NAMESPACE result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR ) + result += "RenderingFragmentShadingRateAttachmentKHR | "; if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | "; if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) @@ -3372,6 +3385,10 @@ namespace VULKAN_HPP_NAMESPACE result += "AllowDerivatives | "; if ( value & PipelineCreateFlagBits2KHR::eDerivative ) result += "Derivative | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits2KHR::eExecutionGraphAMDX ) + result += "ExecutionGraphAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ if ( value & PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT ) result += "EnableLegacyDitheringEXT | "; if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex ) @@ -3432,6 +3449,8 @@ namespace VULKAN_HPP_NAMESPACE result += "DescriptorBufferEXT | "; if ( value & PipelineCreateFlagBits2KHR::eCaptureData ) result += "CaptureData | "; + if ( value & PipelineCreateFlagBits2KHR::eIndirectBindableEXT ) + result += "IndirectBindableEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3496,6 +3515,8 @@ namespace VULKAN_HPP_NAMESPACE result += "MicromapBuildInputReadOnlyEXT | "; if ( value & BufferUsageFlagBits2KHR::eMicromapStorageEXT ) result += "MicromapStorageEXT | "; + if ( value & BufferUsageFlagBits2KHR::ePreprocessBufferEXT ) + result += "PreprocessBufferEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3522,6 +3543,38 @@ namespace VULKAN_HPP_NAMESPACE result += "FragmentShadingRateAttachment | "; if ( value & ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment ) result += "FragmentDensityMapAttachment | "; + if ( value & ShaderCreateFlagBitsEXT::eIndirectBindable ) + result += "IndirectBindable | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess ) + result += "ExplicitPreprocess | "; + if ( value & IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences ) + result += "UnorderedSequences | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsInputModeFlagsEXT value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer ) + result += "VulkanIndexBuffer | "; + if ( value & IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer ) + result += "DxgiIndexBuffer | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3919,10 +3972,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eVideoDecodeH264SessionParametersAddInfoKHR: return "VideoDecodeH264SessionParametersAddInfoKHR"; case StructureType::eVideoDecodeH264DpbSlotInfoKHR: return "VideoDecodeH264DpbSlotInfoKHR"; case StructureType::eTextureLodGatherFormatPropertiesAMD: return "TextureLodGatherFormatPropertiesAMD"; - case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; - case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; - case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; - case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; #if defined( VK_USE_PLATFORM_GGP ) case StructureType::eStreamDescriptorSurfaceCreateInfoGGP: return "StreamDescriptorSurfaceCreateInfoGGP"; #endif /*VK_USE_PLATFORM_GGP*/ @@ -3974,6 +4023,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSwapchainCounterCreateInfoEXT: return "SwapchainCounterCreateInfoEXT"; case StructureType::ePresentTimesInfoGOOGLE: return "PresentTimesInfoGOOGLE"; case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX: return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX"; + case StructureType::eMultiviewPerViewAttributesInfoNVX: return "MultiviewPerViewAttributesInfoNVX"; case StructureType::ePipelineViewportSwizzleStateCreateInfoNV: return "PipelineViewportSwizzleStateCreateInfoNV"; case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT: return "PhysicalDeviceDiscardRectanglePropertiesEXT"; case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT: return "PipelineDiscardRectangleStateCreateInfoEXT"; @@ -4033,6 +4083,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExecutionGraphPipelineCreateInfoAMDX: return "ExecutionGraphPipelineCreateInfoAMDX"; case StructureType::ePipelineShaderStageNodeCreateInfoAMDX: return "PipelineShaderStageNodeCreateInfoAMDX"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case StructureType::eAttachmentSampleCountInfoAMD: return "AttachmentSampleCountInfoAMD"; case StructureType::eSampleLocationsInfoEXT: return "SampleLocationsInfoEXT"; case StructureType::eRenderPassSampleLocationsBeginInfoEXT: return "RenderPassSampleLocationsBeginInfoEXT"; case StructureType::ePipelineSampleLocationsStateCreateInfoEXT: return "PipelineSampleLocationsStateCreateInfoEXT"; @@ -4124,6 +4175,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV: return "PhysicalDeviceExclusiveScissorFeaturesNV"; case StructureType::eCheckpointDataNV: return "CheckpointDataNV"; case StructureType::eQueueFamilyCheckpointPropertiesNV: return "QueueFamilyCheckpointPropertiesNV"; + case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; + case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL: return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL"; case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL: return "QueryPoolPerformanceQueryCreateInfoINTEL"; case StructureType::eInitializePerformanceApiInfoINTEL: return "InitializePerformanceApiInfoINTEL"; @@ -4143,11 +4196,13 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT: return "PhysicalDeviceFragmentDensityMapFeaturesEXT"; case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT: return "PhysicalDeviceFragmentDensityMapPropertiesEXT"; case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT: return "RenderPassFragmentDensityMapCreateInfoEXT"; + case StructureType::eRenderingFragmentDensityMapAttachmentInfoEXT: return "RenderingFragmentDensityMapAttachmentInfoEXT"; case StructureType::eFragmentShadingRateAttachmentInfoKHR: return "FragmentShadingRateAttachmentInfoKHR"; case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR: return "PipelineFragmentShadingRateStateCreateInfoKHR"; case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR: return "PhysicalDeviceFragmentShadingRatePropertiesKHR"; case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR: return "PhysicalDeviceFragmentShadingRateFeaturesKHR"; case StructureType::ePhysicalDeviceFragmentShadingRateKHR: return "PhysicalDeviceFragmentShadingRateKHR"; + case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; case StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR: return "PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR"; @@ -4279,8 +4334,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eExportMetalSharedEventInfoEXT: return "ExportMetalSharedEventInfoEXT"; case StructureType::eImportMetalSharedEventInfoEXT: return "ImportMetalSharedEventInfoEXT"; #endif /*VK_USE_PLATFORM_METAL_EXT*/ - case StructureType::eQueueFamilyCheckpointProperties2NV: return "QueueFamilyCheckpointProperties2NV"; - case StructureType::eCheckpointData2NV: return "CheckpointData2NV"; case StructureType::ePhysicalDeviceDescriptorBufferPropertiesEXT: return "PhysicalDeviceDescriptorBufferPropertiesEXT"; case StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT: return "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT"; case StructureType::ePhysicalDeviceDescriptorBufferFeaturesEXT: return "PhysicalDeviceDescriptorBufferFeaturesEXT"; @@ -4335,6 +4388,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceDepthClipControlFeaturesEXT: return "PhysicalDeviceDepthClipControlFeaturesEXT"; case StructureType::ePipelineViewportDepthClipControlCreateInfoEXT: return "PipelineViewportDepthClipControlCreateInfoEXT"; case StructureType::ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT: return "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT"; + case StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT: return "PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT"; #if defined( VK_USE_PLATFORM_FUCHSIA ) case StructureType::eImportMemoryZirconHandleInfoFUCHSIA: return "ImportMemoryZirconHandleInfoFUCHSIA"; case StructureType::eMemoryZirconHandlePropertiesFUCHSIA: return "MemoryZirconHandlePropertiesFUCHSIA"; @@ -4580,9 +4634,30 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceShaderAtomicFloat16VectorFeaturesNV: return "PhysicalDeviceShaderAtomicFloat16VectorFeaturesNV"; case StructureType::ePhysicalDeviceShaderReplicatedCompositesFeaturesEXT: return "PhysicalDeviceShaderReplicatedCompositesFeaturesEXT"; case StructureType::ePhysicalDeviceRayTracingValidationFeaturesNV: return "PhysicalDeviceRayTracingValidationFeaturesNV"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT: return "PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT"; + case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesEXT: return "PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT"; + case StructureType::eGeneratedCommandsMemoryRequirementsInfoEXT: return "GeneratedCommandsMemoryRequirementsInfoEXT"; + case StructureType::eIndirectExecutionSetCreateInfoEXT: return "IndirectExecutionSetCreateInfoEXT"; + case StructureType::eGeneratedCommandsInfoEXT: return "GeneratedCommandsInfoEXT"; + case StructureType::eIndirectCommandsLayoutCreateInfoEXT: return "IndirectCommandsLayoutCreateInfoEXT"; + case StructureType::eIndirectCommandsLayoutTokenEXT: return "IndirectCommandsLayoutTokenEXT"; + case StructureType::eWriteIndirectExecutionSetPipelineEXT: return "WriteIndirectExecutionSetPipelineEXT"; + case StructureType::eWriteIndirectExecutionSetShaderEXT: return "WriteIndirectExecutionSetShaderEXT"; + case StructureType::eIndirectExecutionSetPipelineInfoEXT: return "IndirectExecutionSetPipelineInfoEXT"; + case StructureType::eIndirectExecutionSetShaderInfoEXT: return "IndirectExecutionSetShaderInfoEXT"; + case StructureType::eIndirectExecutionSetShaderLayoutInfoEXT: return "IndirectExecutionSetShaderLayoutInfoEXT"; + case StructureType::eGeneratedCommandsPipelineInfoEXT: return "GeneratedCommandsPipelineInfoEXT"; + case StructureType::eGeneratedCommandsShaderInfoEXT: return "GeneratedCommandsShaderInfoEXT"; case StructureType::ePhysicalDeviceImageAlignmentControlFeaturesMESA: return "PhysicalDeviceImageAlignmentControlFeaturesMESA"; case StructureType::ePhysicalDeviceImageAlignmentControlPropertiesMESA: return "PhysicalDeviceImageAlignmentControlPropertiesMESA"; case StructureType::eImageAlignmentControlCreateInfoMESA: return "ImageAlignmentControlCreateInfoMESA"; + case StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT: return "PhysicalDeviceDepthClampControlFeaturesEXT"; + case StructureType::ePipelineViewportDepthClampControlCreateInfoEXT: return "PipelineViewportDepthClampControlCreateInfoEXT"; + case StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI: return "PhysicalDeviceHdrVividFeaturesHUAWEI"; + case StructureType::eHdrVividDynamicMetadataHUAWEI: return "HdrVividDynamicMetadataHUAWEI"; + case StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV: return "PhysicalDeviceCooperativeMatrix2FeaturesNV"; + case StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV: return "CooperativeMatrixFlexibleDimensionsPropertiesNV"; + case StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV: return "PhysicalDeviceCooperativeMatrix2PropertiesNV"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -4656,6 +4731,8 @@ namespace VULKAN_HPP_NAMESPACE case ObjectType::eOpticalFlowSessionNV: return "OpticalFlowSessionNV"; case ObjectType::eShaderEXT: return "ShaderEXT"; case ObjectType::ePipelineBinaryKHR: return "PipelineBinaryKHR"; + case ObjectType::eIndirectCommandsLayoutEXT: return "IndirectCommandsLayoutEXT"; + case ObjectType::eIndirectExecutionSetEXT: return "IndirectExecutionSetEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5701,6 +5778,7 @@ namespace VULKAN_HPP_NAMESPACE case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV"; case DynamicState::eAttachmentFeedbackLoopEnableEXT: return "AttachmentFeedbackLoopEnableEXT"; case DynamicState::eLineStippleKHR: return "LineStippleKHR"; + case DynamicState::eDepthClampRangeEXT: return "DepthClampRangeEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5750,8 +5828,6 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; - case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; - case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; @@ -5760,6 +5836,8 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; case PipelineCreateFlagBits::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; case PipelineCreateFlagBits::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; case PipelineCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; @@ -6846,6 +6924,7 @@ namespace VULKAN_HPP_NAMESPACE case PresentModeKHR::eFifoRelaxed: return "FifoRelaxed"; case PresentModeKHR::eSharedDemandRefresh: return "SharedDemandRefresh"; case PresentModeKHR::eSharedContinuousRefresh: return "SharedContinuousRefresh"; + case PresentModeKHR::eFifoLatestReadyEXT: return "FifoLatestReadyEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8849,6 +8928,9 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization"; case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives"; case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits2KHR::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ case PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase"; @@ -8879,6 +8961,7 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; case PipelineCreateFlagBits2KHR::eDescriptorBufferEXT: return "DescriptorBufferEXT"; case PipelineCreateFlagBits2KHR::eCaptureData: return "CaptureData"; + case PipelineCreateFlagBits2KHR::eIndirectBindableEXT: return "IndirectBindableEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8915,6 +8998,7 @@ namespace VULKAN_HPP_NAMESPACE case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; case BufferUsageFlagBits2KHR::eMicromapStorageEXT: return "MicromapStorageEXT"; + case BufferUsageFlagBits2KHR::ePreprocessBufferEXT: return "PreprocessBufferEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8955,6 +9039,7 @@ namespace VULKAN_HPP_NAMESPACE case ShaderCreateFlagBitsEXT::eDispatchBase: return "DispatchBase"; case ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment: return "FragmentShadingRateAttachment"; case ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment: return "FragmentDensityMapAttachment"; + case ShaderCreateFlagBitsEXT::eIndirectBindable: return "IndirectBindable"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -9145,5 +9230,79 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_EXT_device_generated_commands === + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeEXT value ) + { + switch ( value ) + { + case IndirectCommandsTokenTypeEXT::eExecutionSet: return "ExecutionSet"; + case IndirectCommandsTokenTypeEXT::ePushConstant: return "PushConstant"; + case IndirectCommandsTokenTypeEXT::eSequenceIndex: return "SequenceIndex"; + case IndirectCommandsTokenTypeEXT::eIndexBuffer: return "IndexBuffer"; + case IndirectCommandsTokenTypeEXT::eVertexBuffer: return "VertexBuffer"; + case IndirectCommandsTokenTypeEXT::eDrawIndexed: return "DrawIndexed"; + case IndirectCommandsTokenTypeEXT::eDraw: return "Draw"; + case IndirectCommandsTokenTypeEXT::eDrawIndexedCount: return "DrawIndexedCount"; + case IndirectCommandsTokenTypeEXT::eDrawCount: return "DrawCount"; + case IndirectCommandsTokenTypeEXT::eDispatch: return "Dispatch"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksNV: return "DrawMeshTasksNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCountNV: return "DrawMeshTasksCountNV"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasks: return "DrawMeshTasks"; + case IndirectCommandsTokenTypeEXT::eDrawMeshTasksCount: return "DrawMeshTasksCount"; + case IndirectCommandsTokenTypeEXT::eTraceRays2: return "TraceRays2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectExecutionSetInfoTypeEXT value ) + { + switch ( value ) + { + case IndirectExecutionSetInfoTypeEXT::ePipelines: return "Pipelines"; + case IndirectExecutionSetInfoTypeEXT::eShaderObjects: return "ShaderObjects"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsEXT value ) + { + switch ( value ) + { + case IndirectCommandsLayoutUsageFlagBitsEXT::eExplicitPreprocess: return "ExplicitPreprocess"; + case IndirectCommandsLayoutUsageFlagBitsEXT::eUnorderedSequences: return "UnorderedSequences"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( IndirectCommandsInputModeFlagBitsEXT value ) + { + switch ( value ) + { + case IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer: return "VulkanIndexBuffer"; + case IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer: return "DxgiIndexBuffer"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + //=== VK_EXT_depth_clamp_control === + + VULKAN_HPP_INLINE std::string to_string( DepthClampModeEXT value ) + { + switch ( value ) + { + case DepthClampModeEXT::eViewportRange: return "ViewportRange"; + case DepthClampModeEXT::eUserDefinedRange: return "UserDefinedRange"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + } // namespace VULKAN_HPP_NAMESPACE + +#if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +#elif defined( _MSC_VER ) +# pragma warning( pop ) +#endif + #endif From 21964fc6283d2455aae3552f610848290cc1154e Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 3 Nov 2024 00:40:14 +0000 Subject: [PATCH 068/131] [BOT] update dependencies --- third_party/vulkan/vulkan.cppm | 52 +- third_party/vulkan/vulkan.hpp | 16850 ++++++++-------- third_party/vulkan/vulkan_core.h | 23 +- third_party/vulkan/vulkan_enums.hpp | 2 + .../vulkan/vulkan_extension_inspection.hpp | 13 +- third_party/vulkan/vulkan_funcs.hpp | 214 +- third_party/vulkan/vulkan_handles.hpp | 104 +- third_party/vulkan/vulkan_hash.hpp | 27 + third_party/vulkan/vulkan_hpp_macros.hpp | 35 +- third_party/vulkan/vulkan_raii.hpp | 5747 +++--- third_party/vulkan/vulkan_shared.hpp | 350 +- .../vulkan/vulkan_static_assertions.hpp | 15 + third_party/vulkan/vulkan_structs.hpp | 220 + third_party/vulkan/vulkan_to_string.hpp | 2 + 14 files changed, 12073 insertions(+), 11581 deletions(-) diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 5e40ea9..65881c4 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -32,14 +32,17 @@ export namespace VULKAN_HPP_NAMESPACE //===================================== using VULKAN_HPP_NAMESPACE::ArrayWrapper1D; using VULKAN_HPP_NAMESPACE::ArrayWrapper2D; - using VULKAN_HPP_NAMESPACE::DispatchLoaderBase; - using VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic; using VULKAN_HPP_NAMESPACE::Flags; using VULKAN_HPP_NAMESPACE::FlagTraits; + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase; + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic; #if !defined( VK_NO_PROTOTYPES ) - using VULKAN_HPP_NAMESPACE::DispatchLoaderStatic; + using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic; #endif /*VK_NO_PROTOTYPES*/ + } // namespace detail using VULKAN_HPP_NAMESPACE::operator&; using VULKAN_HPP_NAMESPACE::operator|; @@ -56,14 +59,18 @@ export namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #if !defined( VULKAN_HPP_NO_SMART_HANDLE ) - using VULKAN_HPP_NAMESPACE::ObjectDestroy; - using VULKAN_HPP_NAMESPACE::ObjectDestroyShared; - using VULKAN_HPP_NAMESPACE::ObjectFree; - using VULKAN_HPP_NAMESPACE::ObjectFreeShared; - using VULKAN_HPP_NAMESPACE::ObjectRelease; - using VULKAN_HPP_NAMESPACE::ObjectReleaseShared; - using VULKAN_HPP_NAMESPACE::PoolFree; - using VULKAN_HPP_NAMESPACE::PoolFreeShared; + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::ObjectDestroy; + using VULKAN_HPP_NAMESPACE::detail::ObjectDestroyShared; + using VULKAN_HPP_NAMESPACE::detail::ObjectFree; + using VULKAN_HPP_NAMESPACE::detail::ObjectFreeShared; + using VULKAN_HPP_NAMESPACE::detail::ObjectRelease; + using VULKAN_HPP_NAMESPACE::detail::ObjectReleaseShared; + using VULKAN_HPP_NAMESPACE::detail::PoolFree; + using VULKAN_HPP_NAMESPACE::detail::PoolFreeShared; + } // namespace detail + using VULKAN_HPP_NAMESPACE::SharedHandle; using VULKAN_HPP_NAMESPACE::UniqueHandle; #endif /*VULKAN_HPP_NO_SMART_HANDLE*/ @@ -2605,6 +2612,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::EXTDepthClampControlExtensionName; using VULKAN_HPP_NAMESPACE::EXTDepthClampControlSpecVersion; + //=== VK_HUAWEI_hdr_vivid === + using VULKAN_HPP_NAMESPACE::HUAWEIHdrVividExtensionName; + using VULKAN_HPP_NAMESPACE::HUAWEIHdrVividSpecVersion; + //=== VK_NV_cooperative_matrix2 === using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2ExtensionName; using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2SpecVersion; @@ -4533,6 +4544,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampControlFeaturesEXT; using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClampControlCreateInfoEXT; + //=== VK_HUAWEI_hdr_vivid === + using VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI; + //=== VK_NV_cooperative_matrix2 === using VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV; using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV; @@ -4883,7 +4898,10 @@ export namespace VULKAN_HPP_NAMESPACE #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - using VULKAN_HPP_NAMESPACE::DynamicLoader; + namespace detail + { + using VULKAN_HPP_NAMESPACE::detail::DynamicLoader; + } // namespace detail #endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/ //===================== @@ -4933,9 +4951,13 @@ export namespace VULKAN_HPP_NAMESPACE //====================== using VULKAN_HPP_RAII_NAMESPACE::Context; - using VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher; - using VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher; - using VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher; + + namespace detail + { + using VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher; + using VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher; + } // namespace detail //==================== //=== RAII HANDLEs === diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index 1207a40..6ee70ed 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -63,7 +63,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 300, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 301, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -954,5069 +954,5093 @@ namespace VULKAN_HPP_NAMESPACE # endif #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE - class DispatchLoaderBase + namespace detail { - public: - DispatchLoaderBase() = default; - DispatchLoaderBase( std::nullptr_t ) -#if !defined( NDEBUG ) - : m_valid( false ) -#endif + class DispatchLoaderBase { - } + public: + DispatchLoaderBase() = default; + DispatchLoaderBase( std::nullptr_t ) +#if !defined( NDEBUG ) + : m_valid( false ) +#endif + { + } #if !defined( NDEBUG ) - size_t getVkHeaderVersion() const - { - VULKAN_HPP_ASSERT( m_valid ); - return vkHeaderVersion; - } + size_t getVkHeaderVersion() const + { + VULKAN_HPP_ASSERT( m_valid ); + return vkHeaderVersion; + } - private: - size_t vkHeaderVersion = VK_HEADER_VERSION; - bool m_valid = true; + private: + size_t vkHeaderVersion = VK_HEADER_VERSION; + bool m_valid = true; #endif - }; + }; #if !defined( VK_NO_PROTOTYPES ) - class DispatchLoaderStatic : public DispatchLoaderBase - { - public: - //=== VK_VERSION_1_0 === - - VkResult - vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + class DispatchLoaderStatic : public DispatchLoaderBase { - return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); - } + public: + //=== VK_VERSION_1_0 === - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyInstance( instance, pAllocator ); - } + VkResult + vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); + } - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); - } + void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyInstance( instance, pAllocator ); + } - void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); - } + VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); + } - void - vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); - } + void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); + } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); - } + void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); + } - void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); - } + VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); + } - void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } + void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); + } - void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); - } + void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetInstanceProcAddr( instance, pName ); - } + void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); + } - PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceProcAddr( device, pName ); - } + PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetInstanceProcAddr( instance, pName ); + } - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); - } + PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceProcAddr( device, pName ); + } - void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDevice( device, pAllocator ); - } + VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); + } - VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDevice( device, pAllocator ); + } + + VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, + uint32_t * pPropertyCount, + VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); + } + + VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); - } + { + return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); + } - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, - const char * pLayerName, - uint32_t * pPropertyCount, - VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); - } + VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); + } - VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); - } + VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); + } - VkResult - vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); - } + void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); + } - void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); - } + VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); + } - VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); - } + VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueWaitIdle( queue ); + } - VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueWaitIdle( queue ); - } + VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeviceWaitIdle( device ); + } - VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeviceWaitIdle( device ); - } + VkResult vkAllocateMemory( VkDevice device, + const VkMemoryAllocateInfo * pAllocateInfo, + const VkAllocationCallbacks * pAllocator, + VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); + } - VkResult vkAllocateMemory( VkDevice device, - const VkMemoryAllocateInfo * pAllocateInfo, - const VkAllocationCallbacks * pAllocator, - VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); - } + void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeMemory( device, memory, pAllocator ); + } - void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeMemory( device, memory, pAllocator ); - } + VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory( device, memory, offset, size, flags, ppData ); + } - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory( device, memory, offset, size, flags, ppData ); - } + void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory( device, memory ); + } - void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory( device, memory ); - } + VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } - VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } + VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); + } - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } + void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); + } - void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); - } + VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); + } - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); - } + VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory( device, image, memory, memoryOffset ); + } - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory( device, image, memory, memoryOffset ); - } + void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); + } - void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); - } + void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); + } - void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); - } + void vkGetImageSparseMemoryRequirements( VkDevice device, + VkImage image, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } - void vkGetImageSparseMemoryRequirements( VkDevice device, - VkImage image, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } + void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); + } - void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkSampleCountFlagBits samples, - VkImageUsageFlags usage, - VkImageTiling tiling, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); - } + VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); + } - VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); - } - - VkResult vkCreateFence( VkDevice device, - const VkFenceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); - } - - void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFence( device, fence, pAllocator ); - } - - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetFences( device, fenceCount, pFences ); - } - - VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceStatus( device, fence ); - } - - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); - } - - VkResult vkCreateSemaphore( VkDevice device, - const VkSemaphoreCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); - } - - void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySemaphore( device, semaphore, pAllocator ); - } - - VkResult vkCreateEvent( VkDevice device, - const VkEventCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); - } - - void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyEvent( device, event, pAllocator ); - } - - VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEventStatus( device, event ); - } - - VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetEvent( device, event ); - } - - VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetEvent( device, event ); - } - - VkResult vkCreateQueryPool( VkDevice device, - const VkQueryPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); - } - - void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyQueryPool( device, queryPool, pAllocator ); - } - - VkResult vkGetQueryPoolResults( VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void * pData, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); - } - - VkResult vkCreateBuffer( VkDevice device, - const VkBufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); - } - - void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBuffer( device, buffer, pAllocator ); - } - - VkResult vkCreateBufferView( VkDevice device, - const VkBufferViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); - } - - void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferView( device, bufferView, pAllocator ); - } - - VkResult vkCreateImage( VkDevice device, - const VkImageCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImage * pImage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); - } - - void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImage( device, image, pAllocator ); - } - - void vkGetImageSubresourceLayout( VkDevice device, - VkImage image, - const VkImageSubresource * pSubresource, - VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); - } - - VkResult vkCreateImageView( VkDevice device, - const VkImageViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImageView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); - } - - void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImageView( device, imageView, pAllocator ); - } - - VkResult vkCreateShaderModule( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); - } - - void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); - } - - VkResult vkCreatePipelineCache( VkDevice device, - const VkPipelineCacheCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); - } - - void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); - } - - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); - } - - VkResult - vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkCreateGraphicsPipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkCreateComputePipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkComputePipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipeline( device, pipeline, pAllocator ); - } - - VkResult vkCreatePipelineLayout( VkDevice device, - const VkPipelineLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); - } - - void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); - } - - VkResult vkCreateSampler( VkDevice device, - const VkSamplerCreateInfo * pCreateInfo, + VkResult vkCreateFence( VkDevice device, + const VkFenceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, - VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); - } - - void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySampler( device, sampler, pAllocator ); - } - - VkResult vkCreateDescriptorSetLayout( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); - } - - void vkDestroyDescriptorSetLayout( VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); - } - - VkResult vkCreateDescriptorPool( VkDevice device, - const VkDescriptorPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); - } - - void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); - } - - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetDescriptorPool( device, descriptorPool, flags ); - } - - VkResult vkAllocateDescriptorSets( VkDevice device, - const VkDescriptorSetAllocateInfo * pAllocateInfo, - VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); - } - - VkResult vkFreeDescriptorSets( VkDevice device, - VkDescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); - } - - void vkUpdateDescriptorSets( VkDevice device, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites, - uint32_t descriptorCopyCount, - const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); - } - - VkResult vkCreateFramebuffer( VkDevice device, - const VkFramebufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); - } - - void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); - } - - VkResult vkCreateRenderPass( VkDevice device, - const VkRenderPassCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); - } - - void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyRenderPass( device, renderPass, pAllocator ); - } - - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); - } - - VkResult vkCreateCommandPool( VkDevice device, - const VkCommandPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); - } - - void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCommandPool( device, commandPool, pAllocator ); - } - - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandPool( device, commandPool, flags ); - } - - VkResult vkAllocateCommandBuffers( VkDevice device, - const VkCommandBufferAllocateInfo * pAllocateInfo, - VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); - } - - void vkFreeCommandBuffers( VkDevice device, - VkCommandPool commandPool, - uint32_t commandBufferCount, - const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); - } - - VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); - } - - VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEndCommandBuffer( commandBuffer ); - } - - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandBuffer( commandBuffer, flags ); - } - - void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); - } - - void - vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); - } - - void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); - } - - void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); - } - - void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } - - void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); - } - - void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); - } - - void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); - } - - void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); - } - - void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); - } - - void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets( - commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); - } - - void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); - } - - void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); - } - - void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } - - void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, - uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } - - void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } - - void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); - } - - void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); - } - - void vkCmdCopyImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } - - void vkCmdBlitImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageBlit * pRegions, - VkFilter filter ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); - } - - void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); - } - - void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); - } - - void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); - } - - void - vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); - } - - void vkCmdClearColorImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearColorValue * pColor, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); - } - - void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearDepthStencilValue * pDepthStencil, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); - } - - void vkCmdClearAttachments( VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkClearAttachment * pAttachments, - uint32_t rectCount, - const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); - } - - void vkCmdResolveImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } - - void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent( commandBuffer, event, stageMask ); - } - - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent( commandBuffer, event, stageMask ); - } - - void vkCmdWaitEvents( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents( commandBuffer, - eventCount, - pEvents, - srcStageMask, - dstStageMask, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } - - void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - VkDependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier( commandBuffer, - srcStageMask, - dstStageMask, - dependencyFlags, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } - - void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); - } - - void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQuery( commandBuffer, queryPool, query ); - } - - void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); - } - - void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); - } - - void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); - } - - void vkCmdPushConstants( VkCommandBuffer commandBuffer, - VkPipelineLayout layout, - VkShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void * pValues ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); - } - - void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); - } - - void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass( commandBuffer, contents ); - } - - void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass( commandBuffer ); - } - - void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); - } - - //=== VK_VERSION_1_1 === - - VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceVersion( pApiVersion ); - } - - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); - } - - void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } - - void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); - } - - void vkCmdDispatchBase( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } - - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - void vkGetImageMemoryRequirements2( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } - - void vkGetBufferMemoryRequirements2( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } - - void vkGetImageSparseMemoryRequirements2( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); - } - - void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); - } - - void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); - } - - VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } - - void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } - - void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); - } - - void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } - - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPool( device, commandPool, flags ); - } - - void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); - } - - VkResult vkCreateSamplerYcbcrConversion( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } - - void vkDestroySamplerYcbcrConversion( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); - } - - VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } - - void vkDestroyDescriptorUpdateTemplate( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); + } + + void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFence( device, fence, pAllocator ); + } + + VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetFences( device, fenceCount, pFences ); + } + + VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceStatus( device, fence ); + } + + VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); + } + + VkResult vkCreateSemaphore( VkDevice device, + const VkSemaphoreCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); + } + + void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySemaphore( device, semaphore, pAllocator ); + } + + VkResult vkCreateEvent( VkDevice device, + const VkEventCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); + } + + void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyEvent( device, event, pAllocator ); + } + + VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEventStatus( device, event ); + } + + VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetEvent( device, event ); + } + + VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetEvent( device, event ); + } + + VkResult vkCreateQueryPool( VkDevice device, + const VkQueryPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); + } + + void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyQueryPool( device, queryPool, pAllocator ); + } + + VkResult vkGetQueryPoolResults( VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void * pData, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); + } + + VkResult vkCreateBuffer( VkDevice device, + const VkBufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); + } + + void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBuffer( device, buffer, pAllocator ); + } + + VkResult vkCreateBufferView( VkDevice device, + const VkBufferViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferView( device, bufferView, pAllocator ); + } + + VkResult vkCreateImage( VkDevice device, + const VkImageCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImage * pImage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); + } + + void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImage( device, image, pAllocator ); + } + + void vkGetImageSubresourceLayout( VkDevice device, + VkImage image, + const VkImageSubresource * pSubresource, + VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); + } + + VkResult vkCreateImageView( VkDevice device, + const VkImageViewCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkImageView * pView ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); + } + + void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyImageView( device, imageView, pAllocator ); + } + + VkResult vkCreateShaderModule( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); + } + + void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); + } + + VkResult vkCreatePipelineCache( VkDevice device, + const VkPipelineCacheCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); + } + + void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); + } + + VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); + } + + VkResult + vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkCreateGraphicsPipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkCreateComputePipelines( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipeline( device, pipeline, pAllocator ); + } + + VkResult vkCreatePipelineLayout( VkDevice device, + const VkPipelineLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); + } + + void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); + } + + VkResult vkCreateSampler( VkDevice device, + const VkSamplerCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); + } + + void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySampler( device, sampler, pAllocator ); + } + + VkResult vkCreateDescriptorSetLayout( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); + } + + void vkDestroyDescriptorSetLayout( VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); + } + + VkResult vkCreateDescriptorPool( VkDevice device, + const VkDescriptorPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); + } + + void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); + } + + VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetDescriptorPool( device, descriptorPool, flags ); + } + + VkResult vkAllocateDescriptorSets( VkDevice device, + const VkDescriptorSetAllocateInfo * pAllocateInfo, + VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); + } + + VkResult vkFreeDescriptorSets( VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); + } + + void vkUpdateDescriptorSets( VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); + } + + VkResult vkCreateFramebuffer( VkDevice device, + const VkFramebufferCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); + } + + void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); + } + + VkResult vkCreateRenderPass( VkDevice device, + const VkRenderPassCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyRenderPass( device, renderPass, pAllocator ); + } + + void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); + } + + VkResult vkCreateCommandPool( VkDevice device, + const VkCommandPoolCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); + } + + void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCommandPool( device, commandPool, pAllocator ); + } + + VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandPool( device, commandPool, flags ); + } + + VkResult vkAllocateCommandBuffers( VkDevice device, + const VkCommandBufferAllocateInfo * pAllocateInfo, + VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); + } + + void vkFreeCommandBuffers( VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); + } + + VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); + } + + VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEndCommandBuffer( commandBuffer ); + } + + VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetCommandBuffer( commandBuffer, flags ); + } + + void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); + } + + void vkCmdSetViewport( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); + } + + void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); + } + + void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); + } + + void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); + } + + void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); + } + + void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); + } + + void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); + } + + void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); + } + + void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); + } + + void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet * pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets( + commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); + } + + void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); + } + + void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); + } + + void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); + } + + void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); + } + + void + vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); + } + + void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); + } + + void vkCmdCopyImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdBlitImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit * pRegions, + VkFilter filter ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); + } + + void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); + } + + void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); + } + + void + vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); + } + + void vkCmdClearColorImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue * pColor, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); + } + + void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue * pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); + } + + void vkCmdClearAttachments( VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment * pAttachments, + uint32_t rectCount, + const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); + } + + void vkCmdResolveImage( VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); + } + + void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents( commandBuffer, + eventCount, + pEvents, + srcStageMask, + dstStageMask, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier * pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier * pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier( commandBuffer, + srcStageMask, + dstStageMask, + dependencyFlags, + memoryBarrierCount, + pMemoryBarriers, + bufferMemoryBarrierCount, + pBufferMemoryBarriers, + imageMemoryBarrierCount, + pImageMemoryBarriers ); + } + + void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); + } + + void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQuery( commandBuffer, queryPool, query ); + } + + void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); + } + + void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); + } + + void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); + } + + void vkCmdPushConstants( VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void * pValues ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); + } + + void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); + } + + void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass( commandBuffer, contents ); + } + + void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass( commandBuffer ); + } + + void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); + } + + //=== VK_VERSION_1_1 === + + VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumerateInstanceVersion( pApiVersion ); + } + + VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); + } + + void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBase( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + + VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + void vkGetImageMemoryRequirements2( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPool( device, commandPool, flags ); + } + + void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); + } + + VkResult vkCreateSamplerYcbcrConversion( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversion( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); - } - - void vkUpdateDescriptorSetWithTemplate( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } - - void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } - - void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } - - void vkGetDescriptorSetLayoutSupport( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); - } - - //=== VK_VERSION_1_2 === - - void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - VkResult vkCreateRenderPass2( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); - } - - void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } - - void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } - - void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); - } - - void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); - } - - VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); - } - - VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphores( device, pWaitInfo, timeout ); - } - - VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphore( device, pSignalInfo ); - } - - VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddress( device, pInfo ); - } - - uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); - } - - uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); - } - - //=== VK_VERSION_1_3 === - - VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); - } - - VkResult vkCreatePrivateDataSlot( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } - - void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); - } - - VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); - } - - void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); - } - - void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); - } - - void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); - } - - void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } - - void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); - } - - void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); - } - - VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); - } - - void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); - } - - void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); - } - - void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); - } - - void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); - } - - void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); - } - - void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); - } - - void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); - } - - void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRendering( commandBuffer ); - } - - void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullMode( commandBuffer, cullMode ); - } - - void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFace( commandBuffer, frontFace ); - } - - void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); - } - - void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); - } - - void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); - } - - void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } - - void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); - } - - void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); - } - - void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); - } - - void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); - } - - void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); - } - - void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } - - void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); - } - - void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); - } - - void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); - } - - void vkGetDeviceBufferMemoryRequirements( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - //=== VK_KHR_surface === - - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); - } - - VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); - } - - VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); - } - - VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); - } - - VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); - } - - //=== VK_KHR_swapchain === - - VkResult vkCreateSwapchainKHR( VkDevice device, - const VkSwapchainCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); - } - - void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); - } - - VkResult vkGetSwapchainImagesKHR( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pSwapchainImageCount, - VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); - } - - VkResult vkAcquireNextImageKHR( - VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); - } - - VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueuePresentKHR( queue, pPresentInfo ); - } - - VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); - } - - VkResult - vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); - } - - VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pRectCount, - VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); - } - - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); - } - - //=== VK_KHR_display === - - VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t * pDisplayCount, - VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); - } - - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); - } - - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); - } - - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } - - //=== VK_KHR_display_swapchain === - - VkResult vkCreateSharedSwapchainsKHR( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); - } + { + return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); + } + + VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplate( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplate( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + + void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + + void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + + void vkGetDescriptorSetLayoutSupport( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); + } + + //=== VK_VERSION_1_2 === + + void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + VkResult vkCreateRenderPass2( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); + } + + void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); + } + + VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphores( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphore( device, pSignalInfo ); + } + + VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddress( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); + } + + //=== VK_VERSION_1_3 === + + VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); + } + + VkResult vkCreatePrivateDataSlot( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); + } + + void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); + } + + void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); + } + + void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); + } + + void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); + } + + void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRendering( commandBuffer ); + } + + void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullMode( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFace( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); + } + + void vkGetDeviceBufferMemoryRequirements( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_surface === + + void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); + } + + VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); + } + + VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); + } + + VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); + } + + //=== VK_KHR_swapchain === + + VkResult vkCreateSwapchainKHR( VkDevice device, + const VkSwapchainCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); + } + + void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); + } + + VkResult vkGetSwapchainImagesKHR( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pSwapchainImageCount, + VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); + } + + VkResult vkAcquireNextImageKHR( + VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); + } + + VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueuePresentKHR( queue, pPresentInfo ); + } + + VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); + } + + VkResult + vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); + } + + VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t * pRectCount, + VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); + } + + VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); + } + + //=== VK_KHR_display === + + VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t * pDisplayCount, + VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); + } + + VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); + } + + VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); + } + + VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_display_swapchain === + + VkResult vkCreateSharedSwapchainsKHR( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); + } # if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === + //=== VK_KHR_xlib_surface === - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, - const VkXlibSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateXlibSurfaceKHR( VkInstance instance, + const VkXlibSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - Display * dpy, - VisualID visualID ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); - } + VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display * dpy, + VisualID visualID ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); + } # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === + //=== VK_KHR_xcb_surface === - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, - const VkXcbSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateXcbSurfaceKHR( VkInstance instance, + const VkXcbSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - xcb_connection_t * connection, - xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); - } + VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t * connection, + xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); + } # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === + //=== VK_KHR_wayland_surface === - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, - const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct wl_display * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); - } + VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); + } # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === + //=== VK_KHR_android_surface === - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, - const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === + //=== VK_KHR_win32_surface === - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, - const VkWin32SurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateWin32SurfaceKHR( VkInstance instance, + const VkWin32SurfaceCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); - } + VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_debug_report === + //=== VK_EXT_debug_report === - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); - } + VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); + } - void vkDestroyDebugReportCallbackEXT( VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); - } + void vkDestroyDebugReportCallbackEXT( VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); + } - void vkDebugReportMessageEXT( VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char * pLayerPrefix, - const char * pMessage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); - } + void vkDebugReportMessageEXT( VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); + } - //=== VK_EXT_debug_marker === + //=== VK_EXT_debug_marker === - VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); - } + VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); + } - VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); - } + VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); + } - void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); - } + void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); + } - void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerEndEXT( commandBuffer ); - } + void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerEndEXT( commandBuffer ); + } - void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); - } + void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); + } - //=== VK_KHR_video_queue === + //=== VK_KHR_video_queue === - VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, - const VkVideoProfileInfoKHR * pVideoProfile, - VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); - } + VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR * pVideoProfile, + VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); + } - VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, - uint32_t * pVideoFormatPropertyCount, - VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); - } + VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, + uint32_t * pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); + } - VkResult vkCreateVideoSessionKHR( VkDevice device, - const VkVideoSessionCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); - } + VkResult vkCreateVideoSessionKHR( VkDevice device, + const VkVideoSessionCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); + } - void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); - } + void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); + } - VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t * pMemoryRequirementsCount, - VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); - } + VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t * pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); + } - VkResult vkBindVideoSessionMemoryKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t bindSessionMemoryInfoCount, - const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); - } + VkResult vkBindVideoSessionMemoryKHR( VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); + } - VkResult vkCreateVideoSessionParametersKHR( VkDevice device, - const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); - } + VkResult vkCreateVideoSessionParametersKHR( VkDevice device, + const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); + } - VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); - } + VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); + } - void vkDestroyVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); - } + void vkDestroyVideoSessionParametersKHR( VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); + } - void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); - } + void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); + } - void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); - } + void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); + } - void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); - } + void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); + } - //=== VK_KHR_video_decode_queue === + //=== VK_KHR_video_decode_queue === - void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); - } + void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); + } - //=== VK_EXT_transform_feedback === + //=== VK_EXT_transform_feedback === - void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); - } + void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); + } - void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer * pCounterBuffers, + const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer * pCounterBuffers, const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } - - void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer * pCounterBuffers, - const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } - - void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); - } - - void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); - } - - void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, - uint32_t instanceCount, - uint32_t firstInstance, - VkBuffer counterBuffer, - VkDeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); - } - - //=== VK_NVX_binary_import === - - VkResult vkCreateCuModuleNVX( VkDevice device, - const VkCuModuleCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); - } - - VkResult vkCreateCuFunctionNVX( VkDevice device, - const VkCuFunctionCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); - } - - void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuModuleNVX( device, module, pAllocator ); - } - - void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); - } - - void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); - } - - //=== VK_NVX_image_view_handle === - - uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewHandleNVX( device, pInfo ); - } - - VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); - } - - //=== VK_AMD_draw_indirect_count === - - void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - //=== VK_AMD_shader_info === - - VkResult vkGetShaderInfoAMD( VkDevice device, - VkPipeline pipeline, - VkShaderStageFlagBits shaderStage, - VkShaderInfoTypeAMD infoType, - size_t * pInfoSize, - void * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); - } - - //=== VK_KHR_dynamic_rendering === - - void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); - } - - void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderingKHR( commandBuffer ); - } - -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, - const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_GGP*/ - - //=== VK_NV_external_memory_capabilities === - - VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - - VkResult vkGetMemoryWin32HandleNV( VkDevice device, - VkDeviceMemory memory, - VkExternalMemoryHandleTypeFlagsNV handleType, - HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_get_physical_device_properties2 === - - void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); - } - - void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); - } - - void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); - } - - VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } - - void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } - - void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); - } - - void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } - - //=== VK_KHR_device_group === - - void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } - - void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); - } - - void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } - -# if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - - VkResult vkCreateViSurfaceNN( VkInstance instance, - const VkViSurfaceCreateInfoNN * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_VI_NN*/ - - //=== VK_KHR_maintenance1 === - - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPoolKHR( device, commandPool, flags ); - } - - //=== VK_KHR_device_group_creation === - - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } - - //=== VK_KHR_external_memory_capabilities === - - void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - - VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } - - VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - HANDLE handle, - VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - - VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); - } - - VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - int fd, - VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); - } - - //=== VK_KHR_external_semaphore_capabilities === - - void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, - const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); - } - - VkResult - vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); - } - - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); - } - - //=== VK_KHR_push_descriptor === - - void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); - } - - void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); - } - - //=== VK_EXT_conditional_rendering === - - void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); - } - - void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); - } - - //=== VK_KHR_descriptor_update_template === - - VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } - - void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); - } - - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); - } - - //=== VK_NV_clip_space_w_scaling === - - void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); - } - - //=== VK_EXT_direct_mode_display === - - VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseDisplayEXT( physicalDevice, display ); - } - -# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); - } - - VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); - } -# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - //=== VK_EXT_display_surface_counter === - - VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); - } - - //=== VK_EXT_display_control === - - VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); - } - - VkResult vkRegisterDeviceEventEXT( VkDevice device, - const VkDeviceEventInfoEXT * pDeviceEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); - } - - VkResult vkRegisterDisplayEventEXT( VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT * pDisplayEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); - } - - VkResult vkGetSwapchainCounterEXT( VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); - } - - //=== VK_GOOGLE_display_timing === - - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); - } - - VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pPresentationTimingCount, - VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); - } - - //=== VK_EXT_discard_rectangles === - - void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); - } - - void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); - } - - void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); - } - - //=== VK_EXT_hdr_metadata === - - void vkSetHdrMetadataEXT( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR * pSwapchains, - const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); - } - - //=== VK_KHR_create_renderpass2 === - - VkResult vkCreateRenderPass2KHR( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); - } - - void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } - - void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } - - void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); - } - - //=== VK_KHR_shared_presentable_image === - - VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainStatusKHR( device, swapchain ); - } - - //=== VK_KHR_external_fence_capabilities === - - void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); - } - - VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); - } - - VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); - } - - //=== VK_KHR_performance_query === - - VkResult - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - uint32_t * pCounterCount, - VkPerformanceCounterKHR * pCounters, - VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); - } - - void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, - const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, - uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); - } - - VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireProfilingLockKHR( device, pInfo ); - } - - void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseProfilingLockKHR( device ); - } - - //=== VK_KHR_get_surface_capabilities2 === - - VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); - } - - VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); - } - - //=== VK_KHR_get_display_properties2 === - - VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); - } - - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); - } - -# if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, - const VkIOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_IOS_MVK*/ - -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, - const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); - } - - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); - } - - void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); - } - - void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueEndDebugUtilsLabelEXT( queue ); - } - - void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); - } - - void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } - - void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); - } - - void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } - - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, - const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); - } - - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, - VkDebugUtilsMessengerEXT messenger, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); - } - - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); - } - -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - - VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, - const struct AHardwareBuffer * buffer, - VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); - } - - VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, - const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, - struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); - } -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - - VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, - VkPipeline executionGraph, - VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); - } - - VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, - VkPipeline executionGraph, - const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, - uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); - } - - void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, - VkPipeline executionGraph, - VkDeviceAddress scratch, - VkDeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); - } - - void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - VkDeviceSize scratchSize, - const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); - } - - void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - VkDeviceSize scratchSize, - const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); - } - - void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - VkDeviceSize scratchSize, - VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_sample_locations === - - void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); - } - - void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); - } - - //=== VK_KHR_get_memory_requirements2 === - - void vkGetImageMemoryRequirements2KHR( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetBufferMemoryRequirements2KHR( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - //=== VK_KHR_acceleration_structure === - - VkResult vkCreateAccelerationStructureKHR( VkDevice device, - const VkAccelerationStructureCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } - - void vkDestroyAccelerationStructureKHR( VkDevice device, - VkAccelerationStructureKHR accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); - } - - void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); - } - - void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkDeviceAddress * pIndirectDeviceAddresses, - const uint32_t * pIndirectStrides, - const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresIndirectKHR( - commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); - } - - VkResult vkBuildAccelerationStructuresKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); - } - - VkResult vkCopyAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); - } - - VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); - } - - VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); - } - - VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - size_t dataSize, - void * pData, - size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); - } - - void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); - } - - void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); - } - - void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); - } - - VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, - const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); - } - - void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesKHR( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } - - void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, - const VkAccelerationStructureVersionInfoKHR * pVersionInfo, - VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); - } - - void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, - VkAccelerationStructureBuildTypeKHR buildType, - const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, - const uint32_t * pMaxPrimitiveCounts, - VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); - } - - //=== VK_KHR_ray_tracing_pipeline === - - void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); - } - - VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkGetRayTracingShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirectKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); - } - - VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, - VkPipeline pipeline, - uint32_t group, - VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); - } - - void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); - } - - //=== VK_KHR_sampler_ycbcr_conversion === - - VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } - - void vkDestroySamplerYcbcrConversionKHR( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); - } - - //=== VK_KHR_bind_memory2 === - - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); - } - - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); - } - - //=== VK_EXT_image_drm_format_modifier === - - VkResult - vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); - } - - //=== VK_EXT_validation_cache === - - VkResult vkCreateValidationCacheEXT( VkDevice device, - const VkValidationCacheCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); - } - - void - vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); - } - - VkResult vkMergeValidationCachesEXT( VkDevice device, - VkValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); - } - - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); - } - - //=== VK_NV_shading_rate_image === - - void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); - } - - void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); - } - - void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, - VkCoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); - } - - //=== VK_NV_ray_tracing === - - VkResult vkCreateAccelerationStructureNV( VkDevice device, - const VkAccelerationStructureCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } - - void vkDestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); - } - - void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, - const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } - - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, - uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); - } - - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, - const VkAccelerationStructureInfoNV * pInfo, - VkBuffer instanceData, - VkDeviceSize instanceOffset, - VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkBuffer scratch, - VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); - } - - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); - } - - void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, - VkBuffer raygenShaderBindingTableBuffer, - VkDeviceSize raygenShaderBindingOffset, - VkBuffer missShaderBindingTableBuffer, - VkDeviceSize missShaderBindingOffset, - VkDeviceSize missShaderBindingStride, - VkBuffer hitShaderBindingTableBuffer, - VkDeviceSize hitShaderBindingOffset, - VkDeviceSize hitShaderBindingStride, - VkBuffer callableShaderBindingTableBuffer, - VkDeviceSize callableShaderBindingOffset, - VkDeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysNV( commandBuffer, - raygenShaderBindingTableBuffer, - raygenShaderBindingOffset, - missShaderBindingTableBuffer, - missShaderBindingOffset, - missShaderBindingStride, - hitShaderBindingTableBuffer, - hitShaderBindingOffset, - hitShaderBindingStride, - callableShaderBindingTableBuffer, - callableShaderBindingOffset, - callableShaderBindingStride, - width, - height, - depth ); - } - - VkResult vkCreateRayTracingPipelinesNV( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoNV * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } - - VkResult vkGetRayTracingShaderGroupHandlesNV( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } - - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - size_t dataSize, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); - } - - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureNV * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesNV( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } - - VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCompileDeferredNV( device, pipeline, shader ); - } - - //=== VK_KHR_maintenance3 === - - void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); - } - - //=== VK_KHR_draw_indirect_count === - - void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - //=== VK_EXT_external_memory_host === - - VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); - } - - //=== VK_AMD_buffer_marker === - - void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); - } - - void vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); - } - - //=== VK_EXT_calibrated_timestamps === - - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, - uint32_t * pTimeDomainCount, - VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); - } - - VkResult vkGetCalibratedTimestampsEXT( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoKHR * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } - - //=== VK_NV_mesh_shader === - - void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); - } - - void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } - - //=== VK_NV_scissor_exclusive === - - void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); - } - - void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); - } - - //=== VK_NV_device_diagnostic_checkpoints === - - void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); - } - - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); - } - - void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); - } - - //=== VK_KHR_timeline_semaphore === - - VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); - } - - VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); - } - - VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphoreKHR( device, pSignalInfo ); - } - - //=== VK_INTEL_performance_query === - - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); - } - - void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUninitializePerformanceApiINTEL( device ); - } - - VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); - } - - VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, - const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); - } - - VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); - } - - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, - const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, - VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); - } - - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); - } - - VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); - } - - VkResult - vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); - } - - //=== VK_AMD_display_native_hdr === - - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); - } - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, - const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, - const VkMetalSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_fragment_shading_rate === - - VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pFragmentShadingRateCount, - VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); - } - - void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, - const VkExtent2D * pFragmentSize, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); - } - - //=== VK_KHR_dynamic_rendering_local_read === - - void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfoKHR * pLocationInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); - } - - void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); - } - - //=== VK_EXT_buffer_device_address === - - VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressEXT( device, pInfo ); - } - - //=== VK_EXT_tooling_info === - - VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); - } - - //=== VK_KHR_present_wait === - - VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); - } - - //=== VK_NV_cooperative_matrix === - - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); - } - - //=== VK_NV_coverage_reduction_mode === - - VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); - } - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - - VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); - } - - VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); - } - - VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); - } - - VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); - } -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_headless_surface === - - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, - const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } - - //=== VK_KHR_buffer_device_address === - - VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressKHR( device, pInfo ); - } - - uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); - } - - uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); - } - - //=== VK_EXT_line_rasterization === - - void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); - } - - //=== VK_EXT_host_query_reset === - - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); - } - - //=== VK_EXT_extended_dynamic_state === - - void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); - } - - void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); - } - - void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); - } - - void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); - } - - void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); - } - - void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } - - void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); - } - - void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); - } - - void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); - } - - void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); - } - - void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); - } - - void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } - - //=== VK_KHR_deferred_host_operations === - - VkResult vkCreateDeferredOperationKHR( VkDevice device, - const VkAllocationCallbacks * pAllocator, - VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); - } - - void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); - } - - uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); - } - - VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationResultKHR( device, operation ); - } - - VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeferredOperationJoinKHR( device, operation ); - } - - //=== VK_KHR_pipeline_executable_properties === - - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, - const VkPipelineInfoKHR * pPipelineInfo, - uint32_t * pExecutableCount, - VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); - } - - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pStatisticCount, - VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); - } - - VkResult - vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); - } - - //=== VK_EXT_host_image_copy === - - VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); - } - - VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); - } - - VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); - } - - VkResult - vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); - } - - void vkGetImageSubresourceLayout2EXT( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); - } - - //=== VK_KHR_map_memory2 === - - VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); - } - - VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); - } - - //=== VK_EXT_swapchain_maintenance1 === - - VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); - } - - //=== VK_NV_device_generated_commands === - - void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } - - void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); - } - - void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, - VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); - } - - void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline, - uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); - } - - VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } - - void vkDestroyIndirectCommandsLayoutNV( VkDevice device, - VkIndirectCommandsLayoutNV indirectCommandsLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); - } - - //=== VK_EXT_depth_bias_control === - - void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); - } - - //=== VK_EXT_acquire_drm_display === - - VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); - } - - VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); - } - - //=== VK_EXT_private_data === - - VkResult vkCreatePrivateDataSlotEXT( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } - - void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); - } - - VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); - } - - void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); - } - - //=== VK_KHR_video_encode_queue === - - VkResult - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, - VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); - } - - VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, - const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, - VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, - size_t * pDataSize, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); - } - - void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); - } - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - - VkResult vkCreateCudaModuleNV( VkDevice device, - const VkCudaModuleCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); - } - - VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); - } - - VkResult vkCreateCudaFunctionNV( VkDevice device, - const VkCudaFunctionCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); - } - - void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCudaModuleNV( device, module, pAllocator ); - } - - void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); - } - - void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); - } -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - - void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); - } -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - - void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); - } - - void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); - } - - void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } - - void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); - } - - void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); - } - - VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); - } - - //=== VK_EXT_descriptor_buffer === - - void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); - } - - void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, - VkDescriptorSetLayout layout, - uint32_t binding, - VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); - } - - void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); - } - - void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, - uint32_t bufferCount, - const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); - } - - void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t setCount, - const uint32_t * pBufferIndices, - const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); - } - - void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); - } - - VkResult - vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult - vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult - vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } - - //=== VK_NV_fragment_shading_rate_enums === - - void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, - VkFragmentShadingRateNV shadingRate, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); - } - - //=== VK_EXT_mesh_shader === - - void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } - - void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); - } - - void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, + { + return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); + } + + void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); + } + + void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); + } + + void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); + } + + //=== VK_NVX_binary_import === + + VkResult vkCreateCuModuleNVX( VkDevice device, + const VkCuModuleCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkCreateCuFunctionNVX( VkDevice device, + const VkCuFunctionCreateInfoNVX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuModuleNVX( device, module, pAllocator ); + } + + void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); + } + + void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); + } + + //=== VK_NVX_image_view_handle === + + uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandleNVX( device, pInfo ); + } + + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + } + + //=== VK_AMD_draw_indirect_count === + + void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } + { + return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } - //=== VK_KHR_copy_commands2 === + //=== VK_AMD_shader_info === - void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); - } + VkResult vkGetShaderInfoAMD( VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t * pInfoSize, + void * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); + } - void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); - } + //=== VK_KHR_dynamic_rendering === - void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); - } + void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); + } - void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); - } + void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderingKHR( commandBuffer ); + } - void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); - } +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === - void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); - } + VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_GGP*/ - //=== VK_EXT_device_fault === + //=== VK_NV_external_memory_capabilities === - VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); - } + VkResult + vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); + } # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === + //=== VK_NV_external_memory_win32 === - VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); - } + VkResult vkGetMemoryWin32HandleNV( VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); - } + //=== VK_KHR_get_physical_device_properties2 === + + void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); + } + + void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); + } + + void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); + } + + VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, + VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); + } + + void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pQueueFamilyPropertyCount, + VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); + } + + void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); + } + + void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, + uint32_t * pPropertyCount, + VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); + } + + //=== VK_KHR_device_group === + + void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); + } + + void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); + } + + void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); + } + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + + VkResult vkCreateViSurfaceNN( VkInstance instance, + const VkViSurfaceCreateInfoNN * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + + void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTrimCommandPoolKHR( device, commandPool, flags ); + } + + //=== VK_KHR_device_group_creation === + + VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, + uint32_t * pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); + } + + //=== VK_KHR_external_memory_capabilities === + + void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, + VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + + VkResult + vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } + + VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + + VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); + } + + VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); + } + + //=== VK_KHR_external_semaphore_capabilities === + + void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, + VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + + VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); + } + + VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, + HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + + VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); + } + + VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_push_descriptor === + + void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + //=== VK_EXT_conditional_rendering === + + void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); + } + + void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); + } + + //=== VK_KHR_descriptor_update_template === + + VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); + } + + void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); + } + + void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); + } + + //=== VK_NV_clip_space_w_scaling === + + void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); + } + + //=== VK_EXT_direct_mode_display === + + VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseDisplayEXT( physicalDevice, display ); + } + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + + VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); + } + + VkResult + vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); + } +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); + } + + //=== VK_EXT_display_control === + + VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); + } + + VkResult vkRegisterDeviceEventEXT( VkDevice device, + const VkDeviceEventInfoEXT * pDeviceEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); + } + + VkResult vkRegisterDisplayEventEXT( VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT * pDisplayEventInfo, + const VkAllocationCallbacks * pAllocator, + VkFence * pFence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); + } + + VkResult vkGetSwapchainCounterEXT( VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); + } + + //=== VK_GOOGLE_display_timing === + + VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); + } + + VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, + VkSwapchainKHR swapchain, + uint32_t * pPresentationTimingCount, + VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); + } + + //=== VK_EXT_discard_rectangles === + + void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); + } + + void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); + } + + void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); + } + + //=== VK_EXT_hdr_metadata === + + void vkSetHdrMetadataEXT( VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR * pSwapchains, + const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); + } + + //=== VK_KHR_create_renderpass2 === + + VkResult vkCreateRenderPass2KHR( VkDevice device, + const VkRenderPassCreateInfo2 * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); + } + + void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo * pRenderPassBegin, + const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); + } + + void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo * pSubpassBeginInfo, + const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); + } + + void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); + } + + //=== VK_KHR_shared_presentable_image === + + VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSwapchainStatusKHR( device, swapchain ); + } + + //=== VK_KHR_external_fence_capabilities === + + void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, + VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + + VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); + } + + VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + + VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); + } + + VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); + } + + //=== VK_KHR_performance_query === + + VkResult + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t * pCounterCount, + VkPerformanceCounterKHR * pCounters, + VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); + } + + void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, + uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); + } + + VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireProfilingLockKHR( device, pInfo ); + } + + void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseProfilingLockKHR( device ); + } + + //=== VK_KHR_get_surface_capabilities2 === + + VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); + } + + VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pSurfaceFormatCount, + VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); + } + + //=== VK_KHR_get_display_properties2 === + + VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t * pPropertyCount, + VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); + } + + VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); + } + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + + VkResult vkCreateIOSSurfaceMVK( VkInstance instance, + const VkIOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + + VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + + VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); + } + + VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); + } + + void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueEndDebugUtilsLabelEXT( queue ); + } + + void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); + } + + void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); + } + + void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); + } + + VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); + } + + void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); + } + + void vkSubmitDebugUtilsMessageEXT( VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); + } + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + + VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, + const struct AHardwareBuffer * buffer, + VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); + } + + VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, + struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); + } +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + + VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); + } + + VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, + uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); + } + + void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, + VkPipeline executionGraph, + VkDeviceAddress scratch, + VkDeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); + } + + void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); + } + + void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceSize scratchSize, + VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + + void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); + } + + void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); + } + + //=== VK_KHR_get_memory_requirements2 === + + void vkGetImageMemoryRequirements2KHR( VkDevice device, + const VkImageMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetBufferMemoryRequirements2KHR( VkDevice device, + const VkBufferMemoryRequirementsInfo2 * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, + const VkImageSparseMemoryRequirementsInfo2 * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_KHR_acceleration_structure === + + VkResult vkCreateAccelerationStructureKHR( VkDevice device, + const VkAccelerationStructureCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureKHR( VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); + } + + void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); + } + + void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkDeviceAddress * pIndirectDeviceAddresses, + const uint32_t * pIndirectStrides, + const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructuresIndirectKHR( + commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); + } + + VkResult vkBuildAccelerationStructuresKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, + const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); + } + + VkResult vkCopyAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); + } + + VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); + } + + VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); + } + + void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesKHR( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, + const VkAccelerationStructureVersionInfoKHR * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); + } + + void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, + const uint32_t * pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); + } + + //=== VK_KHR_ray_tracing_pipeline === + + void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); + } + + VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirectKHR( + commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); + } + + VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); + } + + void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); + } + + //=== VK_KHR_sampler_ycbcr_conversion === + + VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, + const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); + } + + void vkDestroySamplerYcbcrConversionKHR( VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); + } + + //=== VK_KHR_bind_memory2 === + + VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); + } + + //=== VK_EXT_image_drm_format_modifier === + + VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); + } + + //=== VK_EXT_validation_cache === + + VkResult vkCreateValidationCacheEXT( VkDevice device, + const VkValidationCacheCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); + } + + void + vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); + } + + VkResult vkMergeValidationCachesEXT( VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); + } + + VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); + } + + //=== VK_NV_shading_rate_image === + + void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); + } + + void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); + } + + void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); + } + + //=== VK_NV_ray_tracing === + + VkResult vkCreateAccelerationStructureNV( VkDevice device, + const VkAccelerationStructureCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); + } + + void vkDestroyAccelerationStructureNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); + } + + void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); + } + + void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV * pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); + } + + void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); + } + + void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysNV( commandBuffer, + raygenShaderBindingTableBuffer, + raygenShaderBindingOffset, + missShaderBindingTableBuffer, + missShaderBindingOffset, + missShaderBindingStride, + hitShaderBindingTableBuffer, + hitShaderBindingOffset, + hitShaderBindingStride, + callableShaderBindingTableBuffer, + callableShaderBindingOffset, + callableShaderBindingStride, + width, + height, + depth ); + } + + VkResult vkCreateRayTracingPipelinesNV( VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); + } + + VkResult vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); + } + + VkResult vkGetAccelerationStructureHandleNV( VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); + } + + void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV * pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteAccelerationStructuresPropertiesNV( + commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); + } + + VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCompileDeferredNV( device, pipeline, shader ); + } + + //=== VK_KHR_maintenance3 === + + void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, + const VkDescriptorSetLayoutCreateInfo * pCreateInfo, + VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); + } + + //=== VK_KHR_draw_indirect_count === + + void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_EXT_external_memory_host === + + VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void * pHostPointer, + VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); + } + + //=== VK_AMD_buffer_marker === + + void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); + } + + void vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); + } + + //=== VK_EXT_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsEXT( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_NV_mesh_shader === + + void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); + } + + void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_NV_scissor_exclusive === + + void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); + } + + void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); + } + + //=== VK_NV_device_diagnostic_checkpoints === + + void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); + } + + void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); + } + + void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); + } + + //=== VK_KHR_timeline_semaphore === + + VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); + } + + VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); + } + + VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSignalSemaphoreKHR( device, pSignalInfo ); + } + + //=== VK_INTEL_performance_query === + + VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); + } + + void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUninitializePerformanceApiINTEL( device ); + } + + VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); + } + + VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); + } + + VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, + VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); + } + + VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); + } + + VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); + } + + VkResult + vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); + } + + //=== VK_AMD_display_native_hdr === + + void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); + } + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + + VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + + VkResult vkCreateMetalSurfaceEXT( VkInstance instance, + const VkMetalSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + + VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); + } + + void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, + const VkExtent2D * pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); + } + + //=== VK_KHR_dynamic_rendering_local_read === + + void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfoKHR * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); + } + + void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); + } + + //=== VK_EXT_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressEXT( device, pInfo ); + } + + //=== VK_EXT_tooling_info === + + VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, + uint32_t * pToolCount, + VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); + } + + //=== VK_KHR_present_wait === + + VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); + } + + //=== VK_NV_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_NV_coverage_reduction_mode === + + VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + + VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + uint32_t * pPresentModeCount, + VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); + } + + VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); + } + + VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); + } +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + + VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } + + //=== VK_KHR_buffer_device_address === + + VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferDeviceAddressKHR( device, pInfo ); + } + + uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); + } + + uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); + } + + //=== VK_EXT_line_rasterization === + + void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_EXT_host_query_reset === + + void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT + { + return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); + } + + //=== VK_EXT_extended_dynamic_state === + + void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); + } + + void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); + } + + void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); + } + + void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); + } + + void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); + } + + void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer * pBuffers, + const VkDeviceSize * pOffsets, + const VkDeviceSize * pSizes, + const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); + } + + void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); + } + + void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); + } + + void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); + } + + void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); + } + + void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); + } + + void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); + } + + //=== VK_KHR_deferred_host_operations === + + VkResult vkCreateDeferredOperationKHR( VkDevice device, + const VkAllocationCallbacks * pAllocator, + VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); + } + + void + vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); + } + + uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); + } + + VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeferredOperationResultKHR( device, operation ); + } + + VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDeferredOperationJoinKHR( device, operation ); + } + + //=== VK_KHR_pipeline_executable_properties === + + VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, + const VkPipelineInfoKHR * pPipelineInfo, + uint32_t * pExecutableCount, + VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); + } + + VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pStatisticCount, + VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); + } + + VkResult + vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, + const VkPipelineExecutableInfoKHR * pExecutableInfo, + uint32_t * pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); + } + + //=== VK_EXT_host_image_copy === + + VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); + } + + VkResult vkTransitionImageLayoutEXT( VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); + } + + void vkGetImageSubresourceLayout2EXT( VkDevice device, + VkImage image, + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); + } + + //=== VK_KHR_map_memory2 === + + VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); + } + + VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); + } + + //=== VK_EXT_swapchain_maintenance1 === + + VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); + } + + //=== VK_NV_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); + } + + void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); + } + + VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutNV( VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); + } + + //=== VK_EXT_depth_bias_control === + + void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); + } + + //=== VK_EXT_acquire_drm_display === + + VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); + } + + VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); + } + + //=== VK_EXT_private_data === + + VkResult vkCreatePrivateDataSlotEXT( VkDevice device, + const VkPrivateDataSlotCreateInfo * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); + } + + void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); + } + + VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); + } + + void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); + } + + //=== VK_KHR_video_encode_queue === + + VkResult + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); + } + + VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, + size_t * pDataSize, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); + } + + void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); + } + +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + + VkResult vkCreateCudaModuleNV( VkDevice device, + const VkCudaModuleCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); + } + + VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); + } + + VkResult vkCreateCudaFunctionNV( VkDevice device, + const VkCudaFunctionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); + } + + void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaModuleNV( device, module, pAllocator ); + } + + void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); + } + + void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); + } +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + + void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); + } +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + + void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); + } + + void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); + } + + void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent * pEvents, + const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); + } + + void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); + } + + void + vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); + } + + VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); + } + + //=== VK_EXT_descriptor_buffer === + + void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); + } + + void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); + } + + void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); + } + + void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); + } + + void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t * pBufferIndices, + const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); + } + + VkResult + vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult + vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, + void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); + } + + //=== VK_NV_fragment_shading_rate_enums === + + void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); + } + + //=== VK_EXT_mesh_shader === + + void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); + } + + void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); + } + + //=== VK_KHR_copy_commands2 === + + void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); + } + + void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); + } + + void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); + } + + void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); + } + + void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); + } + + void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); + } + + //=== VK_EXT_device_fault === + + VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); + } + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + + VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); + } + + VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); + } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === + //=== VK_EXT_directfb_surface === - VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, - const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } + VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); + } - VkBool32 - vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); - } + VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); + } # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - //=== VK_EXT_vertex_input_dynamic_state === + //=== VK_EXT_vertex_input_dynamic_state === - void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, - uint32_t vertexBindingDescriptionCount, - const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, - uint32_t vertexAttributeDescriptionCount, - const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetVertexInputEXT( - commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); - } + void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetVertexInputEXT( + commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); + } # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === + //=== VK_FUCHSIA_external_memory === - VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, - const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } + VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } - VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); - } + VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); + } # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === + //=== VK_FUCHSIA_external_semaphore === - VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); - } + VkResult + vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); + } - VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } + VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, + zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); + } # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - - VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, - const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); - } - - VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); - } - - VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); - } - - void vkDestroyBufferCollectionFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); - } - - VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); - } -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - - //=== VK_HUAWEI_subpass_shading === - - VkResult - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); - } - - void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); - } - - //=== VK_HUAWEI_invocation_mask === - - void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); - } - - //=== VK_NV_external_memory_rdma === - - VkResult vkGetMemoryRemoteAddressNV( VkDevice device, - const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, - VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); - } - - //=== VK_EXT_pipeline_properties === - - VkResult - vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); - } - - //=== VK_EXT_extended_dynamic_state2 === - - void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); - } - - void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); - } - - void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); - } - - void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); - } - - void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); - } - -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - - VkResult vkCreateScreenSurfaceQNX( VkInstance instance, - const VkScreenSurfaceCreateInfoQNX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); - } - - VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); - } -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_EXT_color_write_enable === - - void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); - } - - //=== VK_KHR_ray_tracing_maintenance1 === - - void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); - } - - //=== VK_EXT_multi_draw === - - void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawInfoEXT * pVertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); - } - - void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawIndexedInfoEXT * pIndexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); - } - - //=== VK_EXT_opacity_micromap === - - VkResult vkCreateMicromapEXT( VkDevice device, - const VkMicromapCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); - } - - void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); - } - - void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); - } - - VkResult vkBuildMicromapsEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); - } - - VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); - } - - VkResult vkCopyMicromapToMemoryEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); - } - - VkResult vkCopyMemoryToMicromapEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); - } - - VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, - uint32_t micromapCount, - const VkMicromapEXT * pMicromaps, - VkQueryType queryType, - size_t dataSize, - void * pData, - size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); - } - - void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); - } - - void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); - } - - void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); - } - - void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, - uint32_t micromapCount, - const VkMicromapEXT * pMicromaps, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); - } - - void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, - const VkMicromapVersionInfoEXT * pVersionInfo, - VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); - } - - void vkGetMicromapBuildSizesEXT( VkDevice device, - VkAccelerationStructureBuildTypeKHR buildType, - const VkMicromapBuildInfoEXT * pBuildInfo, - VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); - } - - //=== VK_HUAWEI_cluster_culling_shader === - - void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } - - void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); - } - - //=== VK_EXT_pageable_device_local_memory === - - void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); - } - - //=== VK_KHR_maintenance4 === - - void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } - - void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } - - //=== VK_VALVE_descriptor_set_host_mapping === - - void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, - const VkDescriptorSetBindingReferenceVALVE * pBindingReference, - VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); - } - - void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); - } - - //=== VK_NV_copy_memory_indirect === - - void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, - VkDeviceAddress copyBufferAddress, - uint32_t copyCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); - } - - void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, - VkDeviceAddress copyBufferAddress, - uint32_t copyCount, - uint32_t stride, - VkImage dstImage, - VkImageLayout dstImageLayout, - const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); - } - - //=== VK_NV_memory_decompression === - - void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, - uint32_t decompressRegionCount, - const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); - } - - void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, - VkDeviceAddress indirectCommandsAddress, - VkDeviceAddress indirectCommandsCountAddress, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); - } - - //=== VK_NV_device_generated_commands_compute === - - void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, - const VkComputePipelineCreateInfo * pCreateInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); - } - - void - vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); - } - - VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); - } - - //=== VK_EXT_extended_dynamic_state3 === - - void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); - } - - void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); - } - - void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); - } - - void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); - } - - void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); - } - - void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); - } - - void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); - } - - void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); - } - - void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); - } - - void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); - } - - void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); - } - - void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); - } - - void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, - VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); - } - - void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); - } - - void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); - } - - void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); - } - - void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); - } - - void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); - } - - void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); - } - - void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); - } - - void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); - } - - void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); - } - - void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); - } - - void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); - } - - void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); - } - - void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); - } - - void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); - } - - void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, - uint32_t coverageModulationTableCount, - const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); - } - - void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); - } - - void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); - } - - void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); - } - - //=== VK_EXT_shader_module_identifier === - - void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); - } - - void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); - } - - //=== VK_NV_optical_flow === - - VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, - const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, - uint32_t * pFormatCount, - VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); - } - - VkResult vkCreateOpticalFlowSessionNV( VkDevice device, - const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); - } - - void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); - } - - VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, - VkOpticalFlowSessionNV session, - VkOpticalFlowSessionBindingPointNV bindingPoint, - VkImageView view, - VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); - } - - void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, - VkOpticalFlowSessionNV session, - const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); - } - - //=== VK_KHR_maintenance5 === - - void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); - } - - void vkGetRenderingAreaGranularityKHR( VkDevice device, - const VkRenderingAreaInfoKHR * pRenderingAreaInfo, - VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); - } - - void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, - const VkDeviceImageSubresourceInfoKHR * pInfo, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); - } - - void vkGetImageSubresourceLayout2KHR( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); - } - - //=== VK_AMD_anti_lag === - - void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAntiLagUpdateAMD( device, pData ); - } - - //=== VK_EXT_shader_object === - - VkResult vkCreateShadersEXT( VkDevice device, - uint32_t createInfoCount, - const VkShaderCreateInfoEXT * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); - } - - void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderEXT( device, shader, pAllocator ); - } - - VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); - } - - void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, - uint32_t stageCount, - const VkShaderStageFlagBits * pStages, - const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); - } - - void vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, - VkDepthClampModeEXT depthClampMode, - const VkDepthClampRangeEXT * pDepthClampRange ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClampRangeEXT( commandBuffer, depthClampMode, pDepthClampRange ); - } - - //=== VK_KHR_pipeline_binary === - - VkResult vkCreatePipelineBinariesKHR( VkDevice device, - const VkPipelineBinaryCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); - } - - void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); - } - - VkResult vkGetPipelineKeyKHR( VkDevice device, - const VkPipelineCreateInfoKHR * pPipelineCreateInfo, - VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); - } - - VkResult vkGetPipelineBinaryDataKHR( VkDevice device, - const VkPipelineBinaryDataInfoKHR * pInfo, - VkPipelineBinaryKeyKHR * pPipelineBinaryKey, - size_t * pPipelineBinaryDataSize, - void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); - } - - VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, - const VkReleaseCapturedPipelineDataInfoKHR * pInfo, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); - } - - //=== VK_QCOM_tile_properties === - - VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, - VkFramebuffer framebuffer, - uint32_t * pPropertiesCount, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); - } - - VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, - const VkRenderingInfo * pRenderingInfo, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); - } - - //=== VK_NV_low_latency2 === - - VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); - } - - VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); - } - - void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); - } - - void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); - } - - void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); - } - - //=== VK_KHR_cooperative_matrix === - - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - - void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); - } - -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === - - VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, - const struct _screen_buffer * buffer, - VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); - } -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_KHR_line_rasterization === - - void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); - } - - //=== VK_KHR_calibrated_timestamps === - - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, - uint32_t * pTimeDomainCount, - VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); - } - - VkResult vkGetCalibratedTimestampsKHR( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoKHR * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } - - //=== VK_KHR_maintenance6 === - - void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); - } - - void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); - } - - void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); - } - - void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); - } - - void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, - const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); - } - - void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( - VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); - } - - //=== VK_EXT_device_generated_commands === - - void vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoEXT * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetGeneratedCommandsMemoryRequirementsEXT( device, pInfo, pMemoryRequirements ); - } - - void vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, - const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo, - VkCommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPreprocessGeneratedCommandsEXT( commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer ); - } - - void vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, - VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteGeneratedCommandsEXT( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); - } - - VkResult vkCreateIndirectCommandsLayoutEXT( VkDevice device, - const VkIndirectCommandsLayoutCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectCommandsLayoutEXT * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectCommandsLayoutEXT( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } - - void vkDestroyIndirectCommandsLayoutEXT( VkDevice device, - VkIndirectCommandsLayoutEXT indirectCommandsLayout, + //=== VK_FUCHSIA_buffer_collection === + + VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); + } + + VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); + } + + VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); + } + + void vkDestroyBufferCollectionFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); + } + + VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); + } +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + + VkResult + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); + } + + void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); + } + + //=== VK_HUAWEI_invocation_mask === + + void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); + } + + //=== VK_NV_external_memory_rdma === + + VkResult vkGetMemoryRemoteAddressNV( VkDevice device, + const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); + } + + //=== VK_EXT_pipeline_properties === + + VkResult vkGetPipelinePropertiesEXT( VkDevice device, + const VkPipelineInfoEXT * pPipelineInfo, + VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); + } + + //=== VK_EXT_extended_dynamic_state2 === + + void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); + } + + void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); + } + + void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); + } + + void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); + } + + void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + + VkResult vkCreateScreenSurfaceQNX( VkInstance instance, + const VkScreenSurfaceCreateInfoQNX * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); + } + + VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + + void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); + } + + //=== VK_KHR_ray_tracing_maintenance1 === + + void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); + } + + //=== VK_EXT_multi_draw === + + void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT * pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); + } + + void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT * pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); + } + + //=== VK_EXT_opacity_micromap === + + VkResult vkCreateMicromapEXT( VkDevice device, + const VkMicromapCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); + } + + void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); + } + + void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); + } + + VkResult vkBuildMicromapsEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); + } + + VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMicromapToMemoryEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); + } + + VkResult vkCopyMemoryToMicromapEXT( VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); + } + + VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + size_t dataSize, + void * pData, + size_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); + } + + void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); + } + + void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); + } + + void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT * pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); + } + + void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, + const VkMicromapVersionInfoEXT * pVersionInfo, + VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); + } + + void vkGetMicromapBuildSizesEXT( VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT * pBuildInfo, + VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); + } + + //=== VK_HUAWEI_cluster_culling_shader === + + void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); + } + + void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); + } + + //=== VK_EXT_pageable_device_local_memory === + + void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); + } + + //=== VK_KHR_maintenance4 === + + void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, + const VkDeviceBufferMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); + } + + void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, + const VkDeviceImageMemoryRequirements * pInfo, + uint32_t * pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + } + + //=== VK_VALVE_descriptor_set_host_mapping === + + void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, + const VkDescriptorSetBindingReferenceVALVE * pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); + } + + void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); + } + + //=== VK_NV_copy_memory_indirect === + + void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); + } + + void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); + } + + //=== VK_NV_memory_decompression === + + void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); + } + + void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); + } + + //=== VK_NV_device_generated_commands_compute === + + void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, + const VkComputePipelineCreateInfo * pCreateInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); + } + + void vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); + } + + VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); + } + + //=== VK_EXT_extended_dynamic_state3 === + + void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); + } + + void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); + } + + void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); + } + + void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); + } + + void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); + } + + void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); + } + + void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); + } + + void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); + } + + void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); + } + + void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); + } + + void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); + } + + void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); + } + + void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); + } + + void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); + } + + void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); + } + + void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); + } + + void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); + } + + void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); + } + + void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); + } + + void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); + } + + void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); + } + + void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); + } + + void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); + } + + void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); + } + + void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); + } + + void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); + } + + void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); + } + + void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); + } + + void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); + } + + void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); + } + + void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); + } + + //=== VK_EXT_shader_module_identifier === + + void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); + } + + void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, + const VkShaderModuleCreateInfo * pCreateInfo, + VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); + } + + //=== VK_NV_optical_flow === + + VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, + uint32_t * pFormatCount, + VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); + } + + VkResult vkCreateOpticalFlowSessionNV( VkDevice device, + const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); + } + + void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); + } + + VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); + } + + void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); + } + + //=== VK_KHR_maintenance5 === + + void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); + } + + void vkGetRenderingAreaGranularityKHR( VkDevice device, + const VkRenderingAreaInfoKHR * pRenderingAreaInfo, + VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, + const VkDeviceImageSubresourceInfoKHR * pInfo, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2KHR( VkDevice device, + VkImage image, + const VkImageSubresource2KHR * pSubresource, + VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); + } + + //=== VK_AMD_anti_lag === + + void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkAntiLagUpdateAMD( device, pData ); + } + + //=== VK_EXT_shader_object === + + VkResult vkCreateShadersEXT( VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT * pCreateInfos, + const VkAllocationCallbacks * pAllocator, + VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); + } + + void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyShaderEXT( device, shader, pAllocator ); + } + + VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); + } + + void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits * pStages, + const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); + } + + void vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT * pDepthClampRange ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDepthClampRangeEXT( commandBuffer, depthClampMode, pDepthClampRange ); + } + + //=== VK_KHR_pipeline_binary === + + VkResult vkCreatePipelineBinariesKHR( VkDevice device, + const VkPipelineBinaryCreateInfoKHR * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); + } + + void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); + } + + VkResult vkGetPipelineKeyKHR( VkDevice device, + const VkPipelineCreateInfoKHR * pPipelineCreateInfo, + VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); + } + + VkResult vkGetPipelineBinaryDataKHR( VkDevice device, + const VkPipelineBinaryDataInfoKHR * pInfo, + VkPipelineBinaryKeyKHR * pPipelineBinaryKey, + size_t * pPipelineBinaryDataSize, + void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); + } + + VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR * pInfo, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); + } + + //=== VK_QCOM_tile_properties === + + VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, + VkFramebuffer framebuffer, + uint32_t * pPropertiesCount, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); + } + + VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, + const VkRenderingInfo * pRenderingInfo, + VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); + } + + //=== VK_NV_low_latency2 === + + VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); + } + + VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); + } + + void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); + } + + void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); + } + + //=== VK_KHR_cooperative_matrix === + + VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, + uint32_t * pPropertyCount, + VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); + } + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + + void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); + } + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + + VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, + const struct _screen_buffer * buffer, + VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); + } +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + + void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + //=== VK_KHR_calibrated_timestamps === + + VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, + uint32_t * pTimeDomainCount, + VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); + } + + VkResult vkGetCalibratedTimestampsKHR( VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR * pTimestampInfos, + uint64_t * pTimestamps, + uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); + } + + //=== VK_KHR_maintenance6 === + + void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); + } + + void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); + } + + void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); + } + + void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } + + void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); + } + + void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); + } + + //=== VK_EXT_device_generated_commands === + + void vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT * pInfo, + VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetGeneratedCommandsMemoryRequirementsEXT( device, pInfo, pMemoryRequirements ); + } + + void vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPreprocessGeneratedCommandsEXT( commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer ); + } + + void vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdExecuteGeneratedCommandsEXT( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); + } + + VkResult vkCreateIndirectCommandsLayoutEXT( VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectCommandsLayoutEXT * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectCommandsLayoutEXT( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); + } + + void vkDestroyIndirectCommandsLayoutEXT( VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectCommandsLayoutEXT( device, indirectCommandsLayout, pAllocator ); + } + + VkResult vkCreateIndirectExecutionSetEXT( VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT * pCreateInfo, + const VkAllocationCallbacks * pAllocator, + VkIndirectExecutionSetEXT * pIndirectExecutionSet ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCreateIndirectExecutionSetEXT( device, pCreateInfo, pAllocator, pIndirectExecutionSet ); + } + + void vkDestroyIndirectExecutionSetEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT + { + return ::vkDestroyIndirectExecutionSetEXT( device, indirectExecutionSet, pAllocator ); + } + + void vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetPipelineEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + void vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUpdateIndirectExecutionSetShaderEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); + } + + //=== VK_NV_cooperative_matrix2 === + + VkResult vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( physicalDevice, pPropertyCount, pProperties ); + } + }; + + inline DispatchLoaderStatic & getDispatchLoaderStatic() { - return ::vkDestroyIndirectCommandsLayoutEXT( device, indirectCommandsLayout, pAllocator ); + static DispatchLoaderStatic dls; + return dls; } - - VkResult vkCreateIndirectExecutionSetEXT( VkDevice device, - const VkIndirectExecutionSetCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectExecutionSetEXT * pIndirectExecutionSet ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectExecutionSetEXT( device, pCreateInfo, pAllocator, pIndirectExecutionSet ); - } - - void vkDestroyIndirectExecutionSetEXT( VkDevice device, - VkIndirectExecutionSetEXT indirectExecutionSet, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectExecutionSetEXT( device, indirectExecutionSet, pAllocator ); - } - - void vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, - VkIndirectExecutionSetEXT indirectExecutionSet, - uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateIndirectExecutionSetPipelineEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); - } - - void vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, - VkIndirectExecutionSetEXT indirectExecutionSet, - uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetShaderEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateIndirectExecutionSetShaderEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); - } - - //=== VK_NV_cooperative_matrix2 === - - VkResult vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( physicalDevice, pPropertyCount, pProperties ); - } - }; - - inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic() - { - static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls; - return dls; - } #endif + } // namespace detail #if ( 14 <= VULKAN_HPP_CPP_VERSION ) using std::exchange; #else @@ -6032,208 +6056,211 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_SMART_HANDLE ) struct AllocationCallbacks; - template - class ObjectDestroy + namespace detail { - public: - ObjectDestroy() = default; - - ObjectDestroy( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) + template + class ObjectDestroy { - } + public: + ObjectDestroy() = default; - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + ObjectDestroy( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + class NoParent; + + template + class ObjectDestroy { - return m_owner; - } + public: + ObjectDestroy() = default; - Optional getAllocator() const VULKAN_HPP_NOEXCEPT + ObjectDestroy( Optional allocationCallbacks, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_dispatch ); + t.destroy( m_allocationCallbacks, *m_dispatch ); + } + + private: + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectFree { - return m_allocationCallbacks; - } + public: + ObjectFree() = default; - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + ObjectFree( OwnerType owner, + Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_allocationCallbacks( allocationCallbacks ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Optional getAllocator() const VULKAN_HPP_NOEXCEPT + { + return m_allocationCallbacks; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Optional m_allocationCallbacks = nullptr; + Dispatch const * m_dispatch = nullptr; + }; + + template + class ObjectRelease { - return *m_dispatch; - } + public: + ObjectRelease() = default; - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT + ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_dispatch( &dispatch ) + { + } + + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } + + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } + + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_owner && m_dispatch ); + m_owner.release( t, *m_dispatch ); + } + + private: + OwnerType m_owner = {}; + Dispatch const * m_dispatch = nullptr; + }; + + template + class PoolFree { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); - } + public: + PoolFree() = default; - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; + PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT + : m_owner( owner ) + , m_pool( pool ) + , m_dispatch( &dispatch ) + { + } - class NoParent; + OwnerType getOwner() const VULKAN_HPP_NOEXCEPT + { + return m_owner; + } - template - class ObjectDestroy - { - public: - ObjectDestroy() = default; + PoolType getPool() const VULKAN_HPP_NOEXCEPT + { + return m_pool; + } - ObjectDestroy( Optional allocationCallbacks, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - { - } + Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT + { + return *m_dispatch; + } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT - { - return m_allocationCallbacks; - } + protected: + template + void destroy( T t ) VULKAN_HPP_NOEXCEPT + { + ( m_owner.free )( m_pool, t, *m_dispatch ); + } - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_dispatch ); - t.destroy( m_allocationCallbacks, *m_dispatch ); - } - - private: - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; - - template - class ObjectFree - { - public: - ObjectFree() = default; - - ObjectFree( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - { - } - - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - - Optional getAllocator() const VULKAN_HPP_NOEXCEPT - { - return m_allocationCallbacks; - } - - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); - } - - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; - - template - class ObjectRelease - { - public: - ObjectRelease() = default; - - ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_dispatch( &dispatch ) - { - } - - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.release( t, *m_dispatch ); - } - - private: - OwnerType m_owner = {}; - Dispatch const * m_dispatch = nullptr; - }; - - template - class PoolFree - { - public: - PoolFree() = default; - - PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_pool( pool ) - , m_dispatch( &dispatch ) - { - } - - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } - - PoolType getPool() const VULKAN_HPP_NOEXCEPT - { - return m_pool; - } - - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - ( m_owner.free )( m_pool, t, *m_dispatch ); - } - - private: - OwnerType m_owner = OwnerType(); - PoolType m_pool = PoolType(); - Dispatch const * m_dispatch = nullptr; - }; + private: + OwnerType m_owner = OwnerType(); + PoolType m_pool = PoolType(); + Dispatch const * m_dispatch = nullptr; + }; + } // namespace detail #endif // !VULKAN_HPP_NO_SMART_HANDLE //================== @@ -8683,6 +8710,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlExtensionName = VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTDepthClampControlSpecVersion = VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION; + //=== VK_HUAWEI_hdr_vivid === + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividExtensionName = VK_HUAWEI_HDR_VIVID_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIHdrVividSpecVersion = VK_HUAWEI_HDR_VIVID_SPEC_VERSION; + //=== VK_NV_cooperative_matrix2 === VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2ExtensionName = VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2SpecVersion = VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION; @@ -17072,6 +17103,34 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_HUAWEI_hdr_vivid === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_cooperative_matrix2 === template <> struct StructExtends @@ -17102,3792 +17161,3827 @@ namespace VULKAN_HPP_NAMESPACE #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE -#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - class DynamicLoader + namespace detail { - public: -# ifdef VULKAN_HPP_NO_EXCEPTIONS - DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT -# else - DynamicLoader( std::string const & vulkanLibraryName = {} ) -# endif +#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL + class DynamicLoader { - if ( !vulkanLibraryName.empty() ) + public: +# ifdef VULKAN_HPP_NO_EXCEPTIONS + DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT +# else + DynamicLoader( std::string const & vulkanLibraryName = {} ) +# endif { + if ( !vulkanLibraryName.empty() ) + { # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); # elif defined( _WIN32 ) - m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); + m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); # else # error unsupported platform # endif - } - else - { + } + else + { # if defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) - { - m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); - } + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } # elif defined( __APPLE__ ) - m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) - { - m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); - } + m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + if ( m_library == nullptr ) + { + m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); + } # elif defined( _WIN32 ) - m_library = ::LoadLibraryA( "vulkan-1.dll" ); + m_library = ::LoadLibraryA( "vulkan-1.dll" ); # else # error unsupported platform # endif - } + } # ifndef VULKAN_HPP_NO_EXCEPTIONS - if ( m_library == nullptr ) - { - // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. - throw std::runtime_error( "Failed to load vulkan library!" ); - } + if ( m_library == nullptr ) + { + // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. + throw std::runtime_error( "Failed to load vulkan library!" ); + } # endif - } + } - DynamicLoader( DynamicLoader const & ) = delete; + DynamicLoader( DynamicLoader const & ) = delete; - DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) - { - other.m_library = nullptr; - } + DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) + { + other.m_library = nullptr; + } - DynamicLoader & operator=( DynamicLoader const & ) = delete; + DynamicLoader & operator=( DynamicLoader const & ) = delete; - DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT - { - std::swap( m_library, other.m_library ); - return *this; - } + DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT + { + std::swap( m_library, other.m_library ); + return *this; + } - ~DynamicLoader() VULKAN_HPP_NOEXCEPT - { - if ( m_library ) + ~DynamicLoader() VULKAN_HPP_NOEXCEPT + { + if ( m_library ) + { +# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + dlclose( m_library ); +# elif defined( _WIN32 ) + ::FreeLibrary( m_library ); +# else +# error unsupported platform +# endif + } + } + + template + T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT { # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - dlclose( m_library ); + return (T)dlsym( m_library, function ); # elif defined( _WIN32 ) - ::FreeLibrary( m_library ); + return ( T )::GetProcAddress( m_library, function ); # else # error unsupported platform # endif } - } - template - T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT - { + bool success() const VULKAN_HPP_NOEXCEPT + { + return m_library != nullptr; + } + + private: # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - return (T)dlsym( m_library, function ); + void * m_library; # elif defined( _WIN32 ) - return ( T )::GetProcAddress( m_library, function ); + ::HINSTANCE m_library; # else # error unsupported platform # endif - } - - bool success() const VULKAN_HPP_NOEXCEPT - { - return m_library != nullptr; - } - - private: -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - void * m_library; -# elif defined( _WIN32 ) - ::HINSTANCE m_library; -# else -# error unsupported platform -# endif - }; + }; #endif - using PFN_dummy = void ( * )(); + using PFN_dummy = void ( * )(); - class DispatchLoaderDynamic : public DispatchLoaderBase - { - public: - //=== VK_VERSION_1_0 === - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - - //=== VK_VERSION_1_1 === - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - - //=== VK_VERSION_1_2 === - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; - - //=== VK_VERSION_1_3 === - PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; - PFN_vkSetPrivateData vkSetPrivateData = 0; - PFN_vkGetPrivateData vkGetPrivateData = 0; - PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; - PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; - PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; - PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; - PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; - PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; - PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; - PFN_vkCmdEndRendering vkCmdEndRendering = 0; - PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - - //=== VK_KHR_surface === - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; - - //=== VK_KHR_swapchain === - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - - //=== VK_KHR_display === - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; - - //=== VK_KHR_display_swapchain === - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - -#if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - -#if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_XCB_KHR*/ - -#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; -#else - PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -#else - PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; -#else - PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_debug_report === - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - - //=== VK_EXT_debug_marker === - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - - //=== VK_KHR_video_queue === - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; - - //=== VK_KHR_video_decode_queue === - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; - - //=== VK_EXT_transform_feedback === - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - - //=== VK_NVX_binary_import === - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; - - //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - - //=== VK_AMD_draw_indirect_count === - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - - //=== VK_AMD_shader_info === - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - - //=== VK_KHR_dynamic_rendering === - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - -#if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -#else - PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; -#endif /*VK_USE_PLATFORM_GGP*/ - - //=== VK_NV_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; -#else - PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_get_physical_device_properties2 === - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - - //=== VK_KHR_device_group === - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - -#if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -#else - PFN_dummy vkCreateViSurfaceNN_placeholder = 0; -#endif /*VK_USE_PLATFORM_VI_NN*/ - - //=== VK_KHR_maintenance1 === - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - - //=== VK_KHR_device_group_creation === - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; - - //=== VK_KHR_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; -#else - PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - - //=== VK_KHR_external_semaphore_capabilities === - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -#else - PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - - //=== VK_KHR_push_descriptor === - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - - //=== VK_EXT_conditional_rendering === - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - - //=== VK_KHR_descriptor_update_template === - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - - //=== VK_NV_clip_space_w_scaling === - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - - //=== VK_EXT_direct_mode_display === - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - -#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; -#else - PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; - PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ - - //=== VK_EXT_display_surface_counter === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - - //=== VK_EXT_display_control === - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - - //=== VK_GOOGLE_display_timing === - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - - //=== VK_EXT_discard_rectangles === - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; - PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; - - //=== VK_EXT_hdr_metadata === - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - - //=== VK_KHR_create_renderpass2 === - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - - //=== VK_KHR_shared_presentable_image === - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - - //=== VK_KHR_external_fence_capabilities === - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; -#else - PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - - //=== VK_KHR_performance_query === - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - - //=== VK_KHR_get_surface_capabilities2 === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - - //=== VK_KHR_get_display_properties2 === - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - -#if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -#else - PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; -#endif /*VK_USE_PLATFORM_IOS_MVK*/ - -#if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -#else - PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; -#endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - -#if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; -#else - PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; - PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; -#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; - PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; - PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; - PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; - PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; -#else - PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; - PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_sample_locations === - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - - //=== VK_KHR_get_memory_requirements2 === - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - - //=== VK_KHR_acceleration_structure === - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - - //=== VK_KHR_ray_tracing_pipeline === - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; - - //=== VK_KHR_sampler_ycbcr_conversion === - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - - //=== VK_KHR_bind_memory2 === - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - - //=== VK_EXT_image_drm_format_modifier === - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - - //=== VK_EXT_validation_cache === - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - - //=== VK_NV_shading_rate_image === - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - - //=== VK_NV_ray_tracing === - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - - //=== VK_KHR_maintenance3 === - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - - //=== VK_KHR_draw_indirect_count === - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - - //=== VK_EXT_external_memory_host === - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - - //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - - //=== VK_NV_mesh_shader === - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - - //=== VK_NV_scissor_exclusive === - PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - - //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; - - //=== VK_KHR_timeline_semaphore === - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - - //=== VK_INTEL_performance_query === - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - - //=== VK_AMD_display_native_hdr === - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; -#else - PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -#else - PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_fragment_shading_rate === - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - - //=== VK_KHR_dynamic_rendering_local_read === - PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; - - //=== VK_EXT_buffer_device_address === - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - - //=== VK_EXT_tooling_info === - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; - - //=== VK_KHR_present_wait === - PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; - - //=== VK_NV_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; - - //=== VK_NV_coverage_reduction_mode === - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; -#else - PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; - PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_EXT_headless_surface === - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; - - //=== VK_KHR_buffer_device_address === - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - - //=== VK_EXT_line_rasterization === - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - - //=== VK_EXT_host_query_reset === - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - - //=== VK_EXT_extended_dynamic_state === - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - - //=== VK_KHR_deferred_host_operations === - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - - //=== VK_KHR_pipeline_executable_properties === - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - - //=== VK_EXT_host_image_copy === - PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; - PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; - PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; - PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - - //=== VK_KHR_map_memory2 === - PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; - PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; - - //=== VK_EXT_swapchain_maintenance1 === - PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; - - //=== VK_NV_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - - //=== VK_EXT_depth_bias_control === - PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; - - //=== VK_EXT_acquire_drm_display === - PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; - PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; - - //=== VK_EXT_private_data === - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; - - //=== VK_KHR_video_encode_queue === - PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; - PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; - -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; - PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; - PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; - PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; - PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; - PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; -#else - PFN_dummy vkCreateCudaModuleNV_placeholder = 0; - PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; - PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; - PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; - PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; - PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -#if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; -#else - PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - - //=== VK_EXT_descriptor_buffer === - PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; - PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; - PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; - PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; - - //=== VK_NV_fragment_shading_rate_enums === - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - - //=== VK_EXT_mesh_shader === - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - - //=== VK_KHR_copy_commands2 === - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - - //=== VK_EXT_device_fault === - PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === - PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; - PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; -#else - PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; - PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - -#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; -#else - PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; -#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ - - //=== VK_EXT_vertex_input_dynamic_state === - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; -#else - PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; -#else - PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - -#if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; -#else - PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; -#endif /*VK_USE_PLATFORM_FUCHSIA*/ - - //=== VK_HUAWEI_subpass_shading === - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - - //=== VK_HUAWEI_invocation_mask === - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; - - //=== VK_NV_external_memory_rdma === - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; - - //=== VK_EXT_pipeline_properties === - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; - - //=== VK_EXT_extended_dynamic_state2 === - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; - -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; -#else - PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_EXT_color_write_enable === - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - - //=== VK_KHR_ray_tracing_maintenance1 === - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; - - //=== VK_EXT_multi_draw === - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; - - //=== VK_EXT_opacity_micromap === - PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; - PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; - PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; - PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; - PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; - PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; - PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; - PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; - PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; - PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; - PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; - PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; - PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; - PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; - - //=== VK_HUAWEI_cluster_culling_shader === - PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; - PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; - - //=== VK_EXT_pageable_device_local_memory === - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; - - //=== VK_KHR_maintenance4 === - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; - - //=== VK_VALVE_descriptor_set_host_mapping === - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; - - //=== VK_NV_copy_memory_indirect === - PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; - PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; - - //=== VK_NV_memory_decompression === - PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; - PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; - - //=== VK_NV_device_generated_commands_compute === - PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; - PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; - PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; - - //=== VK_EXT_extended_dynamic_state3 === - PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; - PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; - PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; - PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; - PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; - PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; - PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; - PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; - PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; - PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; - PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; - PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; - PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; - PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; - PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; - PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; - PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; - PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; - PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; - PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; - PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; - PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; - PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; - PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; - PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; - PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; - PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; - PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; - PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; - PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; - - //=== VK_EXT_shader_module_identifier === - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; - - //=== VK_NV_optical_flow === - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; - PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; - PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; - PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; - PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; - - //=== VK_KHR_maintenance5 === - PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; - PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; - PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; - PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; - - //=== VK_AMD_anti_lag === - PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; - - //=== VK_EXT_shader_object === - PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; - PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; - PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; - PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; - PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; - - //=== VK_KHR_pipeline_binary === - PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; - PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; - PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; - PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; - PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; - - //=== VK_QCOM_tile_properties === - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; - - //=== VK_NV_low_latency2 === - PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; - PFN_vkLatencySleepNV vkLatencySleepNV = 0; - PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; - PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; - PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; - - //=== VK_KHR_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; - -#if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === - PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; -#else - PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; -#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_KHR_line_rasterization === - PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; - - //=== VK_KHR_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; - PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; - - //=== VK_KHR_maintenance6 === - PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; - PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; - PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; - PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; - - //=== VK_EXT_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; - PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; - PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; - PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; - PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; - PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; - PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; - PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; - PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; - - //=== VK_NV_cooperative_matrix2 === - PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - - public: - DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; - DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + class DispatchLoaderDynamic : public DispatchLoaderBase { - init( getInstanceProcAddr ); - } - - // This interface does not require a linked vulkan library. - DispatchLoaderDynamic( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT - { - init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); - } - - template - void init() - { - static DynamicLoader dl; - init( dl ); - } - - template - void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT - { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); - init( getInstanceProcAddr ); - } - - void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getInstanceProcAddr ); - - vkGetInstanceProcAddr = getInstanceProcAddr; - + public: //=== VK_VERSION_1_0 === - vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); - vkEnumerateInstanceExtensionProperties = - PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); - vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; //=== VK_VERSION_1_1 === - vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); - } - - // This interface does not require a linked vulkan library. - void init( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); - vkGetInstanceProcAddr = getInstanceProcAddr; - init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); - if ( device ) - { - init( VULKAN_HPP_NAMESPACE::Device( device ) ); - } - } - - void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT - { - VkInstance instance = static_cast( instanceCpp ); - - //=== VK_VERSION_1_0 === - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties = - PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = - PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); - vkEnumerateDeviceExtensionProperties = - PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - - //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2 = - PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = - PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2 = - PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetPhysicalDeviceExternalBufferProperties = - PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalFenceProperties = - PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; //=== VK_VERSION_1_3 === - vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; //=== VK_KHR_surface === - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = - PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); - vkGetPhysicalDeviceSurfacePresentModesKHR = - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetPhysicalDevicePresentRectanglesKHR = - PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; //=== VK_KHR_display === - vkGetPhysicalDeviceDisplayPropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = - PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); - vkGetPhysicalDeviceXlibPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); - vkGetPhysicalDeviceXcbPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +#else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); - vkGetPhysicalDeviceWin32PresentationSupportKHR = - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +#else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; //=== VK_KHR_video_queue === - vkGetPhysicalDeviceVideoCapabilitiesKHR = - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +#else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; #endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +#else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - if ( !vkGetPhysicalDeviceFeatures2 ) - vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceProperties2 ) - vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; - vkGetPhysicalDeviceFormatProperties2KHR = - PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceFormatProperties2 ) - vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; - vkGetPhysicalDeviceImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceImageFormatProperties2 ) - vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) - vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; - vkGetPhysicalDeviceMemoryProperties2KHR = - PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceMemoryProperties2 ) - vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) - vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; #if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +#else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; #endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; //=== VK_KHR_device_group_creation === - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - if ( !vkEnumeratePhysicalDeviceGroups ) - vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; //=== VK_KHR_external_memory_capabilities === - vkGetPhysicalDeviceExternalBufferPropertiesKHR = - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalBufferProperties ) - vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +#else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; //=== VK_KHR_external_semaphore_capabilities === - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) - vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +#else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; //=== VK_EXT_direct_mode_display === - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +#else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === - vkGetPhysicalDeviceSurfaceCapabilities2EXT = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; //=== VK_KHR_external_fence_capabilities === - vkGetPhysicalDeviceExternalFencePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalFenceProperties ) - vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +#else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; //=== VK_KHR_performance_query === - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; //=== VK_KHR_get_surface_capabilities2 === - vkGetPhysicalDeviceSurfaceCapabilities2KHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; //=== VK_KHR_get_display_properties2 === - vkGetPhysicalDeviceDisplayProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; #if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_IOS_MVK*/ #if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +#else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +#else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +#else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); - vkGetPhysicalDeviceMultisamplePropertiesEXT = - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_EXT_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +#else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +#else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === - vkGetPhysicalDeviceFragmentShadingRatesKHR = - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; //=== VK_EXT_tooling_info === - vkGetPhysicalDeviceToolPropertiesEXT = - PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); - if ( !vkGetPhysicalDeviceToolProperties ) - vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; //=== VK_NV_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; //=== VK_NV_coverage_reduction_mode === - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - vkGetPhysicalDeviceSurfacePresentModes2EXT = - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +#else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; //=== VK_EXT_acquire_drm_display === - vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); - vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; //=== VK_KHR_video_encode_queue === - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +#else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +#else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); - vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +#else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +#else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +#else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +#else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +#else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); - vkGetPhysicalDeviceScreenPresentationSupportQNX = - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +#else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = - PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = - PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = - PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) ); + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; //=== VK_NV_optical_flow === - vkGetPhysicalDeviceOpticalFlowImageFormatsNV = - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = - PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) ); + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); - vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampRangeEXT" ) ); + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetInstanceProcAddr( instance, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetInstanceProcAddr( instance, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetInstanceProcAddr( instance, "vkReleaseCapturedPipelineDataKHR" ) ); + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetInstanceProcAddr( instance, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetInstanceProcAddr( instance, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetInstanceProcAddr( instance, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetInstanceProcAddr( instance, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetInstanceProcAddr( instance, "vkQueueNotifyOutOfBandNV" ) ); + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; //=== VK_KHR_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetInstanceProcAddr( instance, "vkGetScreenBufferPropertiesQNX" ) ); + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +#else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleKHR" ) ); + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; //=== VK_KHR_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsKHR" ) ); + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; //=== VK_EXT_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsEXT = - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); - vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsEXT" ) ); - vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsEXT" ) ); - vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutEXT" ) ); - vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutEXT" ) ); - vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectExecutionSetEXT" ) ); - vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectExecutionSetEXT" ) ); - vkUpdateIndirectExecutionSetPipelineEXT = - PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); - vkUpdateIndirectExecutionSetShaderEXT = - PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; //=== VK_NV_cooperative_matrix2 === - vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); - } + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT - { - VkDevice device = static_cast( deviceCpp ); + public: + DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; + DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; - //=== VK_VERSION_1_0 === - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + init( getInstanceProcAddr ); + } - //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + // This interface does not require a linked vulkan library. + DispatchLoaderDynamic( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT + { + init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); + } - //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + template + void init() + { + static DynamicLoader dl; + init( dl ); + } - //=== VK_VERSION_1_3 === - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + template + void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + init( getInstanceProcAddr ); + } - //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getInstanceProcAddr ); - //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + vkGetInstanceProcAddr = getInstanceProcAddr; - //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + //=== VK_VERSION_1_0 === + vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); + vkEnumerateInstanceExtensionProperties = + PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); + vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); - //=== VK_KHR_video_queue === - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + //=== VK_VERSION_1_1 === + vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); + } - //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + // This interface does not require a linked vulkan library. + void init( VkInstance instance, + PFN_vkGetInstanceProcAddr getInstanceProcAddr, + VkDevice device = {}, + PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); + vkGetInstanceProcAddr = getInstanceProcAddr; + init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); + if ( device ) + { + init( VULKAN_HPP_NAMESPACE::Device( device ) ); + } + } - //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT + { + VkInstance instance = static_cast( instanceCpp ); - //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = + PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); - //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); - //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = + PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); - //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); - //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); - //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ - //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_XCB_KHR*/ -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); - - //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - - //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); - - //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; - - //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); - - //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); - - //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); - - //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); - - //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); - - //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; - - //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); - -#if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); -#endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); - - //=== VK_KHR_performance_query === - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); - - //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = + PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = + PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = + PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = + PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); - //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; - //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = + PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); - //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); - //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; - //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; - //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); - //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); - //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = + PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = + PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); - //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; - //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; - //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = + PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); - //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); - //=== VK_EXT_calibrated_timestamps === - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; - //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); - //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); - //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; - //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); - //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - //=== VK_KHR_fragment_shading_rate === - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); - //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + vkAcquireFullScreenExclusiveModeEXT = + PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = + PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); - //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; - //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStippleKHR ) + vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; - //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; - //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; - //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); - //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; - //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); - //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); - //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); - //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); - //=== VK_KHR_video_encode_queue === - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); #if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; - //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); - //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); - //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; - //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); - //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ - //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); - //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); - //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); - //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); - //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; - - //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); - - //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); - - //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); - - //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); - - //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); - - //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); - - //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; - - //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); - - //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); - - //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); - - //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); - - //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); - - //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); - - //=== VK_NV_optical_flow === - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); - - //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); - - //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); - - //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); - vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); - - //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); - - //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); - - //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); - //=== VK_KHR_calibrated_timestamps === - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); - //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); - //=== VK_EXT_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsEXT = - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); - vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); - vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); - vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); - vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); - vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); - vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); - vkUpdateIndirectExecutionSetPipelineEXT = - PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); - vkUpdateIndirectExecutionSetShaderEXT = - PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); - } + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = + PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); - template - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT - { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); - PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); - init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); - } + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); - template ( deviceCpp ); + + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStippleKHR ) + vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; + + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; + + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; + + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; + + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + } + + template + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT + { + PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); + PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); + init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); + } + + template - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT - { - static DynamicLoader dl; - init( instance, device, dl ); - } - }; + > + void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT + { + static DynamicLoader dl; + init( instance, device, dl ); + } + }; + } // namespace detail } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index b2973ff..d9cdd13 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 300 +#define VK_HEADER_VERSION 301 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -1163,6 +1163,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT = 1000582001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI = 1000590000, + VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI = 1000590001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV = 1000593000, VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV = 1000593001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV = 1000593002, @@ -19901,6 +19903,25 @@ typedef struct VkPipelineViewportDepthClampControlCreateInfoEXT { +// VK_HUAWEI_hdr_vivid is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_hdr_vivid 1 +#define VK_HUAWEI_HDR_VIVID_SPEC_VERSION 1 +#define VK_HUAWEI_HDR_VIVID_EXTENSION_NAME "VK_HUAWEI_hdr_vivid" +typedef struct VkPhysicalDeviceHdrVividFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 hdrVivid; +} VkPhysicalDeviceHdrVividFeaturesHUAWEI; + +typedef struct VkHdrVividDynamicMetadataHUAWEI { + VkStructureType sType; + const void* pNext; + size_t dynamicMetadataSize; + const void* pDynamicMetadata; +} VkHdrVividDynamicMetadataHUAWEI; + + + // VK_NV_cooperative_matrix2 is a preprocessor guard. Do not pass it to API calls. #define VK_NV_cooperative_matrix2 1 #define VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION 1 diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index e3c323f..5209b44 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -1468,6 +1468,8 @@ namespace VULKAN_HPP_NAMESPACE eImageAlignmentControlCreateInfoMESA = VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA, ePhysicalDeviceDepthClampControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT, ePipelineViewportDepthClampControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT, + ePhysicalDeviceHdrVividFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HDR_VIVID_FEATURES_HUAWEI, + eHdrVividDynamicMetadataHUAWEI = VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI, ePhysicalDeviceCooperativeMatrix2FeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV, eCooperativeMatrixFlexibleDimensionsPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV, ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index 73fb83d..3075472 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -454,6 +454,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_EXT_device_generated_commands", "VK_MESA_image_alignment_control", "VK_EXT_depth_clamp_control", + "VK_HUAWEI_hdr_vivid", "VK_NV_cooperative_matrix2" }; return deviceExtensions; @@ -2362,6 +2363,16 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_get_physical_device_properties2", } } }, { "VK_VERSION_1_1", { {} } } } }, + { "VK_HUAWEI_hdr_vivid", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", + { { + "VK_KHR_swapchain", + "VK_EXT_hdr_metadata", + } } } } }, { "VK_NV_cooperative_matrix2", { { "VK_VERSION_1_0", { { @@ -3145,7 +3156,7 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_KHR_maintenance7" ) || ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_EXT_shader_replicated_composites" ) || ( extension == "VK_NV_ray_tracing_validation" ) || ( extension == "VK_EXT_device_generated_commands" ) || ( extension == "VK_MESA_image_alignment_control" ) || - ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_NV_cooperative_matrix2" ); + ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || ( extension == "VK_NV_cooperative_matrix2" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index dee5661..fe0174d 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -70,7 +70,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( instance, ObjectDestroy( allocator, d ) ) ); + result, UniqueHandle( instance, detail::ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -490,7 +490,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( device, ObjectDestroy( allocator, d ) ) ); + result, UniqueHandle( device, detail::ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -998,7 +998,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( memory, ObjectFree( *this, allocator, d ) ) ); + result, UniqueHandle( memory, detail::ObjectFree( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1580,7 +1580,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1779,7 +1779,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( semaphore, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( semaphore, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1891,7 +1891,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( event, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( event, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2076,7 +2076,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( queryPool, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( queryPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2274,7 +2274,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( buffer, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( buffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2388,7 +2388,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2500,7 +2500,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( image, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( image, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2647,7 +2647,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( view, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2763,7 +2763,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( shaderModule, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( shaderModule, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2879,7 +2879,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( pipelineCache, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipelineCache, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3183,7 +3183,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3220,7 +3220,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3253,7 +3253,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3391,7 +3391,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3428,7 +3428,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3461,7 +3461,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3577,7 +3577,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( pipelineLayout, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipelineLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3689,7 +3689,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( sampler, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( sampler, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3803,7 +3803,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( setLayout, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( setLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3923,7 +3923,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( descriptorPool, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( descriptorPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4080,7 +4080,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets; uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); @@ -4110,7 +4110,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); - PoolFree deleter( *this, allocateInfo.descriptorPool, d ); + detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); @@ -4270,7 +4270,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( framebuffer, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( framebuffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4386,7 +4386,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4528,7 +4528,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( commandPool, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( commandPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4688,7 +4688,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers; uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); @@ -4718,7 +4718,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); - PoolFree deleter( *this, allocateInfo.commandPool, d ); + detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); @@ -6787,7 +6787,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6913,7 +6914,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7253,7 +7254,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7647,7 +7648,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8794,7 +8796,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9578,7 +9580,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( mode, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( mode, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9673,7 +9675,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9796,7 +9798,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains; uniqueSwapchains.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & swapchain : swapchains ) { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); @@ -9830,7 +9832,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); uniqueSwapchains.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & swapchain : swapchains ) { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); @@ -9859,7 +9861,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( swapchain, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9924,7 +9926,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10016,7 +10018,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10112,7 +10114,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10205,7 +10207,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10271,7 +10273,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10344,7 +10346,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( callback, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( callback, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10765,7 +10768,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( videoSession, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( videoSession, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11021,7 +11024,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - videoSessionParameters, ObjectDestroy( *this, allocator, d ) ) ); + videoSessionParameters, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11450,7 +11453,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11512,7 +11515,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11946,7 +11949,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12618,7 +12621,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13218,7 +13221,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - descriptorUpdateTemplate, ObjectDestroy( *this, allocator, d ) ) ); + descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13413,7 +13416,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13541,7 +13544,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13609,7 +13612,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13921,7 +13924,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14981,7 +14984,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15047,7 +15050,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15269,7 +15272,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( messenger, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( messenger, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15588,7 +15592,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -15626,7 +15630,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -15659,7 +15663,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16113,9 +16117,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16947,7 +16951,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -16990,7 +16994,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -17028,7 +17032,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17264,7 +17268,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( ycbcrConversion, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17448,7 +17453,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( validationCache, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( validationCache, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17754,9 +17760,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( accelerationStructure, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18132,7 +18138,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -18169,7 +18175,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -18202,7 +18208,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( - result, UniqueHandle( pipeline, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19209,7 +19215,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( configuration, ObjectRelease( *this, d ) ) ); + result, + UniqueHandle( configuration, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19397,7 +19404,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19463,7 +19470,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20215,7 +20222,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20558,7 +20565,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( deferredOperation, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( deferredOperation, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21416,7 +21424,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - indirectCommandsLayout, ObjectDestroy( *this, allocator, d ) ) ); + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21577,7 +21585,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21641,7 +21649,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( privateDataSlot, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22080,7 +22089,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( module, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22220,7 +22229,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( function, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23145,7 +23154,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, ObjectRelease( *this, d ) ) ); + result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23211,7 +23220,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23478,7 +23487,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( collection, ObjectDestroy( *this, allocator, d ) ) ); + result, + UniqueHandle( collection, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23855,7 +23865,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -24065,7 +24075,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( micromap, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( micromap, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25594,7 +25604,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( session, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( session, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26018,7 +26028,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders; uniqueShaders.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & shader : shaders ) { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); @@ -26053,7 +26063,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); uniqueShaders.reserve( createInfos.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & shader : shaders ) { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); @@ -26084,7 +26094,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( - result, UniqueHandle( shader, ObjectDestroy( *this, allocator, d ) ) ); + result, UniqueHandle( shader, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26474,7 +26484,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries; uniquePipelineBinaries.reserve( pipelineBinaries.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipelineBinary : pipelineBinaries ) { uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); @@ -26541,7 +26551,7 @@ namespace VULKAN_HPP_NAMESPACE std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries( pipelineBinaryKHRAllocator ); uniquePipelineBinaries.reserve( pipelineBinaries.size() ); - ObjectDestroy deleter( *this, allocator, d ); + detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipelineBinary : pipelineBinaries ) { uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); @@ -27686,7 +27696,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( - indirectCommandsLayout, ObjectDestroy( *this, allocator, d ) ) ); + indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -27806,9 +27816,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &indirectExecutionSet ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXTUnique" ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( indirectExecutionSet, ObjectDestroy( *this, allocator, d ) ) ); + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, + UniqueHandle( + indirectExecutionSet, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index 4947796..4964e9b 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -1909,6 +1909,10 @@ namespace VULKAN_HPP_NAMESPACE struct PipelineViewportDepthClampControlCreateInfoEXT; struct DepthClampRangeEXT; + //=== VK_HUAWEI_hdr_vivid === + struct PhysicalDeviceHdrVividFeaturesHUAWEI; + struct HdrVividDynamicMetadataHUAWEI; + //=== VK_NV_cooperative_matrix2 === struct CooperativeMatrixFlexibleDimensionsPropertiesNV; struct PhysicalDeviceCooperativeMatrix2FeaturesNV; @@ -2031,7 +2035,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueInstance = UniqueHandle; @@ -2040,7 +2044,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDevice = UniqueHandle; @@ -2049,7 +2053,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectFree; + using deleter = detail::ObjectFree; }; using UniqueDeviceMemory = UniqueHandle; @@ -2058,7 +2062,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueFence = UniqueHandle; @@ -2067,7 +2071,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSemaphore = UniqueHandle; @@ -2076,7 +2080,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueEvent = UniqueHandle; @@ -2085,7 +2089,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueQueryPool = UniqueHandle; @@ -2094,7 +2098,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueBuffer = UniqueHandle; @@ -2103,7 +2107,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueBufferView = UniqueHandle; @@ -2112,7 +2116,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueImage = UniqueHandle; @@ -2121,7 +2125,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueImageView = UniqueHandle; @@ -2130,7 +2134,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueShaderModule = UniqueHandle; @@ -2139,7 +2143,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipelineCache = UniqueHandle; @@ -2148,7 +2152,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipeline = UniqueHandle; @@ -2157,7 +2161,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipelineLayout = UniqueHandle; @@ -2166,7 +2170,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSampler = UniqueHandle; @@ -2175,7 +2179,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDescriptorPool = UniqueHandle; @@ -2184,7 +2188,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = PoolFree; + using deleter = detail::PoolFree; }; using UniqueDescriptorSet = UniqueHandle; @@ -2193,7 +2197,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDescriptorSetLayout = UniqueHandle; @@ -2202,7 +2206,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueFramebuffer = UniqueHandle; @@ -2211,7 +2215,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueRenderPass = UniqueHandle; @@ -2220,7 +2224,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCommandPool = UniqueHandle; @@ -2229,7 +2233,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = PoolFree; + using deleter = detail::PoolFree; }; using UniqueCommandBuffer = UniqueHandle; @@ -2239,7 +2243,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSamplerYcbcrConversion = UniqueHandle; @@ -2249,7 +2253,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDescriptorUpdateTemplate = UniqueHandle; @@ -2260,7 +2264,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePrivateDataSlot = UniqueHandle; @@ -2271,7 +2275,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSurfaceKHR = UniqueHandle; @@ -2281,7 +2285,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueSwapchainKHR = UniqueHandle; @@ -2291,7 +2295,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDisplayKHR = UniqueHandle; @@ -2301,7 +2305,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDebugReportCallbackEXT = UniqueHandle; @@ -2311,7 +2315,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueVideoSessionKHR = UniqueHandle; @@ -2320,7 +2324,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueVideoSessionParametersKHR = UniqueHandle; @@ -2330,7 +2334,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCuModuleNVX = UniqueHandle; @@ -2339,7 +2343,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCuFunctionNVX = UniqueHandle; @@ -2349,7 +2353,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDebugUtilsMessengerEXT = UniqueHandle; @@ -2359,7 +2363,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueAccelerationStructureKHR = UniqueHandle; @@ -2369,7 +2373,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueValidationCacheEXT = UniqueHandle; @@ -2379,7 +2383,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueAccelerationStructureNV = UniqueHandle; @@ -2389,7 +2393,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePerformanceConfigurationINTEL = UniqueHandle; @@ -2399,7 +2403,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueDeferredOperationKHR = UniqueHandle; @@ -2409,7 +2413,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueIndirectCommandsLayoutNV = UniqueHandle; @@ -2420,7 +2424,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCudaModuleNV = UniqueHandle; @@ -2429,7 +2433,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueCudaFunctionNV = UniqueHandle; @@ -2441,7 +2445,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueBufferCollectionFUCHSIA = UniqueHandle; @@ -2452,7 +2456,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueMicromapEXT = UniqueHandle; @@ -2462,7 +2466,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueOpticalFlowSessionNV = UniqueHandle; @@ -2472,7 +2476,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueShaderEXT = UniqueHandle; @@ -2482,7 +2486,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniquePipelineBinaryKHR = UniqueHandle; @@ -2492,7 +2496,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueIndirectCommandsLayoutEXT = UniqueHandle; @@ -2501,7 +2505,7 @@ namespace VULKAN_HPP_NAMESPACE class UniqueHandleTraits { public: - using deleter = ObjectDestroy; + using deleter = detail::ObjectDestroy; }; using UniqueIndirectExecutionSetEXT = UniqueHandle; diff --git a/third_party/vulkan/vulkan_hash.hpp b/third_party/vulkan/vulkan_hash.hpp index 61fe948..c53be12 100644 --- a/third_party/vulkan/vulkan_hash.hpp +++ b/third_party/vulkan/vulkan_hash.hpp @@ -5793,6 +5793,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI const & hdrVividDynamicMetadataHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.dynamicMetadataSize ); + VULKAN_HPP_HASH_COMBINE( seed, hdrVividDynamicMetadataHUAWEI.pDynamicMetadata ); + return seed; + } + }; + template <> struct hash { @@ -9511,6 +9525,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI const & physicalDeviceHdrVividFeaturesHUAWEI ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHdrVividFeaturesHUAWEI.hdrVivid ); + return seed; + } + }; + template <> struct hash { diff --git a/third_party/vulkan/vulkan_hpp_macros.hpp b/third_party/vulkan/vulkan_hpp_macros.hpp index 0ad04dd..aba3b9a 100644 --- a/third_party/vulkan/vulkan_hpp_macros.hpp +++ b/third_party/vulkan/vulkan_hpp_macros.hpp @@ -257,33 +257,40 @@ namespace VULKAN_HPP_NAMESPACE { - class DispatchLoaderDynamic; + namespace detail + { + class DispatchLoaderDynamic; + +#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) +# if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 + extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; +# endif +#endif + } // namespace detail } // namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_DEFAULT_DISPATCHER ) # if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic -# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ - namespace VULKAN_HPP_NAMESPACE \ - { \ - VULKAN_HPP_STORAGE_API ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::defaultDispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE \ + namespace VULKAN_HPP_NAMESPACE \ + { \ + namespace detail \ + { \ + VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \ + } \ } - -namespace VULKAN_HPP_NAMESPACE -{ - extern VULKAN_HPP_STORAGE_API VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic; -} // namespace VULKAN_HPP_NAMESPACE # else -# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic() +# define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::detail::getDispatchLoaderStatic() # define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE # endif #endif #if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ) # if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic # else -# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic +# define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic # endif #endif diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index e777293..d8ca95f 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -19,2636 +19,2645 @@ namespace VULKAN_HPP_NAMESPACE { namespace VULKAN_HPP_RAII_NAMESPACE { - template - class CreateReturnType + namespace detail { - public: + template + class CreateReturnType + { + public: # if defined( VULKAN_HPP_RAII_NO_EXCEPTIONS ) - using Type = VULKAN_HPP_EXPECTED; + using Type = VULKAN_HPP_EXPECTED; # else - using Type = T; + using Type = T; # endif - }; + }; - class ContextDispatcher : public DispatchLoaderBase - { - public: - ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) - : vkGetInstanceProcAddr( getProcAddr ) - //=== VK_VERSION_1_0 === - , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) - , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) - , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) - //=== VK_VERSION_1_1 === - , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + using PFN_dummy = void ( * )(); + + class ContextDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase { - } + public: + ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr ) + : vkGetInstanceProcAddr( getProcAddr ) + //=== VK_VERSION_1_0 === + , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) ) + , vkEnumerateInstanceExtensionProperties( + PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) ) + , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) ) + //=== VK_VERSION_1_1 === + , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) ) + { + } - public: - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + public: + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - //=== VK_VERSION_1_0 === - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - - //=== VK_VERSION_1_1 === - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - }; - - class InstanceDispatcher : public DispatchLoaderBase - { - public: - InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) - { //=== VK_VERSION_1_0 === - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFormatProperties = - PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties = - PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = - PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceMemoryProperties = - PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkEnumerateDeviceExtensionProperties = - PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + PFN_vkCreateInstance vkCreateInstance = 0; + PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; + PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; //=== VK_VERSION_1_1 === - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2 = - PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = - PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2 = - PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkGetPhysicalDeviceExternalBufferProperties = - PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalFenceProperties = - PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; + }; + + class InstanceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); + vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); + vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); + vkGetPhysicalDeviceFormatProperties = + PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); + vkGetPhysicalDeviceImageFormatProperties = + PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); + vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); + vkGetPhysicalDeviceQueueFamilyProperties = + PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); + vkGetPhysicalDeviceMemoryProperties = + PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); + vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); + vkEnumerateDeviceExtensionProperties = + PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); + vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); + + //=== VK_VERSION_1_1 === + vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); + vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); + vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); + vkGetPhysicalDeviceFormatProperties2 = + PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); + vkGetPhysicalDeviceImageFormatProperties2 = + PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); + vkGetPhysicalDeviceQueueFamilyProperties2 = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); + vkGetPhysicalDeviceMemoryProperties2 = + PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); + vkGetPhysicalDeviceSparseImageFormatProperties2 = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); + vkGetPhysicalDeviceExternalBufferProperties = + PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); + vkGetPhysicalDeviceExternalFenceProperties = + PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); + vkGetPhysicalDeviceExternalSemaphoreProperties = + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); + + //=== VK_VERSION_1_3 === + vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + + //=== VK_KHR_surface === + vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); + vkGetPhysicalDeviceSurfaceSupportKHR = + PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); + vkGetPhysicalDeviceSurfaceCapabilitiesKHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); + vkGetPhysicalDeviceSurfaceFormatsKHR = + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); + vkGetPhysicalDeviceSurfacePresentModesKHR = + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + + //=== VK_KHR_swapchain === + vkGetPhysicalDevicePresentRectanglesKHR = + PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + + //=== VK_KHR_display === + vkGetPhysicalDeviceDisplayPropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); + vkGetPhysicalDeviceDisplayPlanePropertiesKHR = + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); + vkGetDisplayPlaneSupportedDisplaysKHR = + PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); + vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); + vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); + vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); + vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); + vkGetPhysicalDeviceXlibPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +# if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); + vkGetPhysicalDeviceXcbPresentationSupportKHR = + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_XCB_KHR*/ + +# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); + vkGetPhysicalDeviceWaylandPresentationSupportKHR = + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +# if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); +# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); + vkGetPhysicalDeviceWin32PresentationSupportKHR = + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); + vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); + vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + + //=== VK_KHR_video_queue === + vkGetPhysicalDeviceVideoCapabilitiesKHR = + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); + vkGetPhysicalDeviceVideoFormatPropertiesKHR = + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + vkCreateStreamDescriptorSurfaceGGP = + PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); +# endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_external_memory_capabilities === + vkGetPhysicalDeviceExternalImageFormatPropertiesNV = + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + + //=== VK_KHR_get_physical_device_properties2 === + vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); + if ( !vkGetPhysicalDeviceFeatures2 ) + vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; + vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceProperties2 ) + vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; + vkGetPhysicalDeviceFormatProperties2KHR = + PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceFormatProperties2 ) + vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; + vkGetPhysicalDeviceImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceImageFormatProperties2 ) + vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; + vkGetPhysicalDeviceQueueFamilyProperties2KHR = + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) + vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; + vkGetPhysicalDeviceMemoryProperties2KHR = + PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceMemoryProperties2 ) + vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; + vkGetPhysicalDeviceSparseImageFormatProperties2KHR = + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); + if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) + vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + +# if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); +# endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_KHR_device_group_creation === + vkEnumeratePhysicalDeviceGroupsKHR = + PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); + if ( !vkEnumeratePhysicalDeviceGroups ) + vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + + //=== VK_KHR_external_memory_capabilities === + vkGetPhysicalDeviceExternalBufferPropertiesKHR = + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalBufferProperties ) + vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + + //=== VK_KHR_external_semaphore_capabilities === + vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) + vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + + //=== VK_EXT_direct_mode_display === + vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + +# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) + //=== VK_EXT_acquire_xlib_display === + vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); + vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); +# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + + //=== VK_EXT_display_surface_counter === + vkGetPhysicalDeviceSurfaceCapabilities2EXT = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + + //=== VK_KHR_external_fence_capabilities === + vkGetPhysicalDeviceExternalFencePropertiesKHR = + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); + if ( !vkGetPhysicalDeviceExternalFenceProperties ) + vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + + //=== VK_KHR_performance_query === + vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); + vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + + //=== VK_KHR_get_surface_capabilities2 === + vkGetPhysicalDeviceSurfaceCapabilities2KHR = + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); + vkGetPhysicalDeviceSurfaceFormats2KHR = + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + + //=== VK_KHR_get_display_properties2 === + vkGetPhysicalDeviceDisplayProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); + vkGetPhysicalDeviceDisplayPlaneProperties2KHR = + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); + vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); + vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + +# if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_IOS_MVK*/ + +# if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); +# endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); + vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); + vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + + //=== VK_EXT_sample_locations === + vkGetPhysicalDeviceMultisamplePropertiesEXT = + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); + if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + +# if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); +# endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_KHR_fragment_shading_rate === + vkGetPhysicalDeviceFragmentShadingRatesKHR = + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + + //=== VK_EXT_tooling_info === + vkGetPhysicalDeviceToolPropertiesEXT = + PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); + if ( !vkGetPhysicalDeviceToolProperties ) + vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + + //=== VK_NV_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + + //=== VK_NV_coverage_reduction_mode === + vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + vkGetPhysicalDeviceSurfacePresentModes2EXT = + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + + //=== VK_EXT_acquire_drm_display === + vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); + vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + + //=== VK_KHR_video_encode_queue === + vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_acquire_winrt_display === + vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); + vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); + vkGetPhysicalDeviceDirectFBPresentationSupportEXT = + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); +# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + +# if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); + vkGetPhysicalDeviceScreenPresentationSupportQNX = + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); +# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_NV_optical_flow === + vkGetPhysicalDeviceOpticalFlowImageFormatsNV = + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + + //=== VK_KHR_cooperative_matrix === + vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + + //=== VK_KHR_calibrated_timestamps === + vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + + //=== VK_NV_cooperative_matrix2 === + vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( + vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); + + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); + } + + public: + //=== VK_VERSION_1_0 === + PFN_vkDestroyInstance vkDestroyInstance = 0; + PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; + PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; + PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; + PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; + PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; + PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; + PFN_vkCreateDevice vkCreateDevice = 0; + PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; + PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + + //=== VK_VERSION_1_1 === + PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; + PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; + PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; + PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; + PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; + PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; + PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; //=== VK_VERSION_1_3 === - vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); + PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; //=== VK_KHR_surface === - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = - PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); - vkGetPhysicalDeviceSurfacePresentModesKHR = - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); + PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; + PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; //=== VK_KHR_swapchain === - vkGetPhysicalDevicePresentRectanglesKHR = - PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); + PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; //=== VK_KHR_display === - vkGetPhysicalDeviceDisplayPropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = - PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); + PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; + PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; + PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; + PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; + PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; + PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); - vkGetPhysicalDeviceXlibPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); + PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); - vkGetPhysicalDeviceXcbPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); + PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); + PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); + PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; +# else + PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); - vkGetPhysicalDeviceWin32PresentationSupportKHR = - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); + PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; + PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; +# else + PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); + PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; + PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; + PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; //=== VK_KHR_video_queue === - vkGetPhysicalDeviceVideoCapabilitiesKHR = - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; + PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); + PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; +# else + PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; # endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; //=== VK_KHR_get_physical_device_properties2 === - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - if ( !vkGetPhysicalDeviceFeatures2 ) - vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceProperties2 ) - vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; - vkGetPhysicalDeviceFormatProperties2KHR = - PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceFormatProperties2 ) - vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; - vkGetPhysicalDeviceImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceImageFormatProperties2 ) - vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) - vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; - vkGetPhysicalDeviceMemoryProperties2KHR = - PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceMemoryProperties2 ) - vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) - vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; + PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; + PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; + PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; + PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); + PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; +# else + PFN_dummy vkCreateViSurfaceNN_placeholder = 0; # endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_device_group_creation === - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - if ( !vkEnumeratePhysicalDeviceGroups ) - vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; + PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; //=== VK_KHR_external_memory_capabilities === - vkGetPhysicalDeviceExternalBufferPropertiesKHR = - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalBufferProperties ) - vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; + PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; //=== VK_KHR_external_semaphore_capabilities === - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) - vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; //=== VK_EXT_direct_mode_display === - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); + PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); + PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; + PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; +# else + PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; + PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === - vkGetPhysicalDeviceSurfaceCapabilities2EXT = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; //=== VK_KHR_external_fence_capabilities === - vkGetPhysicalDeviceExternalFencePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalFenceProperties ) - vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; + PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; //=== VK_KHR_performance_query === - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); + PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; + PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; //=== VK_KHR_get_surface_capabilities2 === - vkGetPhysicalDeviceSurfaceCapabilities2KHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); + PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; + PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; //=== VK_KHR_get_display_properties2 === - vkGetPhysicalDeviceDisplayProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); + PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; + PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; + PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; + PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); + PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; # endif /*VK_USE_PLATFORM_IOS_MVK*/ # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); + PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; +# else + PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; # endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); + PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; + PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; + PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; //=== VK_EXT_sample_locations === - vkGetPhysicalDeviceMultisamplePropertiesEXT = - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); + PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; //=== VK_EXT_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); + PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; +# else + PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); + PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; +# else + PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === - vkGetPhysicalDeviceFragmentShadingRatesKHR = - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); + PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; //=== VK_EXT_tooling_info === - vkGetPhysicalDeviceToolPropertiesEXT = - PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); - if ( !vkGetPhysicalDeviceToolProperties ) - vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; + PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; //=== VK_NV_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; //=== VK_NV_coverage_reduction_mode === - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); + PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - vkGetPhysicalDeviceSurfacePresentModes2EXT = - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); + PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); + PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; //=== VK_EXT_acquire_drm_display === - vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); - vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); + PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; + PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; //=== VK_KHR_video_encode_queue === - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); - vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); + PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; + PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; +# else + PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; + PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); + PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; + PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; +# else + PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); - vkGetPhysicalDeviceScreenPresentationSupportQNX = - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); + PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; + PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; +# else + PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; + PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_NV_optical_flow === - vkGetPhysicalDeviceOpticalFlowImageFormatsNV = - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); + PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; //=== VK_KHR_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; //=== VK_KHR_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); + PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; //=== VK_NV_cooperative_matrix2 === - vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); + PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - } + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + }; - public: - //=== VK_VERSION_1_0 === - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; + class DeviceDispatcher : public ::VULKAN_HPP_NAMESPACE::detail::DispatchLoaderBase + { + public: + DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) + { + //=== VK_VERSION_1_0 === + vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); + vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); + vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); + vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); + vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); + vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); + vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); + vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); + vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); + vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); + vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); + vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); + vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); + vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); + vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); + vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); + vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); + vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); + vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); + vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); + vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); + vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); + vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); + vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); + vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); + vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); + vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); + vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); + vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); + vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); + vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); + vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); + vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); + vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); + vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); + vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); + vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); + vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); + vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); + vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); + vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); + vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); + vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); + vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); + vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); + vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); + vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); + vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); + vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); + vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); + vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); + vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); + vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); + vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); + vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); + vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); + vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); + vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); + vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); + vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); + vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); + vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); + vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); + vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); + vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); + vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); + vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); + vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); + vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); + vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); + vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); + vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); + vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); + vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); + vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); + vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); + vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); + vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); + vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); + vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); + vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); + vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); + vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); + vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); + vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); + vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); + vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); + vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); + vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); + vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); + vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); + vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); + vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); + vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); + vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); + vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); + vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); + vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); + vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); + vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); + vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); + vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); + vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); + vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); + vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); + vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); + vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); + vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); + vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); + vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); + vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); + vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); + vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); + vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); + vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); + vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); + vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); + vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); + vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); + vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); + vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); - //=== VK_VERSION_1_1 === - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; + //=== VK_VERSION_1_1 === + vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); + vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); + vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); + vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); + vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); + vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); + vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); + vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); + vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); + vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); + vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); + vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); + vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); + vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); + vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); + vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); - //=== VK_VERSION_1_3 === - PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; + //=== VK_VERSION_1_2 === + vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); + vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); + vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); + vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); + vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); + vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); + vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); + vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); + vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); + vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); + vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); + vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); + vkGetDeviceMemoryOpaqueCaptureAddress = + PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); - //=== VK_KHR_surface === - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + //=== VK_VERSION_1_3 === + vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); + vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); + vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); + vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); + vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); + vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); + vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); + vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); + vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); + vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); + vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); + vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); + vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); + vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); + vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); + vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); + vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); + vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); + vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); + vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); + vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); + vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); + vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); + vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); + vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); + vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); + vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); + vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); + vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); + vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); + vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); + vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); + vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); + vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); + vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); + vkGetDeviceImageSparseMemoryRequirements = + PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); - //=== VK_KHR_swapchain === - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; + //=== VK_KHR_swapchain === + vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); + vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); + vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); + vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); + vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); + vkGetDeviceGroupPresentCapabilitiesKHR = + PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); + vkGetDeviceGroupSurfacePresentModesKHR = + PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); + vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); - //=== VK_KHR_display === - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; + //=== VK_KHR_display_swapchain === + vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); -# if defined( VK_USE_PLATFORM_XLIB_KHR ) - //=== VK_KHR_xlib_surface === - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_XLIB_KHR*/ + //=== VK_EXT_debug_marker === + vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); + vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); + vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); + vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); + vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); -# if defined( VK_USE_PLATFORM_XCB_KHR ) - //=== VK_KHR_xcb_surface === - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_XCB_KHR*/ + //=== VK_KHR_video_queue === + vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); + vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); + vkGetVideoSessionMemoryRequirementsKHR = + PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); + vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); + vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); + vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); + vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); + vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); + vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); + vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); -# if defined( VK_USE_PLATFORM_WAYLAND_KHR ) - //=== VK_KHR_wayland_surface === - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; -# else - PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + //=== VK_KHR_video_decode_queue === + vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + + //=== VK_EXT_transform_feedback === + vkCmdBindTransformFeedbackBuffersEXT = + PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); + vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); + vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); + vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); + vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); + vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + + //=== VK_NVX_binary_import === + vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); + vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); + vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); + vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); + vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + + //=== VK_NVX_image_view_handle === + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + + //=== VK_AMD_draw_indirect_count === + vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; + vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + + //=== VK_AMD_shader_info === + vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + + //=== VK_KHR_dynamic_rendering === + vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); + if ( !vkCmdBeginRendering ) + vkCmdBeginRendering = vkCmdBeginRenderingKHR; + vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); + if ( !vkCmdEndRendering ) + vkCmdEndRendering = vkCmdEndRenderingKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + vkGetDeviceGroupPeerMemoryFeaturesKHR = + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); + if ( !vkGetDeviceGroupPeerMemoryFeatures ) + vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; + vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); + if ( !vkCmdSetDeviceMask ) + vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; + vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); + if ( !vkCmdDispatchBase ) + vkCmdDispatchBase = vkCmdDispatchBaseKHR; + + //=== VK_KHR_maintenance1 === + vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); + if ( !vkTrimCommandPool ) + vkTrimCommandPool = vkTrimCommandPoolKHR; + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); + vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); + vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); + vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); + vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + + //=== VK_KHR_push_descriptor === + vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + vkCmdPushDescriptorSetWithTemplateKHR = + PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + + //=== VK_EXT_conditional_rendering === + vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); + vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + + //=== VK_KHR_descriptor_update_template === + vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); + if ( !vkCreateDescriptorUpdateTemplate ) + vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; + vkDestroyDescriptorUpdateTemplateKHR = + PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); + if ( !vkDestroyDescriptorUpdateTemplate ) + vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; + vkUpdateDescriptorSetWithTemplateKHR = + PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); + if ( !vkUpdateDescriptorSetWithTemplate ) + vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + + //=== VK_NV_clip_space_w_scaling === + vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + + //=== VK_EXT_display_control === + vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); + vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); + vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); + vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + + //=== VK_GOOGLE_display_timing === + vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); + vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + + //=== VK_EXT_discard_rectangles === + vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); + vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); + vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + + //=== VK_EXT_hdr_metadata === + vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + + //=== VK_KHR_create_renderpass2 === + vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); + if ( !vkCreateRenderPass2 ) + vkCreateRenderPass2 = vkCreateRenderPass2KHR; + vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); + if ( !vkCmdBeginRenderPass2 ) + vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; + vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); + if ( !vkCmdNextSubpass2 ) + vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; + vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); + if ( !vkCmdEndRenderPass2 ) + vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + + //=== VK_KHR_shared_presentable_image === + vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + +# if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); + vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); +# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); + vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + + //=== VK_KHR_performance_query === + vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); + vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + + //=== VK_EXT_debug_utils === + vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); + vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); + vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); + vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); + vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); + vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); + vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); + vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); # if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_KHR_android_surface === - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; -# else - PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; + //=== VK_ANDROID_external_memory_android_hardware_buffer === + vkGetAndroidHardwareBufferPropertiesANDROID = + PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); + vkGetMemoryAndroidHardwareBufferANDROID = + PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); + vkGetExecutionGraphPipelineScratchSizeAMDX = + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); + vkGetExecutionGraphPipelineNodeIndexAMDX = + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); + vkCmdInitializeGraphScratchMemoryAMDX = + PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); + vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); + vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); + vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_EXT_sample_locations === + vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + + //=== VK_KHR_get_memory_requirements2 === + vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); + if ( !vkGetImageMemoryRequirements2 ) + vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; + vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); + if ( !vkGetBufferMemoryRequirements2 ) + vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; + vkGetImageSparseMemoryRequirements2KHR = + PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); + if ( !vkGetImageSparseMemoryRequirements2 ) + vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + + //=== VK_KHR_acceleration_structure === + vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); + vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); + vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); + vkCmdBuildAccelerationStructuresIndirectKHR = + PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); + vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); + vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); + vkCopyAccelerationStructureToMemoryKHR = + PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); + vkCopyMemoryToAccelerationStructureKHR = + PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); + vkWriteAccelerationStructuresPropertiesKHR = + PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); + vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); + vkCmdCopyAccelerationStructureToMemoryKHR = + PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); + vkCmdCopyMemoryToAccelerationStructureKHR = + PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); + vkGetAccelerationStructureDeviceAddressKHR = + PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); + vkCmdWriteAccelerationStructuresPropertiesKHR = + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); + vkGetDeviceAccelerationStructureCompatibilityKHR = + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); + vkGetAccelerationStructureBuildSizesKHR = + PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + + //=== VK_KHR_ray_tracing_pipeline === + vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); + vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); + vkGetRayTracingShaderGroupHandlesKHR = + PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); + vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); + vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); + vkGetRayTracingShaderGroupStackSizeKHR = + PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); + vkCmdSetRayTracingPipelineStackSizeKHR = + PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + + //=== VK_KHR_sampler_ycbcr_conversion === + vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); + if ( !vkCreateSamplerYcbcrConversion ) + vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; + vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); + if ( !vkDestroySamplerYcbcrConversion ) + vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + + //=== VK_KHR_bind_memory2 === + vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); + if ( !vkBindBufferMemory2 ) + vkBindBufferMemory2 = vkBindBufferMemory2KHR; + vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); + if ( !vkBindImageMemory2 ) + vkBindImageMemory2 = vkBindImageMemory2KHR; + + //=== VK_EXT_image_drm_format_modifier === + vkGetImageDrmFormatModifierPropertiesEXT = + PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + + //=== VK_EXT_validation_cache === + vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); + vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); + vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); + vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + + //=== VK_NV_shading_rate_image === + vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); + vkCmdSetViewportShadingRatePaletteNV = + PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); + vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + + //=== VK_NV_ray_tracing === + vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); + vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); + vkGetAccelerationStructureMemoryRequirementsNV = + PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); + vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); + vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); + vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); + vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); + vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); + vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); + if ( !vkGetRayTracingShaderGroupHandlesKHR ) + vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; + vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); + vkCmdWriteAccelerationStructuresPropertiesNV = + PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); + vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + + //=== VK_KHR_maintenance3 === + vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); + if ( !vkGetDescriptorSetLayoutSupport ) + vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + + //=== VK_KHR_draw_indirect_count === + vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); + if ( !vkCmdDrawIndirectCount ) + vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; + vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); + if ( !vkCmdDrawIndexedIndirectCount ) + vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + + //=== VK_EXT_external_memory_host === + vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + + //=== VK_AMD_buffer_marker === + vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); + vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + + //=== VK_EXT_calibrated_timestamps === + vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); + if ( !vkGetCalibratedTimestampsKHR ) + vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + + //=== VK_NV_mesh_shader === + vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); + vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); + vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + + //=== VK_NV_scissor_exclusive === + vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); + vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + + //=== VK_NV_device_diagnostic_checkpoints === + vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); + vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); + vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + + //=== VK_KHR_timeline_semaphore === + vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); + if ( !vkGetSemaphoreCounterValue ) + vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; + vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); + if ( !vkWaitSemaphores ) + vkWaitSemaphores = vkWaitSemaphoresKHR; + vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); + if ( !vkSignalSemaphore ) + vkSignalSemaphore = vkSignalSemaphoreKHR; + + //=== VK_INTEL_performance_query === + vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); + vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); + vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); + vkCmdSetPerformanceStreamMarkerINTEL = + PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); + vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); + vkAcquirePerformanceConfigurationINTEL = + PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); + vkReleasePerformanceConfigurationINTEL = + PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); + vkQueueSetPerformanceConfigurationINTEL = + PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); + vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + + //=== VK_AMD_display_native_hdr === + vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + + //=== VK_KHR_fragment_shading_rate === + vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + + //=== VK_KHR_dynamic_rendering_local_read === + vkCmdSetRenderingAttachmentLocationsKHR = + PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + vkCmdSetRenderingInputAttachmentIndicesKHR = + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + + //=== VK_EXT_buffer_device_address === + vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + + //=== VK_KHR_present_wait === + vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + # if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_win32_surface === - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; -# else - PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; + //=== VK_EXT_full_screen_exclusive === + vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); + vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); + vkGetDeviceGroupSurfacePresentModes2EXT = + PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_EXT_debug_report === - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; + //=== VK_KHR_buffer_device_address === + vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); + if ( !vkGetBufferDeviceAddress ) + vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; + vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); + if ( !vkGetBufferOpaqueCaptureAddress ) + vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; + vkGetDeviceMemoryOpaqueCaptureAddressKHR = + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); + if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) + vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; - //=== VK_KHR_video_queue === - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; + //=== VK_EXT_line_rasterization === + vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); + if ( !vkCmdSetLineStippleKHR ) + vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; -# if defined( VK_USE_PLATFORM_GGP ) - //=== VK_GGP_stream_descriptor_surface === - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; -# else - PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; -# endif /*VK_USE_PLATFORM_GGP*/ + //=== VK_EXT_host_query_reset === + vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); + if ( !vkResetQueryPool ) + vkResetQueryPool = vkResetQueryPoolEXT; - //=== VK_NV_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; + //=== VK_EXT_extended_dynamic_state === + vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); + if ( !vkCmdSetCullMode ) + vkCmdSetCullMode = vkCmdSetCullModeEXT; + vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); + if ( !vkCmdSetFrontFace ) + vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; + vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); + if ( !vkCmdSetPrimitiveTopology ) + vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; + vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); + if ( !vkCmdSetViewportWithCount ) + vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; + vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); + if ( !vkCmdSetScissorWithCount ) + vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; + vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); + if ( !vkCmdBindVertexBuffers2 ) + vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; + vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); + if ( !vkCmdSetDepthTestEnable ) + vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; + vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); + if ( !vkCmdSetDepthWriteEnable ) + vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; + vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); + if ( !vkCmdSetDepthCompareOp ) + vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; + vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); + if ( !vkCmdSetDepthBoundsTestEnable ) + vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; + vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); + if ( !vkCmdSetStencilTestEnable ) + vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; + vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); + if ( !vkCmdSetStencilOp ) + vkCmdSetStencilOp = vkCmdSetStencilOpEXT; - //=== VK_KHR_get_physical_device_properties2 === - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; + //=== VK_KHR_deferred_host_operations === + vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); + vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); + vkGetDeferredOperationMaxConcurrencyKHR = + PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); + vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); + vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); -# if defined( VK_USE_PLATFORM_VI_NN ) - //=== VK_NN_vi_surface === - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; -# else - PFN_dummy vkCreateViSurfaceNN_placeholder = 0; -# endif /*VK_USE_PLATFORM_VI_NN*/ + //=== VK_KHR_pipeline_executable_properties === + vkGetPipelineExecutablePropertiesKHR = + PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); + vkGetPipelineExecutableStatisticsKHR = + PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); + vkGetPipelineExecutableInternalRepresentationsKHR = + PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); - //=== VK_KHR_device_group_creation === - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; + //=== VK_EXT_host_image_copy === + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); + if ( !vkGetImageSubresourceLayout2KHR ) + vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; - //=== VK_KHR_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; + //=== VK_KHR_map_memory2 === + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); - //=== VK_KHR_external_semaphore_capabilities === - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; + //=== VK_EXT_swapchain_maintenance1 === + vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); - //=== VK_EXT_direct_mode_display === - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; + //=== VK_NV_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsNV = + PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); + vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); + vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); + vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); + vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); + vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); -# if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) - //=== VK_EXT_acquire_xlib_display === - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; -# else - PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; - PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ + //=== VK_EXT_depth_bias_control === + vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); - //=== VK_EXT_display_surface_counter === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; + //=== VK_EXT_private_data === + vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); + if ( !vkCreatePrivateDataSlot ) + vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; + vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); + if ( !vkDestroyPrivateDataSlot ) + vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; + vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); + if ( !vkSetPrivateData ) + vkSetPrivateData = vkSetPrivateDataEXT; + vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); + if ( !vkGetPrivateData ) + vkGetPrivateData = vkGetPrivateDataEXT; - //=== VK_KHR_external_fence_capabilities === - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; + //=== VK_KHR_video_encode_queue === + vkGetEncodedVideoSessionParametersKHR = + PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); + vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); - //=== VK_KHR_performance_query === - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - - //=== VK_KHR_get_surface_capabilities2 === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; - - //=== VK_KHR_get_display_properties2 === - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - -# if defined( VK_USE_PLATFORM_IOS_MVK ) - //=== VK_MVK_ios_surface === - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; -# else - PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; -# endif /*VK_USE_PLATFORM_IOS_MVK*/ - -# if defined( VK_USE_PLATFORM_MACOS_MVK ) - //=== VK_MVK_macos_surface === - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; -# else - PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; -# endif /*VK_USE_PLATFORM_MACOS_MVK*/ - - //=== VK_EXT_debug_utils === - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - - //=== VK_EXT_sample_locations === - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_imagepipe_surface === - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; -# else - PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ +# if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); + vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); + vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); + vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); + vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); + vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); +# endif /*VK_ENABLE_BETA_EXTENSIONS*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_surface === - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; -# else - PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; + //=== VK_EXT_metal_objects === + vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); # endif /*VK_USE_PLATFORM_METAL_EXT*/ - //=== VK_KHR_fragment_shading_rate === - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; + //=== VK_KHR_synchronization2 === + vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); + if ( !vkCmdSetEvent2 ) + vkCmdSetEvent2 = vkCmdSetEvent2KHR; + vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); + if ( !vkCmdResetEvent2 ) + vkCmdResetEvent2 = vkCmdResetEvent2KHR; + vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); + if ( !vkCmdWaitEvents2 ) + vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; + vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); + if ( !vkCmdPipelineBarrier2 ) + vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; + vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); + if ( !vkCmdWriteTimestamp2 ) + vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; + vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); + if ( !vkQueueSubmit2 ) + vkQueueSubmit2 = vkQueueSubmit2KHR; - //=== VK_EXT_tooling_info === - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; + //=== VK_EXT_descriptor_buffer === + vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); + vkGetDescriptorSetLayoutBindingOffsetEXT = + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); + vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); + vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); + vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplersEXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); + vkGetBufferOpaqueCaptureDescriptorDataEXT = + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); + vkGetImageViewOpaqueCaptureDescriptorDataEXT = + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); + vkGetSamplerOpaqueCaptureDescriptorDataEXT = + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); + vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); - //=== VK_NV_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; + //=== VK_NV_fragment_shading_rate_enums === + vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_NV_coverage_reduction_mode === - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; + //=== VK_EXT_mesh_shader === + vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); + vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); + vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; -# else - PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ + //=== VK_KHR_copy_commands2 === + vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); + if ( !vkCmdCopyBuffer2 ) + vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; + vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); + if ( !vkCmdCopyImage2 ) + vkCmdCopyImage2 = vkCmdCopyImage2KHR; + vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); + if ( !vkCmdCopyBufferToImage2 ) + vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; + vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); + if ( !vkCmdCopyImageToBuffer2 ) + vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; + vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); + if ( !vkCmdBlitImage2 ) + vkCmdBlitImage2 = vkCmdBlitImage2KHR; + vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); + if ( !vkCmdResolveImage2 ) + vkCmdResolveImage2 = vkCmdResolveImage2KHR; - //=== VK_EXT_headless_surface === - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; + //=== VK_EXT_device_fault === + vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); - //=== VK_EXT_acquire_drm_display === - PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; - PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; + //=== VK_EXT_vertex_input_dynamic_state === + vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); - //=== VK_KHR_video_encode_queue === - PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); + vkGetMemoryZirconHandlePropertiesFUCHSIA = + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_acquire_winrt_display === - PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; - PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; -# else - PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; - PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + vkImportSemaphoreZirconHandleFUCHSIA = + PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); + vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ -# if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) - //=== VK_EXT_directfb_surface === - PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; -# else - PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ +# if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); + vkSetBufferCollectionImageConstraintsFUCHSIA = + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); + vkSetBufferCollectionBufferConstraintsFUCHSIA = + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); + vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); + vkGetBufferCollectionPropertiesFUCHSIA = + PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); +# endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); + vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + + //=== VK_HUAWEI_invocation_mask === + vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + + //=== VK_NV_external_memory_rdma === + vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + + //=== VK_EXT_pipeline_properties === + vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + + //=== VK_EXT_extended_dynamic_state2 === + vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); + vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); + if ( !vkCmdSetRasterizerDiscardEnable ) + vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; + vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); + if ( !vkCmdSetDepthBiasEnable ) + vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; + vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); + vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); + if ( !vkCmdSetPrimitiveRestartEnable ) + vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + + //=== VK_EXT_color_write_enable === + vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + + //=== VK_KHR_ray_tracing_maintenance1 === + vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + + //=== VK_EXT_multi_draw === + vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); + vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + + //=== VK_EXT_opacity_micromap === + vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); + vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); + vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); + vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); + vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); + vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); + vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); + vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); + vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); + vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); + vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); + vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); + vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); + vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + + //=== VK_HUAWEI_cluster_culling_shader === + vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); + vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + + //=== VK_EXT_pageable_device_local_memory === + vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + + //=== VK_KHR_maintenance4 === + vkGetDeviceBufferMemoryRequirementsKHR = + PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceBufferMemoryRequirements ) + vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; + vkGetDeviceImageMemoryRequirementsKHR = + PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageMemoryRequirements ) + vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; + vkGetDeviceImageSparseMemoryRequirementsKHR = + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); + if ( !vkGetDeviceImageSparseMemoryRequirements ) + vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + + //=== VK_VALVE_descriptor_set_host_mapping === + vkGetDescriptorSetLayoutHostMappingInfoVALVE = + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); + vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + + //=== VK_NV_copy_memory_indirect === + vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); + vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + + //=== VK_NV_memory_decompression === + vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); + vkCmdDecompressMemoryIndirectCountNV = + PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + + //=== VK_NV_device_generated_commands_compute === + vkGetPipelineIndirectMemoryRequirementsNV = + PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); + vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); + vkGetPipelineIndirectDeviceAddressNV = + PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + + //=== VK_EXT_extended_dynamic_state3 === + vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); + vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); + vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); + vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); + vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); + vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); + vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); + vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); + vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); + vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); + vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); + vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); + vkCmdSetConservativeRasterizationModeEXT = + PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); + vkCmdSetExtraPrimitiveOverestimationSizeEXT = + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); + vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); + vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); + vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); + vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); + vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); + vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); + vkCmdSetDepthClipNegativeOneToOneEXT = + PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); + vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); + vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); + vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); + vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); + vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); + vkCmdSetCoverageModulationTableEnableNV = + PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); + vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); + vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); + vkCmdSetRepresentativeFragmentTestEnableNV = + PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); + vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + + //=== VK_EXT_shader_module_identifier === + vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); + vkGetShaderModuleCreateInfoIdentifierEXT = + PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + + //=== VK_NV_optical_flow === + vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); + vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); + vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); + vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + + //=== VK_KHR_maintenance5 === + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + vkGetDeviceImageSubresourceLayoutKHR = + PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + + //=== VK_AMD_anti_lag === + vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + + //=== VK_EXT_shader_object === + vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); + vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); + vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); + vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); + vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + + //=== VK_KHR_pipeline_binary === + vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); + vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); + vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); + vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); + vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + + //=== VK_QCOM_tile_properties === + vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); + vkGetDynamicRenderingTilePropertiesQCOM = + PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + + //=== VK_NV_low_latency2 === + vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); + vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); + vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); + vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); + vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + vkCmdSetAttachmentFeedbackLoopEnableEXT = + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); # if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_screen_surface === - PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; -# else - PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; + //=== VK_QNX_external_memory_screen_buffer === + vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - //=== VK_NV_optical_flow === - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; + //=== VK_KHR_line_rasterization === + vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); - //=== VK_KHR_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; + //=== VK_KHR_calibrated_timestamps === + vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); - //=== VK_KHR_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; + //=== VK_KHR_maintenance6 === + vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + vkCmdPushDescriptorSetWithTemplate2KHR = + PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); + vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); - //=== VK_NV_cooperative_matrix2 === - PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; + //=== VK_EXT_device_generated_commands === + vkGetGeneratedCommandsMemoryRequirementsEXT = + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); + vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); + vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); + vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); + vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); + vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); + vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); + vkUpdateIndirectExecutionSetPipelineEXT = + PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); + vkUpdateIndirectExecutionSetShaderEXT = + PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); + } - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - }; - - class DeviceDispatcher : public DispatchLoaderBase - { - public: - DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr ) - { + public: //=== VK_VERSION_1_0 === - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); + PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; + PFN_vkDestroyDevice vkDestroyDevice = 0; + PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; + PFN_vkQueueSubmit vkQueueSubmit = 0; + PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; + PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; + PFN_vkAllocateMemory vkAllocateMemory = 0; + PFN_vkFreeMemory vkFreeMemory = 0; + PFN_vkMapMemory vkMapMemory = 0; + PFN_vkUnmapMemory vkUnmapMemory = 0; + PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; + PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; + PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; + PFN_vkBindBufferMemory vkBindBufferMemory = 0; + PFN_vkBindImageMemory vkBindImageMemory = 0; + PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; + PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; + PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; + PFN_vkQueueBindSparse vkQueueBindSparse = 0; + PFN_vkCreateFence vkCreateFence = 0; + PFN_vkDestroyFence vkDestroyFence = 0; + PFN_vkResetFences vkResetFences = 0; + PFN_vkGetFenceStatus vkGetFenceStatus = 0; + PFN_vkWaitForFences vkWaitForFences = 0; + PFN_vkCreateSemaphore vkCreateSemaphore = 0; + PFN_vkDestroySemaphore vkDestroySemaphore = 0; + PFN_vkCreateEvent vkCreateEvent = 0; + PFN_vkDestroyEvent vkDestroyEvent = 0; + PFN_vkGetEventStatus vkGetEventStatus = 0; + PFN_vkSetEvent vkSetEvent = 0; + PFN_vkResetEvent vkResetEvent = 0; + PFN_vkCreateQueryPool vkCreateQueryPool = 0; + PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; + PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; + PFN_vkCreateBuffer vkCreateBuffer = 0; + PFN_vkDestroyBuffer vkDestroyBuffer = 0; + PFN_vkCreateBufferView vkCreateBufferView = 0; + PFN_vkDestroyBufferView vkDestroyBufferView = 0; + PFN_vkCreateImage vkCreateImage = 0; + PFN_vkDestroyImage vkDestroyImage = 0; + PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; + PFN_vkCreateImageView vkCreateImageView = 0; + PFN_vkDestroyImageView vkDestroyImageView = 0; + PFN_vkCreateShaderModule vkCreateShaderModule = 0; + PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; + PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; + PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; + PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; + PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; + PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; + PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; + PFN_vkDestroyPipeline vkDestroyPipeline = 0; + PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; + PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; + PFN_vkCreateSampler vkCreateSampler = 0; + PFN_vkDestroySampler vkDestroySampler = 0; + PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; + PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; + PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; + PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; + PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; + PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; + PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; + PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; + PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; + PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; + PFN_vkCreateRenderPass vkCreateRenderPass = 0; + PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; + PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; + PFN_vkCreateCommandPool vkCreateCommandPool = 0; + PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; + PFN_vkResetCommandPool vkResetCommandPool = 0; + PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; + PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; + PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; + PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; + PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; + PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; + PFN_vkCmdSetViewport vkCmdSetViewport = 0; + PFN_vkCmdSetScissor vkCmdSetScissor = 0; + PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; + PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; + PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; + PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; + PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; + PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; + PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; + PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; + PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; + PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; + PFN_vkCmdDraw vkCmdDraw = 0; + PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; + PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; + PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; + PFN_vkCmdDispatch vkCmdDispatch = 0; + PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; + PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; + PFN_vkCmdCopyImage vkCmdCopyImage = 0; + PFN_vkCmdBlitImage vkCmdBlitImage = 0; + PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; + PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; + PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; + PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; + PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; + PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; + PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; + PFN_vkCmdResolveImage vkCmdResolveImage = 0; + PFN_vkCmdSetEvent vkCmdSetEvent = 0; + PFN_vkCmdResetEvent vkCmdResetEvent = 0; + PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; + PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; + PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; + PFN_vkCmdEndQuery vkCmdEndQuery = 0; + PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; + PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; + PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; + PFN_vkCmdPushConstants vkCmdPushConstants = 0; + PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; + PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; + PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; + PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); + PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; + PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; + PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; + PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; + PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; + PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; + PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; + PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; + PFN_vkTrimCommandPool vkTrimCommandPool = 0; + PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; + PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; + PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; + PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; + PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; + PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; + PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); + PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; + PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; + PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; + PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; + PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; + PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; + PFN_vkResetQueryPool vkResetQueryPool = 0; + PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; + PFN_vkWaitSemaphores vkWaitSemaphores = 0; + PFN_vkSignalSemaphore vkSignalSemaphore = 0; + PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; + PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; //=== VK_VERSION_1_3 === - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; + PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; + PFN_vkSetPrivateData vkSetPrivateData = 0; + PFN_vkGetPrivateData vkGetPrivateData = 0; + PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; + PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; + PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; + PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; + PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; + PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; + PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; + PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; + PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; + PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; + PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; + PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; + PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; + PFN_vkCmdEndRendering vkCmdEndRendering = 0; + PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; + PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; + PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; + PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; + PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; + PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; + PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; + PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; + PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; + PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; + PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; + PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; + PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; + PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; + PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; + PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; + PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; + PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; + PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; + PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; + PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; + PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; + PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; + PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; + PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); + PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); + PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; + PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; + PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; + PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; + PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; //=== VK_KHR_video_queue === - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); + PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; + PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; + PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; + PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; + PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; + PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; + PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; + PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; + PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; + PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); + PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); + PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; + PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; + PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; + PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; + PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; + PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); + PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; + PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; + PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; + PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; + PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; + PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; + PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); + PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; + PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; + PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); + PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; +# else + PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; + PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; + PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; + PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; + PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); + PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; + PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; +# else + PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); + PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; + PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); + PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; + PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; +# else + PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); + PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; + PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); + PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; + PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; + PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; + PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; + PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); + PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); + PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; + PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; + PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; + PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); + PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; + PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); + PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; + PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; + PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); + PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; + PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; + PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; + PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; + PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); + PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); + PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; + PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; +# else + PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; + PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); + PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; + PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; //=== VK_KHR_performance_query === - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); + PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; + PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); + PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; + PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; + PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; + PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; + PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; + PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; + PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; + PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); + PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; + PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; +# else + PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; + PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); + PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; + PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; + PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; + PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; + PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; + PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; + PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; +# else + PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; + PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; + PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; + PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); + PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; + PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; + PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; + PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); + PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; + PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; + PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; + PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; + PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; + PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; + PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; + PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; + PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; + PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; + PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; + PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); + PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; + PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; + PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; + PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; + PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; + PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; + PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; + PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; + PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; + PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; + PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); + PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); + PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; + PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; + PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; + PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); + PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; + PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; + PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); + PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; + PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; + PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; + PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; + PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; + PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; + PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; + PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; + PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; + PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; + PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; + PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; + PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; + PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; + PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); + PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); + PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; + PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_EXT_calibrated_timestamps === - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; + PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); + PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; + PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; + PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); + PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; + PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); + PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; + PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; + PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; + PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; + PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; + PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); + PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; + PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; + PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; + PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; + PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; + PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; + PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; + PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; + PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); + PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; //=== VK_KHR_fragment_shading_rate === - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); + PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; + PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; + PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); + PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); + PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; + PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; + PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; +# else + PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; + PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; + PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; + PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; + PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; + PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; + PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; + PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; + PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; + PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; + PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; + PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; + PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; + PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; + PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; + PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; + PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; + PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); + PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; + PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; + PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; + PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; + PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); + PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; + PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; + PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; + PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; + PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; + PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; + PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; + PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); + PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); + PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; + PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; + PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; + PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; + PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; + PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); + PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; + PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; + PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; + PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; + PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; //=== VK_KHR_video_encode_queue === - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); + PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; + PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); + PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; + PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; + PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; + PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; + PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; + PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; +# else + PFN_dummy vkCreateCudaModuleNV_placeholder = 0; + PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; + PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; + PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; + PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; + PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); + PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; +# else + PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; + PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; + PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; + PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; + PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; + PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; + PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); + PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; + PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; + PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; + PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; + PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; + PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; + PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); + PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); + PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; + PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; + PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; + PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; + PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; + PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; + PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; + PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); + PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); + PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); + PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; + PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; +# else + PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); + PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; + PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; +# else + PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; + PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); + PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; + PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; + PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; + PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; + PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; +# else + PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; + PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; + PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; # endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); + PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; + PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); + PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); + PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); + PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; + PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; + PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; + PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; + PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; + PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); + PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); + PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); + PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; + PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); + PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; + PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; + PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; + PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; + PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; + PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; + PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; + PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; + PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; + PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; + PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; + PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; + PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; + PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); + PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; + PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); + PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; + PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; + PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); + PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; + PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); + PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; + PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = - PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); + PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; + PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = - PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); + PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; + PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; + PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = - PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); + PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; + PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; + PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; + PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; + PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; + PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; + PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; + PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; + PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; + PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; + PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; + PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; + PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; + PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; + PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; + PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; + PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; + PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; + PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; + PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; + PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; + PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; + PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; + PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; + PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; + PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; + PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; + PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; + PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; + PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; + PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); + PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; + PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; //=== VK_NV_optical_flow === - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); + PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; + PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; + PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; + PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = - PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; + PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; + PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; + PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); + PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); - vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); + PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; + PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; + PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; + PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; + PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); + PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; + PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; + PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; + PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; + PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); + PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; + PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); + PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; + PFN_vkLatencySleepNV vkLatencySleepNV = 0; + PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; + PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; + PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); + PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); + PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; +# else + PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; //=== VK_KHR_calibrated_timestamps === - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); + PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); + PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; + PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; + PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; + PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; + PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; //=== VK_EXT_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsEXT = - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); - vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); - vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); - vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); - vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); - vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); - vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); - vkUpdateIndirectExecutionSetPipelineEXT = - PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); - vkUpdateIndirectExecutionSetShaderEXT = - PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); - } - - public: - //=== VK_VERSION_1_0 === - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - - //=== VK_VERSION_1_1 === - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - - //=== VK_VERSION_1_2 === - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; - - //=== VK_VERSION_1_3 === - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; - PFN_vkSetPrivateData vkSetPrivateData = 0; - PFN_vkGetPrivateData vkGetPrivateData = 0; - PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; - PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; - PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; - PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; - PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; - PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; - PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; - PFN_vkCmdEndRendering vkCmdEndRendering = 0; - PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - - //=== VK_KHR_swapchain === - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - - //=== VK_KHR_display_swapchain === - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - - //=== VK_EXT_debug_marker === - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - - //=== VK_KHR_video_queue === - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; - - //=== VK_KHR_video_decode_queue === - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; - - //=== VK_EXT_transform_feedback === - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - - //=== VK_NVX_binary_import === - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; - - //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - - //=== VK_AMD_draw_indirect_count === - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - - //=== VK_AMD_shader_info === - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; - - //=== VK_KHR_dynamic_rendering === - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_NV_external_memory_win32 === - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; -# else - PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_device_group === - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - - //=== VK_KHR_maintenance1 === - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_memory_win32 === - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; -# else - PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_memory_fd === - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_semaphore_win32 === - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; -# else - PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_semaphore_fd === - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - - //=== VK_KHR_push_descriptor === - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - - //=== VK_EXT_conditional_rendering === - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - - //=== VK_KHR_descriptor_update_template === - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - - //=== VK_NV_clip_space_w_scaling === - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; - - //=== VK_EXT_display_control === - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - - //=== VK_GOOGLE_display_timing === - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - - //=== VK_EXT_discard_rectangles === - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; - PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; - - //=== VK_EXT_hdr_metadata === - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - - //=== VK_KHR_create_renderpass2 === - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - - //=== VK_KHR_shared_presentable_image === - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_KHR_external_fence_win32 === - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; -# else - PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_external_fence_fd === - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - - //=== VK_KHR_performance_query === - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - - //=== VK_EXT_debug_utils === - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - -# if defined( VK_USE_PLATFORM_ANDROID_KHR ) - //=== VK_ANDROID_external_memory_android_hardware_buffer === - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; -# else - PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; - PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; -# endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_AMDX_shader_enqueue === - PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; - PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; - PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; - PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; - PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; -# else - PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; - PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - - //=== VK_EXT_sample_locations === - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - - //=== VK_KHR_get_memory_requirements2 === - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - - //=== VK_KHR_acceleration_structure === - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - - //=== VK_KHR_ray_tracing_pipeline === - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; - - //=== VK_KHR_sampler_ycbcr_conversion === - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - - //=== VK_KHR_bind_memory2 === - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - - //=== VK_EXT_image_drm_format_modifier === - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - - //=== VK_EXT_validation_cache === - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - - //=== VK_NV_shading_rate_image === - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - - //=== VK_NV_ray_tracing === - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - - //=== VK_KHR_maintenance3 === - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - - //=== VK_KHR_draw_indirect_count === - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - - //=== VK_EXT_external_memory_host === - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - - //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - - //=== VK_NV_mesh_shader === - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - - //=== VK_NV_scissor_exclusive === - PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - - //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; - - //=== VK_KHR_timeline_semaphore === - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - - //=== VK_INTEL_performance_query === - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; - - //=== VK_AMD_display_native_hdr === - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - - //=== VK_KHR_fragment_shading_rate === - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - - //=== VK_KHR_dynamic_rendering_local_read === - PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; - - //=== VK_EXT_buffer_device_address === - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - - //=== VK_KHR_present_wait === - PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; - -# if defined( VK_USE_PLATFORM_WIN32_KHR ) - //=== VK_EXT_full_screen_exclusive === - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; -# else - PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_WIN32_KHR*/ - - //=== VK_KHR_buffer_device_address === - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - - //=== VK_EXT_line_rasterization === - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - - //=== VK_EXT_host_query_reset === - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - - //=== VK_EXT_extended_dynamic_state === - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - - //=== VK_KHR_deferred_host_operations === - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - - //=== VK_KHR_pipeline_executable_properties === - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - - //=== VK_EXT_host_image_copy === - PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; - PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; - PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; - PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - - //=== VK_KHR_map_memory2 === - PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; - PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; - - //=== VK_EXT_swapchain_maintenance1 === - PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; - - //=== VK_NV_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - - //=== VK_EXT_depth_bias_control === - PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; - - //=== VK_EXT_private_data === - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; - - //=== VK_KHR_video_encode_queue === - PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; - -# if defined( VK_ENABLE_BETA_EXTENSIONS ) - //=== VK_NV_cuda_kernel_launch === - PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; - PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; - PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; - PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; - PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; - PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; -# else - PFN_dummy vkCreateCudaModuleNV_placeholder = 0; - PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; - PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; - PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; - PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; - PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; -# endif /*VK_ENABLE_BETA_EXTENSIONS*/ - -# if defined( VK_USE_PLATFORM_METAL_EXT ) - //=== VK_EXT_metal_objects === - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; -# else - PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; -# endif /*VK_USE_PLATFORM_METAL_EXT*/ - - //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - - //=== VK_EXT_descriptor_buffer === - PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; - PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; - PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; - PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; - - //=== VK_NV_fragment_shading_rate_enums === - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - - //=== VK_EXT_mesh_shader === - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - - //=== VK_KHR_copy_commands2 === - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; - - //=== VK_EXT_device_fault === - PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; - - //=== VK_EXT_vertex_input_dynamic_state === - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_memory === - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; -# else - PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_external_semaphore === - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; -# else - PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - -# if defined( VK_USE_PLATFORM_FUCHSIA ) - //=== VK_FUCHSIA_buffer_collection === - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; -# else - PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; -# endif /*VK_USE_PLATFORM_FUCHSIA*/ - - //=== VK_HUAWEI_subpass_shading === - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - - //=== VK_HUAWEI_invocation_mask === - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; - - //=== VK_NV_external_memory_rdma === - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; - - //=== VK_EXT_pipeline_properties === - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; - - //=== VK_EXT_extended_dynamic_state2 === - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; - - //=== VK_EXT_color_write_enable === - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - - //=== VK_KHR_ray_tracing_maintenance1 === - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; - - //=== VK_EXT_multi_draw === - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; - - //=== VK_EXT_opacity_micromap === - PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; - PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; - PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; - PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; - PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; - PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; - PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; - PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; - PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; - PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; - PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; - PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; - PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; - PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; - - //=== VK_HUAWEI_cluster_culling_shader === - PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; - PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; - - //=== VK_EXT_pageable_device_local_memory === - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; - - //=== VK_KHR_maintenance4 === - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; - - //=== VK_VALVE_descriptor_set_host_mapping === - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; - - //=== VK_NV_copy_memory_indirect === - PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; - PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; - - //=== VK_NV_memory_decompression === - PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; - PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; - - //=== VK_NV_device_generated_commands_compute === - PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; - PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; - PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; - - //=== VK_EXT_extended_dynamic_state3 === - PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; - PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; - PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; - PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; - PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; - PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; - PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; - PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; - PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; - PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; - PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; - PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; - PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; - PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; - PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; - PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; - PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; - PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; - PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; - PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; - PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; - PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; - PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; - PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; - PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; - PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; - PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; - PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; - PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; - PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; - - //=== VK_EXT_shader_module_identifier === - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; - - //=== VK_NV_optical_flow === - PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; - PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; - PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; - PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; - - //=== VK_KHR_maintenance5 === - PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; - PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; - PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; - PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; - - //=== VK_AMD_anti_lag === - PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; - - //=== VK_EXT_shader_object === - PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; - PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; - PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; - PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; - PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; - - //=== VK_KHR_pipeline_binary === - PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; - PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; - PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; - PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; - PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; - - //=== VK_QCOM_tile_properties === - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; - - //=== VK_NV_low_latency2 === - PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; - PFN_vkLatencySleepNV vkLatencySleepNV = 0; - PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; - PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; - PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; - - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; - -# if defined( VK_USE_PLATFORM_SCREEN_QNX ) - //=== VK_QNX_external_memory_screen_buffer === - PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; -# else - PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; -# endif /*VK_USE_PLATFORM_SCREEN_QNX*/ - - //=== VK_KHR_line_rasterization === - PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; - - //=== VK_KHR_calibrated_timestamps === - PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; - - //=== VK_KHR_maintenance6 === - PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; - PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; - PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; - PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; - - //=== VK_EXT_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; - PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; - PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; - PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; - PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; - PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; - PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; - PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; - PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; - }; + PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; + PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; + PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; + PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; + PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; + PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; + PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; + PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; + PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; + }; + + } // namespace detail //======================================== //=== RAII HANDLE forward declarations === @@ -2766,11 +2775,11 @@ namespace VULKAN_HPP_NAMESPACE public: # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL Context() - : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( m_dynamicLoader.getProcAddress( "vkGetInstanceProcAddr" ) ) ) # else Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) - : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) ) + : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher( getInstanceProcAddr ) ) # endif { } @@ -2801,7 +2810,7 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::ContextDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -2817,9 +2826,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector @@ -2833,9 +2843,9 @@ namespace VULKAN_HPP_NAMESPACE private: # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL - VULKAN_HPP_NAMESPACE::DynamicLoader m_dynamicLoader; + VULKAN_HPP_NAMESPACE::detail::DynamicLoader m_dynamicLoader; # endif - std::unique_ptr m_dispatcher; + std::unique_ptr m_dispatcher; }; class Instance @@ -2863,8 +2873,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) : m_instance( instance ), m_allocator( static_cast( allocator ) ) { - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, - static_cast( m_instance ) ) ); + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr, + static_cast( m_instance ) ) ); } Instance( std::nullptr_t ) {} @@ -2925,7 +2935,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_instance, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -2940,8 +2950,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type enumeratePhysicalDevices() const; VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; @@ -2952,60 +2962,66 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3021,18 +3037,20 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_GGP*/ # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_VI_NN*/ @@ -3043,25 +3061,27 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_IOS_MVK*/ # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3073,50 +3093,55 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_EXT_headless_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - std::unique_ptr m_dispatcher; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; }; class PhysicalDevice @@ -3191,7 +3216,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_physicalDevice, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -3222,9 +3247,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD std::vector @@ -3312,8 +3338,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getDisplayPlanePropertiesKHR() const; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const; # if defined( VK_USE_PLATFORM_XLIB_KHR ) @@ -3418,8 +3444,9 @@ namespace VULKAN_HPP_NAMESPACE void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === @@ -3500,8 +3527,9 @@ namespace VULKAN_HPP_NAMESPACE void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_video_encode_queue === @@ -3515,8 +3543,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) @@ -3552,8 +3581,8 @@ namespace VULKAN_HPP_NAMESPACE getCooperativeMatrixFlexibleDimensionsPropertiesNV() const; private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class PhysicalDevices : public std::vector @@ -3606,8 +3635,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) : m_device( device ), m_allocator( static_cast( allocator ) ) { - m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, - static_cast( m_device ) ) ); + m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr, + static_cast( m_device ) ) ); } Device( std::nullptr_t ) {} @@ -3668,7 +3697,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_device, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return &*m_dispatcher; @@ -3685,13 +3714,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; void waitIdle() const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3700,9 +3730,10 @@ namespace VULKAN_HPP_NAMESPACE void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy const & memoryRanges ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy const & fences ) const; @@ -3711,125 +3742,138 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createComputePipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const; void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const - VULKAN_HPP_RAII_CREATE_NOEXCEPT; - - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_RAII_CREATE_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const; //=== VK_VERSION_1_1 === @@ -3858,17 +3902,18 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3882,9 +3927,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const; @@ -3902,7 +3948,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_3 === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3936,7 +3982,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_swapchain === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3951,13 +3997,13 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display_swapchain === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -3971,26 +4017,27 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_video_queue === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_NVX_binary_import === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4036,8 +4083,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_descriptor_update_template === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate>::Type createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4050,15 +4097,17 @@ namespace VULKAN_HPP_NAMESPACE void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, - VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, + VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_EXT_hdr_metadata === @@ -4068,9 +4117,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_create_renderpass2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # if defined( VK_USE_PLATFORM_WIN32_KHR ) @@ -4117,17 +4167,18 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMDX_shader_enqueue === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createExecutionGraphPipelineAMDX( - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createExecutionGraphPipelineAMDX( + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_KHR_get_memory_requirements2 === @@ -4151,8 +4202,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_acceleration_structure === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR>::Type createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4201,24 +4252,25 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_ray_tracing_pipeline === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createRayTracingPipelineKHR( - VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, - VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, - VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createRayTracingPipelineKHR( + VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, + VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, + VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_sampler_ycbcr_conversion === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4236,15 +4288,15 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_validation_cache === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_NV_ray_tracing === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV>::Type createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4260,12 +4312,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayProxy const & bindInfos ) const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const @@ -4305,8 +4358,8 @@ namespace VULKAN_HPP_NAMESPACE void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4338,7 +4391,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_deferred_host_operations === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4382,8 +4435,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV>::Type createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4391,7 +4444,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_private_data === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4422,13 +4475,13 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_cuda_kernel_launch === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4493,8 +4546,8 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA>::Type createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4511,9 +4564,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_opacity_micromap === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result @@ -4592,7 +4646,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_optical_flow === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -4616,19 +4670,20 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, - VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const + VULKAN_HPP_NODISCARD + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; //=== VK_KHR_pipeline_binary === - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const; @@ -4673,22 +4728,22 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; - VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectExecutionSetEXT>::Type createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - std::unique_ptr m_dispatcher; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + std::unique_ptr m_dispatcher; }; class AccelerationStructureKHR @@ -4790,7 +4845,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4805,10 +4860,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class AccelerationStructureNV @@ -4910,7 +4965,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -4933,10 +4988,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD DataType getHandle() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Buffer @@ -5037,7 +5092,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5058,10 +5113,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Buffer m_buffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -5164,7 +5219,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5187,10 +5242,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -5292,7 +5347,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5307,10 +5362,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CommandPool @@ -5411,7 +5466,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -5438,10 +5493,10 @@ namespace VULKAN_HPP_NAMESPACE void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CommandBuffer @@ -5528,7 +5583,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6500,10 +6555,10 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; - VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {}; + VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CommandBuffers : public std::vector @@ -6630,7 +6685,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6645,10 +6700,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class CuModuleNVX @@ -6749,7 +6804,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6764,10 +6819,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -6869,7 +6924,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -6884,10 +6939,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaFunctionNV m_function = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -6990,7 +7045,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7009,10 +7064,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getCache() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::CudaModuleNV m_module = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; # endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -7115,7 +7170,7 @@ namespace VULKAN_HPP_NAMESPACE return m_instance; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7130,10 +7185,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class DebugUtilsMessengerEXT @@ -7235,7 +7290,7 @@ namespace VULKAN_HPP_NAMESPACE return m_instance; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7250,10 +7305,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class DeferredOperationKHR @@ -7354,7 +7409,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7377,10 +7432,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorPool @@ -7482,7 +7537,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7501,10 +7556,10 @@ namespace VULKAN_HPP_NAMESPACE void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorSet @@ -7593,7 +7648,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7622,10 +7677,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; - VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {}; + VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorSets : public std::vector @@ -7753,7 +7808,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7774,10 +7829,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DescriptorUpdateTemplate @@ -7879,7 +7934,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -7894,10 +7949,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DeviceMemory @@ -7998,7 +8053,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8033,10 +8088,10 @@ namespace VULKAN_HPP_NAMESPACE void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class DisplayKHR @@ -8143,7 +8198,7 @@ namespace VULKAN_HPP_NAMESPACE return m_physicalDevice; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8161,7 +8216,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getModeProperties() const; VULKAN_HPP_NODISCARD - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator = nullptr ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT; @@ -8176,9 +8231,9 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VK_USE_PLATFORM_WIN32_KHR*/ private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class DisplayKHRs : public std::vector @@ -8291,7 +8346,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_displayModeKHR, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8309,9 +8364,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const; private: - VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; - VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {}; + VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class Event @@ -8412,7 +8467,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8435,10 +8490,10 @@ namespace VULKAN_HPP_NAMESPACE void reset() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Event m_event = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Event m_event = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Fence @@ -8558,7 +8613,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8577,10 +8632,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Fence m_fence = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Fence m_fence = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Framebuffer @@ -8681,7 +8736,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8700,10 +8755,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getTilePropertiesQCOM() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Image @@ -8804,7 +8859,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8852,10 +8907,10 @@ namespace VULKAN_HPP_NAMESPACE getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Image m_image = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Image m_image = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class ImageView @@ -8956,7 +9011,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -8975,10 +9030,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ImageView m_imageView = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class IndirectCommandsLayoutEXT @@ -9080,7 +9135,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9095,10 +9150,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT m_indirectCommandsLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class IndirectCommandsLayoutNV @@ -9200,7 +9255,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9215,10 +9270,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class IndirectExecutionSetEXT @@ -9320,7 +9375,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9343,10 +9398,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT m_indirectExecutionSet = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT m_indirectExecutionSet = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class MicromapEXT @@ -9447,7 +9502,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9462,10 +9517,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class OpticalFlowSessionNV @@ -9567,7 +9622,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9588,10 +9643,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ImageLayout layout ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PerformanceConfigurationINTEL @@ -9682,7 +9737,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9696,9 +9751,9 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PipelineCache @@ -9800,7 +9855,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -9821,10 +9876,10 @@ namespace VULKAN_HPP_NAMESPACE void merge( VULKAN_HPP_NAMESPACE::ArrayProxy const & srcCaches ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Pipeline @@ -9980,7 +10035,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10037,11 +10092,11 @@ namespace VULKAN_HPP_NAMESPACE void compileDeferredNV( uint32_t shader ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Pipelines : public std::vector @@ -10216,7 +10271,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10232,11 +10287,11 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinary = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineBinaryKHR m_pipelineBinary = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PipelineBinaryKHRs : public std::vector @@ -10365,7 +10420,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10380,10 +10435,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class PrivateDataSlot @@ -10485,7 +10540,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10500,10 +10555,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class QueryPool @@ -10604,7 +10659,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10644,10 +10699,10 @@ namespace VULKAN_HPP_NAMESPACE void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Queue @@ -10735,7 +10790,7 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::exchange( m_queue, nullptr ); } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10794,8 +10849,8 @@ namespace VULKAN_HPP_NAMESPACE void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Queue m_queue = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Queue m_queue = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class RenderPass @@ -10905,7 +10960,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -10928,10 +10983,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getSubpassShadingMaxWorkgroupSizeHUAWEI() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Sampler @@ -11032,7 +11087,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11047,10 +11102,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Sampler m_sampler = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class SamplerYcbcrConversion @@ -11152,7 +11207,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11167,10 +11222,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class Semaphore @@ -11271,7 +11326,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11294,10 +11349,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class ShaderEXT @@ -11409,7 +11464,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11429,11 +11484,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getBinaryData() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class ShaderEXTs : public std::vector @@ -11561,7 +11616,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11580,10 +11635,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class SurfaceKHR @@ -11836,7 +11891,7 @@ namespace VULKAN_HPP_NAMESPACE return m_instance; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -11851,10 +11906,10 @@ namespace VULKAN_HPP_NAMESPACE } private: - VULKAN_HPP_NAMESPACE::Instance m_instance = {}; - VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Instance m_instance = {}; + VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; class SwapchainKHR @@ -11955,7 +12010,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -12019,10 +12074,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getLatencyTimingsNV() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class SwapchainKHRs : public std::vector @@ -12151,7 +12206,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -12172,10 +12227,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getData() const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class VideoSessionKHR @@ -12277,7 +12332,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -12298,10 +12353,10 @@ namespace VULKAN_HPP_NAMESPACE void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy const & bindSessionMemoryInfos ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; class VideoSessionParametersKHR @@ -12403,7 +12458,7 @@ namespace VULKAN_HPP_NAMESPACE return m_device; } - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * getDispatcher() const { VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION ); return m_dispatcher; @@ -12422,10 +12477,10 @@ namespace VULKAN_HPP_NAMESPACE void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const; private: - VULKAN_HPP_NAMESPACE::Device m_device = {}; - VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr; + VULKAN_HPP_NAMESPACE::Device m_device = {}; + VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {}; + const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; //=========================== @@ -12434,9 +12489,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_0 === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Instance instance; @@ -12456,8 +12511,8 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance( *this, *reinterpret_cast( &instance ), allocator ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Instance::enumeratePhysicalDevices() const { std::vector physicalDevices; @@ -12601,7 +12656,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -12738,7 +12793,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Queue queue; @@ -12773,9 +12828,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DeviceMemory memory; @@ -12969,7 +13024,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13025,9 +13080,9 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( result ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Semaphore semaphore; @@ -13049,7 +13104,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13101,9 +13156,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Event::reset" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::QueryPool queryPool; @@ -13170,7 +13225,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13192,9 +13247,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, *reinterpret_cast( &buffer ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13217,7 +13272,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13253,9 +13308,9 @@ namespace VULKAN_HPP_NAMESPACE return layout; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::ImageView view; @@ -13276,9 +13331,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, *reinterpret_cast( &view ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13300,9 +13355,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, *reinterpret_cast( &shaderModule ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13364,7 +13419,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13396,9 +13451,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createGraphicsPipeline( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -13424,7 +13479,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createComputePipelines( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -13456,8 +13511,8 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const @@ -13483,9 +13538,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, *reinterpret_cast( &pipeline ), allocator, result ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13507,9 +13562,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, *reinterpret_cast( &pipelineLayout ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Sampler sampler; @@ -13531,7 +13586,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -13554,9 +13609,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, *reinterpret_cast( &setLayout ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13589,8 +13644,8 @@ namespace VULKAN_HPP_NAMESPACE static_cast( m_device ), static_cast( m_descriptorPool ), static_cast( flags ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const { std::vector descriptorSets( allocateInfo.descriptorSetCount ); @@ -13630,9 +13685,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( descriptorCopies.data() ) ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13654,9 +13709,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, *reinterpret_cast( &framebuffer ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13689,9 +13744,9 @@ namespace VULKAN_HPP_NAMESPACE return granularity; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -13722,8 +13777,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const { std::vector commandBuffers( allocateInfo.commandBufferCount ); @@ -14716,7 +14771,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::Queue queue; @@ -14727,7 +14782,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -14752,7 +14807,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -14902,9 +14957,9 @@ namespace VULKAN_HPP_NAMESPACE stride ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15063,7 +15118,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -15583,9 +15638,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_swapchain === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15808,7 +15863,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const { std::vector displays; @@ -15873,9 +15928,9 @@ namespace VULKAN_HPP_NAMESPACE return properties; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; @@ -15913,9 +15968,9 @@ namespace VULKAN_HPP_NAMESPACE return capabilities; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15939,8 +15994,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display_swapchain === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator ) const { @@ -15969,9 +16024,9 @@ namespace VULKAN_HPP_NAMESPACE return swapchainsRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -15997,9 +16052,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16037,9 +16092,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16077,9 +16132,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16117,9 +16172,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16145,9 +16200,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16184,7 +16239,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_report === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -16343,7 +16398,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -16410,8 +16465,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR>::Type Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -16595,9 +16650,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NVX_binary_import === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16619,9 +16674,9 @@ namespace VULKAN_HPP_NAMESPACE return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, *reinterpret_cast( &module ), allocator ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -16774,9 +16829,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -17108,9 +17163,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -17381,7 +17436,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_descriptor_update_template === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -17457,9 +17512,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetRandROutputDisplayEXT( @@ -17508,7 +17563,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -17532,7 +17587,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const @@ -17670,9 +17725,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_create_renderpass2 === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -18099,9 +18154,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -18127,9 +18182,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -18217,7 +18272,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -18307,7 +18362,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMDX_shader_enqueue === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -18339,9 +18394,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createExecutionGraphPipelineAMDX( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -18562,7 +18617,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_acceleration_structure === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -18905,7 +18960,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, @@ -18940,9 +18995,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRayTracingPipelineKHR( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::Optional const & deferredOperation, VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo, @@ -19089,7 +19144,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_sampler_ycbcr_conversion === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -19166,7 +19221,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_validation_cache === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -19268,7 +19323,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_ray_tracing === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -19402,7 +19457,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, @@ -19434,9 +19489,9 @@ namespace VULKAN_HPP_NAMESPACE return pipelinesRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createRayTracingPipelineNV( + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional const & pipelineCache, VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -19949,8 +20004,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL>::Type Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20008,9 +20063,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20036,9 +20091,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20330,9 +20385,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_headless_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20555,7 +20610,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_deferred_host_operations === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -20889,7 +20944,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -20933,9 +20988,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetDrmDisplayEXT( @@ -20955,7 +21010,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_private_data === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -21134,9 +21189,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaModuleNV( VULKAN_HPP_NAMESPACE::CudaModuleCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -21185,9 +21240,9 @@ namespace VULKAN_HPP_NAMESPACE return cacheData; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createCudaFunctionNV( VULKAN_HPP_NAMESPACE::CudaFunctionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -21615,9 +21670,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" ); } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::DisplayKHR display; VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkGetWinrtDisplayNV( @@ -21638,9 +21693,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -21759,7 +21814,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_FUCHSIA_buffer_collection === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -21938,9 +21993,9 @@ namespace VULKAN_HPP_NAMESPACE # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -22031,9 +22086,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_opacity_micromap === - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { @@ -22878,7 +22933,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -23031,7 +23086,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_object === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType>::Type Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & createInfos, VULKAN_HPP_NAMESPACE::Optional allocator ) const { @@ -23060,9 +23115,9 @@ namespace VULKAN_HPP_NAMESPACE return shadersRAII; } - VULKAN_HPP_NODISCARD - VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type - Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type + Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT { VULKAN_HPP_NAMESPACE::ShaderEXT shader; @@ -23145,8 +23200,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_binary === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType>::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + std::vector>::Type Device::createPipelineBinariesKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const { @@ -23628,8 +23683,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &generatedCommandsInfo ) ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType< + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutEXT>::Type Device::createIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT @@ -23654,7 +23709,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CreateReturnType::Type + VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT const & createInfo, VULKAN_HPP_NAMESPACE::Optional allocator ) const VULKAN_HPP_RAII_CREATE_NOEXCEPT diff --git a/third_party/vulkan/vulkan_shared.hpp b/third_party/vulkan/vulkan_shared.hpp index fbb871b..7b96b6c 100644 --- a/third_party/vulkan/vulkan_shared.hpp +++ b/third_party/vulkan/vulkan_shared.hpp @@ -308,8 +308,8 @@ namespace VULKAN_HPP_NAMESPACE using BaseType::internalDestroy; }; - template - class SharedHandleTraits; + namespace detail + { // Silence the function cast warnings. # if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) @@ -317,150 +317,152 @@ namespace VULKAN_HPP_NAMESPACE # pragma GCC diagnostic ignored "-Wcast-function-type" # endif - template - class ObjectDestroyShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; - - template - using DestroyFunctionPointerType = - typename std::conditional::value, - void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const, - void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type; - - using SelectorType = typename std::conditional::value, DestructorType, HandleType>::type; - - template - ObjectDestroyShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &SelectorType::destroy ) ) ) - , m_dispatch( &dispatch ) - , m_allocationCallbacks( allocationCallbacks ) + template + class ObjectDestroyShared { - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - public: - template - typename std::enable_if::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + template + using DestroyFunctionPointerType = + typename std::conditional::value, + void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const, + void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type; + + using SelectorType = typename std::conditional::value, DestructorType, HandleType>::type; + + template + ObjectDestroyShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &SelectorType::destroy ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + template + typename std::enable_if::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + template + typename std::enable_if::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectFreeShared { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - template - typename std::enable_if::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const; + + template + ObjectFreeShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, + const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_allocationCallbacks( allocationCallbacks ) + { + } + + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); + } + + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + Optional m_allocationCallbacks = nullptr; + }; + + template + class ObjectReleaseShared { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch ); - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - Optional m_allocationCallbacks = nullptr; - }; + template + using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const; - template - class ObjectFreeShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; + template + ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::release ) ) ) + , m_dispatch( &dispatch ) + { + } - template - using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const; + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); + ( parent.*m_destroy )( handle, *m_dispatch ); + } - template - ObjectFreeShared( Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) - , m_dispatch( &dispatch ) - , m_allocationCallbacks( allocationCallbacks ) + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + }; + + template + class PoolFreeShared { - } + public: + using DestructorType = typename SharedHandleTraits::DestructorType; - public: - void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch ); - } + using PoolTypeExport = PoolType; - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - Optional m_allocationCallbacks = nullptr; - }; + template + using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); - template - class ObjectReleaseShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; + template + using DestroyFunctionPointerType = ReturnType ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const; - template - using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const; + PoolFreeShared() = default; - template - ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &DestructorType::release ) ) ) - , m_dispatch( &dispatch ) - { - } + template + PoolFreeShared( SharedHandle pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) + : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) + , m_dispatch( &dispatch ) + , m_pool( std::move( pool ) ) + { + } - public: - void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch ); - ( parent.*m_destroy )( handle, *m_dispatch ); - } + public: + void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( m_destroy && m_dispatch && m_pool ); + ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); + } - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - }; - - template - class PoolFreeShared - { - public: - using DestructorType = typename SharedHandleTraits::DestructorType; - - using PoolTypeExport = PoolType; - - template - using ReturnType = decltype( std::declval().free( PoolType(), 0u, nullptr, Dispatcher() ) ); - - template - using DestroyFunctionPointerType = ReturnType ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const; - - PoolFreeShared() = default; - - template - PoolFreeShared( SharedHandle pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) - : m_destroy( reinterpret_cast( static_cast>( &DestructorType::free ) ) ) - , m_dispatch( &dispatch ) - , m_pool( std::move( pool ) ) - { - } - - public: - void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_destroy && m_dispatch && m_pool ); - ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch ); - } - - private: - DestroyFunctionPointerType m_destroy = nullptr; - const DispatchLoaderBase * m_dispatch = nullptr; - SharedHandle m_pool{}; - }; + private: + DestroyFunctionPointerType m_destroy = nullptr; + const detail::DispatchLoaderBase * m_dispatch = nullptr; + SharedHandle m_pool{}; + }; # if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER ) # pragma GCC diagnostic pop # endif + } // namespace detail + //====================== //=== SHARED HANDLEs === //====================== @@ -471,7 +473,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = NoDestructor; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedInstance = SharedHandle; @@ -481,7 +483,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = NoDestructor; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDevice = SharedHandle; @@ -491,7 +493,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectFreeShared; + using deleter = detail::ObjectFreeShared; }; using SharedDeviceMemory = SharedHandle; @@ -501,7 +503,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedFence = SharedHandle; @@ -511,7 +513,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSemaphore = SharedHandle; @@ -521,7 +523,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedEvent = SharedHandle; @@ -531,7 +533,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedQueryPool = SharedHandle; @@ -541,7 +543,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedBuffer = SharedHandle; @@ -551,7 +553,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedBufferView = SharedHandle; @@ -561,7 +563,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedImage = SharedHandle; @@ -571,7 +573,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedImageView = SharedHandle; @@ -581,7 +583,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedShaderModule = SharedHandle; @@ -591,7 +593,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipelineCache = SharedHandle; @@ -601,7 +603,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipeline = SharedHandle; @@ -611,7 +613,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipelineLayout = SharedHandle; @@ -621,7 +623,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSampler = SharedHandle; @@ -631,7 +633,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDescriptorPool = SharedHandle; @@ -641,7 +643,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = PoolFreeShared; + using deleter = detail::PoolFreeShared; }; using SharedDescriptorSet = SharedHandle; @@ -651,7 +653,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDescriptorSetLayout = SharedHandle; @@ -661,7 +663,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedFramebuffer = SharedHandle; @@ -671,7 +673,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedRenderPass = SharedHandle; @@ -681,7 +683,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCommandPool = SharedHandle; @@ -691,7 +693,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = PoolFreeShared; + using deleter = detail::PoolFreeShared; }; using SharedCommandBuffer = SharedHandle; @@ -702,7 +704,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSamplerYcbcrConversion = SharedHandle; @@ -713,7 +715,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDescriptorUpdateTemplate = SharedHandle; @@ -725,7 +727,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPrivateDataSlot = SharedHandle; @@ -737,7 +739,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Instance; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSurfaceKHR = SharedHandle; @@ -748,7 +750,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedSwapchainKHR = SharedHandle; @@ -759,7 +761,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = PhysicalDevice; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDisplayKHR = SharedHandle; @@ -770,7 +772,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Instance; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDebugReportCallbackEXT = SharedHandle; @@ -781,7 +783,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedVideoSessionKHR = SharedHandle; @@ -791,7 +793,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedVideoSessionParametersKHR = SharedHandle; @@ -802,7 +804,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCuModuleNVX = SharedHandle; @@ -812,7 +814,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCuFunctionNVX = SharedHandle; @@ -823,7 +825,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Instance; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDebugUtilsMessengerEXT = SharedHandle; @@ -834,7 +836,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedAccelerationStructureKHR = SharedHandle; @@ -845,7 +847,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedValidationCacheEXT = SharedHandle; @@ -856,7 +858,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedAccelerationStructureNV = SharedHandle; @@ -867,7 +869,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPerformanceConfigurationINTEL = SharedHandle; @@ -878,7 +880,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedDeferredOperationKHR = SharedHandle; @@ -889,7 +891,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedIndirectCommandsLayoutNV = SharedHandle; @@ -901,7 +903,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCudaModuleNV = SharedHandle; @@ -911,7 +913,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedCudaFunctionNV = SharedHandle; @@ -924,7 +926,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedBufferCollectionFUCHSIA = SharedHandle; @@ -936,7 +938,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedMicromapEXT = SharedHandle; @@ -947,7 +949,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedOpticalFlowSessionNV = SharedHandle; @@ -958,7 +960,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedShaderEXT = SharedHandle; @@ -969,7 +971,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedPipelineBinaryKHR = SharedHandle; @@ -980,7 +982,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedIndirectCommandsLayoutEXT = SharedHandle; @@ -990,7 +992,7 @@ namespace VULKAN_HPP_NAMESPACE { public: using DestructorType = Device; - using deleter = ObjectDestroyShared; + using deleter = detail::ObjectDestroyShared; }; using SharedIndirectExecutionSetEXT = SharedHandle; diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index 7dbb47b..4fa9d6b 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -7984,6 +7984,21 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "DepthClampRangeEXT is not nothrow_move_constructible!" ); +//=== VK_HUAWEI_hdr_vivid === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHdrVividFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceHdrVividFeaturesHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHdrVividFeaturesHUAWEI is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrVividDynamicMetadataHUAWEI ) == sizeof( VkHdrVividDynamicMetadataHUAWEI ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HdrVividDynamicMetadataHUAWEI is not nothrow_move_constructible!" ); + //=== VK_NV_cooperative_matrix2 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV ) == diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index 7b9b7c4..1bc9452 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -43711,6 +43711,130 @@ namespace VULKAN_HPP_NAMESPACE using Type = HdrMetadataEXT; }; + struct HdrVividDynamicMetadataHUAWEI + { + using NativeType = VkHdrVividDynamicMetadataHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrVividDynamicMetadataHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + HdrVividDynamicMetadataHUAWEI( size_t dynamicMetadataSize_ = {}, const void * pDynamicMetadata_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , dynamicMetadataSize{ dynamicMetadataSize_ } + , pDynamicMetadata{ pDynamicMetadata_ } + { + } + + VULKAN_HPP_CONSTEXPR HdrVividDynamicMetadataHUAWEI( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + HdrVividDynamicMetadataHUAWEI( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : HdrVividDynamicMetadataHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_, const void * pNext_ = nullptr ) + : pNext( pNext_ ), dynamicMetadataSize( dynamicMetadata_.size() * sizeof( T ) ), pDynamicMetadata( dynamicMetadata_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + HdrVividDynamicMetadataHUAWEI & operator=( HdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + HdrVividDynamicMetadataHUAWEI & operator=( VkHdrVividDynamicMetadataHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setDynamicMetadataSize( size_t dynamicMetadataSize_ ) VULKAN_HPP_NOEXCEPT + { + dynamicMetadataSize = dynamicMetadataSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 HdrVividDynamicMetadataHUAWEI & setPDynamicMetadata( const void * pDynamicMetadata_ ) VULKAN_HPP_NOEXCEPT + { + pDynamicMetadata = pDynamicMetadata_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + template + HdrVividDynamicMetadataHUAWEI & setDynamicMetadata( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicMetadata_ ) VULKAN_HPP_NOEXCEPT + { + dynamicMetadataSize = dynamicMetadata_.size() * sizeof( T ); + pDynamicMetadata = dynamicMetadata_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkHdrVividDynamicMetadataHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkHdrVividDynamicMetadataHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, dynamicMetadataSize, pDynamicMetadata ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( HdrVividDynamicMetadataHUAWEI const & ) const = default; +#else + bool operator==( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( dynamicMetadataSize == rhs.dynamicMetadataSize ) && + ( pDynamicMetadata == rhs.pDynamicMetadata ); +# endif + } + + bool operator!=( HdrVividDynamicMetadataHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrVividDynamicMetadataHUAWEI; + const void * pNext = {}; + size_t dynamicMetadataSize = {}; + const void * pDynamicMetadata = {}; + }; + + template <> + struct CppType + { + using Type = HdrVividDynamicMetadataHUAWEI; + }; + struct HeadlessSurfaceCreateInfoEXT { using NativeType = VkHeadlessSurfaceCreateInfoEXT; @@ -71438,6 +71562,102 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties; + struct PhysicalDeviceHdrVividFeaturesHUAWEI + { + using NativeType = VkPhysicalDeviceHdrVividFeaturesHUAWEI; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hdrVivid{ hdrVivid_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceHdrVividFeaturesHUAWEI( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceHdrVividFeaturesHUAWEI( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHdrVividFeaturesHUAWEI( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceHdrVividFeaturesHUAWEI & operator=( VkPhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHdrVividFeaturesHUAWEI & setHdrVivid( VULKAN_HPP_NAMESPACE::Bool32 hdrVivid_ ) VULKAN_HPP_NOEXCEPT + { + hdrVivid = hdrVivid_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceHdrVividFeaturesHUAWEI &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hdrVivid ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceHdrVividFeaturesHUAWEI const & ) const = default; +#else + bool operator==( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdrVivid == rhs.hdrVivid ); +# endif + } + + bool operator!=( PhysicalDeviceHdrVividFeaturesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hdrVivid = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceHdrVividFeaturesHUAWEI; + }; + struct PhysicalDeviceHostImageCopyFeaturesEXT { using NativeType = VkPhysicalDeviceHostImageCopyFeaturesEXT; diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index c560196..1c00c12 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -4653,6 +4653,8 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eImageAlignmentControlCreateInfoMESA: return "ImageAlignmentControlCreateInfoMESA"; case StructureType::ePhysicalDeviceDepthClampControlFeaturesEXT: return "PhysicalDeviceDepthClampControlFeaturesEXT"; case StructureType::ePipelineViewportDepthClampControlCreateInfoEXT: return "PipelineViewportDepthClampControlCreateInfoEXT"; + case StructureType::ePhysicalDeviceHdrVividFeaturesHUAWEI: return "PhysicalDeviceHdrVividFeaturesHUAWEI"; + case StructureType::eHdrVividDynamicMetadataHUAWEI: return "HdrVividDynamicMetadataHUAWEI"; case StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV: return "PhysicalDeviceCooperativeMatrix2FeaturesNV"; case StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV: return "CooperativeMatrixFlexibleDimensionsPropertiesNV"; case StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV: return "PhysicalDeviceCooperativeMatrix2PropertiesNV"; From 5bd66fd3623ba1d75b7ad599a65547cca0e08d10 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 4 Nov 2024 21:21:46 +0100 Subject: [PATCH 069/131] adding clear color in mlx_clear_window --- example/main.c | 4 +-- includes/mlx.h | 4 +-- runtime/Includes/Core/Application.h | 2 +- runtime/Includes/Core/Application.inl | 4 +-- runtime/Includes/Core/Graphics.h | 2 +- runtime/Includes/Core/Graphics.inl | 10 ++++-- runtime/Includes/Graphics/Scene.h | 5 ++- .../Includes/Renderer/RenderPasses/Passes.h | 2 +- runtime/Sources/Core/Bridge.cpp | 9 ++++-- runtime/Sources/Graphics/Font.cpp | 8 ++--- runtime/Sources/Renderer/Image.cpp | 31 ++++++++++++++----- .../Sources/Renderer/RenderPasses/Passes.cpp | 6 ++-- runtime/Sources/Renderer/SceneRenderer.cpp | 2 +- runtime/Sources/Renderer/Swapchain.cpp | 6 ++-- third_party/kvf.h | 4 --- 15 files changed, 62 insertions(+), 37 deletions(-) diff --git a/example/main.c b/example/main.c index bac9e5a..c3fc308 100644 --- a/example/main.c +++ b/example/main.c @@ -17,7 +17,7 @@ int update(void* param) mlx_t* mlx = (mlx_t*)param; if(i == 200) - mlx_clear_window(mlx->mlx, mlx->win); + mlx_clear_window(mlx->mlx, mlx->win, 0xFF334D4D); if(i >= 250) mlx_set_font_scale(mlx->mlx, "default", 16.f); @@ -96,7 +96,7 @@ int key_hook(int key, void* param) mlx_mouse_hide(); break; case 6 : // (C)lear - mlx_clear_window(mlx->mlx, mlx->win); + mlx_clear_window(mlx->mlx, mlx->win, 0xFF334D4D); break; case 79 : // RIGHT KEY mlx_mouse_move(mlx->mlx, mlx->win, x + 10, y); diff --git a/includes/mlx.h b/includes/mlx.h index 6573599..374e7f1 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/10/31 16:21:35 by maldavid ### ########.fr */ +/* Updated: 2024/11/04 21:09:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -325,7 +325,7 @@ MLX_API void mlx_set_font_scale(void* mlx, char* filepath, float scale); * * @return (void) */ -MLX_API void mlx_clear_window(void* mlx, void* win); +MLX_API void mlx_clear_window(void* mlx, void* win, int color); /** * @brief Destroys internal window diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index acb634a..9361b1b 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -27,7 +27,7 @@ namespace mlx inline void SetFPSCap(std::uint32_t fps) noexcept; inline Handle NewGraphicsSuport(std::size_t w, std::size_t h, const char* title, bool is_resizable); - inline void ClearGraphicsSupport(Handle win); + inline void ClearGraphicsSupport(Handle win, int color); inline void DestroyGraphicsSupport(Handle win); inline void SetGraphicsSupportPosition(Handle win, int x, int y); diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 445cf15..41c164b 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -101,11 +101,11 @@ namespace mlx return static_cast(&m_graphics.back()->GetID()); } - void Application::ClearGraphicsSupport(Handle win) + void Application::ClearGraphicsSupport(Handle win, int color) { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)]->ResetRenderData(); + m_graphics[*static_cast(win)]->ResetRenderData(color); } void Application::DestroyGraphicsSupport(Handle win) diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 92c17d9..4924e3a 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -22,7 +22,7 @@ namespace mlx void Render() noexcept; - inline void ResetRenderData() noexcept; + inline void ResetRenderData(int color) noexcept; inline void PixelPut(int x, int y, std::uint32_t color) noexcept; inline void StringPut(int x, int y, std::uint32_t color, std::string str); diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index edd7a30..b4ca15f 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -3,10 +3,16 @@ namespace mlx { - void GraphicsSupport::ResetRenderData() noexcept + void GraphicsSupport::ResetRenderData(int color) noexcept { MLX_PROFILE_FUNCTION(); - p_scene->ResetScene(); + Vec4f vec_color = { + static_cast((color & 0x000000FF)) / 255.0f, + static_cast((color & 0x0000FF00) >> 8) / 255.0f, + static_cast((color & 0x00FF0000) >> 16) / 255.0f, + static_cast((color & 0xFF000000) >> 24) / 255.0f + }; + p_scene->ResetScene(std::move(vec_color)); m_put_pixel_manager.ResetRenderData(); m_draw_layer = 0; m_pixelput_called = false; diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 0d46488..fdb273b 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -7,6 +7,7 @@ #include #include #include +#include namespace mlx { @@ -29,7 +30,8 @@ namespace mlx void BringToFront(NonOwningPtr drawable); void BringToDrawLayer(NonOwningPtr drawable, std::uint64_t draw_layer); - inline void ResetScene() { m_drawables.clear(); } + inline void ResetScene(Vec4f clear) { m_drawables.clear(); m_clear_color = std::move(clear); } + inline const Vec4f& GetClearColor() const noexcept { return m_clear_color; } [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetDrawables() const noexcept { return m_drawables; } [[nodiscard]] MLX_FORCEINLINE ViewerData& GetViewerData() noexcept { return m_viewer_data; } @@ -40,6 +42,7 @@ namespace mlx std::vector> m_drawables; ViewerData m_viewer_data; std::shared_ptr p_bound_font; + Vec4f m_clear_color = { 0.0f, 0.0f, 0.0f, 1.0f }; }; } diff --git a/runtime/Includes/Renderer/RenderPasses/Passes.h b/runtime/Includes/Renderer/RenderPasses/Passes.h index 56aa74d..45f084e 100644 --- a/runtime/Includes/Renderer/RenderPasses/Passes.h +++ b/runtime/Includes/Renderer/RenderPasses/Passes.h @@ -13,7 +13,7 @@ namespace mlx RenderPasses() = default; void Init(class Renderer& renderer); - void Pass(class Scene& scene, class Renderer& renderer); + void Pass(class Scene& scene, class Renderer& renderer, const Vec4f& clear_color); void Destroy(); ~RenderPasses() = default; diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index f842cf2..1811c40 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -264,10 +264,15 @@ extern "C" static_cast(mlx)->LoadFont(file, scale); } - void mlx_clear_window(void* mlx, void* win) + void mlx_clear_window(void* mlx, void* win, int color) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->ClearGraphicsSupport(win); + unsigned char color_bits[4]; + color_bits[0] = (color & 0x00FF0000) >> 16; + color_bits[1] = (color & 0x0000FF00) >> 8; + color_bits[2] = (color & 0x000000FF); + color_bits[3] = (color & 0xFF000000) >> 24; + static_cast(mlx)->ClearGraphicsSupport(win, *reinterpret_cast(color_bits)); } void mlx_destroy_window(void* mlx, void* win) diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index b0b0a3d..339aa62 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -3,13 +3,13 @@ #include #include -#define STBRP_ASSERT(x) mlx::Assert(x, "internal stb assertion") +#define STBRP_ASSERT(x) mlx::Assert(x, "internal stb assertion " #x) #define STB_RECT_PACK_IMPLEMENTATION #include #define STB_TRUETYPE_IMPLEMENTATION -#define STB_malloc(x, u) ((void)(u), MemManager::Get().Malloc(x)) -#define STB_free(x, u) ((void)(u), MemManager::Get().Free(x)) +#define STB_malloc(x, u) ((void)(u), mlx::MemManager::Get().Malloc(x)) +#define STB_free(x, u) ((void)(u), mlx::MemManager::Get().Free(x)) #include namespace mlx @@ -43,7 +43,7 @@ namespace mlx stbtt_PackFontRange(&pc, std::get>(m_build_data).data(), 0, m_scale, 32, 96, m_cdata.data()); stbtt_PackEnd(&pc); - // TODO : find better solution + // TODO : find better solution; No, using VK_FORMAT_R8_SRGB does not work CPUBuffer vulkan_bitmap(RANGE * RANGE * 4); for(int i = 0, j = 0; i < RANGE * RANGE; i++, j += 4) { diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 53ea351..7280831 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -1,10 +1,18 @@ #include #include #include +#include #include #include +#include #define STB_IMAGE_IMPLEMENTATION + +#define STBI_ASSERT(x) mlx::Assert(x, "internal stb assertion " #x) +#define STBI_MALLOC(x) (mlx::MemManager::Get().Malloc(x)) +#define STBI_REALLOC(p, x) (mlx::MemManager::Get().Realloc(p, x)) +#define STBI_FREE(x) (mlx::MemManager::Get().Free(x)) + #ifdef MLX_COMPILER_GCC #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wstringop-overflow" @@ -264,7 +272,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); std::string filename = file.string(); - if(file.stem() == "banana") + if(file.stem() == "terracotta.pie") Message("banana, banana, banana, banana, terracotta banana terracotta, terracotta pie"); if(!std::filesystem::exists(file)) @@ -274,21 +282,28 @@ namespace mlx } if(stbi_is_hdr(filename.c_str())) { - Error("Texture: unsupported image format % (HDR image)", file); + Error("Texture: unsupported image format from % (HDR image)", file); return nullptr; } - int dummy_w; - int dummy_h; - int channels; - std::uint8_t* data = stbi_load(filename.c_str(), (w == nullptr ? &dummy_w : w), (h == nullptr ? &dummy_h : h), &channels, 4); + Vec2i size; + int channels; + + std::uint8_t* data = stbi_load(filename.c_str(), &size.x, &size.y, &channels, STBI_rgb_alpha); CallOnExit defer([=]() { stbi_image_free(data); }); - CPUBuffer buffer((w == nullptr ? dummy_w : *w) * (h == nullptr ? dummy_h : *h) * 4); + Verify(channels == 4, "invalid channels number in image loaded (should be 4, was %)", channels); + + CPUBuffer buffer(size.x * size.y * 4); std::memcpy(buffer.GetData(), data, buffer.GetSize()); + if(w != nullptr) + *w = size.x; + if(h != nullptr) + *h = size.y; + Texture* texture; - try { texture = new Texture(std::move(buffer), (w == nullptr ? dummy_w : *w), (h == nullptr ? dummy_h : *h), VK_FORMAT_R8G8B8A8_SRGB, false, std::move(filename)); } + try { texture = new Texture(std::move(buffer), size.x, size.y, VK_FORMAT_R8G8B8A8_SRGB, false, std::move(filename)); } catch(...) { return nullptr; } return texture; } diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 2e0c76f..c094f84 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -9,7 +9,7 @@ namespace mlx { m_2Dpass.Init(); m_final.Init(); - func::function functor = [this, renderer](const EventBase& event) + func::function functor = [this, &renderer](const EventBase& event) { if(event.What() == Event::ResizeEventCode) { @@ -34,9 +34,9 @@ namespace mlx m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); } - void RenderPasses::Pass(Scene& scene, Renderer& renderer) + void RenderPasses::Pass(Scene& scene, Renderer& renderer, const Vec4f& clear_color) { - m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), Vec4f{ 0.0f, 0.0f, 0.0f, 1.0f }); + m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), clear_color); m_2Dpass.Pass(scene, renderer, m_main_render_texture); m_final.Pass(scene, renderer, m_main_render_texture); diff --git a/runtime/Sources/Renderer/SceneRenderer.cpp b/runtime/Sources/Renderer/SceneRenderer.cpp index c1939fd..2b5bcaa 100644 --- a/runtime/Sources/Renderer/SceneRenderer.cpp +++ b/runtime/Sources/Renderer/SceneRenderer.cpp @@ -15,7 +15,7 @@ namespace mlx void SceneRenderer::Render(Scene& scene, Renderer& renderer) { MLX_PROFILE_FUNCTION(); - m_passes.Pass(scene, renderer); + m_passes.Pass(scene, renderer, scene.GetClearColor()); } void SceneRenderer::Destroy() diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp index 4c5af67..c6c7e96 100644 --- a/runtime/Sources/Renderer/Swapchain.cpp +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -57,7 +57,7 @@ namespace mlx for(Image& img : m_swapchain_images) img.DestroyImageView(); - // kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), m_swapchain); + kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), m_swapchain); RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); m_surface = VK_NULL_HANDLE; @@ -79,8 +79,8 @@ namespace mlx VkSwapchainKHR old_swapchain = m_swapchain; m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, VK_NULL_HANDLE, true); - // if(old_swapchain != VK_NULL_HANDLE) - // kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), old_swapchain); + if(old_swapchain != VK_NULL_HANDLE) + kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), old_swapchain); m_images_count = kvfGetSwapchainImagesCount(m_swapchain); m_min_images_count = kvfGetSwapchainMinImagesCount(m_swapchain); diff --git a/third_party/kvf.h b/third_party/kvf.h index 206c651..7080ab6 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -679,7 +679,6 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_count = images_count; __kvf_internal_swapchains[__kvf_internal_swapchains_size].images_extent = extent; __kvf_internal_swapchains_size++; - printf("new size updated %zu, capacity %zu\n", __kvf_internal_swapchains_size, __kvf_internal_swapchains_capacity); } void __kvfDestroySwapchain(VkDevice device, VkSwapchainKHR swapchain) @@ -701,7 +700,6 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) for(size_t j = i; j < __kvf_internal_swapchains_size - 1; j++) __kvf_internal_swapchains[j] = __kvf_internal_swapchains[j + 1]; __kvf_internal_swapchains_size--; - printf("new size delete %zu, capacity %zu\n", __kvf_internal_swapchains_size, __kvf_internal_swapchains_capacity); if(__kvf_internal_swapchains_size == 0) { KVF_FREE(__kvf_internal_swapchains); @@ -711,12 +709,10 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) } } } -#include __KvfSwapchain* __kvfGetKvfSwapchainFromVkSwapchainKHR(VkSwapchainKHR swapchain) { KVF_ASSERT(swapchain != VK_NULL_HANDLE); - printf("size %zu, capacity %zu\n", __kvf_internal_swapchains_size, __kvf_internal_swapchains_capacity); for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) { if(__kvf_internal_swapchains[i].swapchain == swapchain) From d1a5bceb033d152bc995e1caab1150c14a5cd286 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 5 Nov 2024 11:50:40 +0100 Subject: [PATCH 070/131] fixing bug with image recreation --- Makefile | 4 ++- runtime/Includes/Core/Logs.inl | 14 ++++----- .../Includes/Renderer/RenderPasses/Passes.h | 2 +- runtime/Includes/Renderer/ScenesRenderer.h | 2 +- runtime/Sources/Core/Graphics.cpp | 4 +-- runtime/Sources/Core/Memory.cpp | 2 ++ runtime/Sources/Graphics/Mesh.cpp | 2 -- runtime/Sources/Renderer/Image.cpp | 11 ++++--- .../Sources/Renderer/Pipelines/Graphics.cpp | 7 ++--- runtime/Sources/Renderer/RenderCore.cpp | 6 +++- .../Renderer/RenderPasses/FinalPass.cpp | 2 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 31 +++++++------------ runtime/Sources/Renderer/SceneRenderer.cpp | 4 +-- third_party/kvf.h | 5 +-- 14 files changed, 49 insertions(+), 47 deletions(-) diff --git a/Makefile b/Makefile index 7617caa..2ade2ba 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,7 @@ SHADERS_SRCS = $(wildcard $(addsuffix /*.nzsl, $(SHADERS_DIR))) SPVS = $(SHADERS_SRCS:.nzsl=.spv.h) CXX = clang++ -CXXFLAGS = -std=c++20 -O3 -fPIC -Wall -Wextra -DSDL_MAIN_HANDLED +CXXFLAGS = -std=c++20 -fPIC -Wall -Wextra -DSDL_MAIN_HANDLED INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party NZSLC = nzslc @@ -49,6 +49,8 @@ endif ifeq ($(DEBUG), true) CXXFLAGS += -g3 -D DEBUG LDFLAGS += -rdynamic +else + CXXFLAGS += -O3 endif ifeq ($(FORCE_INTEGRATED_GPU), true) diff --git a/runtime/Includes/Core/Logs.inl b/runtime/Includes/Core/Logs.inl index 744fbaa..54983cf 100644 --- a/runtime/Includes/Core/Logs.inl +++ b/runtime/Includes/Core/Logs.inl @@ -16,7 +16,7 @@ namespace mlx } catch(const std::exception& e) { - Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + Logs::Report(LogType::Error, line, file, function, "formatter exception catched in the log printer: "s + e.what()); } } @@ -32,7 +32,7 @@ namespace mlx } catch(const std::exception& e) { - Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + Logs::Report(LogType::Error, line, file, function, "formatter exception catched in the log printer: "s + e.what()); } } @@ -48,7 +48,7 @@ namespace mlx } catch(const std::exception& e) { - Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + Logs::Report(LogType::Error, line, file, function, "formatter exception catched in the log printer: "s + e.what()); } } @@ -64,7 +64,7 @@ namespace mlx } catch(const std::exception& e) { - Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + Logs::Report(LogType::Error, line, file, function, "formatter exception catched in the log printer: "s + e.what()); } } @@ -80,7 +80,7 @@ namespace mlx } catch(const std::exception& e) { - Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + Logs::Report(LogType::FatalError, line, file, function, "formatter exception catched in the log printer: "s + e.what()); } } @@ -98,7 +98,7 @@ namespace mlx } catch(const std::exception& e) { - Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + Logs::Report(LogType::FatalError, line, file, function, "formatter exception catched in the log printer: "s + e.what()); } } @@ -117,7 +117,7 @@ namespace mlx } catch(const std::exception& e) { - Logs::Report(LogType::Error, "formatter exception catched in the log printer : "s + e.what()); + Logs::Report(LogType::FatalError, line, file, function, "formatter exception catched in the log printer: "s + e.what()); } } #endif diff --git a/runtime/Includes/Renderer/RenderPasses/Passes.h b/runtime/Includes/Renderer/RenderPasses/Passes.h index 45f084e..a24bcc2 100644 --- a/runtime/Includes/Renderer/RenderPasses/Passes.h +++ b/runtime/Includes/Renderer/RenderPasses/Passes.h @@ -12,7 +12,7 @@ namespace mlx public: RenderPasses() = default; - void Init(class Renderer& renderer); + void Init(); void Pass(class Scene& scene, class Renderer& renderer, const Vec4f& clear_color); void Destroy(); diff --git a/runtime/Includes/Renderer/ScenesRenderer.h b/runtime/Includes/Renderer/ScenesRenderer.h index 36dcdad..03c391e 100644 --- a/runtime/Includes/Renderer/ScenesRenderer.h +++ b/runtime/Includes/Renderer/ScenesRenderer.h @@ -9,7 +9,7 @@ namespace mlx { public: SceneRenderer() = default; - void Init(class Renderer& renderer); + void Init(); void Render(class Scene& scene, class Renderer& renderer); // TODO : add RTT support void Destroy(); ~SceneRenderer() = default; diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 9769bbf..66a70ed 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -12,7 +12,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); // TODO : re-enable render targets m_renderer.Init(nullptr); - m_scene_renderer.Init(m_renderer); + m_scene_renderer.Init(); p_scene = std::make_unique(); } @@ -24,7 +24,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); m_renderer.Init(p_window.get()); - m_scene_renderer.Init(m_renderer); + m_scene_renderer.Init(); p_scene = std::make_unique(); } diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 197a458..3a69d32 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -40,6 +40,8 @@ namespace mlx void MemManager::Free(void* ptr) { + if(ptr == nullptr) + return; auto it = std::find(s_blocks.begin(), s_blocks.end(), ptr); if(it == s_blocks.end()) { diff --git a/runtime/Sources/Graphics/Mesh.cpp b/runtime/Sources/Graphics/Mesh.cpp index cebfe54..751d12e 100644 --- a/runtime/Sources/Graphics/Mesh.cpp +++ b/runtime/Sources/Graphics/Mesh.cpp @@ -7,7 +7,6 @@ namespace mlx void Mesh::Draw(VkCommandBuffer cmd, std::size_t& drawcalls, std::size_t& polygondrawn) const noexcept { MLX_PROFILE_FUNCTION(); - #pragma omp parallel for for(std::size_t i = 0; i < m_sub_meshes.size(); i++) Draw(cmd, drawcalls, polygondrawn, i); } @@ -26,7 +25,6 @@ namespace mlx Mesh::~Mesh() { MLX_PROFILE_FUNCTION(); - #pragma omp parallel for for(auto& mesh : m_sub_meshes) { mesh.vbo.Destroy(); diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 7280831..f4f1276 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -8,7 +8,7 @@ #define STB_IMAGE_IMPLEMENTATION -#define STBI_ASSERT(x) mlx::Assert(x, "internal stb assertion " #x) +#define STBI_ASSERT(x) (mlx::Assert(x, "internal stb assertion " #x)) #define STBI_MALLOC(x) (mlx::MemManager::Get().Malloc(x)) #define STBI_REALLOC(p, x) (mlx::MemManager::Get().Realloc(p, x)) #define STBI_FREE(x) (mlx::MemManager::Get().Free(x)) @@ -162,6 +162,10 @@ namespace mlx #endif } m_image = VK_NULL_HANDLE; + m_layout = VK_IMAGE_LAYOUT_UNDEFINED; + m_width = 0; + m_height = 0; + m_is_multisampled = false; } void Texture::Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) @@ -269,6 +273,7 @@ namespace mlx Texture* StbTextureLoad(const std::filesystem::path& file, int* w, int* h) { + using namespace std::literals; MLX_PROFILE_FUNCTION(); std::string filename = file.string(); @@ -289,11 +294,9 @@ namespace mlx Vec2i size; int channels; - std::uint8_t* data = stbi_load(filename.c_str(), &size.x, &size.y, &channels, STBI_rgb_alpha); + std::uint8_t* data = stbi_load(filename.c_str(), &size.x, &size.y, &channels, 4); CallOnExit defer([=]() { stbi_image_free(data); }); - Verify(channels == 4, "invalid channels number in image loaded (should be 4, was %)", channels); - CPUBuffer buffer(size.x * size.y * 4); std::memcpy(buffer.GetData(), data, buffer.GetSize()); diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 52950f1..3a7e51d 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -103,7 +103,6 @@ namespace mlx scissor.extent = fb_extent; RenderCore::Get().vkCmdSetScissor(command_buffer, 0, 1, &scissor); - #pragma omp parallel for for(std::size_t i = 0; i < m_clears.size(); i++) { m_clears[i].color.float32[0] = clear[0]; @@ -128,7 +127,6 @@ namespace mlx MLX_PROFILE_FUNCTION(); p_vertex_shader.reset(); p_fragment_shader.reset(); - #pragma omp parallel for for(auto& fb : m_framebuffers) { kvfDestroyFramebuffer(RenderCore::Get().GetDevice(), fb); @@ -144,6 +142,9 @@ namespace mlx kvfDestroyPipeline(RenderCore::Get().GetDevice(), m_pipeline); m_pipeline = VK_NULL_HANDLE; DebugLog("Vulkan: graphics pipeline destroyed"); + p_renderer = nullptr; + m_clears.clear(); + m_attachments.clear(); } void GraphicPipeline::CreateFramebuffers(const std::vector>& render_targets, bool clear_attachments) @@ -157,7 +158,6 @@ namespace mlx attachment_views.push_back(p_renderer->GetSwapchain().GetSwapchainImages()[0].GetImageView()); } - #pragma omp parallel for for(NonOwningPtr image : render_targets) { attachments.push_back(kvfBuildAttachmentDescription((kvfIsDepthFormat(image->GetFormat()) ? KVF_IMAGE_DEPTH : KVF_IMAGE_COLOR), image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); @@ -178,7 +178,6 @@ namespace mlx DebugLog("Vulkan: framebuffer created"); } } - #pragma omp parallel for for(NonOwningPtr image : render_targets) { m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image->GetWidth(), .height = image->GetHeight() })); diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 3ea7461..e4abfaf 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -1,12 +1,16 @@ #include #include +#include #define KVF_IMPLEMENTATION #ifdef DEBUG #define KVF_ENABLE_VALIDATION_LAYERS #endif -#define KVF_ASSERT(x) mlx::Assert(x, #x) +#define KVF_ASSERT(x) (mlx::Assert(x, "internal kvf assertion " #x)) +#define KVF_MALLOC(x) (mlx::MemManager::Get().Malloc(x)) +#define KVF_REALLOC(p, x) (mlx::MemManager::Get().Realloc(p, x)) +#define KVF_FREE(x) (mlx::MemManager::Get().Free(x)) #if defined(MLX_COMPILER_GCC) || defined(MLX_COMPILER_CLANG) #pragma clang diagnostic push diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index 5775187..e59bb5e 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -20,7 +20,7 @@ namespace mlx ShaderLayout fragment_shader_layout( { { 0, - ShaderSetLayout({ + ShaderSetLayout({ { 0, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER } }) } diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index c094f84..f1ea588 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -5,37 +5,30 @@ namespace mlx { - void RenderPasses::Init(Renderer& renderer) + void RenderPasses::Init() { m_2Dpass.Init(); m_final.Init(); - func::function functor = [this, &renderer](const EventBase& event) + func::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) - { m_main_render_texture.Destroy(); - auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); - #ifdef DEBUG - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); - #else - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); - #endif - m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); - } }; EventBus::RegisterListener({ functor, "__MlxRenderPasses" }); - auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); - - #ifdef DEBUG - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); - #else - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); - #endif - m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); } void RenderPasses::Pass(Scene& scene, Renderer& renderer, const Vec4f& clear_color) { + if(!m_main_render_texture.IsInit()) + { + auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); + #ifdef DEBUG + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); + #else + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + #endif + m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + } m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), clear_color); m_2Dpass.Pass(scene, renderer, m_main_render_texture); diff --git a/runtime/Sources/Renderer/SceneRenderer.cpp b/runtime/Sources/Renderer/SceneRenderer.cpp index 2b5bcaa..98fa750 100644 --- a/runtime/Sources/Renderer/SceneRenderer.cpp +++ b/runtime/Sources/Renderer/SceneRenderer.cpp @@ -6,10 +6,10 @@ namespace mlx { - void SceneRenderer::Init(Renderer& renderer) + void SceneRenderer::Init() { MLX_PROFILE_FUNCTION(); - m_passes.Init(renderer); + m_passes.Init(); } void SceneRenderer::Render(Scene& scene, Renderer& renderer) diff --git a/third_party/kvf.h b/third_party/kvf.h index 7080ab6..fa6a1bd 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -40,7 +40,7 @@ * by using #define KVF_NO_EXIT_ON_FAILURE * * If you are using Volk or any other meta loader you must define KVF_IMPL_VK_NO_PROTOTYPES - * or VK_NO_PROTOTYPES before including this file to avoid conflicts with Vulkan prototypes. + * before including this file to avoid conflicts with Vulkan prototypes. * You will also need to pass the function pointers to kvf using dedicated functions. * * You can also #define KVF_ENABLE_VALIDATION_LAYERS to enable validation layers. @@ -236,7 +236,7 @@ void kvfCheckVk(VkResult result); #ifdef KVF_IMPL_VK_NO_PROTOTYPES #ifdef KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE - #undef KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE + #undef KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE #endif #define KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(fn) PFN_##fn fn @@ -2122,6 +2122,7 @@ void kvfDestroyImageView(VkDevice device, VkImageView image_view) void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer) { KVF_ASSERT(device != VK_NULL_HANDLE); + KVF_ASSERT(cmd != VK_NULL_HANDLE); if(new_layout == old_layout) return; From 8d879fb5bc15d68712e7744d24e77ba84a16bddb Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 5 Nov 2024 11:59:31 +0100 Subject: [PATCH 071/131] improving BringToLayer --- runtime/Includes/Graphics/Scene.h | 1 - runtime/Sources/Graphics/Scene.cpp | 16 +++------------- runtime/Sources/Renderer/RenderPasses/2DPass.cpp | 4 ++-- 3 files changed, 5 insertions(+), 16 deletions(-) diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index fdb273b..c4e6d27 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -27,7 +27,6 @@ namespace mlx inline void BindFont(std::shared_ptr font) { Verify((bool)font, "invalid fond pointer"); p_bound_font = font; } - void BringToFront(NonOwningPtr drawable); void BringToDrawLayer(NonOwningPtr drawable, std::uint64_t draw_layer); inline void ResetScene(Vec4f clear) { m_drawables.clear(); m_clear_color = std::move(clear); } diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index 8ed2986..dcc2f61 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -112,27 +112,17 @@ namespace mlx return ptr->GetText() == text && ptr->GetFont() == p_bound_font; } - void Scene::BringToFront(NonOwningPtr drawable) + void Scene::BringToDrawLayer(NonOwningPtr drawable, std::uint64_t draw_layer) { MLX_PROFILE_FUNCTION(); + if(draw_layer < m_drawables.size()) + return; auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&drawable](std::shared_ptr drawable_ptr) { return drawable_ptr.get() == drawable.Get(); }); if(it == m_drawables.end()) return; - std::rotate(it, it + 1, m_drawables.end()); - } - - void Scene::BringToDrawLayer(NonOwningPtr drawable, std::uint64_t draw_layer) - { - MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&drawable](std::shared_ptr drawable_ptr) - { - return drawable_ptr.get() == drawable.Get(); - }); - if (m_drawables.size() > draw_layer) - return; std::swap(*it, *(m_drawables.begin() + draw_layer)); } } diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index a3ee4b8..09f4afe 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -94,7 +94,7 @@ namespace mlx const auto& drawables = scene.GetDrawables(); - for(auto drawable : drawables) + for(auto& drawable : drawables) { // Check every textures and update modified ones to GPU before starting the render pass if(!drawable->IsSetInit()) @@ -103,7 +103,7 @@ namespace mlx } m_pipeline.BindPipeline(cmd, 0, {}); - for(auto drawable : drawables) + for(auto& drawable : drawables) { SpriteData drawable_data; drawable_data.position = drawable->GetPosition(); From 71d1c20a27ce8a8c1a6a599e105ef02519820cb1 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 5 Nov 2024 12:49:21 +0100 Subject: [PATCH 072/131] adding nesting to debug logs --- runtime/Includes/Core/Logs.h | 7 +++++- runtime/Includes/Core/Logs.inl | 2 +- ...{2DFragment.nzsl => Shader2DFragment.nzsl} | 0 ...DFragment.spv.h => Shader2DFragment.spv.h} | 0 .../{2DVertex.nzsl => Shader2DVertex.nzsl} | 0 .../{2DVertex.spv.h => Shader2DVertex.spv.h} | 0 ...ragment.nzsl => ShaderScreenFragment.nzsl} | 0 ...gment.spv.h => ShaderScreenFragment.spv.h} | 0 ...eenVertex.nzsl => ShaderScreenVertex.nzsl} | 0 ...nVertex.spv.h => ShaderScreenVertex.spv.h} | 0 runtime/Includes/Graphics/Scene.h | 2 -- runtime/Sources/Core/Logs.cpp | 24 ++++++++++++++++++- .../Sources/Renderer/Pipelines/Graphics.cpp | 4 ++++ runtime/Sources/Renderer/RenderCore.cpp | 4 ++++ .../Sources/Renderer/RenderPasses/2DPass.cpp | 4 ++-- .../Renderer/RenderPasses/FinalPass.cpp | 4 ++-- 16 files changed, 42 insertions(+), 9 deletions(-) rename runtime/Includes/Embedded/{2DFragment.nzsl => Shader2DFragment.nzsl} (100%) rename runtime/Includes/Embedded/{2DFragment.spv.h => Shader2DFragment.spv.h} (100%) rename runtime/Includes/Embedded/{2DVertex.nzsl => Shader2DVertex.nzsl} (100%) rename runtime/Includes/Embedded/{2DVertex.spv.h => Shader2DVertex.spv.h} (100%) rename runtime/Includes/Embedded/{ScreenFragment.nzsl => ShaderScreenFragment.nzsl} (100%) rename runtime/Includes/Embedded/{ScreenFragment.spv.h => ShaderScreenFragment.spv.h} (100%) rename runtime/Includes/Embedded/{ScreenVertex.nzsl => ShaderScreenVertex.nzsl} (100%) rename runtime/Includes/Embedded/{ScreenVertex.spv.h => ShaderScreenVertex.spv.h} (100%) diff --git a/runtime/Includes/Core/Logs.h b/runtime/Includes/Core/Logs.h index 4b3746b..5692546 100644 --- a/runtime/Includes/Core/Logs.h +++ b/runtime/Includes/Core/Logs.h @@ -30,11 +30,16 @@ namespace mlx static void Report(LogType type, std::string message); static void Report(LogType type, unsigned int line, std::string_view file, std::string_view function, std::string message); + static void BeginSection(); + static void EndSection(); ~Logs() = delete; + + private: + static std::uint32_t s_nesting; }; - #if defined(DEBUG) + #ifdef DEBUG template void Assert(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args); #else diff --git a/runtime/Includes/Core/Logs.inl b/runtime/Includes/Core/Logs.inl index 54983cf..1275383 100644 --- a/runtime/Includes/Core/Logs.inl +++ b/runtime/Includes/Core/Logs.inl @@ -102,7 +102,7 @@ namespace mlx } } - #if defined(DEBUG) + #ifdef DEBUG template void Assert(bool cond, unsigned int line, std::string_view file, std::string_view function, std::string message, const Args&... args) { diff --git a/runtime/Includes/Embedded/2DFragment.nzsl b/runtime/Includes/Embedded/Shader2DFragment.nzsl similarity index 100% rename from runtime/Includes/Embedded/2DFragment.nzsl rename to runtime/Includes/Embedded/Shader2DFragment.nzsl diff --git a/runtime/Includes/Embedded/2DFragment.spv.h b/runtime/Includes/Embedded/Shader2DFragment.spv.h similarity index 100% rename from runtime/Includes/Embedded/2DFragment.spv.h rename to runtime/Includes/Embedded/Shader2DFragment.spv.h diff --git a/runtime/Includes/Embedded/2DVertex.nzsl b/runtime/Includes/Embedded/Shader2DVertex.nzsl similarity index 100% rename from runtime/Includes/Embedded/2DVertex.nzsl rename to runtime/Includes/Embedded/Shader2DVertex.nzsl diff --git a/runtime/Includes/Embedded/2DVertex.spv.h b/runtime/Includes/Embedded/Shader2DVertex.spv.h similarity index 100% rename from runtime/Includes/Embedded/2DVertex.spv.h rename to runtime/Includes/Embedded/Shader2DVertex.spv.h diff --git a/runtime/Includes/Embedded/ScreenFragment.nzsl b/runtime/Includes/Embedded/ShaderScreenFragment.nzsl similarity index 100% rename from runtime/Includes/Embedded/ScreenFragment.nzsl rename to runtime/Includes/Embedded/ShaderScreenFragment.nzsl diff --git a/runtime/Includes/Embedded/ScreenFragment.spv.h b/runtime/Includes/Embedded/ShaderScreenFragment.spv.h similarity index 100% rename from runtime/Includes/Embedded/ScreenFragment.spv.h rename to runtime/Includes/Embedded/ShaderScreenFragment.spv.h diff --git a/runtime/Includes/Embedded/ScreenVertex.nzsl b/runtime/Includes/Embedded/ShaderScreenVertex.nzsl similarity index 100% rename from runtime/Includes/Embedded/ScreenVertex.nzsl rename to runtime/Includes/Embedded/ShaderScreenVertex.nzsl diff --git a/runtime/Includes/Embedded/ScreenVertex.spv.h b/runtime/Includes/Embedded/ShaderScreenVertex.spv.h similarity index 100% rename from runtime/Includes/Embedded/ScreenVertex.spv.h rename to runtime/Includes/Embedded/ShaderScreenVertex.spv.h diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index c4e6d27..149ef7a 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -33,13 +33,11 @@ namespace mlx inline const Vec4f& GetClearColor() const noexcept { return m_clear_color; } [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetDrawables() const noexcept { return m_drawables; } - [[nodiscard]] MLX_FORCEINLINE ViewerData& GetViewerData() noexcept { return m_viewer_data; } ~Scene() = default; private: std::vector> m_drawables; - ViewerData m_viewer_data; std::shared_ptr p_bound_font; Vec4f m_clear_color = { 0.0f, 0.0f, 0.0f, 1.0f }; }; diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index 37a301b..df87f31 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -12,6 +12,10 @@ namespace mlx }; } + std::uint32_t Logs::s_nesting = 0; + + constexpr int LOGS_TABS_WIDTH = 4; + void Logs::Report(LogType type, std::string message) { Report(type, 0, {}, {}, std::move(message)); @@ -38,7 +42,14 @@ namespace mlx switch(type) { - case LogType::Debug: std::cout << Ansi::blue << "[MLX Debug] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Debug: + { + std::cout << Ansi::blue << "[MLX Debug] " << Ansi::def << std::flush; + std::printf("%*s", s_nesting * LOGS_TABS_WIDTH, ""); + std::fflush(stdout); + std::cout << code_infos << message << std::endl; + break; + } case LogType::Message: std::cout << Ansi::blue << "[MLX Message] " << Ansi::def << code_infos << message << '\n'; break; case LogType::Warning: std::cout << Ansi::magenta << "[MLX Warning] " << Ansi::def << code_infos << message << '\n'; break; case LogType::Error: std::cerr << Ansi::red << "[MLX Error] " << Ansi::def << code_infos << message << '\n'; break; @@ -52,4 +63,15 @@ namespace mlx EventBus::Send("__MlxApplication", Internal::FatalErrorEvent{}); } } + + void Logs::BeginSection() + { + s_nesting++; + } + + void Logs::EndSection() + { + if(s_nesting > 0) + s_nesting--; + } } diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 3a7e51d..8913043 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -133,15 +133,19 @@ namespace mlx DebugLog("Vulkan: framebuffer destroyed"); } m_framebuffers.clear(); + kvfDestroyPipelineLayout(RenderCore::Get().GetDevice(), m_pipeline_layout); m_pipeline_layout = VK_NULL_HANDLE; DebugLog("Vulkan: graphics pipeline layout destroyed"); + kvfDestroyRenderPass(RenderCore::Get().GetDevice(), m_renderpass); m_renderpass = VK_NULL_HANDLE; DebugLog("Vulkan: renderpass destroyed"); + kvfDestroyPipeline(RenderCore::Get().GetDevice(), m_pipeline); m_pipeline = VK_NULL_HANDLE; DebugLog("Vulkan: graphics pipeline destroyed"); + p_renderer = nullptr; m_clears.clear(); m_attachments.clear(); diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index e4abfaf..0d56361 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -56,6 +56,7 @@ namespace mlx return; s_instance = this; + Logs::BeginSection(); loader = std::make_unique(); LoadKVFGlobalVulkanFunctionPointers(); @@ -79,6 +80,7 @@ namespace mlx VkSurfaceKHR surface = window.CreateVulkanSurface(m_instance); + Logs::BeginSection(); m_physical_device = kvfPickGoodDefaultPhysicalDevice(m_instance, surface); // just for style @@ -94,10 +96,12 @@ namespace mlx loader->LoadDevice(m_device); LoadKVFDeviceVulkanFunctionPointers(); + Logs::EndSection(); vkDestroySurfaceKHR(m_instance, surface, nullptr); m_allocator.Init(); + Logs::EndSection(); } #undef MLX_LOAD_FUNCTION diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 09f4afe..d38ddc9 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -28,7 +28,7 @@ namespace mlx }, { ShaderPushConstantLayout({ 0, sizeof(SpriteData) }) } ); std::vector vertex_shader_code = { - #include + #include }; p_vertex_shader = std::make_shared(vertex_shader_code, ShaderType::Vertex, std::move(vertex_shader_layout)); ShaderLayout fragment_shader_layout( @@ -41,7 +41,7 @@ namespace mlx }, {} ); std::vector fragment_shader_code = { - #include + #include }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index e59bb5e..ed368f5 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -14,7 +14,7 @@ namespace mlx {}, {} ); std::vector vertex_shader_code = { - #include + #include }; p_vertex_shader = std::make_shared(vertex_shader_code, ShaderType::Vertex, std::move(vertex_shader_layout)); ShaderLayout fragment_shader_layout( @@ -27,7 +27,7 @@ namespace mlx }, {} ); std::vector fragment_shader_code = { - #include + #include }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); From 35b7408eab04d582f735360ca60c9f9d008541bb Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 13 Nov 2024 12:17:59 +0100 Subject: [PATCH 073/131] adding transformations --- example/main.c | 6 +- includes/mlx.h | 17 ++- runtime/Includes/Core/Application.h | 2 +- runtime/Includes/Core/Application.inl | 4 +- runtime/Includes/Core/Graphics.h | 2 +- runtime/Includes/Core/Graphics.inl | 9 +- runtime/Includes/Core/Logs.h | 5 - runtime/Includes/Embedded/Shader2DVertex.nzsl | 8 +- .../Includes/Embedded/Shader2DVertex.spv.h | 135 +++++++++--------- runtime/Includes/Graphics/Drawable.h | 15 +- runtime/Includes/Graphics/Scene.h | 2 +- runtime/Includes/PreCompiled.h | 4 +- runtime/Sources/Core/Bridge.cpp | 8 +- runtime/Sources/Core/Logs.cpp | 24 +--- runtime/Sources/Graphics/Scene.cpp | 9 +- runtime/Sources/Renderer/Memory.cpp | 1 - runtime/Sources/Renderer/RenderCore.cpp | 4 - .../Sources/Renderer/RenderPasses/2DPass.cpp | 16 ++- 18 files changed, 141 insertions(+), 130 deletions(-) diff --git a/example/main.c b/example/main.c index c3fc308..bb56a06 100644 --- a/example/main.c +++ b/example/main.c @@ -27,7 +27,7 @@ int update(void* param) mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFFFF2066, "this text should be hidden"); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_png, 100, 100); - mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40); + mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40, 0.5f, 75.0f); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); mlx_set_font(mlx->mlx, "default"); @@ -40,7 +40,7 @@ int update(void* param) color += (color < 255); } - mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150); + mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, 2.0f, 0.0f); mlx_set_font_scale(mlx->mlx, "default", 8.f); mlx_string_put(mlx->mlx, mlx->win, 210, 175, 0xFFAF2BFF, "hidden"); @@ -131,7 +131,7 @@ int main(void) int dummy; mlx.mlx = mlx_init(); - mlx.win = mlx_new_resizable_window(mlx.mlx, 400, 400, "My window"); + mlx.win = mlx_new_window(mlx.mlx, 400, 400, "My window"); mlx_set_fps_goal(mlx.mlx, 60); diff --git a/includes/mlx.h b/includes/mlx.h index 374e7f1..9e3f6df 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/11/04 21:09:59 by maldavid ### ########.fr */ +/* Updated: 2024/11/05 18:18:22 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -234,6 +234,21 @@ MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); */ MLX_API void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y); +/** + * @brief Transform and put image to the given window + * + * @param mlx Internal MLX application + * @param win Internal window + * @param img Internal image + * @param x X coordinate + * @param y Y coordinate + * @param scale Scale of the image + * @param angle Rotation angle of the image (clockwise) + * + * @return (void) + */ +MLX_API void mlx_transform_put_image_to_window(void* mlx, void* win, void* img, int x, int y, float scale, float angle); + /** * @brief Destroys internal image * diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 9361b1b..e2abe45 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -36,7 +36,7 @@ namespace mlx Handle NewTexture(int w, int h); Handle NewStbTexture(char* file, int* w, int* h); // stb textures are image files (png, jpg, bpm, ...) - inline void TexturePut(Handle win, Handle img, int x, int y); + inline void TexturePut(Handle win, Handle img, int x, int y, float scale, float angle); inline int GetTexturePixel(Handle img, int x, int y); inline void SetTexturePixel(Handle img, int x, int y, std::uint32_t color); void DestroyTexture(Handle ptr); diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 41c164b..bc4ab5a 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -169,7 +169,7 @@ namespace mlx } } - void Application::TexturePut(Handle win, Handle img, int x, int y) + void Application::TexturePut(Handle win, Handle img, int x, int y, float scale, float angle) { MLX_PROFILE_FUNCTION(); CHECK_WINDOW_PTR(win); @@ -178,7 +178,7 @@ namespace mlx if(!texture->IsInit()) Error("trying to put a texture that has been destroyed"); else - m_graphics[*static_cast(win)]->TexturePut(texture, x, y); + m_graphics[*static_cast(win)]->TexturePut(texture, x, y, scale, angle); } int Application::GetTexturePixel(Handle img, int x, int y) diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 4924e3a..d348000 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -26,7 +26,7 @@ namespace mlx inline void PixelPut(int x, int y, std::uint32_t color) noexcept; inline void StringPut(int x, int y, std::uint32_t color, std::string str); - inline void TexturePut(NonOwningPtr texture, int x, int y); + inline void TexturePut(NonOwningPtr texture, int x, int y, float scale, float angle); inline void TryEraseSpritesInScene(NonOwningPtr texture) noexcept; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index b4ca15f..a3fd304 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -54,15 +54,16 @@ namespace mlx Text& new_text = p_scene->CreateText(str); new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); new_text.SetColor(std::move(vec_color)); + new_text.SetCenter(Vec2f{ 0.0f, 0.0f }); } else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) p_scene->BringToDrawLayer(text.Get(), m_draw_layer); } - void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y) + void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y, float scale, float angle) { MLX_PROFILE_FUNCTION(); - NonOwningPtr sprite = p_scene->GetSpriteFromTextureAndPosition(texture, Vec2f{ static_cast(x), static_cast(y) }); + NonOwningPtr sprite = p_scene->GetSpriteFromTexturePositionScaleRotation(texture, Vec2f{ static_cast(x), static_cast(y) }, scale, angle); if(!sprite) { if(m_pixelput_called) @@ -72,7 +73,9 @@ namespace mlx } Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); - + new_sprite.SetScale(Vec2f{ scale, scale }); + new_sprite.SetRotation(angle); + new_sprite.SetCenter(Vec2f{ texture->GetWidth() / 2.0f, texture->GetHeight() / 2.0f }); } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); diff --git a/runtime/Includes/Core/Logs.h b/runtime/Includes/Core/Logs.h index 5692546..821dcbe 100644 --- a/runtime/Includes/Core/Logs.h +++ b/runtime/Includes/Core/Logs.h @@ -30,13 +30,8 @@ namespace mlx static void Report(LogType type, std::string message); static void Report(LogType type, unsigned int line, std::string_view file, std::string_view function, std::string message); - static void BeginSection(); - static void EndSection(); ~Logs() = delete; - - private: - static std::uint32_t s_nesting; }; #ifdef DEBUG diff --git a/runtime/Includes/Embedded/Shader2DVertex.nzsl b/runtime/Includes/Embedded/Shader2DVertex.nzsl index 77def73..065d89b 100644 --- a/runtime/Includes/Embedded/Shader2DVertex.nzsl +++ b/runtime/Includes/Embedded/Shader2DVertex.nzsl @@ -21,8 +21,8 @@ struct ViewerData struct SpriteData { - color: vec4[f32], - position: vec2[f32] + model_matrix: mat4[f32], + color: vec4[f32] } external @@ -34,11 +34,11 @@ external [entry(vert)] fn main(input: VertIn) -> VertOut { - let position: vec4[f32] = vec4[f32](input.pos.xy + model.position, 1.0, 1.0); + let position: vec4[f32] = vec4[f32](input.pos.xy, 1.0, 1.0); input.uv *= -1.0; let output: VertOut; output.uv = input.uv; output.color = model.color; - output.pos = viewer_data.projection_matrix * position; + output.pos = viewer_data.projection_matrix * model.model_matrix * position; return output; } diff --git a/runtime/Includes/Embedded/Shader2DVertex.spv.h b/runtime/Includes/Embedded/Shader2DVertex.spv.h index de2efec..2d0f916 100644 --- a/runtime/Includes/Embedded/Shader2DVertex.spv.h +++ b/runtime/Includes/Embedded/Shader2DVertex.spv.h @@ -1,70 +1,71 @@ 3,2,35,7,0,0,1,0,39,0,0,0,70,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, 3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,34,0,0,0,109,97,105,110,0,0,0,0, -14,0,0,0,19,0,0,0,25,0,0,0,27,0,0,0,28,0,0,0,3,0,3,0,0,0,0,0,100,0, +13,0,0,0,19,0,0,0,25,0,0,0,27,0,0,0,28,0,0,0,3,0,3,0,0,0,0,0,100,0, 0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0,6,0,8,0,4,0,0,0, -0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0,0,0,5,0,5,0,8,0, -0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,5,0,8,0,0,0,0,0,0,0,99,111,108,111, -114,0,0,0,6,0,6,0,8,0,0,0,1,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0, -4,0,22,0,0,0,86,101,114,116,73,110,0,0,6,0,4,0,22,0,0,0,0,0,0,0,112,111,115,0, -6,0,4,0,22,0,0,0,1,0,0,0,117,118,0,0,5,0,4,0,29,0,0,0,86,101,114,116,79,117, -116,0,6,0,5,0,29,0,0,0,0,0,0,0,99,111,108,111,114,0,0,0,6,0,4,0,29,0,0,0, -1,0,0,0,117,118,0,0,6,0,4,0,29,0,0,0,2,0,0,0,112,111,115,0,5,0,5,0,6,0, -0,0,118,105,101,119,101,114,95,100,97,116,97,0,5,0,4,0,10,0,0,0,109,111,100,101,108,0,0,0, -5,0,3,0,14,0,0,0,112,111,115,0,5,0,3,0,19,0,0,0,117,118,0,0,5,0,4,0,25,0, -0,0,99,111,108,111,114,0,0,0,5,0,3,0,27,0,0,0,117,118,0,0,5,0,5,0,28,0,0,0, -112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,34,0,0,0,109,97,105,110,0,0,0,0,71,0, -4,0,6,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0,0,0,0,0, -71,0,4,0,28,0,0,0,11,0,0,0,0,0,0,0,71,0,4,0,14,0,0,0,30,0,0,0,0,0, -0,0,71,0,4,0,19,0,0,0,30,0,0,0,1,0,0,0,71,0,4,0,25,0,0,0,30,0,0,0, -0,0,0,0,71,0,4,0,27,0,0,0,30,0,0,0,1,0,0,0,71,0,3,0,4,0,0,0,2,0, -0,0,72,0,4,0,4,0,0,0,0,0,0,0,5,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0, -7,0,0,0,16,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,71,0, -3,0,8,0,0,0,2,0,0,0,72,0,5,0,8,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0, -72,0,5,0,8,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,22,0,0,0,0,0, -0,0,35,0,0,0,0,0,0,0,72,0,5,0,22,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0, -72,0,5,0,29,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,29,0,0,0,1,0, -0,0,35,0,0,0,16,0,0,0,72,0,5,0,29,0,0,0,2,0,0,0,35,0,0,0,32,0,0,0, -22,0,3,0,1,0,0,0,32,0,0,0,23,0,4,0,2,0,0,0,1,0,0,0,4,0,0,0,24,0, -4,0,3,0,0,0,2,0,0,0,4,0,0,0,30,0,3,0,4,0,0,0,3,0,0,0,32,0,4,0, -5,0,0,0,2,0,0,0,4,0,0,0,23,0,4,0,7,0,0,0,1,0,0,0,2,0,0,0,30,0, -4,0,8,0,0,0,2,0,0,0,7,0,0,0,32,0,4,0,9,0,0,0,9,0,0,0,8,0,0,0, -19,0,2,0,11,0,0,0,33,0,3,0,12,0,0,0,11,0,0,0,32,0,4,0,13,0,0,0,1,0, -0,0,2,0,0,0,21,0,4,0,15,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,15,0,0,0, -16,0,0,0,0,0,0,0,32,0,4,0,17,0,0,0,7,0,0,0,2,0,0,0,32,0,4,0,18,0, -0,0,1,0,0,0,7,0,0,0,43,0,4,0,15,0,0,0,20,0,0,0,1,0,0,0,32,0,4,0, -21,0,0,0,7,0,0,0,7,0,0,0,30,0,4,0,22,0,0,0,2,0,0,0,7,0,0,0,32,0, -4,0,23,0,0,0,7,0,0,0,22,0,0,0,32,0,4,0,24,0,0,0,3,0,0,0,2,0,0,0, -32,0,4,0,26,0,0,0,3,0,0,0,7,0,0,0,30,0,5,0,29,0,0,0,2,0,0,0,7,0, -0,0,2,0,0,0,43,0,4,0,1,0,0,0,30,0,0,0,0,0,128,63,43,0,4,0,1,0,0,0, -31,0,0,0,0,0,128,191,32,0,4,0,32,0,0,0,7,0,0,0,29,0,0,0,43,0,4,0,15,0, -0,0,33,0,0,0,2,0,0,0,32,0,4,0,44,0,0,0,9,0,0,0,7,0,0,0,32,0,4,0, -56,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,60,0,0,0,2,0,0,0,3,0,0,0,59,0, -4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,9,0,0,0,10,0,0,0,9,0,0,0, -59,0,4,0,13,0,0,0,14,0,0,0,1,0,0,0,59,0,4,0,18,0,0,0,19,0,0,0,1,0, -0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0,0,0,59,0,4,0,26,0,0,0,27,0,0,0, -3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0,54,0,5,0,11,0,0,0,34,0, -0,0,0,0,0,0,12,0,0,0,248,0,2,0,35,0,0,0,59,0,4,0,17,0,0,0,36,0,0,0, -7,0,0,0,59,0,4,0,32,0,0,0,37,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,38,0, -0,0,7,0,0,0,65,0,5,0,17,0,0,0,39,0,0,0,38,0,0,0,16,0,0,0,63,0,3,0, -39,0,0,0,14,0,0,0,65,0,5,0,21,0,0,0,40,0,0,0,38,0,0,0,20,0,0,0,63,0, -3,0,40,0,0,0,19,0,0,0,65,0,5,0,17,0,0,0,41,0,0,0,38,0,0,0,16,0,0,0, -61,0,4,0,2,0,0,0,42,0,0,0,41,0,0,0,79,0,7,0,7,0,0,0,43,0,0,0,42,0, -0,0,42,0,0,0,0,0,0,0,1,0,0,0,65,0,5,0,44,0,0,0,45,0,0,0,10,0,0,0, -20,0,0,0,61,0,4,0,7,0,0,0,46,0,0,0,45,0,0,0,129,0,5,0,7,0,0,0,47,0, -0,0,43,0,0,0,46,0,0,0,80,0,6,0,2,0,0,0,48,0,0,0,47,0,0,0,30,0,0,0, -30,0,0,0,62,0,3,0,36,0,0,0,48,0,0,0,65,0,5,0,21,0,0,0,49,0,0,0,38,0, -0,0,20,0,0,0,61,0,4,0,7,0,0,0,50,0,0,0,49,0,0,0,142,0,5,0,7,0,0,0, -51,0,0,0,50,0,0,0,31,0,0,0,65,0,5,0,21,0,0,0,52,0,0,0,38,0,0,0,20,0, -0,0,62,0,3,0,52,0,0,0,51,0,0,0,65,0,5,0,21,0,0,0,53,0,0,0,38,0,0,0, -20,0,0,0,61,0,4,0,7,0,0,0,54,0,0,0,53,0,0,0,65,0,5,0,21,0,0,0,55,0, -0,0,37,0,0,0,20,0,0,0,62,0,3,0,55,0,0,0,54,0,0,0,65,0,5,0,56,0,0,0, -57,0,0,0,10,0,0,0,16,0,0,0,61,0,4,0,2,0,0,0,58,0,0,0,57,0,0,0,65,0, -5,0,17,0,0,0,59,0,0,0,37,0,0,0,16,0,0,0,62,0,3,0,59,0,0,0,58,0,0,0, -65,0,5,0,60,0,0,0,61,0,0,0,6,0,0,0,16,0,0,0,61,0,4,0,3,0,0,0,62,0, -0,0,61,0,0,0,61,0,4,0,2,0,0,0,63,0,0,0,36,0,0,0,145,0,5,0,2,0,0,0, -64,0,0,0,62,0,0,0,63,0,0,0,65,0,5,0,17,0,0,0,65,0,0,0,37,0,0,0,33,0, -0,0,62,0,3,0,65,0,0,0,64,0,0,0,61,0,4,0,29,0,0,0,66,0,0,0,37,0,0,0, -81,0,5,0,2,0,0,0,67,0,0,0,66,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0,67,0, -0,0,81,0,5,0,7,0,0,0,68,0,0,0,66,0,0,0,1,0,0,0,62,0,3,0,27,0,0,0, -68,0,0,0,81,0,5,0,2,0,0,0,69,0,0,0,66,0,0,0,2,0,0,0,62,0,3,0,28,0, -0,0,69,0,0,0,253,0,1,0,56,0,1,0 +0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0,0,0,5,0,5,0,7,0, +0,0,83,112,114,105,116,101,68,97,116,97,0,0,6,0,7,0,7,0,0,0,0,0,0,0,109,111,100,101, +108,95,109,97,116,114,105,120,0,0,0,0,6,0,5,0,7,0,0,0,1,0,0,0,99,111,108,111,114,0, +0,0,5,0,4,0,22,0,0,0,86,101,114,116,73,110,0,0,6,0,4,0,22,0,0,0,0,0,0,0, +112,111,115,0,6,0,4,0,22,0,0,0,1,0,0,0,117,118,0,0,5,0,4,0,29,0,0,0,86,101, +114,116,79,117,116,0,6,0,5,0,29,0,0,0,0,0,0,0,99,111,108,111,114,0,0,0,6,0,4,0, +29,0,0,0,1,0,0,0,117,118,0,0,6,0,4,0,29,0,0,0,2,0,0,0,112,111,115,0,5,0, +5,0,6,0,0,0,118,105,101,119,101,114,95,100,97,116,97,0,5,0,4,0,9,0,0,0,109,111,100,101, +108,0,0,0,5,0,3,0,13,0,0,0,112,111,115,0,5,0,3,0,19,0,0,0,117,118,0,0,5,0, +4,0,25,0,0,0,99,111,108,111,114,0,0,0,5,0,3,0,27,0,0,0,117,118,0,0,5,0,5,0, +28,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,34,0,0,0,109,97,105,110,0,0, +0,0,71,0,4,0,6,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0, +0,0,0,0,71,0,4,0,28,0,0,0,11,0,0,0,0,0,0,0,71,0,4,0,13,0,0,0,30,0, +0,0,0,0,0,0,71,0,4,0,19,0,0,0,30,0,0,0,1,0,0,0,71,0,4,0,25,0,0,0, +30,0,0,0,0,0,0,0,71,0,4,0,27,0,0,0,30,0,0,0,1,0,0,0,71,0,3,0,4,0, +0,0,2,0,0,0,72,0,4,0,4,0,0,0,0,0,0,0,5,0,0,0,72,0,5,0,4,0,0,0, +0,0,0,0,7,0,0,0,16,0,0,0,72,0,5,0,4,0,0,0,0,0,0,0,35,0,0,0,0,0, +0,0,71,0,3,0,7,0,0,0,2,0,0,0,72,0,4,0,7,0,0,0,0,0,0,0,5,0,0,0, +72,0,5,0,7,0,0,0,0,0,0,0,7,0,0,0,16,0,0,0,72,0,5,0,7,0,0,0,0,0, +0,0,35,0,0,0,0,0,0,0,72,0,5,0,7,0,0,0,1,0,0,0,35,0,0,0,64,0,0,0, +72,0,5,0,22,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0,72,0,5,0,22,0,0,0,1,0, +0,0,35,0,0,0,16,0,0,0,72,0,5,0,29,0,0,0,0,0,0,0,35,0,0,0,0,0,0,0, +72,0,5,0,29,0,0,0,1,0,0,0,35,0,0,0,16,0,0,0,72,0,5,0,29,0,0,0,2,0, +0,0,35,0,0,0,32,0,0,0,22,0,3,0,1,0,0,0,32,0,0,0,23,0,4,0,2,0,0,0, +1,0,0,0,4,0,0,0,24,0,4,0,3,0,0,0,2,0,0,0,4,0,0,0,30,0,3,0,4,0, +0,0,3,0,0,0,32,0,4,0,5,0,0,0,2,0,0,0,4,0,0,0,30,0,4,0,7,0,0,0, +3,0,0,0,2,0,0,0,32,0,4,0,8,0,0,0,9,0,0,0,7,0,0,0,19,0,2,0,10,0, +0,0,33,0,3,0,11,0,0,0,10,0,0,0,32,0,4,0,12,0,0,0,1,0,0,0,2,0,0,0, +21,0,4,0,14,0,0,0,32,0,0,0,1,0,0,0,43,0,4,0,14,0,0,0,15,0,0,0,0,0, +0,0,32,0,4,0,16,0,0,0,7,0,0,0,2,0,0,0,23,0,4,0,17,0,0,0,1,0,0,0, +2,0,0,0,32,0,4,0,18,0,0,0,1,0,0,0,17,0,0,0,43,0,4,0,14,0,0,0,20,0, +0,0,1,0,0,0,32,0,4,0,21,0,0,0,7,0,0,0,17,0,0,0,30,0,4,0,22,0,0,0, +2,0,0,0,17,0,0,0,32,0,4,0,23,0,0,0,7,0,0,0,22,0,0,0,32,0,4,0,24,0, +0,0,3,0,0,0,2,0,0,0,32,0,4,0,26,0,0,0,3,0,0,0,17,0,0,0,30,0,5,0, +29,0,0,0,2,0,0,0,17,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,30,0,0,0,0,0, +128,63,43,0,4,0,1,0,0,0,31,0,0,0,0,0,128,191,32,0,4,0,32,0,0,0,7,0,0,0, +29,0,0,0,43,0,4,0,14,0,0,0,33,0,0,0,2,0,0,0,32,0,4,0,52,0,0,0,9,0, +0,0,2,0,0,0,32,0,4,0,56,0,0,0,2,0,0,0,3,0,0,0,32,0,4,0,59,0,0,0, +9,0,0,0,3,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0, +0,0,9,0,0,0,9,0,0,0,59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0, +18,0,0,0,19,0,0,0,1,0,0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0,0,0,59,0, +4,0,26,0,0,0,27,0,0,0,3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0, +54,0,5,0,10,0,0,0,34,0,0,0,0,0,0,0,11,0,0,0,248,0,2,0,35,0,0,0,59,0, +4,0,16,0,0,0,36,0,0,0,7,0,0,0,59,0,4,0,32,0,0,0,37,0,0,0,7,0,0,0, +59,0,4,0,23,0,0,0,38,0,0,0,7,0,0,0,65,0,5,0,16,0,0,0,39,0,0,0,38,0, +0,0,15,0,0,0,63,0,3,0,39,0,0,0,13,0,0,0,65,0,5,0,21,0,0,0,40,0,0,0, +38,0,0,0,20,0,0,0,63,0,3,0,40,0,0,0,19,0,0,0,65,0,5,0,16,0,0,0,41,0, +0,0,38,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,42,0,0,0,41,0,0,0,79,0,7,0, +17,0,0,0,43,0,0,0,42,0,0,0,42,0,0,0,0,0,0,0,1,0,0,0,80,0,6,0,2,0, +0,0,44,0,0,0,43,0,0,0,30,0,0,0,30,0,0,0,62,0,3,0,36,0,0,0,44,0,0,0, +65,0,5,0,21,0,0,0,45,0,0,0,38,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,46,0, +0,0,45,0,0,0,142,0,5,0,17,0,0,0,47,0,0,0,46,0,0,0,31,0,0,0,65,0,5,0, +21,0,0,0,48,0,0,0,38,0,0,0,20,0,0,0,62,0,3,0,48,0,0,0,47,0,0,0,65,0, +5,0,21,0,0,0,49,0,0,0,38,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,50,0,0,0, +49,0,0,0,65,0,5,0,21,0,0,0,51,0,0,0,37,0,0,0,20,0,0,0,62,0,3,0,51,0, +0,0,50,0,0,0,65,0,5,0,52,0,0,0,53,0,0,0,9,0,0,0,20,0,0,0,61,0,4,0, +2,0,0,0,54,0,0,0,53,0,0,0,65,0,5,0,16,0,0,0,55,0,0,0,37,0,0,0,15,0, +0,0,62,0,3,0,55,0,0,0,54,0,0,0,65,0,5,0,56,0,0,0,57,0,0,0,6,0,0,0, +15,0,0,0,61,0,4,0,3,0,0,0,58,0,0,0,57,0,0,0,65,0,5,0,59,0,0,0,60,0, +0,0,9,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0,61,0,0,0,60,0,0,0,146,0,5,0, +3,0,0,0,62,0,0,0,58,0,0,0,61,0,0,0,61,0,4,0,2,0,0,0,63,0,0,0,36,0, +0,0,145,0,5,0,2,0,0,0,64,0,0,0,62,0,0,0,63,0,0,0,65,0,5,0,16,0,0,0, +65,0,0,0,37,0,0,0,33,0,0,0,62,0,3,0,65,0,0,0,64,0,0,0,61,0,4,0,29,0, +0,0,66,0,0,0,37,0,0,0,81,0,5,0,2,0,0,0,67,0,0,0,66,0,0,0,0,0,0,0, +62,0,3,0,25,0,0,0,67,0,0,0,81,0,5,0,17,0,0,0,68,0,0,0,66,0,0,0,1,0, +0,0,62,0,3,0,27,0,0,0,68,0,0,0,81,0,5,0,2,0,0,0,69,0,0,0,66,0,0,0, +2,0,0,0,62,0,3,0,28,0,0,0,69,0,0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Includes/Graphics/Drawable.h b/runtime/Includes/Graphics/Drawable.h index 0fa4091..021700d 100644 --- a/runtime/Includes/Graphics/Drawable.h +++ b/runtime/Includes/Graphics/Drawable.h @@ -2,6 +2,8 @@ #define __MLX_DRAWABLE__ #include +#include +#include namespace mlx { @@ -12,13 +14,19 @@ namespace mlx public: inline Drawable(DrawableType type) : m_type(type) {} - inline void SetColor(Vec4f color) noexcept { m_color = color; } - inline void SetPosition(Vec2f position) noexcept { m_position = position; } + inline void SetColor(Vec4f color) noexcept { m_color = std::move(color); } + inline void SetPosition(Vec2f position) noexcept { m_position = std::move(position); } + inline void SetScale(Vec2f scale) noexcept { m_scale = std::move(scale); } + inline void SetRotation(float rotation) noexcept { m_rotation = EulerAnglesf{ 0.0f, 0.0f, rotation }; } + inline void SetCenter(Vec2f center) noexcept { m_center = std::move(center); } inline virtual void Update([[maybe_unused]] VkCommandBuffer cmd) {} [[nodiscard]] MLX_FORCEINLINE const Vec4f& GetColor() const noexcept { return m_color; } [[nodiscard]] MLX_FORCEINLINE const Vec2f& GetPosition() const noexcept { return m_position; } + [[nodiscard]] MLX_FORCEINLINE const Vec2f& GetScale() const noexcept { return m_scale; } + [[nodiscard]] MLX_FORCEINLINE const Quatf& GetRotation() const noexcept { return m_rotation; } + [[nodiscard]] MLX_FORCEINLINE const Vec2f& GetCenter() const noexcept { return m_center; } [[nodiscard]] MLX_FORCEINLINE std::shared_ptr GetMesh() const { return p_mesh; } [[nodiscard]] MLX_FORCEINLINE DrawableType GetType() const noexcept { return m_type; } @@ -38,8 +46,11 @@ namespace mlx protected: std::shared_ptr p_set; std::shared_ptr p_mesh; + Quatf m_rotation = Quatf::Identity(); Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; Vec2f m_position = Vec2f{ 0.0f, 0.0f }; + Vec2f m_scale = Vec2f{ 1.0f, 1.0f }; + Vec2f m_center; DrawableType m_type; }; } diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 149ef7a..843d2b0 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -17,7 +17,7 @@ namespace mlx Scene() = default; Sprite& CreateSprite(NonOwningPtr texture) noexcept; - NonOwningPtr GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const; + NonOwningPtr GetSpriteFromTexturePositionScaleRotation(NonOwningPtr texture, const Vec2f& position, float scale, float rotation) const; void TryEraseSpriteFromTexture(NonOwningPtr texture); bool IsTextureAtGivenDrawLayer(NonOwningPtr texture, std::uint64_t draw_layer) const; diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 0330c84..0b597f6 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -9,7 +9,7 @@ #include #include -#include +#include #include #include @@ -61,7 +61,7 @@ #define VMA_STATIC_VULKAN_FUNCTIONS 0 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 #define VMA_VULKAN_VERSION 1000000 -#define VMA_ASSERT(expr) ((void)0) +#define VMA_ASSERT(expr) ((void)0) // Because why not #ifdef MLX_COMPILER_CLANG #pragma clang diagnostic push diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 1811c40..2588115 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -145,7 +145,13 @@ extern "C" void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->TexturePut(win, img, x, y); + static_cast(mlx)->TexturePut(win, img, x, y, 1.0f, 0.0f); + } + + void mlx_transform_put_image_to_window(void* mlx, void* win, void* img, int x, int y, float scale, float angle) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + static_cast(mlx)->TexturePut(win, img, x, y, scale, angle); } void mlx_destroy_image(void* mlx, void* img) diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index df87f31..47cbadf 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -12,10 +12,6 @@ namespace mlx }; } - std::uint32_t Logs::s_nesting = 0; - - constexpr int LOGS_TABS_WIDTH = 4; - void Logs::Report(LogType type, std::string message) { Report(type, 0, {}, {}, std::move(message)); @@ -42,14 +38,7 @@ namespace mlx switch(type) { - case LogType::Debug: - { - std::cout << Ansi::blue << "[MLX Debug] " << Ansi::def << std::flush; - std::printf("%*s", s_nesting * LOGS_TABS_WIDTH, ""); - std::fflush(stdout); - std::cout << code_infos << message << std::endl; - break; - } + case LogType::Debug: std::cout << Ansi::blue << "[MLX Debug] " << Ansi::def << code_infos << message << std::endl; break; case LogType::Message: std::cout << Ansi::blue << "[MLX Message] " << Ansi::def << code_infos << message << '\n'; break; case LogType::Warning: std::cout << Ansi::magenta << "[MLX Warning] " << Ansi::def << code_infos << message << '\n'; break; case LogType::Error: std::cerr << Ansi::red << "[MLX Error] " << Ansi::def << code_infos << message << '\n'; break; @@ -63,15 +52,4 @@ namespace mlx EventBus::Send("__MlxApplication", Internal::FatalErrorEvent{}); } } - - void Logs::BeginSection() - { - s_nesting++; - } - - void Logs::EndSection() - { - if(s_nesting > 0) - s_nesting--; - } } diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index dcc2f61..fba15a5 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -27,14 +27,17 @@ namespace mlx return *sprite; } - NonOwningPtr Scene::GetSpriteFromTextureAndPosition(NonOwningPtr texture, const Vec2f& position) const + NonOwningPtr Scene::GetSpriteFromTexturePositionScaleRotation(NonOwningPtr texture, const Vec2f& position, float scale, float rotation) const { MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&texture, &position](std::shared_ptr drawable) + auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&texture, &position, scale, rotation](std::shared_ptr drawable) { if(!drawable || drawable->GetType() != DrawableType::Sprite) return false; - return static_cast(drawable.get())->GetTexture() == texture && drawable->GetPosition() == position; + return static_cast(drawable.get())->GetTexture() == texture && + drawable->GetPosition() == position && + drawable->GetScale() == Vec2f{ scale, scale } && + drawable->GetRotation() == EulerAnglesf{ 0.0f, 0.0f, rotation }; }); return static_cast(it != m_drawables.end() ? it->get() : nullptr); } diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index b4b041f..4f16f5a 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -1,4 +1,3 @@ -#include #include #define VMA_IMPLEMENTATION diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 0d56361..e4abfaf 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -56,7 +56,6 @@ namespace mlx return; s_instance = this; - Logs::BeginSection(); loader = std::make_unique(); LoadKVFGlobalVulkanFunctionPointers(); @@ -80,7 +79,6 @@ namespace mlx VkSurfaceKHR surface = window.CreateVulkanSurface(m_instance); - Logs::BeginSection(); m_physical_device = kvfPickGoodDefaultPhysicalDevice(m_instance, surface); // just for style @@ -96,12 +94,10 @@ namespace mlx loader->LoadDevice(m_device); LoadKVFDeviceVulkanFunctionPointers(); - Logs::EndSection(); vkDestroySurfaceKHR(m_instance, surface, nullptr); m_allocator.Init(); - Logs::EndSection(); } #undef MLX_LOAD_FUNCTION diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index d38ddc9..a6a9e4e 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -8,10 +8,10 @@ namespace mlx { - struct SpriteData + struct DrawableData { + Mat4f model_matrix; Vec4f color; - Vec2f position; }; void Render2DPass::Init() @@ -25,7 +25,7 @@ namespace mlx { 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER } }) } - }, { ShaderPushConstantLayout({ 0, sizeof(SpriteData) }) } + }, { ShaderPushConstantLayout({ 0, sizeof(DrawableData) }) } ); std::vector vertex_shader_code = { #include @@ -105,15 +105,19 @@ namespace mlx m_pipeline.BindPipeline(cmd, 0, {}); for(auto& drawable : drawables) { - SpriteData drawable_data; - drawable_data.position = drawable->GetPosition(); + DrawableData drawable_data; drawable_data.color = drawable->GetColor(); + drawable_data.model_matrix = Mat4f::Identity(); + drawable_data.model_matrix.SetTranslation(Vec3f{ drawable->GetPosition(), 0.0f }); + drawable_data.model_matrix.SetScale(Vec3f{ drawable->GetScale(), 1.0f }); + drawable_data.model_matrix.SetRotation(drawable->GetRotation()); + //drawable_data.model_matrix = Mat4f::Translate(-Vec3f{ drawable->GetCenter(), 0.0f }) * Mat4f::Rotate(drawable->GetRotation()) * drawable_data.model_matrix; drawable->Bind(frame_index, cmd); std::array sets = { p_viewer_data_set->GetSet(frame_index), drawable->GetSet(frame_index) }; - RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(SpriteData), &drawable_data); + RenderCore::Get().vkCmdPushConstants(cmd, m_pipeline.GetPipelineLayout(), VK_SHADER_STAGE_VERTEX_BIT, 0, sizeof(DrawableData), &drawable_data); RenderCore::Get().vkCmdBindDescriptorSets(cmd, m_pipeline.GetPipelineBindPoint(), m_pipeline.GetPipelineLayout(), 0, sets.size(), sets.data(), 0, nullptr); drawable->GetMesh()->Draw(cmd, renderer.GetDrawCallsCounterRef(), renderer.GetPolygonDrawnCounterRef()); From f27e5ef043ef21c91350e88404a2aab1fb668368 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 13 Nov 2024 12:23:16 +0100 Subject: [PATCH 074/131] fixing linux build failure --- runtime/Sources/Graphics/Scene.cpp | 2 +- runtime/Sources/Renderer/Pipelines/Graphics.cpp | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index fba15a5..c070e64 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -37,7 +37,7 @@ namespace mlx return static_cast(drawable.get())->GetTexture() == texture && drawable->GetPosition() == position && drawable->GetScale() == Vec2f{ scale, scale } && - drawable->GetRotation() == EulerAnglesf{ 0.0f, 0.0f, rotation }; + drawable->GetRotation().ToEulerAngles() == EulerAnglesf{ 0.0f, 0.0f, rotation }; }); return static_cast(it != m_drawables.end() ? it->get() : nullptr); } diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 8913043..bdd5a59 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -132,7 +132,6 @@ namespace mlx kvfDestroyFramebuffer(RenderCore::Get().GetDevice(), fb); DebugLog("Vulkan: framebuffer destroyed"); } - m_framebuffers.clear(); kvfDestroyPipelineLayout(RenderCore::Get().GetDevice(), m_pipeline_layout); m_pipeline_layout = VK_NULL_HANDLE; @@ -149,6 +148,7 @@ namespace mlx p_renderer = nullptr; m_clears.clear(); m_attachments.clear(); + m_framebuffers.clear(); } void GraphicPipeline::CreateFramebuffers(const std::vector>& render_targets, bool clear_attachments) From f401b3694ac931c9578fed8f9ca598c7eb46c780 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 14 Nov 2024 11:44:52 +0100 Subject: [PATCH 075/131] fixing rotation --- runtime/Includes/Core/Graphics.inl | 3 +-- runtime/Includes/Graphics/Drawable.h | 2 +- runtime/Includes/Maths/Mat4.h | 4 ++-- runtime/Includes/Maths/Quaternions.h | 9 +++++---- runtime/Sources/Renderer/RenderPasses/2DPass.cpp | 8 ++++---- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index a3fd304..b07350e 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -54,7 +54,6 @@ namespace mlx Text& new_text = p_scene->CreateText(str); new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); new_text.SetColor(std::move(vec_color)); - new_text.SetCenter(Vec2f{ 0.0f, 0.0f }); } else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) p_scene->BringToDrawLayer(text.Get(), m_draw_layer); @@ -72,10 +71,10 @@ namespace mlx m_pixelput_called = false; } Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetCenter(Vec2f{ texture->GetWidth() / 2.0f, texture->GetHeight() / 2.0f }); new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); new_sprite.SetScale(Vec2f{ scale, scale }); new_sprite.SetRotation(angle); - new_sprite.SetCenter(Vec2f{ texture->GetWidth() / 2.0f, texture->GetHeight() / 2.0f }); } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); diff --git a/runtime/Includes/Graphics/Drawable.h b/runtime/Includes/Graphics/Drawable.h index 021700d..803fd5f 100644 --- a/runtime/Includes/Graphics/Drawable.h +++ b/runtime/Includes/Graphics/Drawable.h @@ -50,7 +50,7 @@ namespace mlx Vec4f m_color = Vec4f{ 1.0f, 1.0f, 1.0f, 1.0f }; Vec2f m_position = Vec2f{ 0.0f, 0.0f }; Vec2f m_scale = Vec2f{ 1.0f, 1.0f }; - Vec2f m_center; + Vec2f m_center = Vec2f{ 0.0f, 0.0f }; DrawableType m_type; }; } diff --git a/runtime/Includes/Maths/Mat4.h b/runtime/Includes/Maths/Mat4.h index 722f5cd..bc8adf9 100644 --- a/runtime/Includes/Maths/Mat4.h +++ b/runtime/Includes/Maths/Mat4.h @@ -95,8 +95,8 @@ namespace mlx static constexpr Mat4 ConcatenateTransform(const Mat4& left, const Mat4& right); static constexpr Mat4 Identity(); static constexpr Mat4 LookAt(const Vec3& eye, const Vec3& target, const Vec3& up = Vec3::Up()); - static constexpr Mat4 Ortho(T left, T right, T top, T bottom, T zNear = -1.0, T zFar = 1.0); - static Mat4 Perspective(RadianAngle angle, T ratio, T zNear, T zFar); + static constexpr Mat4 Ortho(T left, T right, T top, T bottom, T z_near = -1.0, T zFar = 1.0); + static Mat4 Perspective(RadianAngle angle, T ratio, T z_near, T z_far); static constexpr Mat4 Rotate(const Quat& rotation); static constexpr Mat4 Scale(const Vec3& scale); static constexpr Mat4 Translate(const Vec3& translation); diff --git a/runtime/Includes/Maths/Quaternions.h b/runtime/Includes/Maths/Quaternions.h index dfa1809..77b7b4e 100644 --- a/runtime/Includes/Maths/Quaternions.h +++ b/runtime/Includes/Maths/Quaternions.h @@ -20,10 +20,9 @@ namespace mlx template constexpr explicit Quat(const Quat& quat); constexpr Quat(const Quat&) = default; constexpr Quat(Quat&&) = default; - ~Quat() = default; RadianAngle AngleBetween(const Quat& vec) const; - constexpr bool ApproxEqual(const Quat& quat, T maxDifference = std::numeric_limits::epsilon()) const; + constexpr bool ApproxEqual(const Quat& quat, T max_difference = std::numeric_limits::epsilon()) const; Quat& ComputeW(); constexpr Quat& Conjugate(); @@ -68,16 +67,18 @@ namespace mlx constexpr bool operator>=(const Quat& quat) const; static RadianAngle AngleBetween(const Quat& lhs, const Quat& rhs); - static constexpr bool ApproxEqual(const Quat& lhs, const Quat& rhs, T maxDifference = std::numeric_limits::epsilon()); + static constexpr bool ApproxEqual(const Quat& lhs, const Quat& rhs, T max_difference = std::numeric_limits::epsilon()); static constexpr Quat Identity(); static constexpr Quat Lerp(const Quat& from, const Quat& to, T interpolation); static Quat LookAt(const Vec3& forward, const Vec3& up); static Quat Normalize(const Quat& quat, T* length = nullptr); static Quat RotationBetween(const Vec3& from, const Vec3& to); - static Quat RotateTowards(const Quat& from, const Quat& to, RadianAngle maxRotation); + static Quat RotateTowards(const Quat& from, const Quat& to, RadianAngle max_rotation); static Quat Mirror(Quat quat, const Vec3& axis); static Quat Slerp(const Quat& from, const Quat& to, T interpolation); static constexpr Quat Zero(); + + ~Quat() = default; }; using Quatd = Quat; diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index a6a9e4e..4a767ee 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -108,10 +108,10 @@ namespace mlx DrawableData drawable_data; drawable_data.color = drawable->GetColor(); drawable_data.model_matrix = Mat4f::Identity(); - drawable_data.model_matrix.SetTranslation(Vec3f{ drawable->GetPosition(), 0.0f }); - drawable_data.model_matrix.SetScale(Vec3f{ drawable->GetScale(), 1.0f }); - drawable_data.model_matrix.SetRotation(drawable->GetRotation()); - //drawable_data.model_matrix = Mat4f::Translate(-Vec3f{ drawable->GetCenter(), 0.0f }) * Mat4f::Rotate(drawable->GetRotation()) * drawable_data.model_matrix; + drawable_data.model_matrix.ApplyTranslation(Vec3f{ -drawable->GetCenter() / 2.0f, 0.0f }); + drawable_data.model_matrix.ApplyRotation(drawable->GetRotation()); + drawable_data.model_matrix.ApplyTranslation(Vec3f{ drawable->GetPosition() + drawable->GetCenter(), 0.0f }); + drawable_data.model_matrix.ApplyScale(Vec3f{ drawable->GetScale(), 1.0f }); drawable->Bind(frame_index, cmd); From c85a30e6695b1bb2ecc76ca76497ae5c990a2c8d Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Thu, 14 Nov 2024 17:38:44 +0100 Subject: [PATCH 076/131] working on rtt --- example/main.c | 15 +++++++++++++++ .../Includes/Renderer/RenderPasses/FinalPass.h | 2 +- runtime/Includes/Renderer/RenderPasses/Passes.h | 3 ++- runtime/Includes/Renderer/ScenesRenderer.h | 4 ++-- runtime/Sources/Core/Graphics.cpp | 5 ++--- .../Sources/Renderer/RenderPasses/FinalPass.cpp | 7 +++++-- .../Sources/Renderer/RenderPasses/Passes.cpp | 12 +++++++++--- runtime/Sources/Renderer/Renderer.cpp | 17 ++++++++++++----- runtime/Sources/Renderer/SceneRenderer.cpp | 4 ++-- 9 files changed, 50 insertions(+), 19 deletions(-) diff --git a/example/main.c b/example/main.c index bb56a06..819bb89 100644 --- a/example/main.c +++ b/example/main.c @@ -9,6 +9,8 @@ typedef struct void* logo_jpg; void* logo_bmp; void* img; + void* render_target; + void* render_target_win; } mlx_t; int update(void* param) @@ -50,6 +52,12 @@ int update(void* param) mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFFFF0000); } + mlx_string_put(mlx->mlx, mlx->render_target_win, 20, 20, 0xFFAF2BFF, "cacaboudin"); + mlx_transform_put_image_to_window(mlx->mlx, mlx->render_target_win, mlx->logo_bmp, 100, 40, 0.5f, 75.0f); + mlx_put_image_to_window(mlx->mlx, mlx->render_target_win, mlx->img, 40, 60); + + mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->render_target_win, 40, 60); + i++; return 0; } @@ -133,6 +141,9 @@ int main(void) mlx.mlx = mlx_init(); mlx.win = mlx_new_window(mlx.mlx, 400, 400, "My window"); + mlx.render_target = mlx_new_image(mlx.mlx, 200, 200); + mlx.render_target_win = mlx_new_window(mlx.mlx, 200, 200, (char*)mlx.render_target); + mlx_set_fps_goal(mlx.mlx, 60); mlx_on_event(mlx.mlx, mlx.win, MLX_KEYDOWN, key_hook, &mlx); @@ -161,6 +172,10 @@ int main(void) mlx_destroy_image(mlx.mlx, mlx.logo_bmp); mlx_destroy_image(mlx.mlx, mlx.img); mlx_destroy_window(mlx.mlx, mlx.win); + + mlx_destroy_window(mlx.mlx, mlx.render_target_win); + mlx_destroy_image(mlx.mlx, mlx.render_target); + mlx_destroy_display(mlx.mlx); return 0; diff --git a/runtime/Includes/Renderer/RenderPasses/FinalPass.h b/runtime/Includes/Renderer/RenderPasses/FinalPass.h index ead2c3a..1de1d0e 100644 --- a/runtime/Includes/Renderer/RenderPasses/FinalPass.h +++ b/runtime/Includes/Renderer/RenderPasses/FinalPass.h @@ -12,7 +12,7 @@ namespace mlx public: FinalPass() = default; void Init(); - void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target); + void Pass(class Scene& scene, class Renderer& renderer, class Texture& render_target, NonOwningPtr final_target); void Destroy(); ~FinalPass() = default; diff --git a/runtime/Includes/Renderer/RenderPasses/Passes.h b/runtime/Includes/Renderer/RenderPasses/Passes.h index a24bcc2..48dfc84 100644 --- a/runtime/Includes/Renderer/RenderPasses/Passes.h +++ b/runtime/Includes/Renderer/RenderPasses/Passes.h @@ -12,7 +12,7 @@ namespace mlx public: RenderPasses() = default; - void Init(); + void Init(NonOwningPtr render_target); void Pass(class Scene& scene, class Renderer& renderer, const Vec4f& clear_color); void Destroy(); @@ -22,6 +22,7 @@ namespace mlx Render2DPass m_2Dpass; FinalPass m_final; Texture m_main_render_texture; + NonOwningPtr p_render_target; }; } diff --git a/runtime/Includes/Renderer/ScenesRenderer.h b/runtime/Includes/Renderer/ScenesRenderer.h index 03c391e..2919d5f 100644 --- a/runtime/Includes/Renderer/ScenesRenderer.h +++ b/runtime/Includes/Renderer/ScenesRenderer.h @@ -9,8 +9,8 @@ namespace mlx { public: SceneRenderer() = default; - void Init(); - void Render(class Scene& scene, class Renderer& renderer); // TODO : add RTT support + void Init(NonOwningPtr render_target); + void Render(class Scene& scene, class Renderer& renderer); void Destroy(); ~SceneRenderer() = default; diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 66a70ed..db79fa5 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -10,9 +10,8 @@ namespace mlx m_has_window(false) { MLX_PROFILE_FUNCTION(); - // TODO : re-enable render targets m_renderer.Init(nullptr); - m_scene_renderer.Init(); + m_scene_renderer.Init(render_target); p_scene = std::make_unique(); } @@ -24,7 +23,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); m_renderer.Init(p_window.get()); - m_scene_renderer.Init(); + m_scene_renderer.Init(nullptr); p_scene = std::make_unique(); } diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index ed368f5..c8dec0f 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -41,7 +41,7 @@ namespace mlx p_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_fragment_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Fragment); } - void FinalPass::Pass([[maybe_unused]] Scene& scene, Renderer& renderer, Texture& render_target) + void FinalPass::Pass([[maybe_unused]] Scene& scene, Renderer& renderer, Texture& render_target, NonOwningPtr final_target) { MLX_PROFILE_FUNCTION(); if(m_pipeline.GetPipeline() == VK_NULL_HANDLE) @@ -49,7 +49,10 @@ namespace mlx GraphicPipelineDescriptor pipeline_descriptor; pipeline_descriptor.vertex_shader = p_vertex_shader; pipeline_descriptor.fragment_shader = p_fragment_shader; - pipeline_descriptor.renderer = &renderer; + if(final_target) + pipeline_descriptor.color_attachments = { final_target }; + else + pipeline_descriptor.renderer = &renderer; pipeline_descriptor.no_vertex_inputs = true; #ifdef DEBUG m_pipeline.Init(pipeline_descriptor, "mlx_final_pass"); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index f1ea588..29bebbb 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -5,8 +5,10 @@ namespace mlx { - void RenderPasses::Init() + void RenderPasses::Init(NonOwningPtr render_target) { + p_render_target = render_target; + m_2Dpass.Init(); m_final.Init(); func::function functor = [this](const EventBase& event) @@ -21,7 +23,11 @@ namespace mlx { if(!m_main_render_texture.IsInit()) { - auto extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); + VkExtent2D extent; + if(p_render_target) + extent = VkExtent2D{ .width = p_render_target->GetWidth(), .height = p_render_target->GetHeight() }; + else + extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); #ifdef DEBUG m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); #else @@ -32,7 +38,7 @@ namespace mlx m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), clear_color); m_2Dpass.Pass(scene, renderer, m_main_render_texture); - m_final.Pass(scene, renderer, m_main_render_texture); + m_final.Pass(scene, renderer, m_main_render_texture, p_render_target); } void RenderPasses::Destroy() diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 7014e20..9b9ea4e 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -19,7 +19,8 @@ namespace mlx { MLX_PROFILE_FUNCTION(); p_window = window; - m_swapchain.Init(p_window); + if(p_window) + m_swapchain.Init(p_window); for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { m_image_available_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); @@ -37,7 +38,8 @@ namespace mlx { MLX_PROFILE_FUNCTION(); kvfWaitForFence(RenderCore::Get().GetDevice(), m_cmd_fences[m_current_frame_index]); - m_swapchain.AquireFrame(m_image_available_semaphores[m_current_frame_index]); + if(p_window) + m_swapchain.AquireFrame(m_image_available_semaphores[m_current_frame_index]); RenderCore::Get().vkResetCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); kvfBeginCommandBuffer(m_cmd_buffers[m_current_frame_index], 0); m_drawcalls = 0; @@ -50,8 +52,12 @@ namespace mlx MLX_PROFILE_FUNCTION(); VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT }; kvfEndCommandBuffer(m_cmd_buffers[m_current_frame_index]); - kvfSubmitCommandBuffer(RenderCore::Get().GetDevice(), m_cmd_buffers[m_current_frame_index], KVF_GRAPHICS_QUEUE, m_render_finished_semaphores[m_current_frame_index], m_image_available_semaphores[m_current_frame_index], m_cmd_fences[m_current_frame_index], wait_stages); - m_swapchain.Present(m_render_finished_semaphores[m_current_frame_index]); + if(p_window) + kvfSubmitCommandBuffer(RenderCore::Get().GetDevice(), m_cmd_buffers[m_current_frame_index], KVF_GRAPHICS_QUEUE, m_render_finished_semaphores[m_current_frame_index], m_image_available_semaphores[m_current_frame_index], m_cmd_fences[m_current_frame_index], wait_stages); + else + kvfSubmitCommandBuffer(RenderCore::Get().GetDevice(), m_cmd_buffers[m_current_frame_index], KVF_GRAPHICS_QUEUE, VK_NULL_HANDLE, VK_NULL_HANDLE, m_cmd_fences[m_current_frame_index], wait_stages); + if(p_window) + m_swapchain.Present(m_render_finished_semaphores[m_current_frame_index]); m_current_frame_index = (m_current_frame_index + 1) % MAX_FRAMES_IN_FLIGHT; } @@ -68,6 +74,7 @@ namespace mlx kvfDestroyFence(RenderCore::Get().GetDevice(), m_cmd_fences[i]); DebugLog("Vulkan: fence destroyed"); } - m_swapchain.Destroy(); + if(p_window) + m_swapchain.Destroy(); } } diff --git a/runtime/Sources/Renderer/SceneRenderer.cpp b/runtime/Sources/Renderer/SceneRenderer.cpp index 98fa750..4daf819 100644 --- a/runtime/Sources/Renderer/SceneRenderer.cpp +++ b/runtime/Sources/Renderer/SceneRenderer.cpp @@ -6,10 +6,10 @@ namespace mlx { - void SceneRenderer::Init() + void SceneRenderer::Init(NonOwningPtr render_target) { MLX_PROFILE_FUNCTION(); - m_passes.Init(); + m_passes.Init(render_target); } void SceneRenderer::Render(Scene& scene, Renderer& renderer) From 50edf78e7ddf4e2ee6e1c85d96cd1c9b1d1843c2 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 24 Nov 2024 00:41:48 +0000 Subject: [PATCH 077/131] [BOT] update dependencies --- .../vk_video/vulkan_video_codec_av1std.h | 4 +- .../vulkan_video_codec_av1std_encode.h | 143 + third_party/vulkan/vulkan.cppm | 66 + third_party/vulkan/vulkan.hpp | 361 +- third_party/vulkan/vulkan_core.h | 367 +- third_party/vulkan/vulkan_enums.hpp | 231 +- .../vulkan/vulkan_extension_inspection.hpp | 58 +- third_party/vulkan/vulkan_funcs.hpp | 233 ++ third_party/vulkan/vulkan_handles.hpp | 73 + third_party/vulkan/vulkan_hash.hpp | 434 +++ third_party/vulkan/vulkan_raii.hpp | 118 +- .../vulkan/vulkan_static_assertions.hpp | 195 ++ third_party/vulkan/vulkan_structs.hpp | 3079 +++++++++++++++++ third_party/vulkan/vulkan_to_string.hpp | 258 +- 14 files changed, 5524 insertions(+), 96 deletions(-) create mode 100644 third_party/vk_video/vulkan_video_codec_av1std_encode.h diff --git a/third_party/vk_video/vulkan_video_codec_av1std.h b/third_party/vk_video/vulkan_video_codec_av1std.h index 8ce283e..4fdced7 100644 --- a/third_party/vk_video/vulkan_video_codec_av1std.h +++ b/third_party/vk_video/vulkan_video_codec_av1std.h @@ -132,7 +132,7 @@ typedef enum StdVideoAV1FrameRestorationType { typedef enum StdVideoAV1ColorPrimaries { STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709 = 1, - STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED = 2, + STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED = 2, STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M = 4, STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G = 5, STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601 = 6, @@ -144,6 +144,8 @@ typedef enum StdVideoAV1ColorPrimaries { STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432 = 12, STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213 = 22, STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID = 0x7FFFFFFF, + // STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED is a deprecated alias + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED = STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED, STD_VIDEO_AV1_COLOR_PRIMARIES_MAX_ENUM = 0x7FFFFFFF } StdVideoAV1ColorPrimaries; diff --git a/third_party/vk_video/vulkan_video_codec_av1std_encode.h b/third_party/vk_video/vulkan_video_codec_av1std_encode.h new file mode 100644 index 0000000..87fc093 --- /dev/null +++ b/third_party/vk_video/vulkan_video_codec_av1std_encode.h @@ -0,0 +1,143 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std_encode 1 +#include "vulkan_video_codec_av1std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_av1_encode" +typedef struct StdVideoEncodeAV1DecoderModelInfo { + uint8_t buffer_delay_length_minus_1; + uint8_t buffer_removal_time_length_minus_1; + uint8_t frame_presentation_time_length_minus_1; + uint8_t reserved1; + uint32_t num_units_in_decoding_tick; +} StdVideoEncodeAV1DecoderModelInfo; + +typedef struct StdVideoEncodeAV1ExtensionHeader { + uint8_t temporal_id; + uint8_t spatial_id; +} StdVideoEncodeAV1ExtensionHeader; + +typedef struct StdVideoEncodeAV1OperatingPointInfoFlags { + uint32_t decoder_model_present_for_this_op : 1; + uint32_t low_delay_mode_flag : 1; + uint32_t initial_display_delay_present_for_this_op : 1; + uint32_t reserved : 29; +} StdVideoEncodeAV1OperatingPointInfoFlags; + +typedef struct StdVideoEncodeAV1OperatingPointInfo { + StdVideoEncodeAV1OperatingPointInfoFlags flags; + uint16_t operating_point_idc; + uint8_t seq_level_idx; + uint8_t seq_tier; + uint32_t decoder_buffer_delay; + uint32_t encoder_buffer_delay; + uint8_t initial_display_delay_minus_1; +} StdVideoEncodeAV1OperatingPointInfo; + +typedef struct StdVideoEncodeAV1PictureInfoFlags { + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t show_frame : 1; + uint32_t showable_frame : 1; + uint32_t reserved : 3; +} StdVideoEncodeAV1PictureInfoFlags; + +typedef struct StdVideoEncodeAV1PictureInfo { + StdVideoEncodeAV1PictureInfoFlags flags; + StdVideoAV1FrameType frame_type; + uint32_t frame_presentation_time; + uint32_t current_frame_id; + uint8_t order_hint; + uint8_t primary_ref_frame; + uint8_t refresh_frame_flags; + uint8_t coded_denom; + uint16_t render_width_minus_1; + uint16_t render_height_minus_1; + StdVideoAV1InterpolationFilter interpolation_filter; + StdVideoAV1TxMode TxMode; + uint8_t delta_q_res; + uint8_t delta_lf_res; + uint8_t ref_order_hint[STD_VIDEO_AV1_NUM_REF_FRAMES]; + int8_t ref_frame_idx[STD_VIDEO_AV1_REFS_PER_FRAME]; + uint8_t reserved1[3]; + uint32_t delta_frame_id_minus_1[STD_VIDEO_AV1_REFS_PER_FRAME]; + const StdVideoAV1TileInfo* pTileInfo; + const StdVideoAV1Quantization* pQuantization; + const StdVideoAV1Segmentation* pSegmentation; + const StdVideoAV1LoopFilter* pLoopFilter; + const StdVideoAV1CDEF* pCDEF; + const StdVideoAV1LoopRestoration* pLoopRestoration; + const StdVideoAV1GlobalMotion* pGlobalMotion; + const StdVideoEncodeAV1ExtensionHeader* pExtensionHeader; + const uint32_t* pBufferRemovalTimes; +} StdVideoEncodeAV1PictureInfo; + +typedef struct StdVideoEncodeAV1ReferenceInfoFlags { + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; +} StdVideoEncodeAV1ReferenceInfoFlags; + +typedef struct StdVideoEncodeAV1ReferenceInfo { + StdVideoEncodeAV1ReferenceInfoFlags flags; + uint32_t RefFrameId; + StdVideoAV1FrameType frame_type; + uint8_t OrderHint; + uint8_t reserved1[3]; + const StdVideoEncodeAV1ExtensionHeader* pExtensionHeader; +} StdVideoEncodeAV1ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 65881c4..2af062c 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -872,6 +872,18 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ScopeKHR; using VULKAN_HPP_NAMESPACE::ScopeNV; + //=== VK_KHR_video_encode_av1 === + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR; + //=== VK_QCOM_image_processing2 === using VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM; @@ -889,6 +901,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::TimeDomainEXT; using VULKAN_HPP_NAMESPACE::TimeDomainKHR; + //=== VK_NV_display_stereo === + using VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV; + //=== VK_KHR_maintenance7 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR; @@ -2498,6 +2513,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::KHRVideoDecodeAv1SpecVersion; using VULKAN_HPP_NAMESPACE::MaxVideoAv1ReferencesPerFrameKHR; + //=== VK_KHR_video_encode_av1 === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeAv1ExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeAv1SpecVersion; + //=== VK_KHR_video_maintenance1 === using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1ExtensionName; using VULKAN_HPP_NAMESPACE::KHRVideoMaintenance1SpecVersion; @@ -2572,6 +2591,14 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationExtensionName; using VULKAN_HPP_NAMESPACE::NVDescriptorPoolOverallocationSpecVersion; + //=== VK_NV_display_stereo === + using VULKAN_HPP_NAMESPACE::NVDisplayStereoExtensionName; + using VULKAN_HPP_NAMESPACE::NVDisplayStereoSpecVersion; + + //=== VK_KHR_video_encode_quantization_map === + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQuantizationMapExtensionName; + using VULKAN_HPP_NAMESPACE::KHRVideoEncodeQuantizationMapSpecVersion; + //=== VK_NV_raw_access_chains === using VULKAN_HPP_NAMESPACE::NVRawAccessChainsExtensionName; using VULKAN_HPP_NAMESPACE::NVRawAccessChainsSpecVersion; @@ -2620,6 +2647,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2ExtensionName; using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2SpecVersion; + //=== VK_EXT_vertex_attribute_robustness === + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeRobustnessExtensionName; + using VULKAN_HPP_NAMESPACE::EXTVertexAttributeRobustnessSpecVersion; + //======================== //=== CONSTEXPR VALUEs === //======================== @@ -3209,6 +3240,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX; using VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX; using VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX; + using VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX; //=== VK_NVX_image_view_handle === using VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX; @@ -4400,6 +4432,21 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::VideoDecodeAV1ProfileInfoKHR; using VULKAN_HPP_NAMESPACE::VideoDecodeAV1SessionParametersCreateInfoKHR; + //=== VK_KHR_video_encode_av1 === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR; + //=== VK_KHR_video_maintenance1 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR; using VULKAN_HPP_NAMESPACE::VideoInlineQueryInfoKHR; @@ -4484,6 +4531,22 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_NV_descriptor_pool_overallocation === using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + //=== VK_NV_display_stereo === + using VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV; + using VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV; + + //=== VK_KHR_video_encode_quantization_map === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR; + using VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR; + //=== VK_NV_raw_access_chains === using VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV; @@ -4553,6 +4616,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV; using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV; + //=== VK_EXT_vertex_attribute_robustness === + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + //=============== //=== HANDLEs === //=============== diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index 6ee70ed..e20d00f 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -63,7 +63,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 301, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 302, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -980,7 +980,7 @@ namespace VULKAN_HPP_NAMESPACE #endif }; -#if !defined( VK_NO_PROTOTYPES ) +#if !defined( VK_NO_PROTOTYPES ) || ( defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) && ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 0 ) ) class DispatchLoaderStatic : public DispatchLoaderBase { public: @@ -2906,6 +2906,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageViewHandleNVX( device, pInfo ); } + uint64_t vkGetImageViewHandle64NVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageViewHandle64NVX( device, pInfo ); + } + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); @@ -8596,6 +8601,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1ExtensionName = VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1SpecVersion = VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION; + //=== VK_KHR_video_encode_av1 === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1ExtensionName = VK_KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeAv1SpecVersion = VK_KHR_VIDEO_ENCODE_AV1_SPEC_VERSION; + //=== VK_KHR_video_maintenance1 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1ExtensionName = VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoMaintenance1SpecVersion = VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION; @@ -8670,6 +8679,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationExtensionName = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVDescriptorPoolOverallocationSpecVersion = VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION; + //=== VK_NV_display_stereo === + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplayStereoExtensionName = VK_NV_DISPLAY_STEREO_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto NVDisplayStereoSpecVersion = VK_NV_DISPLAY_STEREO_SPEC_VERSION; + + //=== VK_KHR_video_encode_quantization_map === + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQuantizationMapExtensionName = VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQuantizationMapSpecVersion = VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION; + //=== VK_NV_raw_access_chains === VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsExtensionName = VK_NV_RAW_ACCESS_CHAINS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVRawAccessChainsSpecVersion = VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION; @@ -8718,6 +8735,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2ExtensionName = VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2SpecVersion = VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION; + //=== VK_EXT_vertex_attribute_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessExtensionName = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION; + } // namespace VULKAN_HPP_NAMESPACE // clang-format off @@ -10412,6 +10433,16 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NVX_binary_import === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_video_encode_h264 === template <> struct StructExtends @@ -16355,6 +16386,133 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_KHR_video_encode_av1 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_video_maintenance1 === template <> struct StructExtends @@ -16814,6 +16972,125 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_NV_display_stereo === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + //=== VK_KHR_video_encode_quantization_map === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_NV_raw_access_chains === template <> struct StructExtends @@ -17159,6 +17436,25 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_vertex_attribute_robustness === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE namespace detail @@ -17175,37 +17471,57 @@ namespace VULKAN_HPP_NAMESPACE { if ( !vulkanLibraryName.empty() ) { -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); -# elif defined( _WIN32 ) +# if defined( _WIN32 ) m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# elif defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); # else # error unsupported platform # endif } else { -# if defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) - { - m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); - } +# if defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); # elif defined( __APPLE__ ) m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) + if ( !m_library ) { m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); } -# elif defined( _WIN32 ) - m_library = ::LoadLibraryA( "vulkan-1.dll" ); + if ( !m_library ) + { + m_library = dlopen( "libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL ); + } + // Add support for using Vulkan and MoltenVK in a Framework. App store rules for iOS + // strictly enforce no .dylib's. If they aren't found it just falls through + if ( !m_library ) + { + m_library = dlopen( "vulkan.framework/vulkan", RTLD_NOW | RTLD_LOCAL ); + } + if ( !m_library ) + { + m_library = dlopen( "MoltenVK.framework/MoltenVK", RTLD_NOW | RTLD_LOCAL ); + } + // modern versions of macOS don't search /usr/local/lib automatically contrary to what man dlopen says + // Vulkan SDK uses this as the system-wide installation location, so we're going to fallback to this if all else fails + if ( !m_library && ( getenv( "DYLD_FALLBACK_LIBRARY_PATH" ) == NULL ) ) + { + m_library = dlopen( "/usr/local/lib/libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( !m_library ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } # else # error unsupported platform # endif } # ifndef VULKAN_HPP_NO_EXCEPTIONS - if ( m_library == nullptr ) + if ( !m_library ) { // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. throw std::runtime_error( "Failed to load vulkan library!" ); @@ -17616,8 +17932,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; //=== VK_AMD_draw_indirect_count === PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; @@ -18855,8 +19172,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); @@ -20136,8 +20454,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index d9cdd13..bdbf3f8 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 301 +#define VK_HEADER_VERSION 302 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) @@ -471,6 +471,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, + VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX = 1000029004, VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR = 1000038000, @@ -1094,6 +1095,17 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR = 1000512003, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000512004, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR = 1000512005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR = 1000513000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000513001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR = 1000513002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR = 1000513003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR = 1000513004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR = 1000513005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR = 1000513006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR = 1000513007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR = 1000513008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR = 1000513009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR = 1000513010, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR = 1000515000, VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR = 1000515001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV = 1000516000, @@ -1133,6 +1145,18 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT = 1000545007, VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT = 1000545008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV = 1000546000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV = 1000551000, + VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV = 1000551001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553000, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR = 1000553002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000553005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR = 1000553009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553004, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR = 1000553007, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR = 1000553008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV = 1000555000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR = 1000558000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV = 1000559000, @@ -1168,6 +1192,7 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV = 1000593000, VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV = 1000593001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV = 1000593002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT = 1000608000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias @@ -1393,6 +1418,7 @@ typedef enum VkImageLayout { VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR = 1000553000, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, @@ -2412,6 +2438,8 @@ typedef enum VkImageUsageFlagBits { VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000, VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000, VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000, + VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x02000000, + VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x04000000, VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageUsageFlagBits; @@ -6873,6 +6901,8 @@ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x2000000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x4000000000000ULL; typedef struct VkPhysicalDeviceVulkan13Features { VkStructureType sType; @@ -8084,6 +8114,7 @@ typedef enum VkVideoCodecOperationFlagBitsKHR { VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR = 0x00000001, VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002, VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR = 0x00000004, + VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR = 0x00040000, VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoCodecOperationFlagBitsKHR; typedef VkFlags VkVideoCodecOperationFlagsKHR; @@ -8118,9 +8149,16 @@ typedef enum VkVideoSessionCreateFlagBitsKHR { VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR = 0x00000002, VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR = 0x00000004, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000008, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x00000010, VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoSessionCreateFlagBitsKHR; typedef VkFlags VkVideoSessionCreateFlagsKHR; + +typedef enum VkVideoSessionParametersCreateFlagBitsKHR { + VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_PARAMETERS_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionParametersCreateFlagBitsKHR; typedef VkFlags VkVideoSessionParametersCreateFlagsKHR; typedef VkFlags VkVideoBeginCodingFlagsKHR; typedef VkFlags VkVideoEndCodingFlagsKHR; @@ -8423,6 +8461,7 @@ typedef enum VkVideoEncodeH264CapabilityFlagBitsKHR { VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR = 0x00000080, VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR = 0x00000200, VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoEncodeH264CapabilityFlagBitsKHR; typedef VkFlags VkVideoEncodeH264CapabilityFlagsKHR; @@ -8623,6 +8662,7 @@ typedef enum VkVideoEncodeH265CapabilityFlagBitsKHR { VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR = 0x00000080, VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR = 0x00000100, VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR = 0x00000400, VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoEncodeH265CapabilityFlagBitsKHR; typedef VkFlags VkVideoEncodeH265CapabilityFlagsKHR; @@ -10679,11 +10719,19 @@ typedef enum VkVideoEncodeTuningModeKHR { VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoEncodeTuningModeKHR; + +typedef enum VkVideoEncodeFlagBitsKHR { + VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFlagBitsKHR; typedef VkFlags VkVideoEncodeFlagsKHR; typedef enum VkVideoEncodeCapabilityFlagBitsKHR { VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001, VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR = 0x00000008, VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoEncodeCapabilityFlagBitsKHR; typedef VkFlags VkVideoEncodeCapabilityFlagsKHR; @@ -11588,6 +11636,200 @@ typedef struct VkVideoDecodeAV1DpbSlotInfoKHR { +// VK_KHR_video_encode_av1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_av1 1 +#include "vk_video/vulkan_video_codec_av1std_encode.h" +#define VK_KHR_VIDEO_ENCODE_AV1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_ENCODE_AV1_EXTENSION_NAME "VK_KHR_video_encode_av1" + +typedef enum VkVideoEncodeAV1PredictionModeKHR { + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR = 0, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR = 1, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR = 2, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR = 3, + VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1PredictionModeKHR; + +typedef enum VkVideoEncodeAV1RateControlGroupKHR { + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR = 0, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR = 1, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR = 2, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1RateControlGroupKHR; + +typedef enum VkVideoEncodeAV1CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_AV1_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1CapabilityFlagsKHR; + +typedef enum VkVideoEncodeAV1StdFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1StdFlagsKHR; + +typedef enum VkVideoEncodeAV1SuperblockSizeFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1SuperblockSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1SuperblockSizeFlagsKHR; + +typedef enum VkVideoEncodeAV1RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_AV1_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeAV1RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeAV1RateControlFlagsKHR; +typedef struct VkPhysicalDeviceVideoEncodeAV1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoEncodeAV1; +} VkPhysicalDeviceVideoEncodeAV1FeaturesKHR; + +typedef struct VkVideoEncodeAV1CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1CapabilityFlagsKHR flags; + StdVideoAV1Level maxLevel; + VkExtent2D codedPictureAlignment; + VkExtent2D maxTiles; + VkExtent2D minTileSize; + VkExtent2D maxTileSize; + VkVideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes; + uint32_t maxSingleReferenceCount; + uint32_t singleReferenceNameMask; + uint32_t maxUnidirectionalCompoundReferenceCount; + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount; + uint32_t unidirectionalCompoundReferenceNameMask; + uint32_t maxBidirectionalCompoundReferenceCount; + uint32_t maxBidirectionalCompoundGroup1ReferenceCount; + uint32_t maxBidirectionalCompoundGroup2ReferenceCount; + uint32_t bidirectionalCompoundReferenceNameMask; + uint32_t maxTemporalLayerCount; + uint32_t maxSpatialLayerCount; + uint32_t maxOperatingPoints; + uint32_t minQIndex; + uint32_t maxQIndex; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeAV1StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeAV1CapabilitiesKHR; + +typedef struct VkVideoEncodeAV1QIndexKHR { + uint32_t intraQIndex; + uint32_t predictiveQIndex; + uint32_t bipredictiveQIndex; +} VkVideoEncodeAV1QIndexKHR; + +typedef struct VkVideoEncodeAV1QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredKeyFramePeriod; + uint32_t preferredConsecutiveBipredictiveFrameCount; + uint32_t preferredTemporalLayerCount; + VkVideoEncodeAV1QIndexKHR preferredConstantQIndex; + uint32_t preferredMaxSingleReferenceCount; + uint32_t preferredSingleReferenceNameMask; + uint32_t preferredMaxUnidirectionalCompoundReferenceCount; + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount; + uint32_t preferredUnidirectionalCompoundReferenceNameMask; + uint32_t preferredMaxBidirectionalCompoundReferenceCount; + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount; + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount; + uint32_t preferredBidirectionalCompoundReferenceNameMask; +} VkVideoEncodeAV1QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeAV1SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevel; + StdVideoAV1Level maxLevel; +} VkVideoEncodeAV1SessionCreateInfoKHR; + +typedef struct VkVideoEncodeAV1SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; + const StdVideoEncodeAV1DecoderModelInfo* pStdDecoderModelInfo; + uint32_t stdOperatingPointCount; + const StdVideoEncodeAV1OperatingPointInfo* pStdOperatingPoints; +} VkVideoEncodeAV1SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeAV1PictureInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeAV1PredictionModeKHR predictionMode; + VkVideoEncodeAV1RateControlGroupKHR rateControlGroup; + uint32_t constantQIndex; + const StdVideoEncodeAV1PictureInfo* pStdPictureInfo; + int32_t referenceNameSlotIndices[VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR]; + VkBool32 primaryReferenceCdfOnly; + VkBool32 generateObuExtensionHeader; +} VkVideoEncodeAV1PictureInfoKHR; + +typedef struct VkVideoEncodeAV1DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeAV1ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeAV1DpbSlotInfoKHR; + +typedef struct VkVideoEncodeAV1ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoAV1Profile stdProfile; +} VkVideoEncodeAV1ProfileInfoKHR; + +typedef struct VkVideoEncodeAV1FrameSizeKHR { + uint32_t intraFrameSize; + uint32_t predictiveFrameSize; + uint32_t bipredictiveFrameSize; +} VkVideoEncodeAV1FrameSizeKHR; + +typedef struct VkVideoEncodeAV1GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingIntra; + uint32_t gopRemainingPredictive; + uint32_t gopRemainingBipredictive; +} VkVideoEncodeAV1GopRemainingFrameInfoKHR; + +typedef struct VkVideoEncodeAV1RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeAV1RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t keyFramePeriod; + uint32_t consecutiveBipredictiveFrameCount; + uint32_t temporalLayerCount; +} VkVideoEncodeAV1RateControlInfoKHR; + +typedef struct VkVideoEncodeAV1RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQIndex; + VkVideoEncodeAV1QIndexKHR minQIndex; + VkBool32 useMaxQIndex; + VkVideoEncodeAV1QIndexKHR maxQIndex; + VkBool32 useMaxFrameSize; + VkVideoEncodeAV1FrameSizeKHR maxFrameSize; +} VkVideoEncodeAV1RateControlLayerInfoKHR; + + + // VK_KHR_video_maintenance1 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_maintenance1 1 #define VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION 1 @@ -11892,6 +12134,76 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( #endif +// VK_KHR_video_encode_quantization_map is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_quantization_map 1 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION 2 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME "VK_KHR_video_encode_quantization_map" +typedef struct VkVideoEncodeQuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D maxQuantizationMapExtent; +} VkVideoEncodeQuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatQuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoFormatQuantizationMapPropertiesKHR; + +typedef struct VkVideoEncodeQuantizationMapInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView quantizationMap; + VkExtent2D quantizationMapExtent; +} VkVideoEncodeQuantizationMapInfoKHR; + +typedef struct VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + +typedef struct VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoEncodeQuantizationMap; +} VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + +typedef struct VkVideoEncodeH264QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH264QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoEncodeH265QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH265QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatH265QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes; +} VkVideoFormatH265QuantizationMapPropertiesKHR; + +typedef struct VkVideoEncodeAV1QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQIndexDelta; + int32_t maxQIndexDelta; +} VkVideoEncodeAV1QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatAV1QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes; +} VkVideoFormatAV1QuantizationMapPropertiesKHR; + + + // VK_KHR_shader_relaxed_extended_instruction is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_relaxed_extended_instruction 1 #define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION 1 @@ -12300,7 +12612,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( #define VK_NVX_binary_import 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) -#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1 +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 2 #define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" typedef struct VkCuModuleCreateInfoNVX { VkStructureType sType; @@ -12309,6 +12621,12 @@ typedef struct VkCuModuleCreateInfoNVX { const void* pData; } VkCuModuleCreateInfoNVX; +typedef struct VkCuModuleTexturingModeCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 use64bitTexturing; +} VkCuModuleTexturingModeCreateInfoNVX; + typedef struct VkCuFunctionCreateInfoNVX { VkStructureType sType; const void* pNext; @@ -12370,7 +12688,7 @@ VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( // VK_NVX_image_view_handle is a preprocessor guard. Do not pass it to API calls. #define VK_NVX_image_view_handle 1 -#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 3 #define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" typedef struct VkImageViewHandleInfoNVX { VkStructureType sType; @@ -12388,6 +12706,7 @@ typedef struct VkImageViewAddressPropertiesNVX { } VkImageViewAddressPropertiesNVX; typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetImageViewHandle64NVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); #ifndef VK_NO_PROTOTYPES @@ -12395,6 +12714,10 @@ VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +VKAPI_ATTR uint64_t VKAPI_CALL vkGetImageViewHandle64NVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, @@ -19520,6 +19843,32 @@ typedef struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV { +// VK_NV_display_stereo is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_display_stereo 1 +#define VK_NV_DISPLAY_STEREO_SPEC_VERSION 1 +#define VK_NV_DISPLAY_STEREO_EXTENSION_NAME "VK_NV_display_stereo" + +typedef enum VkDisplaySurfaceStereoTypeNV { + VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV = 0, + VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV = 1, + VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV = 2, + VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV = 3, + VK_DISPLAY_SURFACE_STEREO_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkDisplaySurfaceStereoTypeNV; +typedef struct VkDisplaySurfaceStereoCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceStereoTypeNV stereoType; +} VkDisplaySurfaceStereoCreateInfoNV; + +typedef struct VkDisplayModeStereoPropertiesNV { + VkStructureType sType; + const void* pNext; + VkBool32 hdmi3DSupported; +} VkDisplayModeStereoPropertiesNV; + + + // VK_NV_raw_access_chains is a preprocessor guard. Do not pass it to API calls. #define VK_NV_raw_access_chains 1 #define VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION 1 @@ -19971,6 +20320,18 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixFlexibleDimen #endif +// VK_EXT_vertex_attribute_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_robustness 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_vertex_attribute_robustness" +typedef struct VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeRobustness; +} VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + + + // VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index 5209b44..ee1fdac 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -729,6 +729,7 @@ namespace VULKAN_HPP_NAMESPACE eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX, eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX, eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX, + eCuModuleTexturingModeCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_TEXTURING_MODE_CREATE_INFO_NVX, eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX, eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX, eVideoEncodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR, @@ -1390,6 +1391,17 @@ namespace VULKAN_HPP_NAMESPACE eVideoDecodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR, eVideoDecodeAv1SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR, eVideoDecodeAv1DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR, + eVideoEncodeAv1CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_CAPABILITIES_KHR, + eVideoEncodeAv1SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR, + eVideoEncodeAv1PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PICTURE_INFO_KHR, + eVideoEncodeAv1DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_DPB_SLOT_INFO_KHR, + ePhysicalDeviceVideoEncodeAv1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR, + eVideoEncodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_PROFILE_INFO_KHR, + eVideoEncodeAv1RateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_INFO_KHR, + eVideoEncodeAv1RateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_RATE_CONTROL_LAYER_INFO_KHR, + eVideoEncodeAv1QualityLevelPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUALITY_LEVEL_PROPERTIES_KHR, + eVideoEncodeAv1SessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_SESSION_CREATE_INFO_KHR, + eVideoEncodeAv1GopRemainingFrameInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_GOP_REMAINING_FRAME_INFO_KHR, ePhysicalDeviceVideoMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR, eVideoInlineQueryInfoKHR = VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR, ePhysicalDevicePerStageDescriptorSetFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV, @@ -1438,6 +1450,18 @@ namespace VULKAN_HPP_NAMESPACE eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV, + eDisplaySurfaceStereoCreateInfoNV = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV, + eDisplayModeStereoPropertiesNV = VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV, + eVideoEncodeQuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatQuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeQuantizationMapInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR, + eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR, + ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR, + eVideoEncodeH264QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoEncodeH265QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatH265QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeAv1QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatAv1QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR, ePhysicalDeviceRawAccessChainsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV, ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR, ePhysicalDeviceCommandBufferInheritanceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV, @@ -1472,7 +1496,8 @@ namespace VULKAN_HPP_NAMESPACE eHdrVividDynamicMetadataHUAWEI = VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI, ePhysicalDeviceCooperativeMatrix2FeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV, eCooperativeMatrixFlexibleDimensionsPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV, - ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV + ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV, + ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT }; enum class PipelineCacheHeaderVersion @@ -1998,28 +2023,30 @@ namespace VULKAN_HPP_NAMESPACE enum class ImageUsageFlagBits : VkImageUsageFlags { - eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, - eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, - eStorage = VK_IMAGE_USAGE_STORAGE_BIT, - eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, - eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, - eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, - eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, - eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, - eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, - eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, - eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, - eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, - eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, - eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, - eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, - eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, - eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, - eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, + eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, + eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, + eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM, + eVideoEncodeQuantizationDeltaMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eVideoEncodeEmphasisMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR }; using ImageUsageFlags = Flags; @@ -2035,7 +2062,8 @@ namespace VULKAN_HPP_NAMESPACE ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | ImageUsageFlagBits::eHostTransferEXT | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | ImageUsageFlagBits::eVideoEncodeDpbKHR | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT | ImageUsageFlagBits::eInvocationMaskHUAWEI | - ImageUsageFlagBits::eSampleWeightQCOM | ImageUsageFlagBits::eSampleBlockMatchQCOM; + ImageUsageFlagBits::eSampleWeightQCOM | ImageUsageFlagBits::eSampleBlockMatchQCOM | ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR | + ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR; }; enum class InstanceCreateFlagBits : VkInstanceCreateFlags @@ -2591,7 +2619,8 @@ namespace VULKAN_HPP_NAMESPACE eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, - eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT + eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT, + eVideoEncodeQuantizationMapKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_QUANTIZATION_MAP_KHR }; enum class ComponentSwizzle @@ -4453,7 +4482,9 @@ namespace VULKAN_HPP_NAMESPACE eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM, eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV, eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV, - eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV + eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV, + eVideoEncodeQuantizationDeltaMapKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eVideoEncodeEmphasisMapKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR }; using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2; @@ -4480,7 +4511,8 @@ namespace VULKAN_HPP_NAMESPACE FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eHostImageTransferEXT | FormatFeatureFlagBits2::eVideoEncodeInputKHR | FormatFeatureFlagBits2::eVideoEncodeDpbKHR | FormatFeatureFlagBits2::eLinearColorAttachmentNV | FormatFeatureFlagBits2::eWeightImageQCOM | FormatFeatureFlagBits2::eWeightSampledImageQCOM | FormatFeatureFlagBits2::eBlockMatchingQCOM | FormatFeatureFlagBits2::eBoxFilterSampledQCOM | - FormatFeatureFlagBits2::eOpticalFlowImageNV | FormatFeatureFlagBits2::eOpticalFlowVectorNV | FormatFeatureFlagBits2::eOpticalFlowCostNV; + FormatFeatureFlagBits2::eOpticalFlowImageNV | FormatFeatureFlagBits2::eOpticalFlowVectorNV | FormatFeatureFlagBits2::eOpticalFlowCostNV | + FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR | FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR; }; //=== VK_KHR_surface === @@ -4825,7 +4857,8 @@ namespace VULKAN_HPP_NAMESPACE eEncodeH265 = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR, eDecodeH264 = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR, eDecodeH265 = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR, - eDecodeAv1 = VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR + eDecodeAv1 = VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR, + eEncodeAv1 = VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR }; using VideoCodecOperationFlagsKHR = Flags; @@ -4836,7 +4869,8 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodecOperationFlagsKHR allFlags = VideoCodecOperationFlagBitsKHR::eNone | VideoCodecOperationFlagBitsKHR::eEncodeH264 | VideoCodecOperationFlagBitsKHR::eEncodeH265 | - VideoCodecOperationFlagBitsKHR::eDecodeH264 | VideoCodecOperationFlagBitsKHR::eDecodeH265 | VideoCodecOperationFlagBitsKHR::eDecodeAv1; + VideoCodecOperationFlagBitsKHR::eDecodeH264 | VideoCodecOperationFlagBitsKHR::eDecodeH265 | VideoCodecOperationFlagBitsKHR::eDecodeAv1 | + VideoCodecOperationFlagBitsKHR::eEncodeAv1; }; enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR @@ -4898,7 +4932,9 @@ namespace VULKAN_HPP_NAMESPACE { eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR, eAllowEncodeParameterOptimizations = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR, - eInlineQueries = VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR + eInlineQueries = VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR, + eAllowEncodeQuantizationDeltaMap = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eAllowEncodeEmphasisMap = VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_EMPHASIS_MAP_BIT_KHR }; using VideoSessionCreateFlagsKHR = Flags; @@ -4907,9 +4943,10 @@ namespace VULKAN_HPP_NAMESPACE struct FlagTraits { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionCreateFlagsKHR allFlags = VideoSessionCreateFlagBitsKHR::eProtectedContent | - VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations | - VideoSessionCreateFlagBitsKHR::eInlineQueries; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionCreateFlagsKHR allFlags = + VideoSessionCreateFlagBitsKHR::eProtectedContent | VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations | + VideoSessionCreateFlagBitsKHR::eInlineQueries | VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap | + VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap; }; enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR @@ -4939,6 +4976,7 @@ namespace VULKAN_HPP_NAMESPACE enum class VideoSessionParametersCreateFlagBitsKHR : VkVideoSessionParametersCreateFlagsKHR { + eQuantizationMapCompatible = VK_VIDEO_SESSION_PARAMETERS_CREATE_QUANTIZATION_MAP_COMPATIBLE_BIT_KHR }; using VideoSessionParametersCreateFlagsKHR = Flags; @@ -4947,7 +4985,7 @@ namespace VULKAN_HPP_NAMESPACE struct FlagTraits { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionParametersCreateFlagsKHR allFlags = {}; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionParametersCreateFlagsKHR allFlags = VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible; }; enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR @@ -5052,7 +5090,8 @@ namespace VULKAN_HPP_NAMESPACE eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR, ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, ePerSliceConstantQp = VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR, - eGeneratePrefixNalu = VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR + eGeneratePrefixNalu = VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR, + eMbQpDiffWraparound = VK_VIDEO_ENCODE_H264_CAPABILITY_MB_QP_DIFF_WRAPAROUND_BIT_KHR }; using VideoEncodeH264CapabilityFlagsKHR = Flags; @@ -5066,7 +5105,7 @@ namespace VULKAN_HPP_NAMESPACE VideoEncodeH264CapabilityFlagBitsKHR::eRowUnalignedSlice | VideoEncodeH264CapabilityFlagBitsKHR::eDifferentSliceType | VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH264CapabilityFlagBitsKHR::eBFrameInL1List | VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp | - VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu; + VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu | VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound; }; enum class VideoEncodeH264StdFlagBitsKHR : VkVideoEncodeH264StdFlagsKHR @@ -5146,7 +5185,8 @@ namespace VULKAN_HPP_NAMESPACE ePerPictureTypeMinMaxQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR, ePerSliceSegmentConstantQp = VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR, eMultipleTilesPerSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR, - eMultipleSliceSegmentsPerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR + eMultipleSliceSegmentsPerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR, + eCuQpDiffWraparound = VK_VIDEO_ENCODE_H265_CAPABILITY_CU_QP_DIFF_WRAPAROUND_BIT_KHR }; using VideoEncodeH265CapabilityFlagsKHR = Flags; @@ -5160,7 +5200,8 @@ namespace VULKAN_HPP_NAMESPACE VideoEncodeH265CapabilityFlagBitsKHR::eRowUnalignedSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eDifferentSliceSegmentType | VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL0List | VideoEncodeH265CapabilityFlagBitsKHR::eBFrameInL1List | VideoEncodeH265CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp | VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp | - VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile; + VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment | VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile | + VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound; }; enum class VideoEncodeH265StdFlagBitsKHR : VkVideoEncodeH265StdFlagsKHR @@ -6330,7 +6371,9 @@ namespace VULKAN_HPP_NAMESPACE enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR { ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR, - eInsufficientBitstreamBufferRangeDetection = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR + eInsufficientBitstreamBufferRangeDetection = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR, + eQuantizationDeltaMap = VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR, + eEmphasisMap = VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR }; using VideoEncodeCapabilityFlagsKHR = Flags; @@ -6340,7 +6383,8 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = - VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection; + VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection | + VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap | VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap; }; enum class VideoEncodeFeedbackFlagBitsKHR : VkVideoEncodeFeedbackFlagsKHR @@ -6430,6 +6474,8 @@ namespace VULKAN_HPP_NAMESPACE enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR { + eWithQuantizationDeltaMap = VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR, + eWithEmphasisMap = VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR }; using VideoEncodeFlagsKHR = Flags; @@ -6438,7 +6484,8 @@ namespace VULKAN_HPP_NAMESPACE struct FlagTraits { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFlagsKHR allFlags = {}; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFlagsKHR allFlags = + VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap | VideoEncodeFlagBitsKHR::eWithEmphasisMap; }; enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR @@ -7344,6 +7391,98 @@ namespace VULKAN_HPP_NAMESPACE }; using ComponentTypeNV = ComponentTypeKHR; + //=== VK_KHR_video_encode_av1 === + + enum class VideoEncodeAV1PredictionModeKHR + { + eIntraOnly = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_INTRA_ONLY_KHR, + eSingleReference = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_SINGLE_REFERENCE_KHR, + eUnidirectionalCompound = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_UNIDIRECTIONAL_COMPOUND_KHR, + eBidirectionalCompound = VK_VIDEO_ENCODE_AV1_PREDICTION_MODE_BIDIRECTIONAL_COMPOUND_KHR + }; + + enum class VideoEncodeAV1RateControlGroupKHR + { + eIntra = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_INTRA_KHR, + ePredictive = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_PREDICTIVE_KHR, + eBipredictive = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_GROUP_BIPREDICTIVE_KHR + }; + + enum class VideoEncodeAV1CapabilityFlagBitsKHR : VkVideoEncodeAV1CapabilityFlagsKHR + { + ePerRateControlGroupMinMaxQIndex = VK_VIDEO_ENCODE_AV1_CAPABILITY_PER_RATE_CONTROL_GROUP_MIN_MAX_Q_INDEX_BIT_KHR, + eGenerateObuExtensionHeader = VK_VIDEO_ENCODE_AV1_CAPABILITY_GENERATE_OBU_EXTENSION_HEADER_BIT_KHR, + ePrimaryReferenceCdfOnly = VK_VIDEO_ENCODE_AV1_CAPABILITY_PRIMARY_REFERENCE_CDF_ONLY_BIT_KHR, + eFrameSizeOverride = VK_VIDEO_ENCODE_AV1_CAPABILITY_FRAME_SIZE_OVERRIDE_BIT_KHR, + eMotionVectorScaling = VK_VIDEO_ENCODE_AV1_CAPABILITY_MOTION_VECTOR_SCALING_BIT_KHR + }; + + using VideoEncodeAV1CapabilityFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1CapabilityFlagsKHR allFlags = + VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex | VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader | + VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly | VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride | + VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling; + }; + + enum class VideoEncodeAV1StdFlagBitsKHR : VkVideoEncodeAV1StdFlagsKHR + { + eUniformTileSpacingFlagSet = VK_VIDEO_ENCODE_AV1_STD_UNIFORM_TILE_SPACING_FLAG_SET_BIT_KHR, + eSkipModePresentUnset = VK_VIDEO_ENCODE_AV1_STD_SKIP_MODE_PRESENT_UNSET_BIT_KHR, + ePrimaryRefFrame = VK_VIDEO_ENCODE_AV1_STD_PRIMARY_REF_FRAME_BIT_KHR, + eDeltaQ = VK_VIDEO_ENCODE_AV1_STD_DELTA_Q_BIT_KHR + }; + + using VideoEncodeAV1StdFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1StdFlagsKHR allFlags = + VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet | VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset | + VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame | VideoEncodeAV1StdFlagBitsKHR::eDeltaQ; + }; + + enum class VideoEncodeAV1SuperblockSizeFlagBitsKHR : VkVideoEncodeAV1SuperblockSizeFlagsKHR + { + e64 = VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_64_BIT_KHR, + e128 = VK_VIDEO_ENCODE_AV1_SUPERBLOCK_SIZE_128_BIT_KHR + }; + + using VideoEncodeAV1SuperblockSizeFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1SuperblockSizeFlagsKHR allFlags = + VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64 | VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128; + }; + + enum class VideoEncodeAV1RateControlFlagBitsKHR : VkVideoEncodeAV1RateControlFlagsKHR + { + eRegularGop = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REGULAR_GOP_BIT_KHR, + eTemporalLayerPatternDyadic = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR, + eReferencePatternFlat = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR, + eReferencePatternDyadic = VK_VIDEO_ENCODE_AV1_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR + }; + + using VideoEncodeAV1RateControlFlagsKHR = Flags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeAV1RateControlFlagsKHR allFlags = + VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop | VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic | + VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat | VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic; + }; + //=== VK_QCOM_image_processing2 === enum class BlockMatchWindowCompareModeQCOM @@ -7392,6 +7531,16 @@ namespace VULKAN_HPP_NAMESPACE }; using TimeDomainEXT = TimeDomainKHR; + //=== VK_NV_display_stereo === + + enum class DisplaySurfaceStereoTypeNV + { + eNone = VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV, + eOnboardDin = VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV, + eHdmi3D = VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV, + eInbandDisplayport = VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV + }; + //=== VK_KHR_maintenance7 === enum class PhysicalDeviceLayeredApiKHR diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index 3075472..bcd9d11 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -46,7 +46,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::map const & getDeprecatedExtensions() { - static std::map deprecatedExtensions = { + static const std::map deprecatedExtensions = { { "VK_EXT_debug_report", "VK_EXT_debug_utils" }, { "VK_NV_glsl_shader", "" }, { "VK_NV_dedicated_allocation", "VK_KHR_dedicated_allocation" }, @@ -76,7 +76,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::set const & getDeviceExtensions() { - static std::set deviceExtensions = { + static const std::set deviceExtensions = { "VK_KHR_swapchain", "VK_KHR_display_swapchain", "VK_NV_glsl_shader", @@ -424,6 +424,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_QCOM_multiview_per_view_render_areas", "VK_KHR_compute_shader_derivatives", "VK_KHR_video_decode_av1", + "VK_KHR_video_encode_av1", "VK_KHR_video_maintenance1", "VK_NV_per_stage_descriptor_set", "VK_QCOM_image_processing2", @@ -444,6 +445,7 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_shader_expect_assume", "VK_KHR_maintenance6", "VK_NV_descriptor_pool_overallocation", + "VK_KHR_video_encode_quantization_map", "VK_NV_raw_access_chains", "VK_KHR_shader_relaxed_extended_instruction", "VK_NV_command_buffer_inheritance", @@ -455,14 +457,15 @@ namespace VULKAN_HPP_NAMESPACE "VK_MESA_image_alignment_control", "VK_EXT_depth_clamp_control", "VK_HUAWEI_hdr_vivid", - "VK_NV_cooperative_matrix2" + "VK_NV_cooperative_matrix2", + "VK_EXT_vertex_attribute_robustness" }; return deviceExtensions; } VULKAN_HPP_INLINE std::set const & getInstanceExtensions() { - static std::set instanceExtensions = { + static const std::set instanceExtensions = { "VK_KHR_surface", "VK_KHR_display", #if defined( VK_USE_PLATFORM_XLIB_KHR ) @@ -529,15 +532,16 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_portability_enumeration", "VK_GOOGLE_surfaceless_query", "VK_LUNARG_direct_driver_loading", - "VK_EXT_layer_settings" + "VK_EXT_layer_settings", + "VK_NV_display_stereo" }; return instanceExtensions; } VULKAN_HPP_INLINE std::map>> const & getExtensionDepends( std::string const & extension ) { - static std::map>> noDependencies; - static std::map>>> dependencies = { + static const std::map>> noDependencies; + static const std::map>>> dependencies = { { "VK_KHR_swapchain", { { "VK_VERSION_1_0", { { @@ -2249,6 +2253,11 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_KHR_video_decode_queue", } } } } }, + { "VK_KHR_video_encode_av1", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + } } } } }, { "VK_KHR_video_maintenance1", { { "VK_VERSION_1_0", { { @@ -2344,6 +2353,18 @@ namespace VULKAN_HPP_NAMESPACE { "VK_VERSION_1_1", { {} } } } }, { "VK_KHR_maintenance6", { { "VK_VERSION_1_1", { {} } } } }, { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_display_stereo", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + "VK_KHR_get_display_properties2", + } } } } }, + { "VK_KHR_video_encode_quantization_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + "VK_KHR_format_feature_flags2", + } } } } }, { "VK_KHR_maintenance7", { { "VK_VERSION_1_1", { {} } } } }, { "VK_EXT_device_generated_commands", { { "VK_VERSION_1_0", @@ -2414,13 +2435,13 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE std::map const & getObsoletedExtensions() { - static std::map obsoletedExtensions = { { "VK_AMD_negative_viewport_height", "VK_KHR_maintenance1" } }; + static const std::map obsoletedExtensions = { { "VK_AMD_negative_viewport_height", "VK_KHR_maintenance1" } }; return obsoletedExtensions; } VULKAN_HPP_INLINE std::map const & getPromotedExtensions() { - static std::map promotedExtensions = { + static const std::map promotedExtensions = { { "VK_KHR_sampler_mirror_clamp_to_edge", "VK_VERSION_1_2" }, { "VK_EXT_debug_marker", "VK_EXT_debug_utils" }, { "VK_AMD_draw_indirect_count", "VK_KHR_draw_indirect_count" }, @@ -3141,8 +3162,8 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_ARM_shader_core_builtins" ) || ( extension == "VK_EXT_pipeline_library_group_handles" ) || ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) || ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || - ( extension == "VK_KHR_compute_shader_derivatives" ) || ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_maintenance1" ) || - ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || + ( extension == "VK_KHR_compute_shader_derivatives" ) || ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_encode_av1" ) || + ( extension == "VK_KHR_video_maintenance1" ) || ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) || ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) || ( extension == "VK_KHR_load_store_op_none" ) || ( extension == "VK_KHR_shader_float_controls2" ) @@ -3151,12 +3172,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || ( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ) || - ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_NV_raw_access_chains" ) || - ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || ( extension == "VK_NV_command_buffer_inheritance" ) || - ( extension == "VK_KHR_maintenance7" ) || ( extension == "VK_NV_shader_atomic_float16_vector" ) || - ( extension == "VK_EXT_shader_replicated_composites" ) || ( extension == "VK_NV_ray_tracing_validation" ) || - ( extension == "VK_EXT_device_generated_commands" ) || ( extension == "VK_MESA_image_alignment_control" ) || - ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || ( extension == "VK_NV_cooperative_matrix2" ); + ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_KHR_video_encode_quantization_map" ) || + ( extension == "VK_NV_raw_access_chains" ) || ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || + ( extension == "VK_NV_command_buffer_inheritance" ) || ( extension == "VK_KHR_maintenance7" ) || + ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_EXT_shader_replicated_composites" ) || + ( extension == "VK_NV_ray_tracing_validation" ) || ( extension == "VK_EXT_device_generated_commands" ) || + ( extension == "VK_MESA_image_alignment_control" ) || ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || + ( extension == "VK_NV_cooperative_matrix2" ) || ( extension == "VK_EXT_vertex_attribute_robustness" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) @@ -3216,7 +3238,7 @@ namespace VULKAN_HPP_NAMESPACE || ( extension == "VK_QNX_screen_surface" ) #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ || ( extension == "VK_KHR_portability_enumeration" ) || ( extension == "VK_GOOGLE_surfaceless_query" ) || - ( extension == "VK_LUNARG_direct_driver_loading" ) || ( extension == "VK_EXT_layer_settings" ); + ( extension == "VK_LUNARG_direct_driver_loading" ) || ( extension == "VK_EXT_layer_settings" ) || ( extension == "VK_NV_display_stereo" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension ) diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index fe0174d..25a480c 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -10709,6 +10709,106 @@ namespace VULKAN_HPP_NAMESPACE } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector structureChains; + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template @@ -11676,6 +11776,30 @@ namespace VULKAN_HPP_NAMESPACE } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return d.vkGetImageViewHandle64NVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE uint64_t Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetImageViewHandle64NVX && "Function requires " ); +# endif + + uint64_t result = d.vkGetImageViewHandle64NVX( m_device, reinterpret_cast( &info ) ); + + return result; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, @@ -14889,6 +15013,98 @@ namespace VULKAN_HPP_NAMESPACE } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template @@ -18553,6 +18769,23 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_calibrated_timestamps === + template + VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, + VULKAN_HPP_NAMESPACE::Buffer dstBuffer, + VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, + uint32_t marker, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), + static_cast( stage ), + static_cast( dstBuffer ), + static_cast( dstOffset ), + marker ); + } + + //=== VK_EXT_calibrated_timestamps === + template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index 4964e9b..d911ead 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -572,6 +572,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NVX_binary_import === struct CuModuleCreateInfoNVX; + struct CuModuleTexturingModeCreateInfoNVX; struct CuFunctionCreateInfoNVX; struct CuLaunchInfoNVX; @@ -1765,6 +1766,21 @@ namespace VULKAN_HPP_NAMESPACE struct VideoDecodeAV1PictureInfoKHR; struct VideoDecodeAV1DpbSlotInfoKHR; + //=== VK_KHR_video_encode_av1 === + struct PhysicalDeviceVideoEncodeAV1FeaturesKHR; + struct VideoEncodeAV1CapabilitiesKHR; + struct VideoEncodeAV1QualityLevelPropertiesKHR; + struct VideoEncodeAV1SessionCreateInfoKHR; + struct VideoEncodeAV1SessionParametersCreateInfoKHR; + struct VideoEncodeAV1PictureInfoKHR; + struct VideoEncodeAV1DpbSlotInfoKHR; + struct VideoEncodeAV1ProfileInfoKHR; + struct VideoEncodeAV1QIndexKHR; + struct VideoEncodeAV1FrameSizeKHR; + struct VideoEncodeAV1GopRemainingFrameInfoKHR; + struct VideoEncodeAV1RateControlInfoKHR; + struct VideoEncodeAV1RateControlLayerInfoKHR; + //=== VK_KHR_video_maintenance1 === struct PhysicalDeviceVideoMaintenance1FeaturesKHR; struct VideoInlineQueryInfoKHR; @@ -1849,6 +1865,22 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_descriptor_pool_overallocation === struct PhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + //=== VK_NV_display_stereo === + struct DisplaySurfaceStereoCreateInfoNV; + struct DisplayModeStereoPropertiesNV; + + //=== VK_KHR_video_encode_quantization_map === + struct VideoEncodeQuantizationMapCapabilitiesKHR; + struct VideoFormatQuantizationMapPropertiesKHR; + struct VideoEncodeQuantizationMapInfoKHR; + struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + struct VideoEncodeH264QuantizationMapCapabilitiesKHR; + struct VideoEncodeH265QuantizationMapCapabilitiesKHR; + struct VideoFormatH265QuantizationMapPropertiesKHR; + struct VideoEncodeAV1QuantizationMapCapabilitiesKHR; + struct VideoFormatAV1QuantizationMapPropertiesKHR; + //=== VK_NV_raw_access_chains === struct PhysicalDeviceRawAccessChainsFeaturesNV; @@ -1918,6 +1950,9 @@ namespace VULKAN_HPP_NAMESPACE struct PhysicalDeviceCooperativeMatrix2FeaturesNV; struct PhysicalDeviceCooperativeMatrix2PropertiesNV; + //=== VK_EXT_vertex_attribute_robustness === + struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + //=================================== //=== HANDLE forward declarations === //=================================== @@ -12737,6 +12772,15 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template + uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + template VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, @@ -16805,6 +16849,21 @@ namespace VULKAN_HPP_NAMESPACE getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_NV_external_memory_capabilities === @@ -17213,6 +17272,20 @@ namespace VULKAN_HPP_NAMESPACE getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + template , + typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, + typename std::enable_if::value, int>::type = 0> + VULKAN_HPP_NODISCARD typename ResultValueType>::type + getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template diff --git a/third_party/vulkan/vulkan_hash.hpp b/third_party/vulkan/vulkan_hash.hpp index c53be12..0d6d35c 100644 --- a/third_party/vulkan/vulkan_hash.hpp +++ b/third_party/vulkan/vulkan_hash.hpp @@ -2911,6 +2911,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX const & cuModuleTexturingModeCreateInfoNVX ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.sType ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, cuModuleTexturingModeCreateInfoNVX.use64bitTexturing ); + return seed; + } + }; + # if defined( VK_ENABLE_BETA_EXTENSIONS ) template <> struct hash @@ -4304,6 +4317,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV const & displayModeStereoPropertiesNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displayModeStereoPropertiesNV.hdmi3DSupported ); + return seed; + } + }; + template <> struct hash { @@ -4470,6 +4496,19 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV const & displaySurfaceStereoCreateInfoNV ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.sType ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, displaySurfaceStereoCreateInfoNV.stereoType ); + return seed; + } + }; + template <> struct hash { @@ -12774,6 +12813,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & physicalDeviceVertexAttributeRobustnessFeaturesEXT ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeRobustnessFeaturesEXT.vertexAttributeRobustness ); + return seed; + } + }; + template <> struct hash { @@ -12788,6 +12842,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR const & physicalDeviceVideoEncodeAV1FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeAV1FeaturesKHR.videoEncodeAV1 ); + return seed; + } + }; + template <> struct hash { @@ -12819,6 +12887,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & physicalDeviceVideoEncodeQuantizationMapFeaturesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVideoEncodeQuantizationMapFeaturesKHR.videoEncodeQuantizationMap ); + return seed; + } + }; + template <> struct hash { @@ -16648,6 +16731,243 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR const & videoEncodeAV1CapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.codedPictureAlignment ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTiles ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.minTileSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTileSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.superblockSizes ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxSingleReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.singleReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxUnidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxUnidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.unidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxBidirectionalCompoundGroup2ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.bidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxSpatialLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxOperatingPoints ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.minQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.maxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.prefersGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.requiresGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1CapabilitiesKHR.stdSyntaxFlags ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR const & videoEncodeAV1DpbSlotInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1DpbSlotInfoKHR.pStdReferenceInfo ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const & videoEncodeAV1FrameSizeKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.intraFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.predictiveFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1FrameSizeKHR.bipredictiveFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR const & videoEncodeAV1GopRemainingFrameInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.useGopRemainingFrames ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingIntra ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingPredictive ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1GopRemainingFrameInfoKHR.gopRemainingBipredictive ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR const & videoEncodeAV1PictureInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.predictionMode ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.rateControlGroup ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.constantQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.pStdPictureInfo ); + for ( size_t i = 0; i < VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.referenceNameSlotIndices[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.primaryReferenceCdfOnly ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1PictureInfoKHR.generateObuExtensionHeader ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR const & videoEncodeAV1ProfileInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1ProfileInfoKHR.stdProfile ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & videoEncodeAV1QIndexKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.intraQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.predictiveQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QIndexKHR.bipredictiveQIndex ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR const & videoEncodeAV1QualityLevelPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredRateControlFlags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredGopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredKeyFramePeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredConsecutiveBipredictiveFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredTemporalLayerCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredConstantQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxSingleReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredSingleReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxUnidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxUnidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredUnidirectionalCompoundReferenceNameMask ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundGroup1ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredMaxBidirectionalCompoundGroup2ReferenceCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QualityLevelPropertiesKHR.preferredBidirectionalCompoundReferenceNameMask ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR const & videoEncodeAV1QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.minQIndexDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1QuantizationMapCapabilitiesKHR.maxQIndexDelta ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR const & videoEncodeAV1RateControlInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.gopFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.keyFramePeriod ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.consecutiveBipredictiveFrameCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlInfoKHR.temporalLayerCount ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR const & videoEncodeAV1RateControlLayerInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMinQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.minQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMaxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.maxQIndex ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.useMaxFrameSize ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1RateControlLayerInfoKHR.maxFrameSize ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR const & videoEncodeAV1SessionCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.useMaxLevel ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionCreateInfoKHR.maxLevel ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR const & videoEncodeAV1SessionParametersCreateInfoKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdSequenceHeader ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdDecoderModelInfo ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.stdOperatingPointCount ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeAV1SessionParametersCreateInfoKHR.pStdOperatingPoints ); + return seed; + } + }; + template <> struct hash { @@ -16813,6 +17133,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR const & videoEncodeH264QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.minQpDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH264QuantizationMapCapabilitiesKHR.maxQpDelta ); + return seed; + } + }; + template <> struct hash { @@ -17076,6 +17411,21 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR const & videoEncodeH265QuantizationMapCapabilitiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.minQpDelta ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeH265QuantizationMapCapabilitiesKHR.maxQpDelta ); + return seed; + } + }; + template <> struct hash { @@ -17245,6 +17595,48 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR const & videoEncodeQuantizationMapCapabilitiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapCapabilitiesKHR.maxQuantizationMapExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR const & videoEncodeQuantizationMapInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.quantizationMap ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapInfoKHR.quantizationMapExtent ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & + videoEncodeQuantizationMapSessionParametersCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoEncodeQuantizationMapSessionParametersCreateInfoKHR.quantizationMapTexelSize ); + return seed; + } + }; + template <> struct hash { @@ -17335,6 +17727,34 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR const & videoFormatAV1QuantizationMapPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatAV1QuantizationMapPropertiesKHR.compatibleSuperblockSizes ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR const & videoFormatH265QuantizationMapPropertiesKHR ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatH265QuantizationMapPropertiesKHR.compatibleCtbSizes ); + return seed; + } + }; + template <> struct hash { @@ -17353,6 +17773,20 @@ namespace std } }; + template <> + struct hash + { + std::size_t + operator()( VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR const & videoFormatQuantizationMapPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.sType ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, videoFormatQuantizationMapPropertiesKHR.quantizationMapTexelSize ); + return seed; + } + }; + template <> struct hash { diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index d8ca95f..08010d5 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -926,8 +926,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); @@ -1999,8 +2000,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; //=== VK_AMD_draw_indirect_count === PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; @@ -3381,6 +3383,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; + template + VULKAN_HPP_NODISCARD std::vector + getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const; + //=== VK_NV_external_memory_capabilities === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV( @@ -4046,6 +4052,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD uint64_t getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT; + //=== VK_KHR_device_group === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags @@ -8224,6 +8232,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + template + VULKAN_HPP_NODISCARD std::vector getModeProperties2() const; + # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -16397,6 +16408,52 @@ namespace VULKAN_HPP_NAMESPACE return videoFormatProperties; } + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); + + std::vector structureChains; + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return structureChains; + } + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::CreateReturnType::Type Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo, @@ -16718,6 +16775,17 @@ namespace VULKAN_HPP_NAMESPACE return result; } + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t + Device::getImageViewHandle64NVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandle64NVX && "Function requires " ); + + uint64_t result = + getDispatcher()->vkGetImageViewHandle64NVX( static_cast( m_device ), reinterpret_cast( &info ) ); + + return result; + } + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const { VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && "Function requires " ); @@ -18135,6 +18203,48 @@ namespace VULKAN_HPP_NAMESPACE return properties; } + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector DisplayKHR::getModeProperties2() const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR && + "Function requires " ); + + std::vector structureChains; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( getDispatcher()->vkGetDisplayModeProperties2KHR( + static_cast( m_physicalDevice ), static_cast( m_display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), + static_cast( m_display ), + &propertyCount, + reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return structureChains; + } + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const { diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index 4fa9d6b..fa1dde6 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -2236,6 +2236,13 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "CuModuleCreateInfoNVX is not nothrow_move_constructible!" ); +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleTexturingModeCreateInfoNVX ) == sizeof( VkCuModuleTexturingModeCreateInfoNVX ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CuModuleTexturingModeCreateInfoNVX is not nothrow_move_constructible!" ); + VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ), "struct and wrapper have different size!" ); VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -7356,6 +7363,93 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "VideoDecodeAV1DpbSlotInfoKHR is not nothrow_move_constructible!" ); +//=== VK_KHR_video_encode_av1 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeAV1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeAV1FeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilitiesKHR ) == sizeof( VkVideoEncodeAV1CapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1CapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeAV1QualityLevelPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QualityLevelPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionCreateInfoKHR ) == sizeof( VkVideoEncodeAV1SessionCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1SessionCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeAV1SessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1SessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PictureInfoKHR ) == sizeof( VkVideoEncodeAV1PictureInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1PictureInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1DpbSlotInfoKHR ) == sizeof( VkVideoEncodeAV1DpbSlotInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1DpbSlotInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1ProfileInfoKHR ) == sizeof( VkVideoEncodeAV1ProfileInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1ProfileInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR ) == sizeof( VkVideoEncodeAV1QIndexKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QIndexKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR ) == sizeof( VkVideoEncodeAV1FrameSizeKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1FrameSizeKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1GopRemainingFrameInfoKHR ) == sizeof( VkVideoEncodeAV1GopRemainingFrameInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1GopRemainingFrameInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlInfoKHR ) == sizeof( VkVideoEncodeAV1RateControlInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1RateControlInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlLayerInfoKHR ) == sizeof( VkVideoEncodeAV1RateControlLayerInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1RateControlLayerInfoKHR is not nothrow_move_constructible!" ); + //=== VK_KHR_video_maintenance1 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoMaintenance1FeaturesKHR ) == sizeof( VkPhysicalDeviceVideoMaintenance1FeaturesKHR ), @@ -7670,6 +7764,97 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV is not nothrow_move_constructible!" ); +//=== VK_NV_display_stereo === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoCreateInfoNV ) == sizeof( VkDisplaySurfaceStereoCreateInfoNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplaySurfaceStereoCreateInfoNV is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeStereoPropertiesNV ) == sizeof( VkDisplayModeStereoPropertiesNV ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DisplayModeStereoPropertiesNV is not nothrow_move_constructible!" ); + +//=== VK_KHR_video_encode_quantization_map === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapCapabilitiesKHR ) == sizeof( VkVideoEncodeQuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatQuantizationMapPropertiesKHR ) == sizeof( VkVideoFormatQuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatQuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapInfoKHR ) == sizeof( VkVideoEncodeQuantizationMapInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQuantizationMapSessionParametersCreateInfoKHR ) == + sizeof( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeQuantizationMapSessionParametersCreateInfoKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR ) == + sizeof( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeH264QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH264QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeH265QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeH265QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatH265QuantizationMapPropertiesKHR ) == + sizeof( VkVideoFormatH265QuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatH265QuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QuantizationMapCapabilitiesKHR ) == + sizeof( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoEncodeAV1QuantizationMapCapabilitiesKHR is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatAV1QuantizationMapPropertiesKHR ) == sizeof( VkVideoFormatAV1QuantizationMapPropertiesKHR ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VideoFormatAV1QuantizationMapPropertiesKHR is not nothrow_move_constructible!" ); + //=== VK_NV_raw_access_chains === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRawAccessChainsFeaturesNV ) == sizeof( VkPhysicalDeviceRawAccessChainsFeaturesNV ), @@ -8024,4 +8209,14 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceCooperativeMatrix2PropertiesNV is not nothrow_move_constructible!" ); +//=== VK_EXT_vertex_attribute_robustness === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT ) == + sizeof( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT is not nothrow_move_constructible!" ); + #endif diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index 1bc9452..e2937f9 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -20500,6 +20500,103 @@ namespace VULKAN_HPP_NAMESPACE using Type = CuModuleCreateInfoNVX; }; + struct CuModuleTexturingModeCreateInfoNVX + { + using NativeType = VkCuModuleTexturingModeCreateInfoNVX; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCuModuleTexturingModeCreateInfoNVX; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , use64bitTexturing{ use64bitTexturing_ } + { + } + + VULKAN_HPP_CONSTEXPR CuModuleTexturingModeCreateInfoNVX( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + CuModuleTexturingModeCreateInfoNVX( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + : CuModuleTexturingModeCreateInfoNVX( *reinterpret_cast( &rhs ) ) + { + } + + CuModuleTexturingModeCreateInfoNVX & operator=( CuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + CuModuleTexturingModeCreateInfoNVX & operator=( VkCuModuleTexturingModeCreateInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 CuModuleTexturingModeCreateInfoNVX & setUse64bitTexturing( VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing_ ) VULKAN_HPP_NOEXCEPT + { + use64bitTexturing = use64bitTexturing_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkCuModuleTexturingModeCreateInfoNVX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkCuModuleTexturingModeCreateInfoNVX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, use64bitTexturing ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( CuModuleTexturingModeCreateInfoNVX const & ) const = default; +#else + bool operator==( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( use64bitTexturing == rhs.use64bitTexturing ); +# endif + } + + bool operator!=( CuModuleTexturingModeCreateInfoNVX const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCuModuleTexturingModeCreateInfoNVX; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 use64bitTexturing = {}; + }; + + template <> + struct CppType + { + using Type = CuModuleTexturingModeCreateInfoNVX; + }; + #if defined( VK_ENABLE_BETA_EXTENSIONS ) struct CudaFunctionCreateInfoNV { @@ -32271,6 +32368,88 @@ namespace VULKAN_HPP_NAMESPACE using Type = DisplayModeProperties2KHR; }; + struct DisplayModeStereoPropertiesNV + { + using NativeType = VkDisplayModeStereoPropertiesNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeStereoPropertiesNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , hdmi3DSupported{ hdmi3DSupported_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplayModeStereoPropertiesNV( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplayModeStereoPropertiesNV( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplayModeStereoPropertiesNV( *reinterpret_cast( &rhs ) ) + { + } + + DisplayModeStereoPropertiesNV & operator=( DisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplayModeStereoPropertiesNV & operator=( VkDisplayModeStereoPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDisplayModeStereoPropertiesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplayModeStereoPropertiesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, hdmi3DSupported ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplayModeStereoPropertiesNV const & ) const = default; +#else + bool operator==( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( hdmi3DSupported == rhs.hdmi3DSupported ); +# endif + } + + bool operator!=( DisplayModeStereoPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeStereoPropertiesNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 hdmi3DSupported = {}; + }; + + template <> + struct CppType + { + using Type = DisplayModeStereoPropertiesNV; + }; + struct DisplayNativeHdrSurfaceCapabilitiesAMD { using NativeType = VkDisplayNativeHdrSurfaceCapabilitiesAMD; @@ -33384,6 +33563,104 @@ namespace VULKAN_HPP_NAMESPACE using Type = DisplaySurfaceCreateInfoKHR; }; + struct DisplaySurfaceStereoCreateInfoNV + { + using NativeType = VkDisplaySurfaceStereoCreateInfoNV; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceStereoCreateInfoNV; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + DisplaySurfaceStereoCreateInfoNV( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stereoType{ stereoType_ } + { + } + + VULKAN_HPP_CONSTEXPR DisplaySurfaceStereoCreateInfoNV( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DisplaySurfaceStereoCreateInfoNV( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + : DisplaySurfaceStereoCreateInfoNV( *reinterpret_cast( &rhs ) ) + { + } + + DisplaySurfaceStereoCreateInfoNV & operator=( DisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DisplaySurfaceStereoCreateInfoNV & operator=( VkDisplaySurfaceStereoCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceStereoCreateInfoNV & setStereoType( VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType_ ) VULKAN_HPP_NOEXCEPT + { + stereoType = stereoType_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkDisplaySurfaceStereoCreateInfoNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDisplaySurfaceStereoCreateInfoNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stereoType ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DisplaySurfaceStereoCreateInfoNV const & ) const = default; +#else + bool operator==( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( stereoType == rhs.stereoType ); +# endif + } + + bool operator!=( DisplaySurfaceStereoCreateInfoNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceStereoCreateInfoNV; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV stereoType = VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV::eNone; + }; + + template <> + struct CppType + { + using Type = DisplaySurfaceStereoCreateInfoNV; + }; + struct DrawIndexedIndirectCommand { using NativeType = VkDrawIndexedIndirectCommand; @@ -93290,6 +93567,106 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceVertexAttributeDivisorPropertiesKHR; }; + struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT + { + using NativeType = VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , vertexAttributeRobustness{ vertexAttributeRobustness_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeRobustnessFeaturesEXT( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & + operator=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeRobustnessFeaturesEXT & + setVertexAttributeRobustness( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeRobustness = vertexAttributeRobustness_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, vertexAttributeRobustness ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeRobustness == rhs.vertexAttributeRobustness ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeRobustness = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + }; + struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT { using NativeType = VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; @@ -93389,6 +93766,103 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceVertexInputDynamicStateFeaturesEXT; }; + struct PhysicalDeviceVideoEncodeAV1FeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoEncodeAV1FeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoEncodeAV1{ videoEncodeAV1_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeAV1FeaturesKHR( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoEncodeAV1FeaturesKHR( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeAV1FeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVideoEncodeAV1FeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeAV1FeaturesKHR & setVideoEncodeAV1( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1_ ) VULKAN_HPP_NOEXCEPT + { + videoEncodeAV1 = videoEncodeAV1_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoEncodeAV1 ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeAV1 == rhs.videoEncodeAV1 ); +# endif + } + + bool operator!=( PhysicalDeviceVideoEncodeAV1FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoEncodeAV1 = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoEncodeAV1FeaturesKHR; + }; + struct VideoProfileInfoKHR { using NativeType = VkVideoProfileInfoKHR; @@ -93628,6 +94102,106 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceVideoEncodeQualityLevelInfoKHR; }; + struct PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR + { + using NativeType = VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , videoEncodeQuantizationMap{ videoEncodeQuantizationMap_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & + operator=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & operator=( VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR & + setVideoEncodeQuantizationMap( VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap_ ) VULKAN_HPP_NOEXCEPT + { + videoEncodeQuantizationMap = videoEncodeQuantizationMap_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, videoEncodeQuantizationMap ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & ) const = default; +#else + bool operator==( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( videoEncodeQuantizationMap == rhs.videoEncodeQuantizationMap ); +# endif + } + + bool operator!=( PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 videoEncodeQuantizationMap = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + }; + struct PhysicalDeviceVideoFormatInfoKHR { using NativeType = VkPhysicalDeviceVideoFormatInfoKHR; @@ -124800,6 +125374,1797 @@ namespace VULKAN_HPP_NAMESPACE using Type = VideoDecodeUsageInfoKHR; }; + struct VideoEncodeAV1CapabilitiesKHR + { + using NativeType = VkVideoEncodeAV1CapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1CapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags_ = {}, + StdVideoAV1Level maxLevel_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTiles_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D minTileSize_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D maxTileSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes_ = {}, + uint32_t maxSingleReferenceCount_ = {}, + uint32_t singleReferenceNameMask_ = {}, + uint32_t maxUnidirectionalCompoundReferenceCount_ = {}, + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t unidirectionalCompoundReferenceNameMask_ = {}, + uint32_t maxBidirectionalCompoundReferenceCount_ = {}, + uint32_t maxBidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t maxBidirectionalCompoundGroup2ReferenceCount_ = {}, + uint32_t bidirectionalCompoundReferenceNameMask_ = {}, + uint32_t maxTemporalLayerCount_ = {}, + uint32_t maxSpatialLayerCount_ = {}, + uint32_t maxOperatingPoints_ = {}, + uint32_t minQIndex_ = {}, + uint32_t maxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , maxLevel{ maxLevel_ } + , codedPictureAlignment{ codedPictureAlignment_ } + , maxTiles{ maxTiles_ } + , minTileSize{ minTileSize_ } + , maxTileSize{ maxTileSize_ } + , superblockSizes{ superblockSizes_ } + , maxSingleReferenceCount{ maxSingleReferenceCount_ } + , singleReferenceNameMask{ singleReferenceNameMask_ } + , maxUnidirectionalCompoundReferenceCount{ maxUnidirectionalCompoundReferenceCount_ } + , maxUnidirectionalCompoundGroup1ReferenceCount{ maxUnidirectionalCompoundGroup1ReferenceCount_ } + , unidirectionalCompoundReferenceNameMask{ unidirectionalCompoundReferenceNameMask_ } + , maxBidirectionalCompoundReferenceCount{ maxBidirectionalCompoundReferenceCount_ } + , maxBidirectionalCompoundGroup1ReferenceCount{ maxBidirectionalCompoundGroup1ReferenceCount_ } + , maxBidirectionalCompoundGroup2ReferenceCount{ maxBidirectionalCompoundGroup2ReferenceCount_ } + , bidirectionalCompoundReferenceNameMask{ bidirectionalCompoundReferenceNameMask_ } + , maxTemporalLayerCount{ maxTemporalLayerCount_ } + , maxSpatialLayerCount{ maxSpatialLayerCount_ } + , maxOperatingPoints{ maxOperatingPoints_ } + , minQIndex{ minQIndex_ } + , maxQIndex{ maxQIndex_ } + , prefersGopRemainingFrames{ prefersGopRemainingFrames_ } + , requiresGopRemainingFrames{ requiresGopRemainingFrames_ } + , stdSyntaxFlags{ stdSyntaxFlags_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1CapabilitiesKHR( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1CapabilitiesKHR( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1CapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1CapabilitiesKHR & operator=( VideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1CapabilitiesKHR & operator=( VkVideoEncodeAV1CapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeAV1CapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1CapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + flags, + maxLevel, + codedPictureAlignment, + maxTiles, + minTileSize, + maxTileSize, + superblockSizes, + maxSingleReferenceCount, + singleReferenceNameMask, + maxUnidirectionalCompoundReferenceCount, + maxUnidirectionalCompoundGroup1ReferenceCount, + unidirectionalCompoundReferenceNameMask, + maxBidirectionalCompoundReferenceCount, + maxBidirectionalCompoundGroup1ReferenceCount, + maxBidirectionalCompoundGroup2ReferenceCount, + bidirectionalCompoundReferenceNameMask, + maxTemporalLayerCount, + maxSpatialLayerCount, + maxOperatingPoints, + minQIndex, + maxQIndex, + prefersGopRemainingFrames, + requiresGopRemainingFrames, + stdSyntaxFlags ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = flags <=> rhs.flags; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + if ( auto cmp = codedPictureAlignment <=> rhs.codedPictureAlignment; cmp != 0 ) + return cmp; + if ( auto cmp = maxTiles <=> rhs.maxTiles; cmp != 0 ) + return cmp; + if ( auto cmp = minTileSize <=> rhs.minTileSize; cmp != 0 ) + return cmp; + if ( auto cmp = maxTileSize <=> rhs.maxTileSize; cmp != 0 ) + return cmp; + if ( auto cmp = superblockSizes <=> rhs.superblockSizes; cmp != 0 ) + return cmp; + if ( auto cmp = maxSingleReferenceCount <=> rhs.maxSingleReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = singleReferenceNameMask <=> rhs.singleReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxUnidirectionalCompoundReferenceCount <=> rhs.maxUnidirectionalCompoundReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxUnidirectionalCompoundGroup1ReferenceCount <=> rhs.maxUnidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = unidirectionalCompoundReferenceNameMask <=> rhs.unidirectionalCompoundReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundReferenceCount <=> rhs.maxBidirectionalCompoundReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundGroup1ReferenceCount <=> rhs.maxBidirectionalCompoundGroup1ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxBidirectionalCompoundGroup2ReferenceCount <=> rhs.maxBidirectionalCompoundGroup2ReferenceCount; cmp != 0 ) + return cmp; + if ( auto cmp = bidirectionalCompoundReferenceNameMask <=> rhs.bidirectionalCompoundReferenceNameMask; cmp != 0 ) + return cmp; + if ( auto cmp = maxTemporalLayerCount <=> rhs.maxTemporalLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxSpatialLayerCount <=> rhs.maxSpatialLayerCount; cmp != 0 ) + return cmp; + if ( auto cmp = maxOperatingPoints <=> rhs.maxOperatingPoints; cmp != 0 ) + return cmp; + if ( auto cmp = minQIndex <=> rhs.minQIndex; cmp != 0 ) + return cmp; + if ( auto cmp = maxQIndex <=> rhs.maxQIndex; cmp != 0 ) + return cmp; + if ( auto cmp = prefersGopRemainingFrames <=> rhs.prefersGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = requiresGopRemainingFrames <=> rhs.requiresGopRemainingFrames; cmp != 0 ) + return cmp; + if ( auto cmp = stdSyntaxFlags <=> rhs.stdSyntaxFlags; cmp != 0 ) + return cmp; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && + ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ) && ( codedPictureAlignment == rhs.codedPictureAlignment ) && + ( maxTiles == rhs.maxTiles ) && ( minTileSize == rhs.minTileSize ) && ( maxTileSize == rhs.maxTileSize ) && + ( superblockSizes == rhs.superblockSizes ) && ( maxSingleReferenceCount == rhs.maxSingleReferenceCount ) && + ( singleReferenceNameMask == rhs.singleReferenceNameMask ) && + ( maxUnidirectionalCompoundReferenceCount == rhs.maxUnidirectionalCompoundReferenceCount ) && + ( maxUnidirectionalCompoundGroup1ReferenceCount == rhs.maxUnidirectionalCompoundGroup1ReferenceCount ) && + ( unidirectionalCompoundReferenceNameMask == rhs.unidirectionalCompoundReferenceNameMask ) && + ( maxBidirectionalCompoundReferenceCount == rhs.maxBidirectionalCompoundReferenceCount ) && + ( maxBidirectionalCompoundGroup1ReferenceCount == rhs.maxBidirectionalCompoundGroup1ReferenceCount ) && + ( maxBidirectionalCompoundGroup2ReferenceCount == rhs.maxBidirectionalCompoundGroup2ReferenceCount ) && + ( bidirectionalCompoundReferenceNameMask == rhs.bidirectionalCompoundReferenceNameMask ) && + ( maxTemporalLayerCount == rhs.maxTemporalLayerCount ) && ( maxSpatialLayerCount == rhs.maxSpatialLayerCount ) && + ( maxOperatingPoints == rhs.maxOperatingPoints ) && ( minQIndex == rhs.minQIndex ) && ( maxQIndex == rhs.maxQIndex ) && + ( prefersGopRemainingFrames == rhs.prefersGopRemainingFrames ) && ( requiresGopRemainingFrames == rhs.requiresGopRemainingFrames ) && + ( stdSyntaxFlags == rhs.stdSyntaxFlags ); + } + + bool operator!=( VideoEncodeAV1CapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1CapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1CapabilityFlagsKHR flags = {}; + StdVideoAV1Level maxLevel = {}; + VULKAN_HPP_NAMESPACE::Extent2D codedPictureAlignment = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTiles = {}; + VULKAN_HPP_NAMESPACE::Extent2D minTileSize = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxTileSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR superblockSizes = {}; + uint32_t maxSingleReferenceCount = {}; + uint32_t singleReferenceNameMask = {}; + uint32_t maxUnidirectionalCompoundReferenceCount = {}; + uint32_t maxUnidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t unidirectionalCompoundReferenceNameMask = {}; + uint32_t maxBidirectionalCompoundReferenceCount = {}; + uint32_t maxBidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t maxBidirectionalCompoundGroup2ReferenceCount = {}; + uint32_t bidirectionalCompoundReferenceNameMask = {}; + uint32_t maxTemporalLayerCount = {}; + uint32_t maxSpatialLayerCount = {}; + uint32_t maxOperatingPoints = {}; + uint32_t minQIndex = {}; + uint32_t maxQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 prefersGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::Bool32 requiresGopRemainingFrames = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1StdFlagsKHR stdSyntaxFlags = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1CapabilitiesKHR; + }; + + struct VideoEncodeAV1DpbSlotInfoKHR + { + using NativeType = VkVideoEncodeAV1DpbSlotInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdReferenceInfo{ pStdReferenceInfo_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1DpbSlotInfoKHR( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1DpbSlotInfoKHR( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1DpbSlotInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1DpbSlotInfoKHR & operator=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1DpbSlotInfoKHR & operator=( VkVideoEncodeAV1DpbSlotInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1DpbSlotInfoKHR & setPStdReferenceInfo( const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdReferenceInfo = pStdReferenceInfo_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1DpbSlotInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1DpbSlotInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdReferenceInfo ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1DpbSlotInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdReferenceInfo == rhs.pStdReferenceInfo ); +# endif + } + + bool operator!=( VideoEncodeAV1DpbSlotInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1DpbSlotInfoKHR; + const void * pNext = {}; + const StdVideoEncodeAV1ReferenceInfo * pStdReferenceInfo = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1DpbSlotInfoKHR; + }; + + struct VideoEncodeAV1FrameSizeKHR + { + using NativeType = VkVideoEncodeAV1FrameSizeKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1FrameSizeKHR( uint32_t intraFrameSize_ = {}, uint32_t predictiveFrameSize_ = {}, uint32_t bipredictiveFrameSize_ = {} ) VULKAN_HPP_NOEXCEPT + : intraFrameSize{ intraFrameSize_ } + , predictiveFrameSize{ predictiveFrameSize_ } + , bipredictiveFrameSize{ bipredictiveFrameSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1FrameSizeKHR( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1FrameSizeKHR( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1FrameSizeKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1FrameSizeKHR & operator=( VideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1FrameSizeKHR & operator=( VkVideoEncodeAV1FrameSizeKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setIntraFrameSize( uint32_t intraFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + intraFrameSize = intraFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setPredictiveFrameSize( uint32_t predictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + predictiveFrameSize = predictiveFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1FrameSizeKHR & setBipredictiveFrameSize( uint32_t bipredictiveFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + bipredictiveFrameSize = bipredictiveFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1FrameSizeKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1FrameSizeKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( intraFrameSize, predictiveFrameSize, bipredictiveFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1FrameSizeKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( intraFrameSize == rhs.intraFrameSize ) && ( predictiveFrameSize == rhs.predictiveFrameSize ) && + ( bipredictiveFrameSize == rhs.bipredictiveFrameSize ); +# endif + } + + bool operator!=( VideoEncodeAV1FrameSizeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t intraFrameSize = {}; + uint32_t predictiveFrameSize = {}; + uint32_t bipredictiveFrameSize = {}; + }; + + struct VideoEncodeAV1GopRemainingFrameInfoKHR + { + using NativeType = VkVideoEncodeAV1GopRemainingFrameInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ = {}, + uint32_t gopRemainingIntra_ = {}, + uint32_t gopRemainingPredictive_ = {}, + uint32_t gopRemainingBipredictive_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useGopRemainingFrames{ useGopRemainingFrames_ } + , gopRemainingIntra{ gopRemainingIntra_ } + , gopRemainingPredictive{ gopRemainingPredictive_ } + , gopRemainingBipredictive{ gopRemainingBipredictive_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1GopRemainingFrameInfoKHR( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1GopRemainingFrameInfoKHR( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1GopRemainingFrameInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1GopRemainingFrameInfoKHR & operator=( VkVideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & + setUseGopRemainingFrames( VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames_ ) VULKAN_HPP_NOEXCEPT + { + useGopRemainingFrames = useGopRemainingFrames_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingIntra( uint32_t gopRemainingIntra_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingIntra = gopRemainingIntra_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingPredictive( uint32_t gopRemainingPredictive_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingPredictive = gopRemainingPredictive_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1GopRemainingFrameInfoKHR & setGopRemainingBipredictive( uint32_t gopRemainingBipredictive_ ) VULKAN_HPP_NOEXCEPT + { + gopRemainingBipredictive = gopRemainingBipredictive_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1GopRemainingFrameInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useGopRemainingFrames, gopRemainingIntra, gopRemainingPredictive, gopRemainingBipredictive ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1GopRemainingFrameInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useGopRemainingFrames == rhs.useGopRemainingFrames ) && + ( gopRemainingIntra == rhs.gopRemainingIntra ) && ( gopRemainingPredictive == rhs.gopRemainingPredictive ) && + ( gopRemainingBipredictive == rhs.gopRemainingBipredictive ); +# endif + } + + bool operator!=( VideoEncodeAV1GopRemainingFrameInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useGopRemainingFrames = {}; + uint32_t gopRemainingIntra = {}; + uint32_t gopRemainingPredictive = {}; + uint32_t gopRemainingBipredictive = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1GopRemainingFrameInfoKHR; + }; + + struct VideoEncodeAV1PictureInfoKHR + { + using NativeType = VkVideoEncodeAV1PictureInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1PictureInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( + VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra, + uint32_t constantQIndex_ = {}, + const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ = {}, + std::array const & referenceNameSlotIndices_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , predictionMode{ predictionMode_ } + , rateControlGroup{ rateControlGroup_ } + , constantQIndex{ constantQIndex_ } + , pStdPictureInfo{ pStdPictureInfo_ } + , referenceNameSlotIndices{ referenceNameSlotIndices_ } + , primaryReferenceCdfOnly{ primaryReferenceCdfOnly_ } + , generateObuExtensionHeader{ generateObuExtensionHeader_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1PictureInfoKHR( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1PictureInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1PictureInfoKHR & operator=( VideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1PictureInfoKHR & operator=( VkVideoEncodeAV1PictureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setPredictionMode( VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode_ ) VULKAN_HPP_NOEXCEPT + { + predictionMode = predictionMode_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setRateControlGroup( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup_ ) VULKAN_HPP_NOEXCEPT + { + rateControlGroup = rateControlGroup_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setConstantQIndex( uint32_t constantQIndex_ ) VULKAN_HPP_NOEXCEPT + { + constantQIndex = constantQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & setPStdPictureInfo( const StdVideoEncodeAV1PictureInfo * pStdPictureInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdPictureInfo = pStdPictureInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setReferenceNameSlotIndices( std::array referenceNameSlotIndices_ ) VULKAN_HPP_NOEXCEPT + { + referenceNameSlotIndices = referenceNameSlotIndices_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setPrimaryReferenceCdfOnly( VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly_ ) VULKAN_HPP_NOEXCEPT + { + primaryReferenceCdfOnly = primaryReferenceCdfOnly_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1PictureInfoKHR & + setGenerateObuExtensionHeader( VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader_ ) VULKAN_HPP_NOEXCEPT + { + generateObuExtensionHeader = generateObuExtensionHeader_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1PictureInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1PictureInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + predictionMode, + rateControlGroup, + constantQIndex, + pStdPictureInfo, + referenceNameSlotIndices, + primaryReferenceCdfOnly, + generateObuExtensionHeader ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1PictureInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( predictionMode == rhs.predictionMode ) && ( rateControlGroup == rhs.rateControlGroup ) && + ( constantQIndex == rhs.constantQIndex ) && ( pStdPictureInfo == rhs.pStdPictureInfo ) && + ( referenceNameSlotIndices == rhs.referenceNameSlotIndices ) && ( primaryReferenceCdfOnly == rhs.primaryReferenceCdfOnly ) && + ( generateObuExtensionHeader == rhs.generateObuExtensionHeader ); +# endif + } + + bool operator!=( VideoEncodeAV1PictureInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1PictureInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR predictionMode = VULKAN_HPP_NAMESPACE::VideoEncodeAV1PredictionModeKHR::eIntraOnly; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR rateControlGroup = VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlGroupKHR::eIntra; + uint32_t constantQIndex = {}; + const StdVideoEncodeAV1PictureInfo * pStdPictureInfo = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D referenceNameSlotIndices = {}; + VULKAN_HPP_NAMESPACE::Bool32 primaryReferenceCdfOnly = {}; + VULKAN_HPP_NAMESPACE::Bool32 generateObuExtensionHeader = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1PictureInfoKHR; + }; + + struct VideoEncodeAV1ProfileInfoKHR + { + using NativeType = VkVideoEncodeAV1ProfileInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1ProfileInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( StdVideoAV1Profile stdProfile_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , stdProfile{ stdProfile_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1ProfileInfoKHR( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1ProfileInfoKHR( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1ProfileInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1ProfileInfoKHR & operator=( VideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1ProfileInfoKHR & operator=( VkVideoEncodeAV1ProfileInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1ProfileInfoKHR & setStdProfile( StdVideoAV1Profile stdProfile_ ) VULKAN_HPP_NOEXCEPT + { + stdProfile = stdProfile_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1ProfileInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1ProfileInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, stdProfile ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( memcmp( &stdProfile, &rhs.stdProfile, sizeof( StdVideoAV1Profile ) ) == 0 ); + } + + bool operator!=( VideoEncodeAV1ProfileInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1ProfileInfoKHR; + const void * pNext = {}; + StdVideoAV1Profile stdProfile = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1ProfileInfoKHR; + }; + + struct VideoEncodeAV1QIndexKHR + { + using NativeType = VkVideoEncodeAV1QIndexKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1QIndexKHR( uint32_t intraQIndex_ = {}, uint32_t predictiveQIndex_ = {}, uint32_t bipredictiveQIndex_ = {} ) VULKAN_HPP_NOEXCEPT + : intraQIndex{ intraQIndex_ } + , predictiveQIndex{ predictiveQIndex_ } + , bipredictiveQIndex{ bipredictiveQIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QIndexKHR( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1QIndexKHR( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QIndexKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1QIndexKHR & operator=( VideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1QIndexKHR & operator=( VkVideoEncodeAV1QIndexKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setIntraQIndex( uint32_t intraQIndex_ ) VULKAN_HPP_NOEXCEPT + { + intraQIndex = intraQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setPredictiveQIndex( uint32_t predictiveQIndex_ ) VULKAN_HPP_NOEXCEPT + { + predictiveQIndex = predictiveQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1QIndexKHR & setBipredictiveQIndex( uint32_t bipredictiveQIndex_ ) VULKAN_HPP_NOEXCEPT + { + bipredictiveQIndex = bipredictiveQIndex_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1QIndexKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QIndexKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( intraQIndex, predictiveQIndex, bipredictiveQIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QIndexKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( intraQIndex == rhs.intraQIndex ) && ( predictiveQIndex == rhs.predictiveQIndex ) && ( bipredictiveQIndex == rhs.bipredictiveQIndex ); +# endif + } + + bool operator!=( VideoEncodeAV1QIndexKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + uint32_t intraQIndex = {}; + uint32_t predictiveQIndex = {}; + uint32_t bipredictiveQIndex = {}; + }; + + struct VideoEncodeAV1QualityLevelPropertiesKHR + { + using NativeType = VkVideoEncodeAV1QualityLevelPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags_ = {}, + uint32_t preferredGopFrameCount_ = {}, + uint32_t preferredKeyFramePeriod_ = {}, + uint32_t preferredConsecutiveBipredictiveFrameCount_ = {}, + uint32_t preferredTemporalLayerCount_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex_ = {}, + uint32_t preferredMaxSingleReferenceCount_ = {}, + uint32_t preferredSingleReferenceNameMask_ = {}, + uint32_t preferredMaxUnidirectionalCompoundReferenceCount_ = {}, + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t preferredUnidirectionalCompoundReferenceNameMask_ = {}, + uint32_t preferredMaxBidirectionalCompoundReferenceCount_ = {}, + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount_ = {}, + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount_ = {}, + uint32_t preferredBidirectionalCompoundReferenceNameMask_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , preferredRateControlFlags{ preferredRateControlFlags_ } + , preferredGopFrameCount{ preferredGopFrameCount_ } + , preferredKeyFramePeriod{ preferredKeyFramePeriod_ } + , preferredConsecutiveBipredictiveFrameCount{ preferredConsecutiveBipredictiveFrameCount_ } + , preferredTemporalLayerCount{ preferredTemporalLayerCount_ } + , preferredConstantQIndex{ preferredConstantQIndex_ } + , preferredMaxSingleReferenceCount{ preferredMaxSingleReferenceCount_ } + , preferredSingleReferenceNameMask{ preferredSingleReferenceNameMask_ } + , preferredMaxUnidirectionalCompoundReferenceCount{ preferredMaxUnidirectionalCompoundReferenceCount_ } + , preferredMaxUnidirectionalCompoundGroup1ReferenceCount{ preferredMaxUnidirectionalCompoundGroup1ReferenceCount_ } + , preferredUnidirectionalCompoundReferenceNameMask{ preferredUnidirectionalCompoundReferenceNameMask_ } + , preferredMaxBidirectionalCompoundReferenceCount{ preferredMaxBidirectionalCompoundReferenceCount_ } + , preferredMaxBidirectionalCompoundGroup1ReferenceCount{ preferredMaxBidirectionalCompoundGroup1ReferenceCount_ } + , preferredMaxBidirectionalCompoundGroup2ReferenceCount{ preferredMaxBidirectionalCompoundGroup2ReferenceCount_ } + , preferredBidirectionalCompoundReferenceNameMask{ preferredBidirectionalCompoundReferenceNameMask_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QualityLevelPropertiesKHR( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1QualityLevelPropertiesKHR( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QualityLevelPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1QualityLevelPropertiesKHR & operator=( VkVideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeAV1QualityLevelPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QualityLevelPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + preferredRateControlFlags, + preferredGopFrameCount, + preferredKeyFramePeriod, + preferredConsecutiveBipredictiveFrameCount, + preferredTemporalLayerCount, + preferredConstantQIndex, + preferredMaxSingleReferenceCount, + preferredSingleReferenceNameMask, + preferredMaxUnidirectionalCompoundReferenceCount, + preferredMaxUnidirectionalCompoundGroup1ReferenceCount, + preferredUnidirectionalCompoundReferenceNameMask, + preferredMaxBidirectionalCompoundReferenceCount, + preferredMaxBidirectionalCompoundGroup1ReferenceCount, + preferredMaxBidirectionalCompoundGroup2ReferenceCount, + preferredBidirectionalCompoundReferenceNameMask ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QualityLevelPropertiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( preferredRateControlFlags == rhs.preferredRateControlFlags ) && + ( preferredGopFrameCount == rhs.preferredGopFrameCount ) && ( preferredKeyFramePeriod == rhs.preferredKeyFramePeriod ) && + ( preferredConsecutiveBipredictiveFrameCount == rhs.preferredConsecutiveBipredictiveFrameCount ) && + ( preferredTemporalLayerCount == rhs.preferredTemporalLayerCount ) && ( preferredConstantQIndex == rhs.preferredConstantQIndex ) && + ( preferredMaxSingleReferenceCount == rhs.preferredMaxSingleReferenceCount ) && + ( preferredSingleReferenceNameMask == rhs.preferredSingleReferenceNameMask ) && + ( preferredMaxUnidirectionalCompoundReferenceCount == rhs.preferredMaxUnidirectionalCompoundReferenceCount ) && + ( preferredMaxUnidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxUnidirectionalCompoundGroup1ReferenceCount ) && + ( preferredUnidirectionalCompoundReferenceNameMask == rhs.preferredUnidirectionalCompoundReferenceNameMask ) && + ( preferredMaxBidirectionalCompoundReferenceCount == rhs.preferredMaxBidirectionalCompoundReferenceCount ) && + ( preferredMaxBidirectionalCompoundGroup1ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup1ReferenceCount ) && + ( preferredMaxBidirectionalCompoundGroup2ReferenceCount == rhs.preferredMaxBidirectionalCompoundGroup2ReferenceCount ) && + ( preferredBidirectionalCompoundReferenceNameMask == rhs.preferredBidirectionalCompoundReferenceNameMask ); +# endif + } + + bool operator!=( VideoEncodeAV1QualityLevelPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR preferredRateControlFlags = {}; + uint32_t preferredGopFrameCount = {}; + uint32_t preferredKeyFramePeriod = {}; + uint32_t preferredConsecutiveBipredictiveFrameCount = {}; + uint32_t preferredTemporalLayerCount = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR preferredConstantQIndex = {}; + uint32_t preferredMaxSingleReferenceCount = {}; + uint32_t preferredSingleReferenceNameMask = {}; + uint32_t preferredMaxUnidirectionalCompoundReferenceCount = {}; + uint32_t preferredMaxUnidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t preferredUnidirectionalCompoundReferenceNameMask = {}; + uint32_t preferredMaxBidirectionalCompoundReferenceCount = {}; + uint32_t preferredMaxBidirectionalCompoundGroup1ReferenceCount = {}; + uint32_t preferredMaxBidirectionalCompoundGroup2ReferenceCount = {}; + uint32_t preferredBidirectionalCompoundReferenceNameMask = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1QualityLevelPropertiesKHR; + }; + + struct VideoEncodeAV1QuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeAV1QuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeAV1QuantizationMapCapabilitiesKHR( int32_t minQIndexDelta_ = {}, int32_t maxQIndexDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQIndexDelta{ minQIndexDelta_ } + , maxQIndexDelta{ maxQIndexDelta_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1QuantizationMapCapabilitiesKHR( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1QuantizationMapCapabilitiesKHR( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minQIndexDelta, maxQIndexDelta ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQIndexDelta == rhs.minQIndexDelta ) && ( maxQIndexDelta == rhs.maxQIndexDelta ); +# endif + } + + bool operator!=( VideoEncodeAV1QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQIndexDelta = {}; + int32_t maxQIndexDelta = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1QuantizationMapCapabilitiesKHR; + }; + + struct VideoEncodeAV1RateControlInfoKHR + { + using NativeType = VkVideoEncodeAV1RateControlInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ = {}, + uint32_t gopFrameCount_ = {}, + uint32_t keyFramePeriod_ = {}, + uint32_t consecutiveBipredictiveFrameCount_ = {}, + uint32_t temporalLayerCount_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , gopFrameCount{ gopFrameCount_ } + , keyFramePeriod{ keyFramePeriod_ } + , consecutiveBipredictiveFrameCount{ consecutiveBipredictiveFrameCount_ } + , temporalLayerCount{ temporalLayerCount_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlInfoKHR( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1RateControlInfoKHR( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1RateControlInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1RateControlInfoKHR & operator=( VideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1RateControlInfoKHR & operator=( VkVideoEncodeAV1RateControlInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + { + flags = flags_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setGopFrameCount( uint32_t gopFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + gopFrameCount = gopFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setKeyFramePeriod( uint32_t keyFramePeriod_ ) VULKAN_HPP_NOEXCEPT + { + keyFramePeriod = keyFramePeriod_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & + setConsecutiveBipredictiveFrameCount( uint32_t consecutiveBipredictiveFrameCount_ ) VULKAN_HPP_NOEXCEPT + { + consecutiveBipredictiveFrameCount = consecutiveBipredictiveFrameCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlInfoKHR & setTemporalLayerCount( uint32_t temporalLayerCount_ ) VULKAN_HPP_NOEXCEPT + { + temporalLayerCount = temporalLayerCount_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1RateControlInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, gopFrameCount, keyFramePeriod, consecutiveBipredictiveFrameCount, temporalLayerCount ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1RateControlInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( gopFrameCount == rhs.gopFrameCount ) && + ( keyFramePeriod == rhs.keyFramePeriod ) && ( consecutiveBipredictiveFrameCount == rhs.consecutiveBipredictiveFrameCount ) && + ( temporalLayerCount == rhs.temporalLayerCount ); +# endif + } + + bool operator!=( VideoEncodeAV1RateControlInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1RateControlFlagsKHR flags = {}; + uint32_t gopFrameCount = {}; + uint32_t keyFramePeriod = {}; + uint32_t consecutiveBipredictiveFrameCount = {}; + uint32_t temporalLayerCount = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1RateControlInfoKHR; + }; + + struct VideoEncodeAV1RateControlLayerInfoKHR + { + using NativeType = VkVideoEncodeAV1RateControlLayerInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ = {}, + VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMinQIndex{ useMinQIndex_ } + , minQIndex{ minQIndex_ } + , useMaxQIndex{ useMaxQIndex_ } + , maxQIndex{ maxQIndex_ } + , useMaxFrameSize{ useMaxFrameSize_ } + , maxFrameSize{ maxFrameSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1RateControlLayerInfoKHR( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1RateControlLayerInfoKHR( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1RateControlLayerInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1RateControlLayerInfoKHR & operator=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1RateControlLayerInfoKHR & operator=( VkVideoEncodeAV1RateControlLayerInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMinQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex_ ) VULKAN_HPP_NOEXCEPT + { + useMinQIndex = useMinQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMinQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & minQIndex_ ) VULKAN_HPP_NOEXCEPT + { + minQIndex = minQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxQIndex( VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex_ ) VULKAN_HPP_NOEXCEPT + { + useMaxQIndex = useMaxQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMaxQIndex( VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR const & maxQIndex_ ) VULKAN_HPP_NOEXCEPT + { + maxQIndex = maxQIndex_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & setUseMaxFrameSize( VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + useMaxFrameSize = useMaxFrameSize_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1RateControlLayerInfoKHR & + setMaxFrameSize( VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR const & maxFrameSize_ ) VULKAN_HPP_NOEXCEPT + { + maxFrameSize = maxFrameSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1RateControlLayerInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1RateControlLayerInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMinQIndex, minQIndex, useMaxQIndex, maxQIndex, useMaxFrameSize, maxFrameSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1RateControlLayerInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMinQIndex == rhs.useMinQIndex ) && ( minQIndex == rhs.minQIndex ) && + ( useMaxQIndex == rhs.useMaxQIndex ) && ( maxQIndex == rhs.maxQIndex ) && ( useMaxFrameSize == rhs.useMaxFrameSize ) && + ( maxFrameSize == rhs.maxFrameSize ); +# endif + } + + bool operator!=( VideoEncodeAV1RateControlLayerInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1RateControlLayerInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMinQIndex = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR minQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxQIndex = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1QIndexKHR maxQIndex = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxFrameSize = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1FrameSizeKHR maxFrameSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1RateControlLayerInfoKHR; + }; + + struct VideoEncodeAV1SessionCreateInfoKHR + { + using NativeType = VkVideoEncodeAV1SessionCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ = {}, + StdVideoAV1Level maxLevel_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , useMaxLevel{ useMaxLevel_ } + , maxLevel{ maxLevel_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionCreateInfoKHR( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1SessionCreateInfoKHR( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1SessionCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeAV1SessionCreateInfoKHR & operator=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1SessionCreateInfoKHR & operator=( VkVideoEncodeAV1SessionCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setUseMaxLevel( VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel_ ) VULKAN_HPP_NOEXCEPT + { + useMaxLevel = useMaxLevel_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionCreateInfoKHR & setMaxLevel( StdVideoAV1Level maxLevel_ ) VULKAN_HPP_NOEXCEPT + { + maxLevel = maxLevel_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1SessionCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, useMaxLevel, maxLevel ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + std::strong_ordering operator<=>( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) + return cmp; + if ( auto cmp = pNext <=> rhs.pNext; cmp != 0 ) + return cmp; + if ( auto cmp = useMaxLevel <=> rhs.useMaxLevel; cmp != 0 ) + return cmp; + if ( auto cmp = memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ); cmp != 0 ) + return ( cmp < 0 ) ? std::strong_ordering::less : std::strong_ordering::greater; + + return std::strong_ordering::equivalent; + } +#endif + + bool operator==( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( useMaxLevel == rhs.useMaxLevel ) && + ( memcmp( &maxLevel, &rhs.maxLevel, sizeof( StdVideoAV1Level ) ) == 0 ); + } + + bool operator!=( VideoEncodeAV1SessionCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 useMaxLevel = {}; + StdVideoAV1Level maxLevel = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1SessionCreateInfoKHR; + }; + + struct VideoEncodeAV1SessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeAV1SessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ = {}, + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ = {}, + uint32_t stdOperatingPointCount_ = {}, + const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , pStdSequenceHeader{ pStdSequenceHeader_ } + , pStdDecoderModelInfo{ pStdDecoderModelInfo_ } + , stdOperatingPointCount{ stdOperatingPointCount_ } + , pStdOperatingPoints{ pStdOperatingPoints_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeAV1SessionParametersCreateInfoKHR( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeAV1SessionParametersCreateInfoKHR( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeAV1SessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeAV1SessionParametersCreateInfoKHR( + const StdVideoAV1SequenceHeader * pStdSequenceHeader_, + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdOperatingPoints_, + const void * pNext_ = nullptr ) + : pNext( pNext_ ) + , pStdSequenceHeader( pStdSequenceHeader_ ) + , pStdDecoderModelInfo( pStdDecoderModelInfo_ ) + , stdOperatingPointCount( static_cast( stdOperatingPoints_.size() ) ) + , pStdOperatingPoints( stdOperatingPoints_.data() ) + { + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ + + VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeAV1SessionParametersCreateInfoKHR & operator=( VkVideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdSequenceHeader( const StdVideoAV1SequenceHeader * pStdSequenceHeader_ ) VULKAN_HPP_NOEXCEPT + { + pStdSequenceHeader = pStdSequenceHeader_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdDecoderModelInfo( const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo_ ) VULKAN_HPP_NOEXCEPT + { + pStdDecoderModelInfo = pStdDecoderModelInfo_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPointCount( uint32_t stdOperatingPointCount_ ) VULKAN_HPP_NOEXCEPT + { + stdOperatingPointCount = stdOperatingPointCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeAV1SessionParametersCreateInfoKHR & + setPStdOperatingPoints( const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT + { + pStdOperatingPoints = pStdOperatingPoints_; + return *this; + } + +# if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) + VideoEncodeAV1SessionParametersCreateInfoKHR & setStdOperatingPoints( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & stdOperatingPoints_ ) VULKAN_HPP_NOEXCEPT + { + stdOperatingPointCount = static_cast( stdOperatingPoints_.size() ); + pStdOperatingPoints = stdOperatingPoints_.data(); + return *this; + } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeAV1SessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, pStdSequenceHeader, pStdDecoderModelInfo, stdOperatingPointCount, pStdOperatingPoints ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeAV1SessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( pStdSequenceHeader == rhs.pStdSequenceHeader ) && + ( pStdDecoderModelInfo == rhs.pStdDecoderModelInfo ) && ( stdOperatingPointCount == rhs.stdOperatingPointCount ) && + ( pStdOperatingPoints == rhs.pStdOperatingPoints ); +# endif + } + + bool operator!=( VideoEncodeAV1SessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR; + const void * pNext = {}; + const StdVideoAV1SequenceHeader * pStdSequenceHeader = {}; + const StdVideoEncodeAV1DecoderModelInfo * pStdDecoderModelInfo = {}; + uint32_t stdOperatingPointCount = {}; + const StdVideoEncodeAV1OperatingPointInfo * pStdOperatingPoints = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeAV1SessionParametersCreateInfoKHR; + }; + struct VideoEncodeCapabilitiesKHR { using NativeType = VkVideoEncodeCapabilitiesKHR; @@ -126015,6 +128380,92 @@ namespace VULKAN_HPP_NAMESPACE using Type = VideoEncodeH264QualityLevelPropertiesKHR; }; + struct VideoEncodeH264QuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeH264QuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH264QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQpDelta{ minQpDelta_ } + , maxQpDelta{ maxQpDelta_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH264QuantizationMapCapabilitiesKHR( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH264QuantizationMapCapabilitiesKHR( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH264QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH264QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH264QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minQpDelta, maxQpDelta ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH264QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta ); +# endif + } + + bool operator!=( VideoEncodeH264QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQpDelta = {}; + int32_t maxQpDelta = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH264QuantizationMapCapabilitiesKHR; + }; + struct VideoEncodeH264RateControlInfoKHR { using NativeType = VkVideoEncodeH264RateControlInfoKHR; @@ -128027,6 +130478,92 @@ namespace VULKAN_HPP_NAMESPACE using Type = VideoEncodeH265QualityLevelPropertiesKHR; }; + struct VideoEncodeH265QuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeH265QuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR + VideoEncodeH265QuantizationMapCapabilitiesKHR( int32_t minQpDelta_ = {}, int32_t maxQpDelta_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , minQpDelta{ minQpDelta_ } + , maxQpDelta{ maxQpDelta_ } + { + } + + VULKAN_HPP_CONSTEXPR + VideoEncodeH265QuantizationMapCapabilitiesKHR( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeH265QuantizationMapCapabilitiesKHR( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeH265QuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeH265QuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeH265QuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, minQpDelta, maxQpDelta ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeH265QuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( minQpDelta == rhs.minQpDelta ) && ( maxQpDelta == rhs.maxQpDelta ); +# endif + } + + bool operator!=( VideoEncodeH265QuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR; + void * pNext = {}; + int32_t minQpDelta = {}; + int32_t maxQpDelta = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeH265QuantizationMapCapabilitiesKHR; + }; + struct VideoEncodeH265RateControlInfoKHR { using NativeType = VkVideoEncodeH265RateControlInfoKHR; @@ -129419,6 +131956,299 @@ namespace VULKAN_HPP_NAMESPACE using Type = VideoEncodeQualityLevelPropertiesKHR; }; + struct VideoEncodeQuantizationMapCapabilitiesKHR + { + using NativeType = VkVideoEncodeQuantizationMapCapabilitiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxQuantizationMapExtent{ maxQuantizationMapExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapCapabilitiesKHR( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapCapabilitiesKHR( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapCapabilitiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapCapabilitiesKHR & operator=( VkVideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoEncodeQuantizationMapCapabilitiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxQuantizationMapExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapCapabilitiesKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxQuantizationMapExtent == rhs.maxQuantizationMapExtent ); +# endif + } + + bool operator!=( VideoEncodeQuantizationMapCapabilitiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D maxQuantizationMapExtent = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQuantizationMapCapabilitiesKHR; + }; + + struct VideoEncodeQuantizationMapInfoKHR + { + using NativeType = VkVideoEncodeQuantizationMapInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ = {}, + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMap{ quantizationMap_ } + , quantizationMapExtent{ quantizationMapExtent_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapInfoKHR( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapInfoKHR( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQuantizationMapInfoKHR & operator=( VideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapInfoKHR & operator=( VkVideoEncodeQuantizationMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & setQuantizationMap( VULKAN_HPP_NAMESPACE::ImageView quantizationMap_ ) VULKAN_HPP_NOEXCEPT + { + quantizationMap = quantizationMap_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapInfoKHR & + setQuantizationMapExtent( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapExtent_ ) VULKAN_HPP_NOEXCEPT + { + quantizationMapExtent = quantizationMapExtent_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeQuantizationMapInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std:: + tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, quantizationMap, quantizationMapExtent ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMap == rhs.quantizationMap ) && + ( quantizationMapExtent == rhs.quantizationMapExtent ); +# endif + } + + bool operator!=( VideoEncodeQuantizationMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::ImageView quantizationMap = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapExtent = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQuantizationMapInfoKHR; + }; + + struct VideoEncodeQuantizationMapSessionParametersCreateInfoKHR + { + using NativeType = VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMapTexelSize{ quantizationMapTexelSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) + VULKAN_HPP_NOEXCEPT = default; + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoEncodeQuantizationMapSessionParametersCreateInfoKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + operator=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + operator=( VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 VideoEncodeQuantizationMapSessionParametersCreateInfoKHR & + setQuantizationMapTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & quantizationMapTexelSize_ ) VULKAN_HPP_NOEXCEPT + { + quantizationMapTexelSize = quantizationMapTexelSize_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, quantizationMapTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & ) const = default; +#else + bool operator==( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize ); +# endif + } + + bool operator!=( VideoEncodeQuantizationMapSessionParametersCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + }; + struct VideoEncodeRateControlLayerInfoKHR { using NativeType = VkVideoEncodeRateControlLayerInfoKHR; @@ -130126,6 +132956,172 @@ namespace VULKAN_HPP_NAMESPACE using Type = VideoEndCodingInfoKHR; }; + struct VideoFormatAV1QuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatAV1QuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compatibleSuperblockSizes{ compatibleSuperblockSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatAV1QuantizationMapPropertiesKHR( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatAV1QuantizationMapPropertiesKHR( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatAV1QuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoFormatAV1QuantizationMapPropertiesKHR & operator=( VkVideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatAV1QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compatibleSuperblockSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatAV1QuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleSuperblockSizes == rhs.compatibleSuperblockSizes ); +# endif + } + + bool operator!=( VideoFormatAV1QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatAV1QuantizationMapPropertiesKHR; + }; + + struct VideoFormatH265QuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatH265QuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , compatibleCtbSizes{ compatibleCtbSizes_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatH265QuantizationMapPropertiesKHR( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatH265QuantizationMapPropertiesKHR( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatH265QuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatH265QuantizationMapPropertiesKHR & operator=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoFormatH265QuantizationMapPropertiesKHR & operator=( VkVideoFormatH265QuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatH265QuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, compatibleCtbSizes ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatH265QuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( compatibleCtbSizes == rhs.compatibleCtbSizes ); +# endif + } + + bool operator!=( VideoFormatH265QuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatH265QuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatH265QuantizationMapPropertiesKHR; + }; + struct VideoFormatPropertiesKHR { using NativeType = VkVideoFormatPropertiesKHR; @@ -130233,6 +133229,89 @@ namespace VULKAN_HPP_NAMESPACE using Type = VideoFormatPropertiesKHR; }; + struct VideoFormatQuantizationMapPropertiesKHR + { + using NativeType = VkVideoFormatQuantizationMapPropertiesKHR; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eVideoFormatQuantizationMapPropertiesKHR; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , quantizationMapTexelSize{ quantizationMapTexelSize_ } + { + } + + VULKAN_HPP_CONSTEXPR VideoFormatQuantizationMapPropertiesKHR( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + VideoFormatQuantizationMapPropertiesKHR( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + : VideoFormatQuantizationMapPropertiesKHR( *reinterpret_cast( &rhs ) ) + { + } + + VideoFormatQuantizationMapPropertiesKHR & operator=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + VideoFormatQuantizationMapPropertiesKHR & operator=( VkVideoFormatQuantizationMapPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkVideoFormatQuantizationMapPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkVideoFormatQuantizationMapPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, quantizationMapTexelSize ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( VideoFormatQuantizationMapPropertiesKHR const & ) const = default; +#else + bool operator==( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( quantizationMapTexelSize == rhs.quantizationMapTexelSize ); +# endif + } + + bool operator!=( VideoFormatQuantizationMapPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eVideoFormatQuantizationMapPropertiesKHR; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Extent2D quantizationMapTexelSize = {}; + }; + + template <> + struct CppType + { + using Type = VideoFormatQuantizationMapPropertiesKHR; + }; + struct VideoInlineQueryInfoKHR { using NativeType = VkVideoInlineQueryInfoKHR; diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index 1c00c12..d8f29ee 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -208,6 +208,10 @@ namespace VULKAN_HPP_NAMESPACE result += "SampleWeightQCOM | "; if ( value & ImageUsageFlagBits::eSampleBlockMatchQCOM ) result += "SampleBlockMatchQCOM | "; + if ( value & ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR ) + result += "VideoEncodeQuantizationDeltaMapKHR | "; + if ( value & ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR ) + result += "VideoEncodeEmphasisMapKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1860,6 +1864,10 @@ namespace VULKAN_HPP_NAMESPACE result += "OpticalFlowVectorNV | "; if ( value & FormatFeatureFlagBits2::eOpticalFlowCostNV ) result += "OpticalFlowCostNV | "; + if ( value & FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR ) + result += "VideoEncodeQuantizationDeltaMapKHR | "; + if ( value & FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR ) + result += "VideoEncodeEmphasisMapKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2065,6 +2073,8 @@ namespace VULKAN_HPP_NAMESPACE result += "DecodeH265 | "; if ( value & VideoCodecOperationFlagBitsKHR::eDecodeAv1 ) result += "DecodeAv1 | "; + if ( value & VideoCodecOperationFlagBitsKHR::eEncodeAv1 ) + result += "EncodeAv1 | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2129,13 +2139,24 @@ namespace VULKAN_HPP_NAMESPACE result += "AllowEncodeParameterOptimizations | "; if ( value & VideoSessionCreateFlagBitsKHR::eInlineQueries ) result += "InlineQueries | "; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap ) + result += "AllowEncodeQuantizationDeltaMap | "; + if ( value & VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap ) + result += "AllowEncodeEmphasisMap | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagsKHR value ) { - return "{}"; + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible ) + result += "QuantizationMapCompatible | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagsKHR ) @@ -2234,6 +2255,8 @@ namespace VULKAN_HPP_NAMESPACE result += "PerSliceConstantQp | "; if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu ) result += "GeneratePrefixNalu | "; + if ( value & VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound ) + result += "MbQpDiffWraparound | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2336,6 +2359,8 @@ namespace VULKAN_HPP_NAMESPACE result += "MultipleTilesPerSliceSegment | "; if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile ) result += "MultipleSliceSegmentsPerTile | "; + if ( value & VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound ) + result += "CuQpDiffWraparound | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -2915,9 +2940,18 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_video_encode_queue === - VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagsKHR value ) { - return "{}"; + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap ) + result += "WithQuantizationDeltaMap | "; + if ( value & VideoEncodeFlagBitsKHR::eWithEmphasisMap ) + result += "WithEmphasisMap | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } VULKAN_HPP_INLINE std::string to_string( VideoEncodeCapabilityFlagsKHR value ) @@ -2930,6 +2964,10 @@ namespace VULKAN_HPP_NAMESPACE result += "PrecedingExternallyEncodedBytes | "; if ( value & VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection ) result += "InsufficientBitstreamBufferRangeDetection | "; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap ) + result += "QuantizationDeltaMap | "; + if ( value & VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap ) + result += "EmphasisMap | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3549,6 +3587,78 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_KHR_video_encode_av1 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1CapabilityFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex ) + result += "PerRateControlGroupMinMaxQIndex | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader ) + result += "GenerateObuExtensionHeader | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly ) + result += "PrimaryReferenceCdfOnly | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride ) + result += "FrameSizeOverride | "; + if ( value & VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling ) + result += "MotionVectorScaling | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1StdFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet ) + result += "UniformTileSpacingFlagSet | "; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset ) + result += "SkipModePresentUnset | "; + if ( value & VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame ) + result += "PrimaryRefFrame | "; + if ( value & VideoEncodeAV1StdFlagBitsKHR::eDeltaQ ) + result += "DeltaQ | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1SuperblockSizeFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64 ) + result += "64 | "; + if ( value & VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128 ) + result += "128 | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1RateControlFlagsKHR value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop ) + result += "RegularGop | "; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic ) + result += "TemporalLayerPatternDyadic | "; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat ) + result += "ReferencePatternFlat | "; + if ( value & VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic ) + result += "ReferencePatternDyadic | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //=== VK_EXT_device_generated_commands === VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsEXT value ) @@ -3935,6 +4045,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eCuModuleCreateInfoNVX: return "CuModuleCreateInfoNVX"; case StructureType::eCuFunctionCreateInfoNVX: return "CuFunctionCreateInfoNVX"; case StructureType::eCuLaunchInfoNVX: return "CuLaunchInfoNVX"; + case StructureType::eCuModuleTexturingModeCreateInfoNVX: return "CuModuleTexturingModeCreateInfoNVX"; case StructureType::eImageViewHandleInfoNVX: return "ImageViewHandleInfoNVX"; case StructureType::eImageViewAddressPropertiesNVX: return "ImageViewAddressPropertiesNVX"; case StructureType::eVideoEncodeH264CapabilitiesKHR: return "VideoEncodeH264CapabilitiesKHR"; @@ -4582,6 +4693,17 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eVideoDecodeAv1ProfileInfoKHR: return "VideoDecodeAv1ProfileInfoKHR"; case StructureType::eVideoDecodeAv1SessionParametersCreateInfoKHR: return "VideoDecodeAv1SessionParametersCreateInfoKHR"; case StructureType::eVideoDecodeAv1DpbSlotInfoKHR: return "VideoDecodeAv1DpbSlotInfoKHR"; + case StructureType::eVideoEncodeAv1CapabilitiesKHR: return "VideoEncodeAv1CapabilitiesKHR"; + case StructureType::eVideoEncodeAv1SessionParametersCreateInfoKHR: return "VideoEncodeAv1SessionParametersCreateInfoKHR"; + case StructureType::eVideoEncodeAv1PictureInfoKHR: return "VideoEncodeAv1PictureInfoKHR"; + case StructureType::eVideoEncodeAv1DpbSlotInfoKHR: return "VideoEncodeAv1DpbSlotInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeAv1FeaturesKHR: return "PhysicalDeviceVideoEncodeAv1FeaturesKHR"; + case StructureType::eVideoEncodeAv1ProfileInfoKHR: return "VideoEncodeAv1ProfileInfoKHR"; + case StructureType::eVideoEncodeAv1RateControlInfoKHR: return "VideoEncodeAv1RateControlInfoKHR"; + case StructureType::eVideoEncodeAv1RateControlLayerInfoKHR: return "VideoEncodeAv1RateControlLayerInfoKHR"; + case StructureType::eVideoEncodeAv1QualityLevelPropertiesKHR: return "VideoEncodeAv1QualityLevelPropertiesKHR"; + case StructureType::eVideoEncodeAv1SessionCreateInfoKHR: return "VideoEncodeAv1SessionCreateInfoKHR"; + case StructureType::eVideoEncodeAv1GopRemainingFrameInfoKHR: return "VideoEncodeAv1GopRemainingFrameInfoKHR"; case StructureType::ePhysicalDeviceVideoMaintenance1FeaturesKHR: return "PhysicalDeviceVideoMaintenance1FeaturesKHR"; case StructureType::eVideoInlineQueryInfoKHR: return "VideoInlineQueryInfoKHR"; case StructureType::ePhysicalDevicePerStageDescriptorSetFeaturesNV: return "PhysicalDevicePerStageDescriptorSetFeaturesNV"; @@ -4623,6 +4745,18 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSetDescriptorBufferOffsetsInfoEXT: return "SetDescriptorBufferOffsetsInfoEXT"; case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT"; case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV"; + case StructureType::eDisplaySurfaceStereoCreateInfoNV: return "DisplaySurfaceStereoCreateInfoNV"; + case StructureType::eDisplayModeStereoPropertiesNV: return "DisplayModeStereoPropertiesNV"; + case StructureType::eVideoEncodeQuantizationMapCapabilitiesKHR: return "VideoEncodeQuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatQuantizationMapPropertiesKHR: return "VideoFormatQuantizationMapPropertiesKHR"; + case StructureType::eVideoEncodeQuantizationMapInfoKHR: return "VideoEncodeQuantizationMapInfoKHR"; + case StructureType::eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR: return "VideoEncodeQuantizationMapSessionParametersCreateInfoKHR"; + case StructureType::ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR: return "PhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR"; + case StructureType::eVideoEncodeH264QuantizationMapCapabilitiesKHR: return "VideoEncodeH264QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoEncodeH265QuantizationMapCapabilitiesKHR: return "VideoEncodeH265QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatH265QuantizationMapPropertiesKHR: return "VideoFormatH265QuantizationMapPropertiesKHR"; + case StructureType::eVideoEncodeAv1QuantizationMapCapabilitiesKHR: return "VideoEncodeAv1QuantizationMapCapabilitiesKHR"; + case StructureType::eVideoFormatAv1QuantizationMapPropertiesKHR: return "VideoFormatAv1QuantizationMapPropertiesKHR"; case StructureType::ePhysicalDeviceRawAccessChainsFeaturesNV: return "PhysicalDeviceRawAccessChainsFeaturesNV"; case StructureType::ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR: return "PhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR"; case StructureType::ePhysicalDeviceCommandBufferInheritanceFeaturesNV: return "PhysicalDeviceCommandBufferInheritanceFeaturesNV"; @@ -4658,6 +4792,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV: return "PhysicalDeviceCooperativeMatrix2FeaturesNV"; case StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV: return "CooperativeMatrixFlexibleDimensionsPropertiesNV"; case StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV: return "PhysicalDeviceCooperativeMatrix2PropertiesNV"; + case StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT: return "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5126,6 +5261,8 @@ namespace VULKAN_HPP_NAMESPACE case ImageUsageFlagBits::eInvocationMaskHUAWEI: return "InvocationMaskHUAWEI"; case ImageUsageFlagBits::eSampleWeightQCOM: return "SampleWeightQCOM"; case ImageUsageFlagBits::eSampleBlockMatchQCOM: return "SampleBlockMatchQCOM"; + case ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR: return "VideoEncodeQuantizationDeltaMapKHR"; + case ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR: return "VideoEncodeEmphasisMapKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5517,6 +5654,7 @@ namespace VULKAN_HPP_NAMESPACE case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; case ImageLayout::eAttachmentFeedbackLoopOptimalEXT: return "AttachmentFeedbackLoopOptimalEXT"; + case ImageLayout::eVideoEncodeQuantizationMapKHR: return "VideoEncodeQuantizationMapKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6891,6 +7029,8 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits2::eOpticalFlowImageNV: return "OpticalFlowImageNV"; case FormatFeatureFlagBits2::eOpticalFlowVectorNV: return "OpticalFlowVectorNV"; case FormatFeatureFlagBits2::eOpticalFlowCostNV: return "OpticalFlowCostNV"; + case FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR: return "VideoEncodeQuantizationDeltaMapKHR"; + case FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR: return "VideoEncodeEmphasisMapKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7152,6 +7292,7 @@ namespace VULKAN_HPP_NAMESPACE case VideoCodecOperationFlagBitsKHR::eDecodeH264: return "DecodeH264"; case VideoCodecOperationFlagBitsKHR::eDecodeH265: return "DecodeH265"; case VideoCodecOperationFlagBitsKHR::eDecodeAv1: return "DecodeAv1"; + case VideoCodecOperationFlagBitsKHR::eEncodeAv1: return "EncodeAv1"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7198,6 +7339,8 @@ namespace VULKAN_HPP_NAMESPACE case VideoSessionCreateFlagBitsKHR::eProtectedContent: return "ProtectedContent"; case VideoSessionCreateFlagBitsKHR::eAllowEncodeParameterOptimizations: return "AllowEncodeParameterOptimizations"; case VideoSessionCreateFlagBitsKHR::eInlineQueries: return "InlineQueries"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeQuantizationDeltaMap: return "AllowEncodeQuantizationDeltaMap"; + case VideoSessionCreateFlagBitsKHR::eAllowEncodeEmphasisMap: return "AllowEncodeEmphasisMap"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7225,9 +7368,13 @@ namespace VULKAN_HPP_NAMESPACE } } - VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagBitsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoSessionParametersCreateFlagBitsKHR value ) { - return "(void)"; + switch ( value ) + { + case VideoSessionParametersCreateFlagBitsKHR::eQuantizationMapCompatible: return "QuantizationMapCompatible"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } } VULKAN_HPP_INLINE std::string to_string( VideoBeginCodingFlagBitsKHR ) @@ -7291,6 +7438,7 @@ namespace VULKAN_HPP_NAMESPACE case VideoEncodeH264CapabilityFlagBitsKHR::ePerPictureTypeMinMaxQp: return "PerPictureTypeMinMaxQp"; case VideoEncodeH264CapabilityFlagBitsKHR::ePerSliceConstantQp: return "PerSliceConstantQp"; case VideoEncodeH264CapabilityFlagBitsKHR::eGeneratePrefixNalu: return "GeneratePrefixNalu"; + case VideoEncodeH264CapabilityFlagBitsKHR::eMbQpDiffWraparound: return "MbQpDiffWraparound"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7352,6 +7500,7 @@ namespace VULKAN_HPP_NAMESPACE case VideoEncodeH265CapabilityFlagBitsKHR::ePerSliceSegmentConstantQp: return "PerSliceSegmentConstantQp"; case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleTilesPerSliceSegment: return "MultipleTilesPerSliceSegment"; case VideoEncodeH265CapabilityFlagBitsKHR::eMultipleSliceSegmentsPerTile: return "MultipleSliceSegmentsPerTile"; + case VideoEncodeH265CapabilityFlagBitsKHR::eCuQpDiffWraparound: return "CuQpDiffWraparound"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8363,6 +8512,8 @@ namespace VULKAN_HPP_NAMESPACE { case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes"; case VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection: return "InsufficientBitstreamBufferRangeDetection"; + case VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap: return "QuantizationDeltaMap"; + case VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap: return "EmphasisMap"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -8428,9 +8579,14 @@ namespace VULKAN_HPP_NAMESPACE } } - VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR ) + VULKAN_HPP_INLINE std::string to_string( VideoEncodeFlagBitsKHR value ) { - return "(void)"; + switch ( value ) + { + case VideoEncodeFlagBitsKHR::eWithQuantizationDeltaMap: return "WithQuantizationDeltaMap"; + case VideoEncodeFlagBitsKHR::eWithEmphasisMap: return "WithEmphasisMap"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } } VULKAN_HPP_INLINE std::string to_string( VideoEncodeRateControlFlagBitsKHR ) @@ -9149,6 +9305,78 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_KHR_video_encode_av1 === + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1PredictionModeKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1PredictionModeKHR::eIntraOnly: return "IntraOnly"; + case VideoEncodeAV1PredictionModeKHR::eSingleReference: return "SingleReference"; + case VideoEncodeAV1PredictionModeKHR::eUnidirectionalCompound: return "UnidirectionalCompound"; + case VideoEncodeAV1PredictionModeKHR::eBidirectionalCompound: return "BidirectionalCompound"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1RateControlGroupKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1RateControlGroupKHR::eIntra: return "Intra"; + case VideoEncodeAV1RateControlGroupKHR::ePredictive: return "Predictive"; + case VideoEncodeAV1RateControlGroupKHR::eBipredictive: return "Bipredictive"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1CapabilityFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1CapabilityFlagBitsKHR::ePerRateControlGroupMinMaxQIndex: return "PerRateControlGroupMinMaxQIndex"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eGenerateObuExtensionHeader: return "GenerateObuExtensionHeader"; + case VideoEncodeAV1CapabilityFlagBitsKHR::ePrimaryReferenceCdfOnly: return "PrimaryReferenceCdfOnly"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eFrameSizeOverride: return "FrameSizeOverride"; + case VideoEncodeAV1CapabilityFlagBitsKHR::eMotionVectorScaling: return "MotionVectorScaling"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1StdFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1StdFlagBitsKHR::eUniformTileSpacingFlagSet: return "UniformTileSpacingFlagSet"; + case VideoEncodeAV1StdFlagBitsKHR::eSkipModePresentUnset: return "SkipModePresentUnset"; + case VideoEncodeAV1StdFlagBitsKHR::ePrimaryRefFrame: return "PrimaryRefFrame"; + case VideoEncodeAV1StdFlagBitsKHR::eDeltaQ: return "DeltaQ"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1SuperblockSizeFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1SuperblockSizeFlagBitsKHR::e64: return "64"; + case VideoEncodeAV1SuperblockSizeFlagBitsKHR::e128: return "128"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1RateControlFlagBitsKHR value ) + { + switch ( value ) + { + case VideoEncodeAV1RateControlFlagBitsKHR::eRegularGop: return "RegularGop"; + case VideoEncodeAV1RateControlFlagBitsKHR::eTemporalLayerPatternDyadic: return "TemporalLayerPatternDyadic"; + case VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternFlat: return "ReferencePatternFlat"; + case VideoEncodeAV1RateControlFlagBitsKHR::eReferencePatternDyadic: return "ReferencePatternDyadic"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_QCOM_image_processing2 === VULKAN_HPP_INLINE std::string to_string( BlockMatchWindowCompareModeQCOM value ) @@ -9215,6 +9443,20 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_NV_display_stereo === + + VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceStereoTypeNV value ) + { + switch ( value ) + { + case DisplaySurfaceStereoTypeNV::eNone: return "None"; + case DisplaySurfaceStereoTypeNV::eOnboardDin: return "OnboardDin"; + case DisplaySurfaceStereoTypeNV::eHdmi3D: return "Hdmi3D"; + case DisplaySurfaceStereoTypeNV::eInbandDisplayport: return "InbandDisplayport"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_KHR_maintenance7 === VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceLayeredApiKHR value ) From 17fcb85adc74364fe828cecd454b29a81a347f0d Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 26 Nov 2024 23:11:03 +0100 Subject: [PATCH 078/131] working on swapchains --- Makefile | 2 +- example/main.c | 18 +++++++++---- runtime/Includes/Renderer/RenderCore.h | 2 +- runtime/Includes/Renderer/Renderer.h | 3 +++ runtime/Sources/Core/Graphics.cpp | 2 +- runtime/Sources/Graphics/PutPixelManager.cpp | 13 ++++++--- .../Sources/Renderer/Pipelines/Graphics.cpp | 27 ++++++++++++++++--- .../Sources/Renderer/RenderPasses/2DPass.cpp | 2 +- .../Renderer/RenderPasses/FinalPass.cpp | 2 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 2 +- runtime/Sources/Renderer/Renderer.cpp | 20 ++++++++++++-- runtime/Sources/Renderer/Swapchain.cpp | 20 +++++++------- 12 files changed, 84 insertions(+), 29 deletions(-) diff --git a/Makefile b/Makefile index 2ade2ba..f4d4a83 100644 --- a/Makefile +++ b/Makefile @@ -47,7 +47,7 @@ ifeq ($(OS), Darwin) endif ifeq ($(DEBUG), true) - CXXFLAGS += -g3 -D DEBUG + CXXFLAGS += -g3 -O0 -D DEBUG LDFLAGS += -rdynamic else CXXFLAGS += -O3 diff --git a/example/main.c b/example/main.c index 819bb89..84a524d 100644 --- a/example/main.c +++ b/example/main.c @@ -56,7 +56,14 @@ int update(void* param) mlx_transform_put_image_to_window(mlx->mlx, mlx->render_target_win, mlx->logo_bmp, 100, 40, 0.5f, 75.0f); mlx_put_image_to_window(mlx->mlx, mlx->render_target_win, mlx->img, 40, 60); - mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->render_target_win, 40, 60); + for(int j = 0, color = 0; j < 200; j++) + { + mlx_pixel_put(mlx->mlx, mlx->render_target_win, j, j, 0xFFFF0000 + color); + mlx_pixel_put(mlx->mlx, mlx->render_target_win, 199 - j, j, 0xFF0000FF); + color += (color < 255); + } + + mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->render_target, 5, 250, 0.5f, 33.0f); i++; return 0; @@ -139,10 +146,14 @@ int main(void) int dummy; mlx.mlx = mlx_init(); - mlx.win = mlx_new_window(mlx.mlx, 400, 400, "My window"); + mlx.win = mlx_new_resizable_window(mlx.mlx, 400, 400, "My window"); + + mlx_get_screens_size(mlx.mlx, mlx.win, &w, &h); + printf("screen size : %dx%d\n", w, h); mlx.render_target = mlx_new_image(mlx.mlx, 200, 200); mlx.render_target_win = mlx_new_window(mlx.mlx, 200, 200, (char*)mlx.render_target); + mlx_clear_window(mlx.mlx, mlx.render_target_win, 0xFFC16868); mlx_set_fps_goal(mlx.mlx, 60); @@ -164,9 +175,6 @@ int main(void) mlx_loop_hook(mlx.mlx, update, &mlx); mlx_loop(mlx.mlx); - mlx_get_screens_size(mlx.mlx, mlx.win, &w, &h); - printf("screen size : %dx%d\n", w, h); - mlx_destroy_image(mlx.mlx, mlx.logo_png); mlx_destroy_image(mlx.mlx, mlx.logo_jpg); mlx_destroy_image(mlx.mlx, mlx.logo_bmp); diff --git a/runtime/Includes/Renderer/RenderCore.h b/runtime/Includes/Renderer/RenderCore.h index 9f139c6..f6b126c 100644 --- a/runtime/Includes/Renderer/RenderCore.h +++ b/runtime/Includes/Renderer/RenderCore.h @@ -1,7 +1,7 @@ #ifndef __MLX_RENDER_CORE__ #define __MLX_RENDER_CORE__ -constexpr const int MAX_FRAMES_IN_FLIGHT = 3; +constexpr const int MAX_FRAMES_IN_FLIGHT = 2; #include #include diff --git a/runtime/Includes/Renderer/Renderer.h b/runtime/Includes/Renderer/Renderer.h index 07f572b..0217a45 100644 --- a/runtime/Includes/Renderer/Renderer.h +++ b/runtime/Includes/Renderer/Renderer.h @@ -16,6 +16,7 @@ namespace mlx Renderer() = default; void Init(NonOwningPtr window); + void Init(NonOwningPtr render_target); void BeginFrame(); void EndFrame(); @@ -28,6 +29,7 @@ namespace mlx [[nodiscard]] inline std::size_t& GetPolygonDrawnCounterRef() noexcept { return m_polygons_drawn; } [[nodiscard]] inline std::size_t GetCurrentFrameIndex() const noexcept { return m_current_frame_index; } [[nodiscard]] inline NonOwningPtr GetWindow() const noexcept { return p_window; } + [[nodiscard]] inline NonOwningPtr GetRenderTarget() const noexcept { return p_render_target; } [[nodiscard]] inline const Swapchain& GetSwapchain() const noexcept { return m_swapchain; } void Destroy() noexcept; @@ -41,6 +43,7 @@ namespace mlx std::array m_cmd_buffers; std::array m_cmd_fences; NonOwningPtr p_window; + NonOwningPtr p_render_target; std::uint32_t m_current_frame_index = 0; std::size_t m_polygons_drawn = 0; std::size_t m_drawcalls = 0; diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index db79fa5..93721c4 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -10,7 +10,7 @@ namespace mlx m_has_window(false) { MLX_PROFILE_FUNCTION(); - m_renderer.Init(nullptr); + m_renderer.Init(render_target); m_scene_renderer.Init(render_target); p_scene = std::make_unique(); } diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 3266ef6..9d4d575 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -9,11 +9,18 @@ namespace mlx { Verify((bool)p_renderer, "invalid renderer pointer"); - VkExtent2D swapchain_extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain().Get()); + VkExtent2D extent; + if(p_renderer->GetWindow()) + extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain().Get()); + else if(p_renderer->GetRenderTarget()) + extent = VkExtent2D{ .width = p_renderer->GetRenderTarget()->GetWidth(), .height = p_renderer->GetRenderTarget()->GetHeight() }; + else + FatalError("a renderer was created without window nor render target attached (wtf)"); + #ifdef DEBUG - auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(draw_layer)); + auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(draw_layer)); #else - auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, swapchain_extent.width, swapchain_extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); + auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); #endif if(res.second) res.first->second.Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index bdd5a59..4a8024f 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -132,6 +132,7 @@ namespace mlx kvfDestroyFramebuffer(RenderCore::Get().GetDevice(), fb); DebugLog("Vulkan: framebuffer destroyed"); } + m_framebuffers.clear(); kvfDestroyPipelineLayout(RenderCore::Get().GetDevice(), m_pipeline_layout); m_pipeline_layout = VK_NULL_HANDLE; @@ -148,7 +149,6 @@ namespace mlx p_renderer = nullptr; m_clears.clear(); m_attachments.clear(); - m_framebuffers.clear(); } void GraphicPipeline::CreateFramebuffers(const std::vector>& render_targets, bool clear_attachments) @@ -156,6 +156,8 @@ namespace mlx MLX_PROFILE_FUNCTION(); std::vector attachments; std::vector attachment_views; + std::vector dependencies; + if(p_renderer) { attachments.push_back(kvfBuildSwapchainAttachmentDescription(p_renderer->GetSwapchain().Get(), clear_attachments)); @@ -164,11 +166,30 @@ namespace mlx for(NonOwningPtr image : render_targets) { - attachments.push_back(kvfBuildAttachmentDescription((kvfIsDepthFormat(image->GetFormat()) ? KVF_IMAGE_DEPTH : KVF_IMAGE_COLOR), image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); + attachments.push_back(kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); attachment_views.push_back(image->GetImageView()); +#if 0 + VkSubpassDependency& first_dependency = dependencies.emplace_back(); + first_dependency.srcSubpass = VK_SUBPASS_EXTERNAL; + first_dependency.dstSubpass = 0; + first_dependency.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + first_dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + first_dependency.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; + first_dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + first_dependency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + + VkSubpassDependency& second_dependency = dependencies.emplace_back(); + second_dependency.srcSubpass = 0; + second_dependency.dstSubpass = VK_SUBPASS_EXTERNAL; + second_dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + second_dependency.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + second_dependency.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + second_dependency.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; + second_dependency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; +#endif } - m_renderpass = kvfCreateRenderPass(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint()); + m_renderpass = kvfCreateRenderPassWithSubpassDependencies(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint(), dependencies.data(), dependencies.size()); m_clears.clear(); m_clears.resize(attachments.size()); DebugLog("Vulkan: renderpass created"); diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 4a767ee..1f408c0 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -50,7 +50,7 @@ namespace mlx if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); }; - EventBus::RegisterListener({ functor, "__MlxRender2DPass" }); + EventBus::RegisterListener({ functor, "mlx_2d_render_pass" }); p_viewer_data_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_vertex_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Vertex); p_texture_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_fragment_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Fragment); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index c8dec0f..d9d674b 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -36,7 +36,7 @@ namespace mlx if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); }; - EventBus::RegisterListener({ functor, "__MlxFinalPass" }); + EventBus::RegisterListener({ functor, "mlx_final_pass" }); p_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_fragment_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Fragment); } diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 29bebbb..41987d7 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -16,7 +16,7 @@ namespace mlx if(event.What() == Event::ResizeEventCode) m_main_render_texture.Destroy(); }; - EventBus::RegisterListener({ functor, "__MlxRenderPasses" }); + EventBus::RegisterListener({ functor, "mlx_render_passes" }); } void RenderPasses::Pass(Scene& scene, Renderer& renderer, const Vec4f& clear_color) diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 9b9ea4e..8c18706 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -19,8 +19,24 @@ namespace mlx { MLX_PROFILE_FUNCTION(); p_window = window; - if(p_window) - m_swapchain.Init(p_window); + m_swapchain.Init(p_window); + for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) + { + m_image_available_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); + DebugLog("Vulkan: image available semaphore created"); + m_render_finished_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); + DebugLog("Vulkan: render finished semaphore created"); + m_cmd_buffers[i] = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); + DebugLog("Vulkan: command buffer created"); + m_cmd_fences[i] = kvfCreateFence(RenderCore::Get().GetDevice()); + DebugLog("Vulkan: fence created"); + } + } + + void Renderer::Init(NonOwningPtr render_target) + { + MLX_PROFILE_FUNCTION(); + p_render_target = render_target; for(std::size_t i = 0; i < MAX_FRAMES_IN_FLIGHT; i++) { m_image_available_semaphores[i] = kvfCreateSemaphore(RenderCore::Get().GetDevice()); diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp index c6c7e96..2d6e694 100644 --- a/runtime/Sources/Renderer/Swapchain.cpp +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -19,8 +19,6 @@ namespace mlx void Swapchain::Init(NonOwningPtr window) { p_window = window; - m_surface = window->CreateVulkanSurface(RenderCore::Get().GetInstance()); - DebugLog("Vulkan: surface created"); CreateSwapchain(); } @@ -53,11 +51,16 @@ namespace mlx void Swapchain::Destroy() { + if(m_swapchain == VK_NULL_HANDLE) + return; RenderCore::Get().WaitDeviceIdle(); for(Image& img : m_swapchain_images) img.DestroyImageView(); + m_swapchain_images.clear(); kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), m_swapchain); + m_swapchain = VK_NULL_HANDLE; + DebugLog("Vulkan: swapchain destroyed"); RenderCore::Get().vkDestroySurfaceKHR(RenderCore::Get().GetInstance(), m_surface, nullptr); m_surface = VK_NULL_HANDLE; @@ -66,10 +69,6 @@ namespace mlx void Swapchain::CreateSwapchain() { - for(Image& img : m_swapchain_images) - img.DestroyImageView(); - m_swapchain_images.clear(); - VkExtent2D extent; do { @@ -77,10 +76,11 @@ namespace mlx extent = { size.x, size.y }; } while(extent.width == 0 || extent.height == 0); - VkSwapchainKHR old_swapchain = m_swapchain; - m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, VK_NULL_HANDLE, true); - if(old_swapchain != VK_NULL_HANDLE) - kvfDestroySwapchainKHR(RenderCore::Get().GetDevice(), old_swapchain); + Destroy(); + + m_surface = p_window->CreateVulkanSurface(RenderCore::Get().GetInstance()); + DebugLog("Vulkan: surface created"); + m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, VK_NULL_HANDLE, false); m_images_count = kvfGetSwapchainImagesCount(m_swapchain); m_min_images_count = kvfGetSwapchainMinImagesCount(m_swapchain); From e29f5e7eddece7d406ee6e50794404ab345b7c2c Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 1 Dec 2024 00:48:20 +0000 Subject: [PATCH 079/131] [BOT] update dependencies --- third_party/vulkan/vulkan_funcs.hpp | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index 25a480c..14e59f8 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -18769,23 +18769,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_calibrated_timestamps === - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), - static_cast( stage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); - } - - //=== VK_EXT_calibrated_timestamps === - template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainKHR * pTimeDomains, From ec2c444051f33057a1dd3af8475c649a14ac9b48 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 14:04:44 +0100 Subject: [PATCH 080/131] fixing window resizing --- runtime/Includes/Core/Application.inl | 2 +- .../Includes/Renderer/Pipelines/Graphics.h | 4 +- runtime/Sources/Core/Logs.cpp | 15 ++++-- runtime/Sources/Renderer/Descriptor.cpp | 10 +--- .../Sources/Renderer/Pipelines/Graphics.cpp | 54 ++++++++++++------- .../Sources/Renderer/RenderPasses/2DPass.cpp | 7 ++- .../Renderer/RenderPasses/FinalPass.cpp | 7 ++- runtime/Sources/Renderer/Swapchain.cpp | 3 +- third_party/kvf.h | 49 ++++++++++------- 9 files changed, 90 insertions(+), 61 deletions(-) diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index bc4ab5a..bf2c9be 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -9,7 +9,7 @@ Error("invalid window ptr (NULL)"); \ return; \ } \ - else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return *static_cast(win) == gs->GetID(); }) == m_graphics.end()) \ + else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return gs && *static_cast(win) == gs->GetID(); }) == m_graphics.end()) \ { \ Error("invalid window ptr"); \ return; \ diff --git a/runtime/Includes/Renderer/Pipelines/Graphics.h b/runtime/Includes/Renderer/Pipelines/Graphics.h index 246494a..a474f80 100644 --- a/runtime/Includes/Renderer/Pipelines/Graphics.h +++ b/runtime/Includes/Renderer/Pipelines/Graphics.h @@ -24,8 +24,8 @@ namespace mlx GraphicPipeline() = default; void Init(const GraphicPipelineDescriptor& descriptor, std::string_view debug_name); - bool BindPipeline(VkCommandBuffer command_buffer, std::size_t framebuffer_index, std::array clear) noexcept; - void EndPipeline(VkCommandBuffer command_buffer) noexcept override; + bool BindPipeline(VkCommandBuffer cmd, std::size_t framebuffer_index, std::array clear) noexcept; + void EndPipeline(VkCommandBuffer cmd) noexcept override; void Destroy() noexcept; [[nodiscard]] inline VkPipeline GetPipeline() const override { return m_pipeline; } diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index 47cbadf..f61ec99 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -38,14 +38,19 @@ namespace mlx switch(type) { - case LogType::Debug: std::cout << Ansi::blue << "[MLX Debug] " << Ansi::def << code_infos << message << std::endl; break; - case LogType::Message: std::cout << Ansi::blue << "[MLX Message] " << Ansi::def << code_infos << message << '\n'; break; - case LogType::Warning: std::cout << Ansi::magenta << "[MLX Warning] " << Ansi::def << code_infos << message << '\n'; break; - case LogType::Error: std::cerr << Ansi::red << "[MLX Error] " << Ansi::def << code_infos << message << '\n'; break; - case LogType::FatalError: std::cerr << Ansi::red << "[MLX Fatal Error] " << Ansi::def << code_infos << message << '\n'; break; + case LogType::Debug: std::cout << Ansi::blue << "[MLX Debug] "; break; + case LogType::Message: std::cout << Ansi::blue << "[MLX Message] "; break; + case LogType::Warning: std::cout << Ansi::magenta << "[MLX Warning] "; break; + case LogType::Error: std::cerr << Ansi::red << "[MLX Error] "; break; + case LogType::FatalError: std::cerr << Ansi::red << "[MLX Fatal Error] "; break; default: break; } + + const std::chrono::zoned_time current_time{ std::chrono::current_zone(), std::chrono::floor(std::chrono::system_clock::now()) }; + + std::cout << Ansi::yellow << std::format("[{0:%H:%M:%S}] ", current_time) << Ansi::def << code_infos << message << std::endl; + if(type == LogType::FatalError) { std::cout << Ansi::bg_red << "Fatal Error: emergency exit" << Ansi::bg_def << std::endl; diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index b0240c1..db18106 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -26,17 +26,9 @@ namespace mlx { MLX_PROFILE_FUNCTION(); VkDescriptorPoolSize pool_sizes[] = { - { VK_DESCRIPTOR_TYPE_SAMPLER, MAX_SETS_PER_POOL }, { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, MAX_SETS_PER_POOL }, { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, MAX_SETS_PER_POOL }, - { VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, MAX_SETS_PER_POOL } + { VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, MAX_SETS_PER_POOL } }; VkDescriptorPoolCreateInfo pool_info{}; diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 4a8024f..9036e09 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -50,8 +50,10 @@ namespace mlx kvfGPipelineBuilderSetVertexInputs(builder, binding_description, attributes_description.data(), attributes_description.size()); } - m_pipeline = kvfCreateGraphicsPipeline(RenderCore::Get().GetDevice(), m_pipeline_layout, builder, m_renderpass); - DebugLog("Vulkan: graphics pipeline created"); + m_pipeline = kvfCreateGraphicsPipeline(RenderCore::Get().GetDevice(), VK_NULL_HANDLE, m_pipeline_layout, builder, m_renderpass); + #ifdef DEBUG + DebugLog("Vulkan: graphics pipeline created %", m_debug_name); + #endif kvfDestroyGPipelineBuilder(builder); #ifdef MLX_HAS_DEBUG_UTILS_FUNCTIONS @@ -82,10 +84,10 @@ namespace mlx #endif } - bool GraphicPipeline::BindPipeline(VkCommandBuffer command_buffer, std::size_t framebuffer_index, std::array clear) noexcept + bool GraphicPipeline::BindPipeline(VkCommandBuffer cmd, std::size_t framebuffer_index, std::array clear) noexcept { MLX_PROFILE_FUNCTION(); - TransitionAttachments(command_buffer); + TransitionAttachments(cmd); VkFramebuffer fb = m_framebuffers[framebuffer_index]; VkExtent2D fb_extent = kvfGetFramebufferSize(fb); @@ -96,12 +98,12 @@ namespace mlx viewport.height = fb_extent.height; viewport.minDepth = 0.0f; viewport.maxDepth = 1.0f; - RenderCore::Get().vkCmdSetViewport(command_buffer, 0, 1, &viewport); + RenderCore::Get().vkCmdSetViewport(cmd, 0, 1, &viewport); VkRect2D scissor{}; scissor.offset = { 0, 0 }; scissor.extent = fb_extent; - RenderCore::Get().vkCmdSetScissor(command_buffer, 0, 1, &scissor); + RenderCore::Get().vkCmdSetScissor(cmd, 0, 1, &scissor); for(std::size_t i = 0; i < m_clears.size(); i++) { @@ -111,15 +113,15 @@ namespace mlx m_clears[i].color.float32[3] = clear[3]; } - kvfBeginRenderPass(m_renderpass, command_buffer, fb, fb_extent, m_clears.data(), m_clears.size()); - RenderCore::Get().vkCmdBindPipeline(command_buffer, GetPipelineBindPoint(), GetPipeline()); + kvfBeginRenderPass(m_renderpass, cmd, fb, fb_extent, m_clears.data(), m_clears.size()); + RenderCore::Get().vkCmdBindPipeline(cmd, GetPipelineBindPoint(), GetPipeline()); return true; } - void GraphicPipeline::EndPipeline(VkCommandBuffer command_buffer) noexcept + void GraphicPipeline::EndPipeline(VkCommandBuffer cmd) noexcept { MLX_PROFILE_FUNCTION(); - RenderCore::Get().vkCmdEndRenderPass(command_buffer); + RenderCore::Get().vkCmdEndRenderPass(cmd); } void GraphicPipeline::Destroy() noexcept @@ -127,25 +129,33 @@ namespace mlx MLX_PROFILE_FUNCTION(); p_vertex_shader.reset(); p_fragment_shader.reset(); - for(auto& fb : m_framebuffers) + for(auto fb : m_framebuffers) { kvfDestroyFramebuffer(RenderCore::Get().GetDevice(), fb); - DebugLog("Vulkan: framebuffer destroyed"); + #ifdef DEBUG + DebugLog("Vulkan: framebuffer destroyed in %", m_debug_name); + #endif } m_framebuffers.clear(); kvfDestroyPipelineLayout(RenderCore::Get().GetDevice(), m_pipeline_layout); m_pipeline_layout = VK_NULL_HANDLE; - DebugLog("Vulkan: graphics pipeline layout destroyed"); + #ifdef DEBUG + DebugLog("Vulkan: graphics pipeline layout destroyed %", m_debug_name); + #endif kvfDestroyRenderPass(RenderCore::Get().GetDevice(), m_renderpass); m_renderpass = VK_NULL_HANDLE; - DebugLog("Vulkan: renderpass destroyed"); + #ifdef DEBUG + DebugLog("Vulkan: renderpass destroyed for %", m_debug_name); + #endif kvfDestroyPipeline(RenderCore::Get().GetDevice(), m_pipeline); m_pipeline = VK_NULL_HANDLE; - DebugLog("Vulkan: graphics pipeline destroyed"); - + #ifdef DEBUG + DebugLog("Vulkan: graphics pipeline destroyed %", m_debug_name); + #endif + p_renderer = nullptr; m_clears.clear(); m_attachments.clear(); @@ -192,7 +202,9 @@ namespace mlx m_renderpass = kvfCreateRenderPassWithSubpassDependencies(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint(), dependencies.data(), dependencies.size()); m_clears.clear(); m_clears.resize(attachments.size()); - DebugLog("Vulkan: renderpass created"); + #ifdef DEBUG + DebugLog("Vulkan: renderpass created for %", m_debug_name); + #endif if(p_renderer) { @@ -200,13 +212,17 @@ namespace mlx { attachment_views[0] = image.GetImageView(); m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image.GetWidth(), .height = image.GetHeight() })); - DebugLog("Vulkan: framebuffer created"); + #ifdef DEBUG + DebugLog("Vulkan: framebuffer created for %", m_debug_name); + #endif } } for(NonOwningPtr image : render_targets) { m_framebuffers.push_back(kvfCreateFramebuffer(RenderCore::Get().GetDevice(), m_renderpass, attachment_views.data(), attachment_views.size(), { .width = image->GetWidth(), .height = image->GetHeight() })); - DebugLog("Vulkan: framebuffer created"); + #ifdef DEBUG + DebugLog("Vulkan: framebuffer created for %", m_debug_name); + #endif } } diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 1f408c0..0d1de8e 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -50,7 +50,7 @@ namespace mlx if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); }; - EventBus::RegisterListener({ functor, "mlx_2d_render_pass" }); + EventBus::RegisterListener({ functor, "mlx_2d_render_pass_" + std::to_string(reinterpret_cast(this)) }); p_viewer_data_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_vertex_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Vertex); p_texture_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_fragment_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Fragment); @@ -76,7 +76,10 @@ namespace mlx pipeline_descriptor.color_attachments = { &render_target }; pipeline_descriptor.clear_color_attachments = false; #ifdef DEBUG - m_pipeline.Init(pipeline_descriptor, "mlx_2D_pass"); + if(renderer.GetWindow()) + m_pipeline.Init(pipeline_descriptor, "mlx_2D_pass_" + renderer.GetWindow()->GetName()); + else + m_pipeline.Init(pipeline_descriptor, "mlx_2D_pass"); #else m_pipeline.Init(pipeline_descriptor, {}); #endif diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index d9d674b..0a897a8 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -36,7 +36,7 @@ namespace mlx if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); }; - EventBus::RegisterListener({ functor, "mlx_final_pass" }); + EventBus::RegisterListener({ functor, "mlx_final_pass_" + std::to_string(reinterpret_cast(this)) }); p_set = RenderCore::Get().GetDescriptorPoolManager().GetAvailablePool().RequestDescriptorSet(p_fragment_shader->GetShaderLayout().set_layouts[0].second, ShaderType::Fragment); } @@ -55,7 +55,10 @@ namespace mlx pipeline_descriptor.renderer = &renderer; pipeline_descriptor.no_vertex_inputs = true; #ifdef DEBUG - m_pipeline.Init(pipeline_descriptor, "mlx_final_pass"); + if(final_target) + m_pipeline.Init(pipeline_descriptor, "mlx_final_pass"); + else + m_pipeline.Init(pipeline_descriptor, "mlx_final_pass_" + renderer.GetWindow()->GetName()); #else m_pipeline.Init(pipeline_descriptor, {}); #endif diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp index 2d6e694..d598120 100644 --- a/runtime/Sources/Renderer/Swapchain.cpp +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -27,6 +27,7 @@ namespace mlx if(m_resize) { RenderCore::Get().WaitDeviceIdle(); + Destroy(); CreateSwapchain(); EventBus::SendBroadcast(Internal::ResizeEventBroadcast{}); } @@ -76,8 +77,6 @@ namespace mlx extent = { size.x, size.y }; } while(extent.width == 0 || extent.height == 0); - Destroy(); - m_surface = p_window->CreateVulkanSurface(RenderCore::Get().GetInstance()); DebugLog("Vulkan: surface created"); m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, VK_NULL_HANDLE, false); diff --git a/third_party/kvf.h b/third_party/kvf.h index fa6a1bd..be29a0b 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -229,7 +229,7 @@ void kvfGPipelineBuilderSetVertexInputs(KvfGraphicsPipelineBuilder* builder, VkV void kvfGPipelineBuilderAddShaderStage(KvfGraphicsPipelineBuilder* builder, VkShaderStageFlagBits stage, VkShaderModule module, const char* entry); void kvfGPipelineBuilderResetShaderStages(KvfGraphicsPipelineBuilder* builder); -VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, KvfGraphicsPipelineBuilder* builder, VkRenderPass pass); +VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineCache cache, VkPipelineLayout layout, KvfGraphicsPipelineBuilder* builder, VkRenderPass pass); void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline); void kvfCheckVk(VkResult result); @@ -413,9 +413,9 @@ typedef struct __KvfDevice { VkSurfaceCapabilitiesKHR capabilities; VkSurfaceFormatKHR* formats; - VkPresentModeKHR* presentModes; + VkPresentModeKHR* present_modes; uint32_t formats_count; - uint32_t presentModes_count; + uint32_t present_modes_count; } __KvfSwapchainSupportInternal; typedef struct __KvfSwapchain @@ -619,6 +619,7 @@ void __kvfDestroyDevice(VkDevice device) if(__kvf_internal_devices_size == 0) { KVF_FREE(__kvf_internal_devices); + __kvf_internal_devices = NULL; __kvf_internal_devices_capacity = 0; } return; @@ -703,6 +704,7 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) if(__kvf_internal_swapchains_size == 0) { KVF_FREE(__kvf_internal_swapchains); + __kvf_internal_swapchains = NULL; __kvf_internal_swapchains_capacity = 0; } return; @@ -718,7 +720,6 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) if(__kvf_internal_swapchains[i].swapchain == swapchain) return &__kvf_internal_swapchains[i]; } - puts("not found"); return NULL; } #endif @@ -729,7 +730,7 @@ void __kvfAddFramebufferToArray(VkFramebuffer framebuffer, VkExtent2D extent) if(__kvf_internal_framebuffers_size == __kvf_internal_framebuffers_capacity) { // Resize the dynamic array if necessary - __kvf_internal_framebuffers_capacity += 2; + __kvf_internal_framebuffers_capacity += 5; __kvf_internal_framebuffers = (__KvfFramebuffer*)KVF_REALLOC(__kvf_internal_framebuffers, __kvf_internal_framebuffers_capacity * sizeof(__KvfFramebuffer)); } @@ -760,11 +761,13 @@ void __kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) if(__kvf_internal_framebuffers_size == 0) { KVF_FREE(__kvf_internal_framebuffers); + __kvf_internal_framebuffers = NULL; __kvf_internal_framebuffers_capacity = 0; } return; } } + KVF_ASSERT(false && "could not find framebuffer"); } __KvfFramebuffer* __kvfGetKvfFramebufferFromVkFramebuffer(VkFramebuffer framebuffer) @@ -1875,12 +1878,12 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfaceFormatsKHR)(physical, surface, &support.formats_count, support.formats); } - KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR)(physical, surface, &support.presentModes_count, NULL); - if(support.presentModes_count != 0) + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR)(physical, surface, &support.present_modes_count, NULL); + if(support.present_modes_count != 0) { - support.presentModes = (VkPresentModeKHR*)KVF_MALLOC(sizeof(VkPresentModeKHR) * support.presentModes_count); - KVF_ASSERT(support.presentModes != NULL && "allocation failed :("); - KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR)(physical, surface, &support.presentModes_count, support.presentModes); + support.present_modes = (VkPresentModeKHR*)KVF_MALLOC(sizeof(VkPresentModeKHR) * support.present_modes_count); + KVF_ASSERT(support.present_modes != NULL && "allocation failed :("); + KVF_GET_INSTANCE_FUNCTION(vkGetPhysicalDeviceSurfacePresentModesKHR)(physical, surface, &support.present_modes_count, support.present_modes); } return support; } @@ -1897,13 +1900,21 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) VkPresentModeKHR __kvfChooseSwapPresentMode(__KvfSwapchainSupportInternal* support, bool try_vsync) { - if(try_vsync == false) - return VK_PRESENT_MODE_IMMEDIATE_KHR; - for(uint32_t i = 0; i < support->presentModes_count; i++) + if(try_vsync) + return VK_PRESENT_MODE_FIFO_KHR; + bool mailbox_supported = false; + bool immediate_supported = false; + for(uint32_t i = 0; i < support->present_modes_count; i++) { - if(support->presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR) - return support->presentModes[i]; + if(support->present_modes[i] == VK_PRESENT_MODE_MAILBOX_KHR) + mailbox_supported = true; + if(support->present_modes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR) + immediate_supported = true; } + if(mailbox_supported) + return VK_PRESENT_MODE_MAILBOX_KHR; + if(immediate_supported) + return VK_PRESENT_MODE_IMMEDIATE_KHR; // Best mode for low latency return VK_PRESENT_MODE_FIFO_KHR; } @@ -1920,7 +1931,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) __KvfSwapchainSupportInternal support = __kvfQuerySwapchainSupport(physical, surface); VkSurfaceFormatKHR surfaceFormat = __kvfChooseSwapSurfaceFormat(&support); - VkPresentModeKHR presentMode = __kvfChooseSwapPresentMode(&support, try_vsync); + VkPresentModeKHR present_mode = __kvfChooseSwapPresentMode(&support, try_vsync); uint32_t image_count = support.capabilities.minImageCount + 1; if(support.capabilities.maxImageCount > 0 && image_count > support.capabilities.maxImageCount) @@ -1950,7 +1961,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) createInfo.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; createInfo.preTransform = support.capabilities.currentTransform; createInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; - createInfo.presentMode = presentMode; + createInfo.presentMode = present_mode; createInfo.clipped = VK_TRUE; createInfo.oldSwapchain = old_swapchain; @@ -2968,7 +2979,7 @@ void kvfGPipelineBuilderResetShaderStages(KvfGraphicsPipelineBuilder* builder) builder->shader_stages_count = 0; } -VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, KvfGraphicsPipelineBuilder* builder, VkRenderPass pass) +VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineCache cache, VkPipelineLayout layout, KvfGraphicsPipelineBuilder* builder, VkRenderPass pass) { KVF_ASSERT(builder != NULL); KVF_ASSERT(device != VK_NULL_HANDLE); @@ -3020,7 +3031,7 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineLayout layout, K KVF_ASSERT(kvf_device != NULL); #endif VkPipeline pipeline; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateGraphicsPipelines)(device, VK_NULL_HANDLE, 1, &pipeline_info, NULL, &pipeline)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateGraphicsPipelines)(device, cache, 1, &pipeline_info, NULL, &pipeline)); return pipeline; } From 5529bbd2e135d75a267a5524a5a943e457fdf129 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 14:19:36 +0100 Subject: [PATCH 081/131] adding missing include --- runtime/Includes/PreCompiled.h | 1 + runtime/Sources/Core/EventBus.cpp | 2 -- runtime/Sources/Renderer/RenderCore.cpp | 3 +++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 0b597f6..6b88100 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -36,6 +36,7 @@ #include #include #include +#include #include #include #include diff --git a/runtime/Sources/Core/EventBus.cpp b/runtime/Sources/Core/EventBus.cpp index d67711a..721796a 100644 --- a/runtime/Sources/Core/EventBus.cpp +++ b/runtime/Sources/Core/EventBus.cpp @@ -6,7 +6,6 @@ namespace mlx { void EventBus::Send(const std::string& listener_name, const EventBase& event) { - #pragma omp parallel for for(const EventListener& listener : s_listeners) { if(listener.GetName() == listener_name) @@ -20,7 +19,6 @@ namespace mlx void EventBus::SendBroadcast(const EventBase& event) { - #pragma omp parallel for for(const EventListener& listener : s_listeners) listener.Call(event); } diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index e4abfaf..f2b005c 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -14,9 +14,12 @@ #if defined(MLX_COMPILER_GCC) || defined(MLX_COMPILER_CLANG) #pragma clang diagnostic push + #pragma GCC diagnostic push #pragma clang diagnostic ignored "-Wmissing-field-initializers" + #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #include #pragma clang diagnostic pop + #pragma GCC diagnostic pop #else #include #endif From cda4faa103ed6cd70bff3a821186b0dd66f7172e Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 14:30:09 +0100 Subject: [PATCH 082/131] adding compiler version info to makefile header --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f4d4a83..0224cc8 100644 --- a/Makefile +++ b/Makefile @@ -138,7 +138,7 @@ $(NAME): $(OBJS) @printf "$(_BOLD)$(NAME)$(_RESET) compiled $(COLOR)$(_BOLD)successfully$(_RESET)\n" _printbuildinfos: - @printf "$(_PURPLE)$(_BOLD)MacroLibX $(_RESET)Compiling in $(_BOLD)$(MODE)$(_RESET) mode on $(_BOLD)$(OS)$(_RESET) | Using $(_BOLD)$(CXX)$(_RESET), flags: $(_BOLD)$(_ENABLEDFLAGS)$(_RESET)\n" + @printf "$(_PURPLE)$(_BOLD)MacroLibX $(_RESET)Compiling in $(_BOLD)$(MODE)$(_RESET) mode on $(_BOLD)$(OS)$(_RESET) | Using $(_BOLD)$(CXX) ($(shell $(CXX) --version | head -n 1))$(_RESET), flags: $(_BOLD)$(_ENABLEDFLAGS)$(_RESET)\n" debug: @$(MAKE) all DEBUG=true -j$(shell nproc) From 1bf92b3b2606e8d2a6fde6aea069fe7353dfd48c Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 14:35:54 +0100 Subject: [PATCH 083/131] adding include check --- runtime/Includes/PreCompiled.h | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 6b88100..b4f64b6 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -36,7 +36,6 @@ #include #include #include -#include #include #include #include @@ -51,6 +50,15 @@ #include #include +// Experimentals +#if __has_include() + #include +#elif __has_include() + #include +#else + #error header not present in this STL +#endif + #ifndef MLX_PLAT_WINDOWS #include #endif From 37ea1b1cd4bfbf8ae1c7ae53761c3dca4e7b0857 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 15:04:51 +0100 Subject: [PATCH 084/131] enabling pre compiled header --- Makefile | 16 ++++++++++++++-- runtime/Includes/PreCompiled.h | 1 + runtime/Sources/Renderer/Descriptor.cpp | 1 - runtime/Sources/Renderer/RenderCore.cpp | 10 ++++++---- 4 files changed, 21 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index 0224cc8..cc39693 100644 --- a/Makefile +++ b/Makefile @@ -31,13 +31,19 @@ CXX = clang++ CXXFLAGS = -std=c++20 -fPIC -Wall -Wextra -DSDL_MAIN_HANDLED INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party +CXXPCHFLAGS = + +PCH = runtime/Includes/PreCompiled.h +GCH = runtime/Includes/PreCompiled.h.gch + NZSLC = nzslc ifeq ($(TOOLCHAIN), gcc) CXX = g++ CXXFLAGS += -Wno-error=cpp else - CXXFLAGS += -Wno-error=#warning + CXXFLAGS += -Wno-error=#warning -include-pch $(GCH) + CXXPCHFLAGS = -xc++-header endif ifeq ($(OS), Darwin) @@ -108,7 +114,7 @@ ifeq ($(OBJS_TOTAL), 0) # To avoid division per 0 endif CURR_OBJ = 0 -$(OBJ_DIR)/%.o: %.cpp +$(OBJ_DIR)/%.o: %.cpp $(GCH) @mkdir -p $(dir $@) @$(eval CURR_OBJ=$(shell echo $$(( $(CURR_OBJ) + 1 )))) @$(eval PERCENT=$(shell echo $$(( $(CURR_OBJ) * 100 / $(OBJS_TOTAL) )))) @@ -132,6 +138,10 @@ CURR_SPV = 0 all: _printbuildinfos @$(MAKE) $(NAME) +$(GCH): + @printf "$(COLOR)($(_BOLD)%3s%%$(_RESET)$(COLOR)) $(_RESET)Compiling $(_BOLD)PreCompiled header$(_RESET)\n" "0" + @$(CXX) $(CXXPCHFLAGS) $(INCLUDES) $(PCH) -o $(GCH) + $(NAME): $(OBJS) @printf "Linking $(_BOLD)$(NAME)$(_RESET)\n" @$(CXX) -shared -o $(NAME) $(OBJS) $(LDFLAGS) @@ -151,6 +161,8 @@ shaders: clean-shaders $(SPVS) clean: @$(RM) $(OBJ_DIR) @printf "Cleaned $(_BOLD)$(OBJ_DIR)$(_RESET)\n" + @$(RM) $(GCH) + @printf "Cleaned $(_BOLD)$(GCH)$(_RESET)\n" fclean: clean @$(RM) $(NAME) diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index b4f64b6..4a958a9 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -83,6 +83,7 @@ #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #pragma GCC diagnostic ignored "-Wunused-parameter" #pragma GCC diagnostic ignored "-Wunused-variable" + #pragma GCC diagnostic ignored "-Wunused-function" #pragma GCC diagnostic ignored "-Wparentheses" #include #pragma GCC diagnostic pop diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index db18106..0954bd0 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -142,7 +142,6 @@ namespace mlx void DescriptorPoolManager::Destroy() { MLX_PROFILE_FUNCTION(); - #pragma omp parallel for for(auto& pool : m_pools) pool.Destroy(); m_pools.clear(); diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index f2b005c..e4e6aa4 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -12,14 +12,16 @@ #define KVF_REALLOC(p, x) (mlx::MemManager::Get().Realloc(p, x)) #define KVF_FREE(x) (mlx::MemManager::Get().Free(x)) -#if defined(MLX_COMPILER_GCC) || defined(MLX_COMPILER_CLANG) - #pragma clang diagnostic push +#if defined(MLX_COMPILER_GCC) #pragma GCC diagnostic push - #pragma clang diagnostic ignored "-Wmissing-field-initializers" #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #include - #pragma clang diagnostic pop #pragma GCC diagnostic pop +#elif defined(MLX_COMPILER_CLANG) + #pragma clang diagnostic push + #pragma clang diagnostic ignored "-Wmissing-field-initializers" + #include + #pragma clang diagnostic pop #else #include #endif From fc74873bc612a469cdc248d0257741760f5d89e8 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 15:14:56 +0100 Subject: [PATCH 085/131] yes --- .gitignore | 1 + Makefile | 7 ++++--- runtime/Sources/Renderer/Memory.cpp | 1 - 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 5227471..82d65a8 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ *.ilk *.pdb *.gch +*.pch *.exe *vgcore *.gdb_history diff --git a/Makefile b/Makefile index cc39693..b08d23b 100644 --- a/Makefile +++ b/Makefile @@ -31,19 +31,20 @@ CXX = clang++ CXXFLAGS = -std=c++20 -fPIC -Wall -Wextra -DSDL_MAIN_HANDLED INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party -CXXPCHFLAGS = +CXXPCHFLAGS = -xc++-header PCH = runtime/Includes/PreCompiled.h -GCH = runtime/Includes/PreCompiled.h.gch +GCH = NZSLC = nzslc ifeq ($(TOOLCHAIN), gcc) CXX = g++ + GCH = runtime/Includes/PreCompiled.h.gch CXXFLAGS += -Wno-error=cpp else + GCH = runtime/Includes/PreCompiled.h.pch CXXFLAGS += -Wno-error=#warning -include-pch $(GCH) - CXXPCHFLAGS = -xc++-header endif ifeq ($(OS), Darwin) diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 4f16f5a..6243bab 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -1,5 +1,4 @@ #include - #define VMA_IMPLEMENTATION #ifdef MLX_COMPILER_CLANG #pragma clang diagnostic push From 841d140c694bc86ec3f7ca3ea1e7cc8dce0a7026 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 15:38:47 +0100 Subject: [PATCH 086/131] working on subpass dependencies --- Makefile | 2 +- runtime/Sources/Renderer/Descriptor.cpp | 3 -- .../Sources/Renderer/Pipelines/Graphics.cpp | 28 ++++++------------- 3 files changed, 10 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index b08d23b..0f12752 100644 --- a/Makefile +++ b/Makefile @@ -36,7 +36,7 @@ CXXPCHFLAGS = -xc++-header PCH = runtime/Includes/PreCompiled.h GCH = -NZSLC = nzslc +NZRRC = nzslc ifeq ($(TOOLCHAIN), gcc) CXX = g++ diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 0954bd0..6436770 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -119,10 +119,7 @@ namespace mlx return set == rhs_set; }); if(it == m_used_sets.end()) - { - Error("Vulkan: cannot return descriptor set to pool, invalid pool"); return; - } m_used_sets.erase(it); m_free_sets.push_back(set); } diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index 9036e09..d7fba28 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -178,27 +178,17 @@ namespace mlx { attachments.push_back(kvfBuildAttachmentDescription(KVF_IMAGE_COLOR, image->GetFormat(), image->GetLayout(), image->GetLayout(), clear_attachments, VK_SAMPLE_COUNT_1_BIT)); attachment_views.push_back(image->GetImageView()); -#if 0 - VkSubpassDependency& first_dependency = dependencies.emplace_back(); - first_dependency.srcSubpass = VK_SUBPASS_EXTERNAL; - first_dependency.dstSubpass = 0; - first_dependency.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; - first_dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - first_dependency.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; - first_dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - first_dependency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; - - VkSubpassDependency& second_dependency = dependencies.emplace_back(); - second_dependency.srcSubpass = 0; - second_dependency.dstSubpass = VK_SUBPASS_EXTERNAL; - second_dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - second_dependency.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; - second_dependency.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - second_dependency.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; - second_dependency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; -#endif } + VkSubpassDependency& dependency = dependencies.emplace_back(); + dependency.srcSubpass = VK_SUBPASS_EXTERNAL; + dependency.dstSubpass = 0; + dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + dependency.srcAccessMask = 0; + dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + dependency.dependencyFlags = 0; + m_renderpass = kvfCreateRenderPassWithSubpassDependencies(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint(), dependencies.data(), dependencies.size()); m_clears.clear(); m_clears.resize(attachments.size()); From 9bb95be6798b5a22e7184128f2163e5a14d3f4b6 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 15:56:38 +0100 Subject: [PATCH 087/131] fetching deps --- third_party/vulkan/vulkan.cppm | 3320 +++++- third_party/vulkan/vulkan.hpp | 9740 ++--------------- third_party/vulkan/vulkan_core.h | 2238 ++-- third_party/vulkan/vulkan_enums.hpp | 1179 +- .../vulkan/vulkan_extension_inspection.hpp | 229 +- third_party/vulkan/vulkan_format_traits.hpp | 96 +- third_party/vulkan/vulkan_funcs.hpp | 4761 ++------ third_party/vulkan/vulkan_handles.hpp | 1581 +-- third_party/vulkan/vulkan_hash.hpp | 790 +- third_party/vulkan/vulkan_raii.hpp | 1027 +- .../vulkan/vulkan_static_assertions.hpp | 686 +- third_party/vulkan/vulkan_structs.hpp | 3683 ++++--- third_party/vulkan/vulkan_to_string.hpp | 839 +- third_party/vulkan/vulkan_video.hpp | 8 +- 14 files changed, 11395 insertions(+), 18782 deletions(-) diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 3a04244..0ba8645 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -12,7 +12,7 @@ module; #include -#if defined( __cpp_lib_modules ) +#if defined( __cpp_lib_modules ) && !defined( VULKAN_HPP_ENABLE_STD_MODULE ) # define VULKAN_HPP_ENABLE_STD_MODULE #endif @@ -364,6 +364,34 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ToolPurposeFlags; using VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT; + //=== VK_VERSION_1_4 === + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2; + using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBits; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBitsEXT; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlags; + using VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT; + using VULKAN_HPP_NAMESPACE::LineRasterizationMode; + using VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT; + using VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBits; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlags; + using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriority; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT; + using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR; + //=== VK_KHR_surface === using VULKAN_HPP_NAMESPACE::ColorSpaceKHR; using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR; @@ -505,10 +533,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN; #endif /*VK_USE_PLATFORM_VI_NN*/ - //=== VK_EXT_pipeline_robustness === - using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT; - using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT; - //=== VK_EXT_conditional_rendering === using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagBitsEXT; using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT; @@ -630,10 +654,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagBitsAMD; using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD; - //=== VK_KHR_global_priority === - using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT; - using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR; - //=== VK_AMD_memory_overallocation_behavior === using VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD; @@ -687,14 +707,6 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_executable_properties === using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR; - //=== VK_EXT_host_image_copy === - using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBitsEXT; - using VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT; - - //=== VK_KHR_map_memory2 === - using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR; - using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR; - //=== VK_EXT_surface_maintenance1 === using VULKAN_HPP_NAMESPACE::PresentGravityFlagBitsEXT; using VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT; @@ -841,12 +853,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV; using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV; - //=== VK_KHR_maintenance5 === - using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR; - using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR; - using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR; - using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR; - //=== VK_AMD_anti_lag === using VULKAN_HPP_NAMESPACE::AntiLagModeAMD; using VULKAN_HPP_NAMESPACE::AntiLagStageAMD; @@ -893,20 +899,13 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_MSFT_layered_driver === using VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT; - //=== VK_KHR_line_rasterization === - using VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT; - using VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR; - //=== VK_KHR_calibrated_timestamps === using VULKAN_HPP_NAMESPACE::TimeDomainEXT; using VULKAN_HPP_NAMESPACE::TimeDomainKHR; -<<<<<<< HEAD -======= //=== VK_NV_display_stereo === using VULKAN_HPP_NAMESPACE::DisplaySurfaceStereoTypeNV; ->>>>>>> indev //=== VK_KHR_maintenance7 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredApiKHR; @@ -961,7 +960,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::make_error_condition; using VULKAN_HPP_NAMESPACE::MemoryMapFailedError; using VULKAN_HPP_NAMESPACE::NativeWindowInUseKHRError; - using VULKAN_HPP_NAMESPACE::NotPermittedKHRError; + using VULKAN_HPP_NAMESPACE::NotPermittedError; using VULKAN_HPP_NAMESPACE::OutOfDateKHRError; using VULKAN_HPP_NAMESPACE::OutOfDeviceMemoryError; using VULKAN_HPP_NAMESPACE::OutOfHostMemoryError; @@ -1019,6 +1018,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize; using VULKAN_HPP_NAMESPACE::MaxDriverNameSize; + //=== VK_VERSION_1_4 === + using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySize; + //=== VK_KHR_surface === using VULKAN_HPP_NAMESPACE::KHRSurfaceExtensionName; using VULKAN_HPP_NAMESPACE::KHRSurfaceSpecVersion; @@ -2650,13 +2652,10 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2ExtensionName; using VULKAN_HPP_NAMESPACE::NVCooperativeMatrix2SpecVersion; -<<<<<<< HEAD -======= //=== VK_EXT_vertex_attribute_robustness === using VULKAN_HPP_NAMESPACE::EXTVertexAttributeRobustnessExtensionName; using VULKAN_HPP_NAMESPACE::EXTVertexAttributeRobustnessSpecVersion; ->>>>>>> indev //======================== //=== CONSTEXPR VALUEs === //======================== @@ -2684,6 +2683,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ApiVersion11; using VULKAN_HPP_NAMESPACE::ApiVersion12; using VULKAN_HPP_NAMESPACE::ApiVersion13; + using VULKAN_HPP_NAMESPACE::ApiVersion14; using VULKAN_HPP_NAMESPACE::HeaderVersionComplete; //=============== @@ -3146,6 +3146,120 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock; using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT; + //=== VK_VERSION_1_4 === + using VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo; + using VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR; + using VULKAN_HPP_NAMESPACE::BindMemoryStatus; + using VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo; + using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::CopyImageToImageInfo; + using VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo; + using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo; + using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo; + using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery; + using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT; + using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo; + using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2; + using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; + using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR; + using VULKAN_HPP_NAMESPACE::ImageToMemoryCopy; + using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT; + using VULKAN_HPP_NAMESPACE::MemoryMapInfo; + using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; + using VULKAN_HPP_NAMESPACE::MemoryToImageCopy; + using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT; + using VULKAN_HPP_NAMESPACE::MemoryUnmapInfo; + using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2FeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features; + using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT; + using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR; + using VULKAN_HPP_NAMESPACE::PushConstantsInfo; + using VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo; + using VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT; + using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR; + using VULKAN_HPP_NAMESPACE::RenderingAreaInfo; + using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo; + using VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR; + using VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo; + using VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR; + using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize; + using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; + using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT; + using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR; + //=== VK_KHR_surface === using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR; using VULKAN_HPP_NAMESPACE::SurfaceFormatKHR; @@ -3341,11 +3455,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT; - //=== VK_EXT_pipeline_robustness === - using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT; - using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT; - #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR; @@ -3376,9 +3485,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR; using VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR; - //=== VK_KHR_push_descriptor === - using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR; - //=== VK_EXT_conditional_rendering === using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT; using VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT; @@ -3652,14 +3758,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR; using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR; - //=== VK_KHR_global_priority === - using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT; - using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR; - using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT; - using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR; - //=== VK_AMD_memory_overallocation_behavior === using VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD; @@ -3740,11 +3838,6 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_AMD_device_coherent_memory === using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD; - //=== VK_KHR_dynamic_rendering_local_read === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; - using VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR; - using VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR; - //=== VK_EXT_shader_image_atomic_int64 === using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT; @@ -3822,22 +3915,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PipelineInfoEXT; using VULKAN_HPP_NAMESPACE::PipelineInfoKHR; - //=== VK_EXT_host_image_copy === - using VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT; - using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT; - using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT; - using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT; - using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT; - using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT; - using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT; - using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT; - - //=== VK_KHR_map_memory2 === - using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR; - using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR; - //=== VK_EXT_map_memory_placed === using VULKAN_HPP_NAMESPACE::MemoryMapPlacedInfoEXT; using VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT; @@ -4199,9 +4276,6 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_ARM_shader_core_properties === using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM; - //=== VK_KHR_shader_subgroup_rotate === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeaturesKHR; - //=== VK_ARM_scheduling_controls === using VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM; using VULKAN_HPP_NAMESPACE::PhysicalDeviceSchedulingControlsFeaturesARM; @@ -4312,9 +4386,6 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_legacy_dithering === using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT; - //=== VK_EXT_pipeline_protected_access === - using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT; - #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_format_resolve === using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID; @@ -4322,18 +4393,6 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - //=== VK_KHR_maintenance5 === - using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR; - using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR; - using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT; - using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR; - using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR; - using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR; - using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT; - using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR; - //=== VK_AMD_anti_lag === using VULKAN_HPP_NAMESPACE::AntiLagDataAMD; using VULKAN_HPP_NAMESPACE::AntiLagPresentationInfoAMD; @@ -4480,18 +4539,6 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_attachment_feedback_loop_dynamic_state === using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; - //=== VK_KHR_vertex_attribute_divisor === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR; - using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT; - using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR; - using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT; - using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR; - - //=== VK_KHR_shader_float_controls2 === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2FeaturesKHR; - #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === using VULKAN_HPP_NAMESPACE::ExternalFormatQNX; @@ -4504,34 +4551,12 @@ export namespace VULKAN_HPP_NAMESPACE //=== VK_MSFT_layered_driver === using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT; - //=== VK_KHR_index_type_uint8 === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesKHR; - - //=== VK_KHR_line_rasterization === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesKHR; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesKHR; - using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT; - using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoKHR; - //=== VK_KHR_calibrated_timestamps === using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT; using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR; - //=== VK_KHR_shader_expect_assume === - using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeaturesKHR; - //=== VK_KHR_maintenance6 === using VULKAN_HPP_NAMESPACE::BindDescriptorBufferEmbeddedSamplersInfoEXT; - using VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR; - using VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR; - using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR; - using VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR; - using VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR; - using VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR; using VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT; //=== VK_NV_descriptor_pool_overallocation === @@ -4622,12 +4647,9 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2FeaturesNV; using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrix2PropertiesNV; -<<<<<<< HEAD -======= //=== VK_EXT_vertex_attribute_robustness === using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT; ->>>>>>> indev //=============== //=== HANDLEs === //=============== @@ -5152,3 +5174,3049 @@ export namespace VULKAN_HPP_NAMESPACE } // namespace VULKAN_HPP_RAII_NAMESPACE #endif } // namespace VULKAN_HPP_NAMESPACE + +export namespace std +{ + + //======================================= + //=== HASH specialization for Flags types === + //======================================= + + template + struct hash>; + + //======================================== + //=== HASH specializations for handles === + //======================================== + + //=== VK_VERSION_1_0 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_1 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_3 === + template <> + struct hash; + + //=== VK_KHR_surface === + template <> + struct hash; + + //=== VK_KHR_swapchain === + template <> + struct hash; + + //=== VK_KHR_display === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_debug_report === + template <> + struct hash; + + //=== VK_KHR_video_queue === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_binary_import === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_debug_utils === + template <> + struct hash; + + //=== VK_KHR_acceleration_structure === + template <> + struct hash; + + //=== VK_EXT_validation_cache === + template <> + struct hash; + + //=== VK_NV_ray_tracing === + template <> + struct hash; + + //=== VK_INTEL_performance_query === + template <> + struct hash; + + //=== VK_KHR_deferred_host_operations === + template <> + struct hash; + + //=== VK_NV_device_generated_commands === + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_EXT_opacity_micromap === + template <> + struct hash; + + //=== VK_NV_optical_flow === + template <> + struct hash; + + //=== VK_EXT_shader_object === + template <> + struct hash; + + //=== VK_KHR_pipeline_binary === + template <> + struct hash; + + //=== VK_EXT_device_generated_commands === + template <> + struct hash; + template <> + struct hash; + + //======================================== + //=== HASH specializations for structs === + //======================================== + + //=== VK_VERSION_1_0 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_3 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_VERSION_1_4 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_surface === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_swapchain === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_display === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_display_swapchain === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_XLIB_KHR ) + //=== VK_KHR_xlib_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_XLIB_KHR*/ + +#if defined( VK_USE_PLATFORM_XCB_KHR ) + //=== VK_KHR_xcb_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_XCB_KHR*/ + +#if defined( VK_USE_PLATFORM_WAYLAND_KHR ) + //=== VK_KHR_wayland_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_KHR_android_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_debug_report === + template <> + struct hash; + + //=== VK_AMD_rasterization_order === + template <> + struct hash; + + //=== VK_EXT_debug_marker === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_queue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_decode_queue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_dedicated_allocation === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_transform_feedback === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_binary_import === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_image_view_handle === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_h264 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_h265 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_decode_h264 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_texture_gather_bias_lod === + template <> + struct hash; + + //=== VK_AMD_shader_info === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_stream_descriptor_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_corner_sampled_image === + template <> + struct hash; + + //=== VK_NV_external_memory_capabilities === + template <> + struct hash; + + //=== VK_NV_external_memory === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_external_memory_win32 === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_NV_win32_keyed_mutex === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_device_group === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_validation_flags === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_VI_NN ) + //=== VK_NN_vi_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_VI_NN*/ + + //=== VK_EXT_astc_decode_mode === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_memory_win32 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_memory_fd === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_win32_keyed_mutex === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_semaphore_win32 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_semaphore_fd === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_conditional_rendering === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_incremental_present === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_clip_space_w_scaling === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_display_surface_counter === + template <> + struct hash; + + //=== VK_EXT_display_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_GOOGLE_display_timing === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NVX_multiview_per_view_attributes === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_viewport_swizzle === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_discard_rectangles === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_conservative_rasterization === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_clip_enable === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_hdr_metadata === + template <> + struct hash; + template <> + struct hash; + + //=== VK_IMG_relaxed_line_rasterization === + template <> + struct hash; + + //=== VK_KHR_shared_presentable_image === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_KHR_external_fence_win32 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_KHR_external_fence_fd === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_performance_query === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_get_surface_capabilities2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_get_display_properties2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_IOS_MVK ) + //=== VK_MVK_ios_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_IOS_MVK*/ + +#if defined( VK_USE_PLATFORM_MACOS_MVK ) + //=== VK_MVK_macos_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_MACOS_MVK*/ + + //=== VK_EXT_debug_utils === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_memory_android_hardware_buffer === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_AMDX_shader_enqueue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_AMD_mixed_attachment_samples === + template <> + struct hash; + + //=== VK_EXT_sample_locations === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_blend_operation_advanced === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_fragment_coverage_to_color === + template <> + struct hash; + + //=== VK_KHR_acceleration_structure === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_ray_tracing_pipeline === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_ray_query === + template <> + struct hash; + + //=== VK_NV_framebuffer_mixed_samples === + template <> + struct hash; + + //=== VK_NV_shader_sm_builtins === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_drm_format_modifier === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_validation_cache === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_KHR_portability_subset === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_shading_rate_image === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_ray_tracing === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_representative_fragment_test === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_filter_cubic === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_external_memory_host === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_shader_clock === + template <> + struct hash; + + //=== VK_AMD_pipeline_compiler_control === + template <> + struct hash; + + //=== VK_AMD_shader_core_properties === + template <> + struct hash; + + //=== VK_KHR_video_decode_h265 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_memory_overallocation_behavior === + template <> + struct hash; + + //=== VK_EXT_vertex_attribute_divisor === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_GGP ) + //=== VK_GGP_frame_token === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_GGP*/ + + //=== VK_NV_mesh_shader === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_shader_image_footprint === + template <> + struct hash; + + //=== VK_NV_scissor_exclusive === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_diagnostic_checkpoints === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_INTEL_shader_integer_functions2 === + template <> + struct hash; + + //=== VK_INTEL_performance_query === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pci_bus_info === + template <> + struct hash; + + //=== VK_AMD_display_native_hdr === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_imagepipe_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_fragment_density_map === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_fragment_shading_rate === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_shader_core_properties2 === + template <> + struct hash; + + //=== VK_AMD_device_coherent_memory === + template <> + struct hash; + + //=== VK_EXT_shader_image_atomic_int64 === + template <> + struct hash; + + //=== VK_KHR_shader_quad_control === + template <> + struct hash; + + //=== VK_EXT_memory_budget === + template <> + struct hash; + + //=== VK_EXT_memory_priority === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_surface_protected_capabilities === + template <> + struct hash; + + //=== VK_NV_dedicated_allocation_image_aliasing === + template <> + struct hash; + + //=== VK_EXT_buffer_device_address === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_validation_features === + template <> + struct hash; + + //=== VK_KHR_present_wait === + template <> + struct hash; + + //=== VK_NV_cooperative_matrix === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_coverage_reduction_mode === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_fragment_shader_interlock === + template <> + struct hash; + + //=== VK_EXT_ycbcr_image_arrays === + template <> + struct hash; + + //=== VK_EXT_provoking_vertex === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_WIN32_KHR ) + //=== VK_EXT_full_screen_exclusive === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_WIN32_KHR*/ + + //=== VK_EXT_headless_surface === + template <> + struct hash; + + //=== VK_EXT_shader_atomic_float === + template <> + struct hash; + + //=== VK_EXT_extended_dynamic_state === + template <> + struct hash; + + //=== VK_KHR_pipeline_executable_properties === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_map_memory_placed === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_shader_atomic_float2 === + template <> + struct hash; + + //=== VK_EXT_surface_maintenance1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_swapchain_maintenance1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_generated_commands === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_inherited_viewport_scissor === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_texel_buffer_alignment === + template <> + struct hash; + + //=== VK_QCOM_render_pass_transform === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_bias_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_device_memory_report === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_robustness2 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_custom_border_color === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_pipeline_library === + template <> + struct hash; + + //=== VK_NV_present_barrier === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_present_id === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_queue === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_diagnostics_config === + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_cuda_kernel_launch === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_NV_low_latency === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_METAL_EXT ) + //=== VK_EXT_metal_objects === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_METAL_EXT*/ + + //=== VK_EXT_descriptor_buffer === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_graphics_pipeline_library === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_AMD_shader_early_and_late_fragment_tests === + template <> + struct hash; + + //=== VK_KHR_fragment_shader_barycentric === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_shader_subgroup_uniform_control_flow === + template <> + struct hash; + + //=== VK_NV_fragment_shading_rate_enums === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_ray_tracing_motion_blur === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_mesh_shader === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_ycbcr_2plane_444_formats === + template <> + struct hash; + + //=== VK_EXT_fragment_density_map2 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_rotated_copy_commands === + template <> + struct hash; + + //=== VK_KHR_workgroup_memory_explicit_layout === + template <> + struct hash; + + //=== VK_EXT_image_compression_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_attachment_feedback_loop_layout === + template <> + struct hash; + + //=== VK_EXT_4444_formats === + template <> + struct hash; + + //=== VK_EXT_device_fault === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_rgba10x6_formats === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) + //=== VK_EXT_directfb_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ + + //=== VK_EXT_vertex_input_dynamic_state === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_physical_device_drm === + template <> + struct hash; + + //=== VK_EXT_device_address_binding_report === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_clip_control === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_primitive_topology_list_restart === + template <> + struct hash; + + //=== VK_EXT_present_mode_fifo_latest_ready === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_memory === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_external_semaphore === + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + +#if defined( VK_USE_PLATFORM_FUCHSIA ) + //=== VK_FUCHSIA_buffer_collection === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_FUCHSIA*/ + + //=== VK_HUAWEI_subpass_shading === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_HUAWEI_invocation_mask === + template <> + struct hash; + + //=== VK_NV_external_memory_rdma === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pipeline_properties === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_frame_boundary === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_multisampled_render_to_single_sampled === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_extended_dynamic_state2 === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_screen_surface === + template <> + struct hash; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_EXT_color_write_enable === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_primitives_generated_query === + template <> + struct hash; + + //=== VK_KHR_ray_tracing_maintenance1 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_view_min_lod === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_multi_draw === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_2d_view_of_3d === + template <> + struct hash; + + //=== VK_EXT_shader_tile_image === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_opacity_micromap === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + //=== VK_NV_displacement_micromap === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + + //=== VK_HUAWEI_cluster_culling_shader === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_border_color_swizzle === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pageable_device_local_memory === + template <> + struct hash; + + //=== VK_ARM_shader_core_properties === + template <> + struct hash; + + //=== VK_ARM_scheduling_controls === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_image_sliced_view_of_3d === + template <> + struct hash; + template <> + struct hash; + + //=== VK_VALVE_descriptor_set_host_mapping === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_clamp_zero_one === + template <> + struct hash; + + //=== VK_EXT_non_seamless_cube_map === + template <> + struct hash; + + //=== VK_ARM_render_pass_striped === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_fragment_density_map_offset === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_copy_memory_indirect === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_memory_decompression === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_device_generated_commands_compute === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_linear_color_attachment === + template <> + struct hash; + + //=== VK_KHR_shader_maximal_reconvergence === + template <> + struct hash; + + //=== VK_EXT_image_compression_control_swapchain === + template <> + struct hash; + + //=== VK_QCOM_image_processing === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_nested_command_buffer === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_external_memory_acquire_unmodified === + template <> + struct hash; + + //=== VK_EXT_extended_dynamic_state3 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_subpass_merge_feedback === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_LUNARG_direct_driver_loading === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_shader_module_identifier === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_rasterization_order_attachment_access === + template <> + struct hash; + + //=== VK_NV_optical_flow === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_legacy_dithering === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_ANDROID_KHR ) + //=== VK_ANDROID_external_format_resolve === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ + + //=== VK_AMD_anti_lag === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_ray_tracing_position_fetch === + template <> + struct hash; + + //=== VK_EXT_shader_object === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_pipeline_binary === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_tile_properties === + template <> + struct hash; + template <> + struct hash; + + //=== VK_SEC_amigo_profiling === + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_multiview_per_view_viewports === + template <> + struct hash; + + //=== VK_NV_ray_tracing_invocation_reorder === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_extended_sparse_address_space === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_mutable_descriptor_type === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_legacy_vertex_attributes === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_layer_settings === + template <> + struct hash; + template <> + struct hash; + + //=== VK_ARM_shader_core_builtins === + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_pipeline_library_group_handles === + template <> + struct hash; + + //=== VK_EXT_dynamic_rendering_unused_attachments === + template <> + struct hash; + + //=== VK_NV_low_latency2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_cooperative_matrix === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_multiview_per_view_render_areas === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_compute_shader_derivatives === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_decode_av1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_av1 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_maintenance1 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_per_stage_descriptor_set === + template <> + struct hash; + + //=== VK_QCOM_image_processing2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_filter_cubic_weights === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_ycbcr_degamma === + template <> + struct hash; + template <> + struct hash; + + //=== VK_QCOM_filter_cubic_clamp === + template <> + struct hash; + + //=== VK_EXT_attachment_feedback_loop_dynamic_state === + template <> + struct hash; + +#if defined( VK_USE_PLATFORM_SCREEN_QNX ) + //=== VK_QNX_external_memory_screen_buffer === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ + + //=== VK_MSFT_layered_driver === + template <> + struct hash; + + //=== VK_KHR_calibrated_timestamps === + template <> + struct hash; + + //=== VK_KHR_maintenance6 === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_descriptor_pool_overallocation === + template <> + struct hash; + + //=== VK_NV_display_stereo === + template <> + struct hash; + template <> + struct hash; + + //=== VK_KHR_video_encode_quantization_map === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_raw_access_chains === + template <> + struct hash; + + //=== VK_KHR_shader_relaxed_extended_instruction === + template <> + struct hash; + + //=== VK_NV_command_buffer_inheritance === + template <> + struct hash; + + //=== VK_KHR_maintenance7 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_shader_atomic_float16_vector === + template <> + struct hash; + + //=== VK_EXT_shader_replicated_composites === + template <> + struct hash; + + //=== VK_NV_ray_tracing_validation === + template <> + struct hash; + + //=== VK_EXT_device_generated_commands === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_MESA_image_alignment_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_depth_clamp_control === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_HUAWEI_hdr_vivid === + template <> + struct hash; + template <> + struct hash; + + //=== VK_NV_cooperative_matrix2 === + template <> + struct hash; + template <> + struct hash; + template <> + struct hash; + + //=== VK_EXT_vertex_attribute_robustness === + template <> + struct hash; + +} // namespace std diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index 343bc8a..96c7cc8 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -22,30 +22,15 @@ import VULKAN_HPP_STD_MODULE; #endif #include -#if 17 <= VULKAN_HPP_CPP_VERSION && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) -#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) -# include -import VULKAN_HPP_STD_MODULE; -#else -# include -# include // ArrayWrapperND -# include // strnlen -# include // std::string -# include // std::exchange -#endif -#include - #if 17 <= VULKAN_HPP_CPP_VERSION && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include // std::tie # include // std::vector #endif -#if !defined( VULKAN_HPP_NO_EXCEPTIONS ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) #if !defined( VULKAN_HPP_NO_EXCEPTIONS ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include // std::is_error_code_enum #endif @@ -70,17 +55,15 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # endif #endif -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -#if defined( VULKAN_HPP_SUPPORT_SPAN ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) #if defined( VULKAN_HPP_SUPPORT_SPAN ) && !( defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) ) # include #endif -static_assert( VK_HEADER_VERSION == 301, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 303, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) @@ -182,19 +165,6 @@ namespace VULKAN_HPP_NAMESPACE } }; -// relational operators between ArrayWrapper1D of chars with potentially different sizes - private: - VULKAN_HPP_CONSTEXPR_14 void copy( char const * data, size_t len ) VULKAN_HPP_NOEXCEPT - { - size_t n = ( std::min )( N - 1, len ); - for ( size_t i = 0; i < n; ++i ) - { - ( *this )[i] = data[i]; - } - ( *this )[n] = 0; - } - }; - // relational operators between ArrayWrapper1D of chars with potentially different sizes #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) template @@ -203,40 +173,19 @@ namespace VULKAN_HPP_NAMESPACE int result = strcmp( lhs.data(), rhs.data() ); return ( result < 0 ) ? std::strong_ordering::less : ( ( result > 0 ) ? std::strong_ordering::greater : std::strong_ordering::equal ); } - template - std::strong_ordering operator<=>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - int result = strcmp( lhs.data(), rhs.data() ); - return ( result < 0 ) ? std::strong_ordering::less : ( ( result > 0 ) ? std::strong_ordering::greater : std::strong_ordering::equal ); - } #else template bool operator<( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return strcmp( lhs.data(), rhs.data() ) < 0; } - template - bool operator<( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - return strcmp( lhs.data(), rhs.data() ) < 0; - } - template - bool operator<=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - return strcmp( lhs.data(), rhs.data() ) <= 0; - } template bool operator<=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { return strcmp( lhs.data(), rhs.data() ) <= 0; } - template - bool operator>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - return strcmp( lhs.data(), rhs.data() ) > 0; - } template bool operator>( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT { @@ -248,11 +197,6 @@ namespace VULKAN_HPP_NAMESPACE { return strcmp( lhs.data(), rhs.data() ) >= 0; } - template - bool operator>=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - return strcmp( lhs.data(), rhs.data() ) >= 0; - } #endif template @@ -267,26 +211,6 @@ namespace VULKAN_HPP_NAMESPACE return strcmp( lhs.data(), rhs.data() ) != 0; } -// specialization of relational operators between std::string and arrays of chars -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - template - std::strong_ordering operator<=>( std::string const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - return lhs <=> rhs.data(); - } -#else - template - bool operator==( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - return strcmp( lhs.data(), rhs.data() ) == 0; - } - - template - bool operator!=( ArrayWrapper1D const & lhs, ArrayWrapper1D const & rhs ) VULKAN_HPP_NOEXCEPT - { - return strcmp( lhs.data(), rhs.data() ) != 0; - } - // specialization of relational operators between std::string and arrays of chars #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) template @@ -318,7 +242,6 @@ namespace VULKAN_HPP_NAMESPACE { return lhs >= rhs.data(); } -#endif #endif template @@ -472,8 +395,6 @@ namespace VULKAN_HPP_NAMESPACE { } - template ::value && std::is_lvalue_reference::value, int>::type = 0> - ArrayProxyNoTemporaries( B && value ) VULKAN_HPP_NOEXCEPT template ::value && std::is_lvalue_reference::value, int>::type = 0> ArrayProxyNoTemporaries( B && value ) VULKAN_HPP_NOEXCEPT : m_count( 1 ) @@ -497,15 +418,6 @@ namespace VULKAN_HPP_NAMESPACE template ArrayProxyNoTemporaries( T ( &&ptr )[C] ) = delete; - // Any l-value reference with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. - template ().begin() ), T *>::value && - std::is_convertible().data() ), T *>::value && - std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, - int>::type = 0> - ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT - : m_count( static_cast( v.size() ) ) - , m_ptr( v.data() ) // Any l-value reference with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. template ().begin() ), T *>::value && @@ -518,20 +430,14 @@ namespace VULKAN_HPP_NAMESPACE { } - // Any l-value reference with a .begin() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. // Any l-value reference with a .begin() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t. template ().begin() ), T *>::value && std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, int>::type = 0> - ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT - typename std::enable_if().begin() ), T *>::value && - std::is_convertible().size() ), std::size_t>::value && std::is_lvalue_reference::value, - int>::type = 0> ArrayProxyNoTemporaries( V && v ) VULKAN_HPP_NOEXCEPT : m_count( static_cast( v.size() ) ) , m_ptr( v.begin() ) - , m_ptr( v.begin() ) { } @@ -710,8 +616,6 @@ namespace VULKAN_HPP_NAMESPACE { // Note: StructureChain has no move constructor or move assignment operator, as it is not supposed to contain movable containers. // In order to get a copy-operation on a move-operations, those functions are neither deleted nor defaulted. - // Note: StructureChain has no move constructor or move assignment operator, as it is not supposed to contain movable containers. - // In order to get a copy-operation on a move-operations, those functions are neither deleted nor defaulted. public: StructureChain() VULKAN_HPP_NOEXCEPT { @@ -1057,21 +961,11 @@ namespace VULKAN_HPP_NAMESPACE public: DispatchLoaderBase() = default; DispatchLoaderBase( std::nullptr_t ) - namespace detail - { - class DispatchLoaderBase - { - public: - DispatchLoaderBase() = default; - DispatchLoaderBase( std::nullptr_t ) #if !defined( NDEBUG ) : m_valid( false ) - : m_valid( false ) #endif { } - { - } #if !defined( NDEBUG ) size_t getVkHeaderVersion() const @@ -1079,72 +973,40 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( m_valid ); return vkHeaderVersion; } - size_t getVkHeaderVersion() const - { - VULKAN_HPP_ASSERT( m_valid ); - return vkHeaderVersion; - } private: size_t vkHeaderVersion = VK_HEADER_VERSION; bool m_valid = true; - private: - size_t vkHeaderVersion = VK_HEADER_VERSION; - bool m_valid = true; #endif }; - }; -#if !defined( VK_NO_PROTOTYPES ) +#if !defined( VK_NO_PROTOTYPES ) || ( defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC ) && ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 0 ) ) class DispatchLoaderStatic : public DispatchLoaderBase { public: //=== VK_VERSION_1_0 === - VkResult - vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); - } VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT { return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance ); } - void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyInstance( instance, pAllocator ); - } void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyInstance( instance, pAllocator ); } - VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); - } VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices ); } - void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); - } void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures ); } - void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); - } void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT @@ -1152,16 +1014,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties ); } - VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); - } VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, @@ -1173,21 +1025,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties ); } - void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); - } void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties ); } - void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT @@ -1195,42 +1037,22 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); - } void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties ); } - PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetInstanceProcAddr( instance, pName ); - } PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT { return ::vkGetInstanceProcAddr( instance, pName ); } - PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceProcAddr( device, pName ); - } PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceProcAddr( device, pName ); } - VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); - } VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1239,21 +1061,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice ); } - void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDevice( device, pAllocator ); - } void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDevice( device, pAllocator ); } - VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, - uint32_t * pPropertyCount, - VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); - } VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -1261,13 +1073,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties ); } - VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, - const char * pLayerName, - uint32_t * pPropertyCount, - VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); - } VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char * pLayerName, uint32_t * pPropertyCount, @@ -1276,21 +1081,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties ); } - VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); - } VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties ); } - VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); - } VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -1298,49 +1093,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties ); } - void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); - } void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue ); } - VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); - } VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueSubmit( queue, submitCount, pSubmits, fence ); } - VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueWaitIdle( queue ); - } VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueWaitIdle( queue ); } - VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeviceWaitIdle( device ); - } VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT { return ::vkDeviceWaitIdle( device ); } - VkResult vkAllocateMemory( VkDevice device, - const VkMemoryAllocateInfo * pAllocateInfo, - const VkAllocationCallbacks * pAllocator, - VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); - } VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo * pAllocateInfo, const VkAllocationCallbacks * pAllocator, @@ -1349,105 +1121,57 @@ namespace VULKAN_HPP_NAMESPACE return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory ); } - void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeMemory( device, memory, pAllocator ); - } void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkFreeMemory( device, memory, pAllocator ); } - VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory( device, memory, offset, size, flags, ppData ); - } VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const VULKAN_HPP_NOEXCEPT { return ::vkMapMemory( device, memory, offset, size, flags, ppData ); } - void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory( device, memory ); - } void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT { return ::vkUnmapMemory( device, memory ); } - VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT { return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); } - VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); - } VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT { return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges ); } - void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); - } void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes ); } - VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); - } VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT { return ::vkBindBufferMemory( device, buffer, memory, memoryOffset ); } - VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory( device, image, memory, memoryOffset ); - } VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT { return ::vkBindImageMemory( device, image, memory, memoryOffset ); } - void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); - } void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements ); } - void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); - } void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements ); } - void vkGetImageSparseMemoryRequirements( VkDevice device, - VkImage image, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t * pSparseMemoryRequirementCount, @@ -1456,17 +1180,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkSampleCountFlagBits samples, - VkImageUsageFlags usage, - VkImageTiling tiling, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); - } void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, @@ -1479,22 +1192,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties ); } - VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); - } VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence ); } - VkResult vkCreateFence( VkDevice device, - const VkFenceCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); - } VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1503,49 +1205,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence ); } - void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFence( device, fence, pAllocator ); - } void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyFence( device, fence, pAllocator ); } - VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetFences( device, fenceCount, pFences ); - } VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT { return ::vkResetFences( device, fenceCount, pFences ); } - VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceStatus( device, fence ); - } VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT { return ::vkGetFenceStatus( device, fence ); } - VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); - } VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout ); } - VkResult vkCreateSemaphore( VkDevice device, - const VkSemaphoreCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); - } VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1554,22 +1233,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore ); } - void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySemaphore( device, semaphore, pAllocator ); - } void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySemaphore( device, semaphore, pAllocator ); } - VkResult vkCreateEvent( VkDevice device, - const VkEventCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); - } VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1578,49 +1246,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent ); } - void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyEvent( device, event, pAllocator ); - } void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyEvent( device, event, pAllocator ); } - VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEventStatus( device, event ); - } VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT { return ::vkGetEventStatus( device, event ); } - VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetEvent( device, event ); - } VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT { return ::vkSetEvent( device, event ); } - VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetEvent( device, event ); - } VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT { return ::vkResetEvent( device, event ); } - VkResult vkCreateQueryPool( VkDevice device, - const VkQueryPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); - } VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1629,26 +1274,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool ); } - void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyQueryPool( device, queryPool, pAllocator ); - } void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyQueryPool( device, queryPool, pAllocator ); } - VkResult vkGetQueryPoolResults( VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void * pData, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); - } VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, @@ -1661,13 +1291,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags ); } - VkResult vkCreateBuffer( VkDevice device, - const VkBufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); - } VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1676,22 +1299,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer ); } - void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBuffer( device, buffer, pAllocator ); - } void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyBuffer( device, buffer, pAllocator ); } - VkResult vkCreateBufferView( VkDevice device, - const VkBufferViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); - } VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1700,22 +1312,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView ); } - void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferView( device, bufferView, pAllocator ); - } void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyBufferView( device, bufferView, pAllocator ); } - VkResult vkCreateImage( VkDevice device, - const VkImageCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImage * pImage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); - } VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1724,22 +1325,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage ); } - void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImage( device, image, pAllocator ); - } void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyImage( device, image, pAllocator ); } - void vkGetImageSubresourceLayout( VkDevice device, - VkImage image, - const VkImageSubresource * pSubresource, - VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); - } void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource * pSubresource, @@ -1748,13 +1338,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout ); } - VkResult vkCreateImageView( VkDevice device, - const VkImageViewCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkImageView * pView ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); - } VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1763,22 +1346,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView ); } - void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyImageView( device, imageView, pAllocator ); - } void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyImageView( device, imageView, pAllocator ); } - VkResult vkCreateShaderModule( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); - } VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1787,22 +1359,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule ); } - void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); - } void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyShaderModule( device, shaderModule, pAllocator ); } - VkResult vkCreatePipelineCache( VkDevice device, - const VkPipelineCacheCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); - } VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1811,44 +1372,22 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache ); } - void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); - } void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator ); } - VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); - } VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData ); } - VkResult - vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); - } VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT { return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches ); } - VkResult vkCreateGraphicsPipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, @@ -1859,15 +1398,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkCreateComputePipelines( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkComputePipelineCreateInfo * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, @@ -1878,22 +1408,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipeline( device, pipeline, pAllocator ); - } void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPipeline( device, pipeline, pAllocator ); } - VkResult vkCreatePipelineLayout( VkDevice device, - const VkPipelineLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); - } VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1902,22 +1421,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout ); } - void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); - } void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator ); } - VkResult vkCreateSampler( VkDevice device, - const VkSamplerCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); - } VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1926,22 +1434,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler ); } - void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySampler( device, sampler, pAllocator ); - } void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySampler( device, sampler, pAllocator ); } - VkResult vkCreateDescriptorSetLayout( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); - } VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1950,12 +1447,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout ); } - void vkDestroyDescriptorSetLayout( VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); - } void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -1963,13 +1454,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator ); } - VkResult vkCreateDescriptorPool( VkDevice device, - const VkDescriptorPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); - } VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -1978,30 +1462,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool ); } - void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); - } void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator ); } - VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetDescriptorPool( device, descriptorPool, flags ); - } VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkResetDescriptorPool( device, descriptorPool, flags ); } - VkResult vkAllocateDescriptorSets( VkDevice device, - const VkDescriptorSetAllocateInfo * pAllocateInfo, - VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); - } VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo * pAllocateInfo, VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT @@ -2009,13 +1479,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets ); } - VkResult vkFreeDescriptorSets( VkDevice device, - VkDescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); - } VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, @@ -2024,14 +1487,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets ); } - void vkUpdateDescriptorSets( VkDevice device, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites, - uint32_t descriptorCopyCount, - const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); - } void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet * pDescriptorWrites, @@ -2041,13 +1496,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies ); } - VkResult vkCreateFramebuffer( VkDevice device, - const VkFramebufferCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); - } VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -2056,22 +1504,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer ); } - void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); - } void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyFramebuffer( device, framebuffer, pAllocator ); } - VkResult vkCreateRenderPass( VkDevice device, - const VkRenderPassCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); - } VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -2080,31 +1517,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass ); } - void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyRenderPass( device, renderPass, pAllocator ); - } void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyRenderPass( device, renderPass, pAllocator ); } - void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); - } void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity ); } - VkResult vkCreateCommandPool( VkDevice device, - const VkCommandPoolCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); - } VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -2113,30 +1535,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool ); } - void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCommandPool( device, commandPool, pAllocator ); - } void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyCommandPool( device, commandPool, pAllocator ); } - VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandPool( device, commandPool, flags ); - } VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkResetCommandPool( device, commandPool, flags ); } - VkResult vkAllocateCommandBuffers( VkDevice device, - const VkCommandBufferAllocateInfo * pAllocateInfo, - VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); - } VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo * pAllocateInfo, VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT @@ -2144,13 +1552,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers ); } - void vkFreeCommandBuffers( VkDevice device, - VkCommandPool commandPool, - uint32_t commandBufferCount, - const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); - } void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, @@ -2159,49 +1560,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers ); } - VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); - } VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo ); } - VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEndCommandBuffer( commandBuffer ); - } VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkEndCommandBuffer( commandBuffer ); } - VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetCommandBuffer( commandBuffer, flags ); - } VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkResetCommandBuffer( commandBuffer, flags ); } - void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); - } void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline ); } - void vkCmdSetViewport( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); - } void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, @@ -2210,31 +1588,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports ); } - void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); - } void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors ); } - void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); - } void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLineWidth( commandBuffer, lineWidth ); } - void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - } void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, @@ -2243,63 +1606,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } - void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); - } void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetBlendConstants( commandBuffer, blendConstants ); } - void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); - } void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds ); } - void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); - } void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask ); } - void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); - } void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask ); } - void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); - } void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference ); } - void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VkDescriptorSet * pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets( - commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); - } void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -2313,23 +1644,11 @@ namespace VULKAN_HPP_NAMESPACE commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets ); } - void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); - } void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType ); } - void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); - } void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -2339,26 +1658,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets ); } - void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); - } void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); } - void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, - uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - } void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, @@ -2369,67 +1674,34 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } - void - vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); - } void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ ); } - void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); - } void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset ); } - void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); - } void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions ); } - void vkCmdCopyImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -2441,17 +1713,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkCmdBlitImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageBlit * pRegions, - VkFilter filter ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); - } void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -2464,15 +1725,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter ); } - void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); - } void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, @@ -2483,15 +1735,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); - } void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -2502,37 +1745,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions ); } - void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); - } void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData ); } - void - vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); - } void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data ); } - void vkCmdClearColorImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearColorValue * pColor, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); - } void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, @@ -2543,15 +1767,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges ); } - void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearDepthStencilValue * pDepthStencil, - uint32_t rangeCount, - const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); - } void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, @@ -2562,14 +1777,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges ); } - void vkCmdClearAttachments( VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkClearAttachment * pAttachments, - uint32_t rectCount, - const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); - } void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment * pAttachments, @@ -2579,16 +1786,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects ); } - void vkCmdResolveImage( VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); - } void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -2600,48 +1797,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions ); } - void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent( commandBuffer, event, stageMask ); - } void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetEvent( commandBuffer, event, stageMask ); } - void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent( commandBuffer, event, stageMask ); - } void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetEvent( commandBuffer, event, stageMask ); } - void vkCmdWaitEvents( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents( commandBuffer, - eventCount, - pEvents, - srcStageMask, - dstStageMask, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, @@ -2667,28 +1832,6 @@ namespace VULKAN_HPP_NAMESPACE pImageMemoryBarriers ); } - void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - VkDependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VkMemoryBarrier * pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier * pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier( commandBuffer, - srcStageMask, - dstStageMask, - dependencyFlags, - memoryBarrierCount, - pMemoryBarriers, - bufferMemoryBarrierCount, - pBufferMemoryBarriers, - imageMemoryBarrierCount, - pImageMemoryBarriers ); - } void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, @@ -2712,40 +1855,21 @@ namespace VULKAN_HPP_NAMESPACE pImageMemoryBarriers ); } - void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); - } void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags ); } - void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQuery( commandBuffer, queryPool, query ); - } void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndQuery( commandBuffer, queryPool, query ); } - void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); - } void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount ); } - void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); - } void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, @@ -2754,17 +1878,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query ); } - void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize stride, - VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); - } void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, @@ -2777,15 +1890,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags ); } - void vkCmdPushConstants( VkCommandBuffer commandBuffer, - VkPipelineLayout layout, - VkShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void * pValues ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); - } void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, @@ -2796,12 +1900,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues ); } - void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); - } void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT @@ -2809,71 +1907,38 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents ); } - void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass( commandBuffer, contents ); - } void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdNextSubpass( commandBuffer, contents ); } - void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass( commandBuffer ); - } void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndRenderPass( commandBuffer ); } - void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); - } void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers ); } - //=== VK_VERSION_1_1 === //=== VK_VERSION_1_1 === - VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumerateInstanceVersion( pApiVersion ); - } VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT { return ::vkEnumerateInstanceVersion( pApiVersion ); } - VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); - } VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos ); } - VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); - } VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos ); } - void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, @@ -2883,25 +1948,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); } - void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); - } void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDeviceMask( commandBuffer, deviceMask ); } - void vkCmdDispatchBase( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, @@ -2913,12 +1964,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t * pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT @@ -2926,12 +1971,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); } - void vkGetImageMemoryRequirements2( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -2939,12 +1978,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements ); } - void vkGetBufferMemoryRequirements2( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); - } void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -2952,13 +1985,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements ); } - void vkGetImageSparseMemoryRequirements2( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, @@ -2967,30 +1993,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); - } void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures ); } - void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); - } void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties ); } - void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); - } void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT @@ -2998,12 +2010,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties ); } - VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT @@ -3011,12 +2017,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties ); } - void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT @@ -3024,24 +2024,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); - } void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties ); } - void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, @@ -3050,31 +2038,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); } - void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPool( device, commandPool, flags ); - } void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkTrimCommandPool( device, commandPool, flags ); } - void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); - } void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue ); } - VkResult vkCreateSamplerYcbcrConversion( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -3083,12 +2056,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion ); } - void vkDestroySamplerYcbcrConversion( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); - } void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -3096,13 +2063,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator ); } - VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -3111,12 +2071,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); } - void vkDestroyDescriptorUpdateTemplate( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); - } void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -3124,13 +2078,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator ); } - void vkUpdateDescriptorSetWithTemplate( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); - } void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, @@ -3139,12 +2086,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData ); } - void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT @@ -3152,12 +2093,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); } - void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT @@ -3165,12 +2100,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); } - void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT @@ -3178,12 +2107,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); } - void vkGetDescriptorSetLayoutSupport( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); - } void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT @@ -3191,19 +2114,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport ); } - //=== VK_VERSION_1_2 === //=== VK_VERSION_1_2 === - void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -3215,16 +2127,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -3236,13 +2138,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - VkResult vkCreateRenderPass2( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); - } VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2 * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -3251,12 +2146,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass ); } - void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT @@ -3264,12 +2153,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); } - void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo * pSubpassBeginInfo, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT @@ -3277,87 +2160,48 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); } - void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); - } void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo ); } - void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); - } void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount ); } - VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); - } VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreCounterValue( device, semaphore, pValue ); } - VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphores( device, pWaitInfo, timeout ); - } VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { return ::vkWaitSemaphores( device, pWaitInfo, timeout ); } - VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphore( device, pSignalInfo ); - } VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSignalSemaphore( device, pSignalInfo ); } - VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddress( device, pInfo ); - } VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferDeviceAddress( device, pInfo ); } - uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); - } uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferOpaqueCaptureAddress( device, pInfo ); } - uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); - } uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo ); } - //=== VK_VERSION_1_3 === //=== VK_VERSION_1_3 === - VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); - } VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, uint32_t * pToolCount, VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT @@ -3365,13 +2209,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties ); } - VkResult vkCreatePrivateDataSlot( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } VkResult vkCreatePrivateDataSlot( VkDevice device, const VkPrivateDataSlotCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -3380,62 +2217,33 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot ); } - void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); - } void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator ); } - VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); - } VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT { return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data ); } - void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); - } void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData ); } - void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); - } void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo ); } - void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); - } void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetEvent2( commandBuffer, event, stageMask ); } - void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, @@ -3444,160 +2252,86 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos ); } - void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); - } void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo ); } - void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); - } void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query ); } - VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); - } VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence ); } - void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); - } void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo ); } - void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); - } void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo ); } - void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); - } void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo ); } - void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); - } void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo ); } - void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); - } void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo ); } - void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); - } void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo ); } - void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); - } void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo ); } - void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRendering( commandBuffer ); - } void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndRendering( commandBuffer ); } - void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullMode( commandBuffer, cullMode ); - } void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCullMode( commandBuffer, cullMode ); } - void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFace( commandBuffer, frontFace ); - } void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetFrontFace( commandBuffer, frontFace ); } - void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); - } void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology ); } - void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); - } void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports ); } - void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); - } void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors ); } - void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -3609,60 +2343,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); } - void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); - } void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable ); } - void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); - } void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable ); } - void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); - } void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp ); } - void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); - } void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable ); } - void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); - } void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable ); } - void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, @@ -3673,39 +2378,21 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); } - void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); - } void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable ); } - void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); - } void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable ); } - void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); - } void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable ); } - void vkGetDeviceBufferMemoryRequirements( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); - } void vkGetDeviceBufferMemoryRequirements( VkDevice device, const VkDeviceBufferMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -3713,12 +2400,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements ); } - void vkGetDeviceImageMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements ); - } void vkGetDeviceImageMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -3733,33 +2414,130 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT + + //=== VK_VERSION_1_4 === + + void vkCmdSetLineStipple( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); + return ::vkCmdSetLineStipple( commandBuffer, lineStippleFactor, lineStipplePattern ); + } + + VkResult vkMapMemory2( VkDevice device, const VkMemoryMapInfo * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkMapMemory2( device, pMemoryMapInfo, ppData ); + } + + VkResult vkUnmapMemory2( VkDevice device, const VkMemoryUnmapInfo * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkUnmapMemory2( device, pMemoryUnmapInfo ); + } + + void vkCmdBindIndexBuffer2( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const + VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindIndexBuffer2( commandBuffer, buffer, offset, size, indexType ); + } + + void vkGetRenderingAreaGranularity( VkDevice device, const VkRenderingAreaInfo * pRenderingAreaInfo, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetRenderingAreaGranularity( device, pRenderingAreaInfo, pGranularity ); + } + + void vkGetDeviceImageSubresourceLayout( VkDevice device, + const VkDeviceImageSubresourceInfo * pInfo, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetDeviceImageSubresourceLayout( device, pInfo, pLayout ); + } + + void vkGetImageSubresourceLayout2( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT + { + return ::vkGetImageSubresourceLayout2( device, image, pSubresource, pLayout ); + } + + void vkCmdPushDescriptorSet( VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); + } + + void vkCmdPushDescriptorSetWithTemplate( VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void * pData ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); + } + + void vkCmdSetRenderingAttachmentLocations( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingAttachmentLocations( commandBuffer, pLocationInfo ); + } + + void vkCmdSetRenderingInputAttachmentIndices( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdSetRenderingInputAttachmentIndices( commandBuffer, pInputAttachmentIndexInfo ); + } + + void vkCmdBindDescriptorSets2( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdBindDescriptorSets2( commandBuffer, pBindDescriptorSetsInfo ); + } + + void vkCmdPushConstants2( VkCommandBuffer commandBuffer, const VkPushConstantsInfo * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushConstants2( commandBuffer, pPushConstantsInfo ); + } + + void vkCmdPushDescriptorSet2( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSet2( commandBuffer, pPushDescriptorSetInfo ); + } + + void vkCmdPushDescriptorSetWithTemplate2( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCmdPushDescriptorSetWithTemplate2( commandBuffer, pPushDescriptorSetWithTemplateInfo ); + } + + VkResult vkCopyMemoryToImage( VkDevice device, const VkCopyMemoryToImageInfo * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImage( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemory( VkDevice device, const VkCopyImageToMemoryInfo * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemory( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImage( VkDevice device, const VkCopyImageToImageInfo * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImage( device, pCopyImageToImageInfo ); + } + + VkResult + vkTransitionImageLayout( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo * pTransitions ) const VULKAN_HPP_NOEXCEPT + { + return ::vkTransitionImageLayout( device, transitionCount, pTransitions ); } - //=== VK_KHR_surface === //=== VK_KHR_surface === - void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); - } void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySurfaceKHR( instance, surface, pAllocator ); } - VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); - } VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, @@ -3768,12 +2546,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported ); } - VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); - } VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT @@ -3781,13 +2553,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities ); } - VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); - } VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pSurfaceFormatCount, @@ -3796,13 +2561,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats ); } - VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); - } VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pPresentModeCount, @@ -3811,16 +2569,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes ); } - //=== VK_KHR_swapchain === //=== VK_KHR_swapchain === - VkResult vkCreateSwapchainKHR( VkDevice device, - const VkSwapchainCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); - } VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -3829,22 +2579,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain ); } - void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); - } void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroySwapchainKHR( device, swapchain, pAllocator ); } - VkResult vkGetSwapchainImagesKHR( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pSwapchainImageCount, - VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); - } VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t * pSwapchainImageCount, @@ -3853,55 +2592,29 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages ); } - VkResult vkAcquireNextImageKHR( - VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); - } VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex ); } - VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueuePresentKHR( queue, pPresentInfo ); - } VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueuePresentKHR( queue, pPresentInfo ); } - VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); - } VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities ); } - VkResult - vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); - } VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes ); } - VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t * pRectCount, - VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); - } VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pRectCount, @@ -3910,24 +2623,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects ); } - VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); - } VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex ); } - //=== VK_KHR_display === //=== VK_KHR_display === - VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -3935,12 +2637,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -3948,13 +2644,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t * pDisplayCount, - VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); - } VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t * pDisplayCount, @@ -3963,13 +2652,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays ); } - VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); - } VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t * pPropertyCount, @@ -3978,14 +2660,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties ); } - VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); - } VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR * pCreateInfo, @@ -3995,13 +2669,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode ); } - VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); - } VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, @@ -4010,13 +2677,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities ); } - VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4025,7 +2685,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } - //=== VK_KHR_display_swapchain === //=== VK_KHR_display_swapchain === VkResult vkCreateSharedSwapchainsKHR( VkDevice device, @@ -4036,26 +2695,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); } - VkResult vkCreateSharedSwapchainsKHR( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains ); - } # if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === - //=== VK_KHR_xlib_surface === - VkResult vkCreateXlibSurfaceKHR( VkInstance instance, - const VkXlibSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4071,26 +2714,11 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); } - VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - Display * dpy, - VisualID visualID ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID ); - } # endif /*VK_USE_PLATFORM_XLIB_KHR*/ # if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === - //=== VK_KHR_xcb_surface === - VkResult vkCreateXcbSurfaceKHR( VkInstance instance, - const VkXcbSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4106,26 +2734,11 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); } - VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - xcb_connection_t * connection, - xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id ); - } # endif /*VK_USE_PLATFORM_XCB_KHR*/ # if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === - //=== VK_KHR_wayland_surface === - VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, - const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4140,17 +2753,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); } - VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct wl_display * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display ); - } # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === - //=== VK_KHR_android_surface === VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, @@ -4159,26 +2765,11 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, - const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === - //=== VK_KHR_win32_surface === - VkResult vkCreateWin32SurfaceKHR( VkInstance instance, - const VkWin32SurfaceCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4191,22 +2782,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); } - VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex ); - } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === - //=== VK_EXT_debug_report === - VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); - } VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4215,12 +2794,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback ); } - void vkDestroyDebugReportCallbackEXT( VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); - } void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -4228,17 +2801,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator ); } - void vkDebugReportMessageEXT( VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char * pLayerPrefix, - const char * pMessage ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); - } void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, @@ -4251,63 +2813,35 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage ); } - //=== VK_EXT_debug_marker === //=== VK_EXT_debug_marker === - VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); - } VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo ); } - VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); - } VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo ); } - void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); - } void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo ); } - void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerEndEXT( commandBuffer ); - } void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDebugMarkerEndEXT( commandBuffer ); } - void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); - } void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo ); } - //=== VK_KHR_video_queue === //=== VK_KHR_video_queue === - VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, - const VkVideoProfileInfoKHR * pVideoProfile, - VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); - } VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR * pVideoProfile, VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT @@ -4315,13 +2849,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities ); } - VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, - uint32_t * pVideoFormatPropertyCount, - VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); - } VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, uint32_t * pVideoFormatPropertyCount, @@ -4330,13 +2857,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties ); } - VkResult vkCreateVideoSessionKHR( VkDevice device, - const VkVideoSessionCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); - } VkResult vkCreateVideoSessionKHR( VkDevice device, const VkVideoSessionCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4345,22 +2865,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession ); } - void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); - } void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator ); } - VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t * pMemoryRequirementsCount, - VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); - } VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, VkVideoSessionKHR videoSession, uint32_t * pMemoryRequirementsCount, @@ -4369,13 +2878,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements ); } - VkResult vkBindVideoSessionMemoryKHR( VkDevice device, - VkVideoSessionKHR videoSession, - uint32_t bindSessionMemoryInfoCount, - const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); - } VkResult vkBindVideoSessionMemoryKHR( VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, @@ -4384,13 +2886,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos ); } - VkResult vkCreateVideoSessionParametersKHR( VkDevice device, - const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); - } VkResult vkCreateVideoSessionParametersKHR( VkDevice device, const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4399,12 +2894,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters ); } - VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); - } VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT @@ -4412,12 +2901,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo ); } - void vkDestroyVideoSessionParametersKHR( VkDevice device, - VkVideoSessionParametersKHR videoSessionParameters, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); - } void vkDestroyVideoSessionParametersKHR( VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -4425,57 +2908,30 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator ); } - void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); - } void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo ); } - void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); - } void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo ); } - void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); - } void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo ); } - //=== VK_KHR_video_decode_queue === //=== VK_KHR_video_decode_queue === - void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); - } void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo ); } - //=== VK_EXT_transform_feedback === //=== VK_EXT_transform_feedback === - void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); - } void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -4486,14 +2942,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes ); } - void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer * pCounterBuffers, - const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, @@ -4503,14 +2951,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, - uint32_t firstCounterBuffer, - uint32_t counterBufferCount, - const VkBuffer * pCounterBuffers, - const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); - } void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, @@ -4520,36 +2960,17 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets ); } - void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); - } void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index ); } - void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); - } void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index ); } - void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, - uint32_t instanceCount, - uint32_t firstInstance, - VkBuffer counterBuffer, - VkDeviceSize counterBufferOffset, - uint32_t counterOffset, - uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); - } void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, @@ -4561,16 +2982,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride ); } - //=== VK_NVX_binary_import === //=== VK_NVX_binary_import === - VkResult vkCreateCuModuleNVX( VkDevice device, - const VkCuModuleCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); - } VkResult vkCreateCuModuleNVX( VkDevice device, const VkCuModuleCreateInfoNVX * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4579,13 +2992,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule ); } - VkResult vkCreateCuFunctionNVX( VkDevice device, - const VkCuFunctionCreateInfoNVX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); - } VkResult vkCreateCuFunctionNVX( VkDevice device, const VkCuFunctionCreateInfoNVX * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -4594,34 +3000,21 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction ); } - void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuModuleNVX( device, module, pAllocator ); - } void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyCuModuleNVX( device, module, pAllocator ); } - void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); - } void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyCuFunctionNVX( device, function, pAllocator ); } - void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); - } void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo ); } - //=== VK_NVX_image_view_handle === //=== VK_NVX_image_view_handle === uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT @@ -4629,28 +3022,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageViewHandleNVX( device, pInfo ); } - VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT + uint64_t vkGetImageViewHandle64NVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT { - return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); + return ::vkGetImageViewHandle64NVX( device, pInfo ); } + VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageViewAddressNVX( device, imageView, pProperties ); } - //=== VK_AMD_draw_indirect_count === //=== VK_AMD_draw_indirect_count === - void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -4662,16 +3045,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -4683,18 +3056,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - //=== VK_AMD_shader_info === //=== VK_AMD_shader_info === - VkResult vkGetShaderInfoAMD( VkDevice device, - VkPipeline pipeline, - VkShaderStageFlagBits shaderStage, - VkShaderInfoTypeAMD infoType, - size_t * pInfoSize, - void * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); - } VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, @@ -4705,13 +3068,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo ); } - //=== VK_KHR_dynamic_rendering === //=== VK_KHR_dynamic_rendering === - void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); - } void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo ); @@ -4721,14 +3079,9 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCmdEndRenderingKHR( commandBuffer ); } - void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderingKHR( commandBuffer ); - } # if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === - //=== VK_GGP_stream_descriptor_surface === VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, @@ -4737,17 +3090,9 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, - const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface ); - } # endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === - //=== VK_NV_external_memory_capabilities === VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, @@ -4762,23 +3107,9 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); } - VkResult - vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties ); - } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === - //=== VK_NV_external_memory_win32 === VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, @@ -4787,42 +3118,20 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); } - VkResult vkGetMemoryWin32HandleNV( VkDevice device, - VkDeviceMemory memory, - VkExternalMemoryHandleTypeFlagsNV handleType, - HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle ); - } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === - //=== VK_KHR_get_physical_device_properties2 === - void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); - } void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures ); } - void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); - } void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties ); } - void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); - } void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT @@ -4830,12 +3139,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties ); } - VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, - VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); - } VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT @@ -4843,12 +3146,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties ); } - void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pQueueFamilyPropertyCount, - VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); - } void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT @@ -4856,24 +3153,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties ); } - void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); - } void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties ); } - void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, - uint32_t * pPropertyCount, - VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); - } void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, @@ -4882,17 +3167,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties ); } - //=== VK_KHR_device_group === //=== VK_KHR_device_group === - void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); - } void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, @@ -4902,10 +3178,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures ); } - void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); - } void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask ); @@ -4921,20 +3193,9 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } - void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - } # if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === - //=== VK_NN_vi_surface === VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN * pCreateInfo, @@ -4943,36 +3204,17 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateViSurfaceNN( VkInstance instance, - const VkViSurfaceCreateInfoNN * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface ); - } # endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_maintenance1 === - //=== VK_KHR_maintenance1 === - void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTrimCommandPoolKHR( device, commandPool, flags ); - } void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT { return ::vkTrimCommandPoolKHR( device, commandPool, flags ); } - //=== VK_KHR_device_group_creation === //=== VK_KHR_device_group_creation === - VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, - uint32_t * pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); - } VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t * pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT @@ -4980,7 +3222,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties ); } - //=== VK_KHR_external_memory_capabilities === //=== VK_KHR_external_memory_capabilities === void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, @@ -4989,22 +3230,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); } - void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, - VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties ); - } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === - //=== VK_KHR_external_memory_win32 === - VkResult - vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT { @@ -5018,34 +3247,15 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); } - VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - HANDLE handle, - VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties ); - } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === - //=== VK_KHR_external_memory_fd === - VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); - } VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd ); } - VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - int fd, - VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); - } VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, @@ -5054,7 +3264,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties ); } - //=== VK_KHR_external_semaphore_capabilities === //=== VK_KHR_external_semaphore_capabilities === void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, @@ -5063,22 +3272,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); } - void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, - VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties ); - } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === - //=== VK_KHR_external_semaphore_win32 === - VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, - const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo ); - } VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT { @@ -5091,47 +3288,22 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } - VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, - const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, - HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === - //=== VK_KHR_external_semaphore_fd === - VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); - } VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo ); } - VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); - } VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd ); } - //=== VK_KHR_push_descriptor === //=== VK_KHR_push_descriptor === - void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); - } void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -5142,14 +3314,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites ); } - void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); - } void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, @@ -5159,39 +3323,21 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData ); } - //=== VK_EXT_conditional_rendering === //=== VK_EXT_conditional_rendering === - void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); - } void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin ); } - void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); - } void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndConditionalRenderingEXT( commandBuffer ); } - //=== VK_KHR_descriptor_update_template === //=== VK_KHR_descriptor_update_template === - VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); - } VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -5200,12 +3346,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate ); } - void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); - } void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -5213,13 +3353,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator ); } - void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); - } void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, @@ -5228,16 +3361,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData ); } - //=== VK_NV_clip_space_w_scaling === //=== VK_NV_clip_space_w_scaling === - void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); - } void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, @@ -5246,26 +3371,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings ); } - //=== VK_EXT_direct_mode_display === //=== VK_EXT_direct_mode_display === VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { return ::vkReleaseDisplayEXT( physicalDevice, display ); } - VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseDisplayEXT( physicalDevice, display ); - } # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === - //=== VK_EXT_acquire_xlib_display === - VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); - } VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display ); @@ -5276,22 +3391,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); } - VkResult - vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay ); - } # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === - //=== VK_EXT_display_surface_counter === - VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); - } VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT @@ -5299,25 +3402,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities ); } - //=== VK_EXT_display_control === //=== VK_EXT_display_control === - VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); - } VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo ); } - VkResult vkRegisterDeviceEventEXT( VkDevice device, - const VkDeviceEventInfoEXT * pDeviceEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); - } VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT * pDeviceEventInfo, const VkAllocationCallbacks * pAllocator, @@ -5326,14 +3417,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence ); } - VkResult vkRegisterDisplayEventEXT( VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT * pDisplayEventInfo, - const VkAllocationCallbacks * pAllocator, - VkFence * pFence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); - } VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT * pDisplayEventInfo, @@ -5343,13 +3426,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence ); } - VkResult vkGetSwapchainCounterEXT( VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); - } VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, @@ -5358,15 +3434,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue ); } - //=== VK_GOOGLE_display_timing === //=== VK_GOOGLE_display_timing === - VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); - } VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT @@ -5374,13 +3443,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties ); } - VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, - VkSwapchainKHR swapchain, - uint32_t * pPresentationTimingCount, - VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); - } VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t * pPresentationTimingCount, @@ -5389,16 +3451,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings ); } - //=== VK_EXT_discard_rectangles === //=== VK_EXT_discard_rectangles === - void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); - } void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, @@ -5407,34 +3461,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles ); } - void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); - } void vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDiscardRectangleEnableEXT( commandBuffer, discardRectangleEnable ); } - void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); - } void vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDiscardRectangleModeEXT( commandBuffer, discardRectangleMode ); } - //=== VK_EXT_hdr_metadata === //=== VK_EXT_hdr_metadata === - void vkSetHdrMetadataEXT( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR * pSwapchains, - const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); - } void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR * pSwapchains, @@ -5443,16 +3481,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata ); } - //=== VK_KHR_create_renderpass2 === //=== VK_KHR_create_renderpass2 === - VkResult vkCreateRenderPass2KHR( VkDevice device, - const VkRenderPassCreateInfo2 * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); - } VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2 * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -5461,12 +3491,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass ); } - void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo * pRenderPassBegin, - const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); - } void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT @@ -5474,12 +3498,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo ); } - void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo * pSubpassBeginInfo, - const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); - } void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo * pSubpassBeginInfo, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT @@ -5487,28 +3505,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo ); } - void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); - } void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo ); } - //=== VK_KHR_shared_presentable_image === //=== VK_KHR_shared_presentable_image === - VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSwapchainStatusKHR( device, swapchain ); - } VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSwapchainStatusKHR( device, swapchain ); } - //=== VK_KHR_external_fence_capabilities === //=== VK_KHR_external_fence_capabilities === void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, @@ -5517,21 +3525,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); } - void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, - VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties ); - } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === - //=== VK_KHR_external_fence_win32 === - VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); - } VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo ); @@ -5541,46 +3538,22 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); } - VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle ); - } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === - //=== VK_KHR_external_fence_fd === - VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); - } VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkImportFenceFdKHR( device, pImportFenceFdInfo ); } - VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); - } VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT { return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd ); } - //=== VK_KHR_performance_query === //=== VK_KHR_performance_query === - VkResult - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - uint32_t * pCounterCount, - VkPerformanceCounterKHR * pCounters, - VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); - } VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, @@ -5592,12 +3565,6 @@ namespace VULKAN_HPP_NAMESPACE physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions ); } - void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, - const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, - uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); - } void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT @@ -5605,33 +3572,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses ); } - VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireProfilingLockKHR( device, pInfo ); - } VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireProfilingLockKHR( device, pInfo ); } - void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseProfilingLockKHR( device ); - } void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT { return ::vkReleaseProfilingLockKHR( device ); } - //=== VK_KHR_get_surface_capabilities2 === //=== VK_KHR_get_surface_capabilities2 === - VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); - } VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT @@ -5639,13 +3591,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities ); } - VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pSurfaceFormatCount, - VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); - } VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pSurfaceFormatCount, @@ -5654,15 +3599,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats ); } - //=== VK_KHR_get_display_properties2 === //=== VK_KHR_get_display_properties2 === - VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -5670,12 +3608,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); - } VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -5683,13 +3615,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties ); } - VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t * pPropertyCount, - VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties ); - } VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t * pPropertyCount, @@ -5704,16 +3629,9 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); } - VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, - const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities ); - } # if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === - //=== VK_MVK_ios_surface === VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK * pCreateInfo, @@ -5722,18 +3640,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateIOSSurfaceMVK( VkInstance instance, - const VkIOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } # endif /*VK_USE_PLATFORM_IOS_MVK*/ # if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === - //=== VK_MVK_macos_surface === VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, @@ -5742,97 +3652,50 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, - const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface ); - } # endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === - //=== VK_EXT_debug_utils === - VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); - } VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo ); } - VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); - } VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo ); } - void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); - } void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo ); } - void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueEndDebugUtilsLabelEXT( queue ); - } void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueEndDebugUtilsLabelEXT( queue ); } - void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); - } void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo ); } - void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); } - void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); - } void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer ); } - void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); - } void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo ); } - VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, - const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); - } VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -5841,12 +3704,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger ); } - void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, - VkDebugUtilsMessengerEXT messenger, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator ); - } void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -5861,24 +3718,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); } - void vkSubmitDebugUtilsMessageEXT( VkInstance instance, - VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, - VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData ); - } # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === - //=== VK_ANDROID_external_memory_android_hardware_buffer === - VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, - const struct AHardwareBuffer * buffer, - VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties ); - } VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer * buffer, VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -5892,27 +3735,11 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); } - VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, - const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, - struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer ); - } # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_AMDX_shader_enqueue === - //=== VK_AMDX_shader_enqueue === - VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkExecutionGraphPipelineCreateInfoAMDX * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } VkResult vkCreateExecutionGraphPipelinesAMDX( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, @@ -5923,12 +3750,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateExecutionGraphPipelinesAMDX( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, - VkPipeline executionGraph, - VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); - } VkResult vkGetExecutionGraphPipelineScratchSizeAMDX( VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX * pSizeInfo ) const VULKAN_HPP_NOEXCEPT @@ -5936,13 +3757,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetExecutionGraphPipelineScratchSizeAMDX( device, executionGraph, pSizeInfo ); } - VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, - VkPipeline executionGraph, - const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, - uint32_t * pNodeIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); - } VkResult vkGetExecutionGraphPipelineNodeIndexAMDX( VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX * pNodeInfo, @@ -5951,13 +3765,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetExecutionGraphPipelineNodeIndexAMDX( device, executionGraph, pNodeInfo, pNodeIndex ); } - void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, - VkPipeline executionGraph, - VkDeviceAddress scratch, - VkDeviceSize scratchSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); - } void vkCmdInitializeGraphScratchMemoryAMDX( VkCommandBuffer commandBuffer, VkPipeline executionGraph, VkDeviceAddress scratch, @@ -5966,13 +3773,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdInitializeGraphScratchMemoryAMDX( commandBuffer, executionGraph, scratch, scratchSize ); } - void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - VkDeviceSize scratchSize, - const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); - } void vkCmdDispatchGraphAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, @@ -5981,13 +3781,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDispatchGraphAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); } - void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - VkDeviceSize scratchSize, - const VkDispatchGraphCountInfoAMDX * pCountInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphIndirectAMDX( commandBuffer, scratch, scratchSize, pCountInfo ); - } void vkCmdDispatchGraphIndirectAMDX( VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceSize scratchSize, @@ -6003,33 +3796,15 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); } - void vkCmdDispatchGraphIndirectCountAMDX( VkCommandBuffer commandBuffer, - VkDeviceAddress scratch, - VkDeviceSize scratchSize, - VkDeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDispatchGraphIndirectCountAMDX( commandBuffer, scratch, scratchSize, countInfo ); - } # endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === - //=== VK_EXT_sample_locations === - void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); - } void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo ); } - void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); - } void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT @@ -6037,15 +3812,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties ); } - //=== VK_KHR_get_memory_requirements2 === //=== VK_KHR_get_memory_requirements2 === - void vkGetImageMemoryRequirements2KHR( VkDevice device, - const VkImageMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -6053,12 +3821,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); } - void vkGetBufferMemoryRequirements2KHR( VkDevice device, - const VkBufferMemoryRequirementsInfo2 * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); - } void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -6066,13 +3828,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements ); } - void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2 * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, @@ -6081,16 +3836,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - //=== VK_KHR_acceleration_structure === //=== VK_KHR_acceleration_structure === - VkResult vkCreateAccelerationStructureKHR( VkDevice device, - const VkAccelerationStructureCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -6099,12 +3846,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure ); } - void vkDestroyAccelerationStructureKHR( VkDevice device, - VkAccelerationStructureKHR accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); - } void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -6112,13 +3853,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator ); } - void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); - } void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, @@ -6127,16 +3861,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos ); } - void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkDeviceAddress * pIndirectDeviceAddresses, - const uint32_t * pIndirectStrides, - const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructuresIndirectKHR( - commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); - } void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, @@ -6148,14 +3872,6 @@ namespace VULKAN_HPP_NAMESPACE commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts ); } - VkResult vkBuildAccelerationStructuresKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, - const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); - } VkResult vkBuildAccelerationStructuresKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, @@ -6165,12 +3881,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos ); } - VkResult vkCopyAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); - } VkResult vkCopyAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT @@ -6178,12 +3888,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo ); } - VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); - } VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT @@ -6191,12 +3895,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo ); } - VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); - } VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT @@ -6204,16 +3902,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo ); } - VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - size_t dataSize, - void * pData, - size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); - } VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR * pAccelerationStructures, @@ -6225,58 +3913,29 @@ namespace VULKAN_HPP_NAMESPACE return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride ); } - void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); - } void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo ); } - void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); - } void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo ); } - void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); - } void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo ); } - VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, - const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); - } VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo ); } - void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureKHR * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesKHR( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR * pAccelerationStructures, @@ -6288,12 +3947,6 @@ namespace VULKAN_HPP_NAMESPACE commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); } - void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, - const VkAccelerationStructureVersionInfoKHR * pVersionInfo, - VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); - } void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionInfoKHR * pVersionInfo, VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT @@ -6301,14 +3954,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility ); } - void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, - VkAccelerationStructureBuildTypeKHR buildType, - const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, - const uint32_t * pMaxPrimitiveCounts, - VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); - } void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, @@ -6318,21 +3963,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo ); } - //=== VK_KHR_ray_tracing_pipeline === //=== VK_KHR_ray_tracing_pipeline === - void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); - } void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, @@ -6346,16 +3978,6 @@ namespace VULKAN_HPP_NAMESPACE commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth ); } - VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, - VkDeferredOperationKHR deferredOperation, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, @@ -6367,38 +3989,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkGetRayTracingShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); } - VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData ); } - void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, - const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, - const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, - VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirectKHR( - commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); - } void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, @@ -6410,13 +4012,6 @@ namespace VULKAN_HPP_NAMESPACE commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress ); } - VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, - VkPipeline pipeline, - uint32_t group, - VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); - } VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, VkPipeline pipeline, uint32_t group, @@ -6425,25 +4020,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader ); } - void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); - } void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize ); } - //=== VK_KHR_sampler_ycbcr_conversion === //=== VK_KHR_sampler_ycbcr_conversion === - VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); - } VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -6452,12 +4035,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion ); } - void vkDestroySamplerYcbcrConversionKHR( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); - } void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -6465,36 +4042,20 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator ); } - //=== VK_KHR_bind_memory2 === //=== VK_KHR_bind_memory2 === - VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); - } VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos ); } - VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); - } VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos ); } - //=== VK_EXT_image_drm_format_modifier === //=== VK_EXT_image_drm_format_modifier === - VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, - VkImage image, - VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); - } VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -6502,16 +4063,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties ); } - //=== VK_EXT_validation_cache === //=== VK_EXT_validation_cache === - VkResult vkCreateValidationCacheEXT( VkDevice device, - const VkValidationCacheCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); - } VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -6520,24 +4073,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache ); } - void - vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); - } void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator ); } - VkResult vkMergeValidationCachesEXT( VkDevice device, - VkValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); - } VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, @@ -6546,34 +4087,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches ); } - VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); - } VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData ); } - //=== VK_NV_shading_rate_image === //=== VK_NV_shading_rate_image === - void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); - } void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout ); } - void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); - } void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, @@ -6582,13 +4107,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes ); } - void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, - VkCoarseSampleOrderTypeNV sampleOrderType, - uint32_t customSampleOrderCount, - const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); - } void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, @@ -6597,16 +4115,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders ); } - //=== VK_NV_ray_tracing === //=== VK_NV_ray_tracing === - VkResult vkCreateAccelerationStructureNV( VkDevice device, - const VkAccelerationStructureCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); - } VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -6615,12 +4125,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure ); } - void vkDestroyAccelerationStructureNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); - } void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -6628,12 +4132,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator ); } - void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, - const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -6641,12 +4139,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); } - VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, - uint32_t bindInfoCount, - const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); - } VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT @@ -6654,18 +4146,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos ); } - void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, - const VkAccelerationStructureInfoNV * pInfo, - VkBuffer instanceData, - VkDeviceSize instanceOffset, - VkBool32 update, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkBuffer scratch, - VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); - } void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV * pInfo, VkBuffer instanceData, @@ -6679,13 +4159,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset ); } - void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, - VkAccelerationStructureNV dst, - VkAccelerationStructureNV src, - VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); - } void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, @@ -6694,38 +4167,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode ); } - void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, - VkBuffer raygenShaderBindingTableBuffer, - VkDeviceSize raygenShaderBindingOffset, - VkBuffer missShaderBindingTableBuffer, - VkDeviceSize missShaderBindingOffset, - VkDeviceSize missShaderBindingStride, - VkBuffer hitShaderBindingTableBuffer, - VkDeviceSize hitShaderBindingOffset, - VkDeviceSize hitShaderBindingStride, - VkBuffer callableShaderBindingTableBuffer, - VkDeviceSize callableShaderBindingOffset, - VkDeviceSize callableShaderBindingStride, - uint32_t width, - uint32_t height, - uint32_t depth ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysNV( commandBuffer, - raygenShaderBindingTableBuffer, - raygenShaderBindingOffset, - missShaderBindingTableBuffer, - missShaderBindingOffset, - missShaderBindingStride, - hitShaderBindingTableBuffer, - hitShaderBindingOffset, - hitShaderBindingStride, - callableShaderBindingTableBuffer, - callableShaderBindingOffset, - callableShaderBindingStride, - width, - height, - depth ); - } void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, @@ -6759,15 +4200,6 @@ namespace VULKAN_HPP_NAMESPACE depth ); } - VkResult vkCreateRayTracingPipelinesNV( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkRayTracingPipelineCreateInfoNV * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); - } VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, @@ -6778,24 +4210,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines ); } - VkResult vkGetRayTracingShaderGroupHandlesNV( - VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); - } VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData ); } - VkResult vkGetAccelerationStructureHandleNV( VkDevice device, - VkAccelerationStructureNV accelerationStructure, - size_t dataSize, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); - } VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, @@ -6804,16 +4224,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData ); } - void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, - uint32_t accelerationStructureCount, - const VkAccelerationStructureNV * pAccelerationStructures, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteAccelerationStructuresPropertiesNV( - commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); - } void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV * pAccelerationStructures, @@ -6825,24 +4235,13 @@ namespace VULKAN_HPP_NAMESPACE commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery ); } - VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCompileDeferredNV( device, pipeline, shader ); - } VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT { return ::vkCompileDeferredNV( device, pipeline, shader ); } - //=== VK_KHR_maintenance3 === //=== VK_KHR_maintenance3 === - void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, - const VkDescriptorSetLayoutCreateInfo * pCreateInfo, - VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); - } void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT @@ -6850,19 +4249,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport ); } - //=== VK_KHR_draw_indirect_count === //=== VK_KHR_draw_indirect_count === - void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -6874,16 +4262,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -6895,16 +4273,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - //=== VK_EXT_external_memory_host === //=== VK_EXT_external_memory_host === - VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - const void * pHostPointer, - VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); - } VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, @@ -6913,7 +4283,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties ); } - //=== VK_AMD_buffer_marker === //=== VK_AMD_buffer_marker === void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, @@ -6925,35 +4294,14 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); } - void vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); - } - void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker ); - } - void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker ); } - //=== VK_EXT_calibrated_timestamps === //=== VK_EXT_calibrated_timestamps === - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, - uint32_t * pTimeDomainCount, - VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); - } VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t * pTimeDomainCount, VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT @@ -6961,14 +4309,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains ); } - VkResult vkGetCalibratedTimestampsEXT( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoKHR * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR * pTimestampInfos, @@ -6978,39 +4318,19 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); } - //=== VK_NV_mesh_shader === //=== VK_NV_mesh_shader === - void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); - } void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask ); } - void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); - } void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -7022,16 +4342,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - //=== VK_NV_scissor_exclusive === //=== VK_NV_scissor_exclusive === - void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkBool32 * pExclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); - } void vkCmdSetExclusiveScissorEnableNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, @@ -7040,13 +4352,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetExclusiveScissorEnableNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissorEnables ); } - void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, - uint32_t firstExclusiveScissor, - uint32_t exclusiveScissorCount, - const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); - } void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, @@ -7055,13 +4360,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors ); } - //=== VK_NV_device_diagnostic_checkpoints === //=== VK_NV_device_diagnostic_checkpoints === - void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); - } void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker ); @@ -7072,106 +4372,56 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); } - void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); - } - void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData ); - } - void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData ); } - //=== VK_KHR_timeline_semaphore === //=== VK_KHR_timeline_semaphore === - VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); - } VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue ); } - VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); - } VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout ); } - VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSignalSemaphoreKHR( device, pSignalInfo ); - } VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSignalSemaphoreKHR( device, pSignalInfo ); } - //=== VK_INTEL_performance_query === //=== VK_INTEL_performance_query === - VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); - } VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo ); } - void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUninitializePerformanceApiINTEL( device ); - } void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT { return ::vkUninitializePerformanceApiINTEL( device ); } - VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); - } VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo ); } - VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, - const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); - } VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo ); } - VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); - } VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo ); } - VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, - const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, - VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); - } VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT @@ -7179,50 +4429,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration ); } - VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); - } VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT { return ::vkReleasePerformanceConfigurationINTEL( device, configuration ); } - VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); - } VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration ); } - VkResult - vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); - } VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPerformanceParameterINTEL( device, parameter, pValue ); } - //=== VK_AMD_display_native_hdr === //=== VK_AMD_display_native_hdr === void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); } - void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable ); - } # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === - //=== VK_FUCHSIA_imagepipe_surface === VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, @@ -7231,18 +4462,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, - const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface ); - } # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === - //=== VK_EXT_metal_surface === VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT * pCreateInfo, @@ -7251,24 +4474,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); } - VkResult vkCreateMetalSurfaceEXT( VkInstance instance, - const VkMetalSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === - //=== VK_KHR_fragment_shading_rate === - VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pFragmentShadingRateCount, - VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); - } VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, uint32_t * pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT @@ -7276,12 +4485,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates ); } - void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, - const VkExtent2D * pFragmentSize, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); - } void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, const VkExtent2D * pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT @@ -7289,52 +4492,29 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps ); } - //=== VK_KHR_dynamic_rendering_local_read === //=== VK_KHR_dynamic_rendering_local_read === - void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfoKHR * pLocationInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); - } - void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfoKHR * pLocationInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo * pLocationInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRenderingAttachmentLocationsKHR( commandBuffer, pLocationInfo ); } - void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); - } - void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRenderingInputAttachmentIndicesKHR( commandBuffer, pInputAttachmentIndexInfo ); } - //=== VK_EXT_buffer_device_address === //=== VK_EXT_buffer_device_address === - VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressEXT( device, pInfo ); - } VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferDeviceAddressEXT( device, pInfo ); } - //=== VK_EXT_tooling_info === //=== VK_EXT_tooling_info === - VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, - uint32_t * pToolCount, - VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); - } VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t * pToolCount, VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT @@ -7342,27 +4522,15 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties ); } - //=== VK_KHR_present_wait === //=== VK_KHR_present_wait === - VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); - } VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT { return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout ); } - //=== VK_NV_cooperative_matrix === //=== VK_NV_cooperative_matrix === - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); - } VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -7370,7 +4538,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties ); } - //=== VK_NV_coverage_reduction_mode === //=== VK_NV_coverage_reduction_mode === VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( @@ -7378,23 +4545,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); } - VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations ); - } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === - //=== VK_EXT_full_screen_exclusive === - VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - uint32_t * pPresentModeCount, - VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); - } VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pPresentModeCount, @@ -7403,19 +4557,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes ); } - VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); - } VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain ); } - VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); - } VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT { return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain ); @@ -7427,24 +4573,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); } - VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, - const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, - VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes ); - } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === - //=== VK_EXT_headless_surface === - VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, - const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -7453,118 +4585,64 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); } - //=== VK_KHR_buffer_device_address === //=== VK_KHR_buffer_device_address === - VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferDeviceAddressKHR( device, pInfo ); - } VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferDeviceAddressKHR( device, pInfo ); } - uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); - } uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo ); } - uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); - } uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo ); } - //=== VK_EXT_line_rasterization === //=== VK_EXT_line_rasterization === - void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); - } void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern ); } - //=== VK_EXT_host_query_reset === //=== VK_EXT_host_query_reset === - void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT - { - return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); - } void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT { return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount ); } - //=== VK_EXT_extended_dynamic_state === //=== VK_EXT_extended_dynamic_state === - void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); - } void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCullModeEXT( commandBuffer, cullMode ); } - void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); - } void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace ); } - void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); - } void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology ); } - void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); - } void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports ); } - void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); - } void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors ); } - void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer * pBuffers, - const VkDeviceSize * pOffsets, - const VkDeviceSize * pSizes, - const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); - } void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -7576,60 +4654,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides ); } - void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); - } void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable ); } - void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); - } void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable ); } - void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); - } void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp ); } - void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); - } void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable ); } - void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); - } void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable ); } - void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); - } void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, @@ -7640,15 +4689,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp ); } - //=== VK_KHR_deferred_host_operations === //=== VK_KHR_deferred_host_operations === - VkResult vkCreateDeferredOperationKHR( VkDevice device, - const VkAllocationCallbacks * pAllocator, - VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); - } VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks * pAllocator, VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT @@ -7656,54 +4698,29 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation ); } - void - vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); - } void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator ); } - uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); - } uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation ); } - VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeferredOperationResultKHR( device, operation ); - } VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeferredOperationResultKHR( device, operation ); } - VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDeferredOperationJoinKHR( device, operation ); - } VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT { return ::vkDeferredOperationJoinKHR( device, operation ); } - //=== VK_KHR_pipeline_executable_properties === //=== VK_KHR_pipeline_executable_properties === - VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, - const VkPipelineInfoKHR * pPipelineInfo, - uint32_t * pExecutableCount, - VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); - } VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR * pPipelineInfo, uint32_t * pExecutableCount, @@ -7712,13 +4729,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties ); } - VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pStatisticCount, - VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics ); - } VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pStatisticCount, @@ -7735,115 +4745,59 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); } + + //=== VK_EXT_host_image_copy === + + VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfo * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); + } + + VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfo * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); + } + + VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfo * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT + { + return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); + } + VkResult - vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, - const VkPipelineExecutableInfoKHR * pExecutableInfo, - uint32_t * pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations ); - } - - //=== VK_EXT_host_image_copy === - //=== VK_EXT_host_image_copy === - - VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); - } - VkResult vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToImageEXT( device, pCopyMemoryToImageInfo ); - } - - VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); - } - VkResult vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToMemoryEXT( device, pCopyImageToMemoryInfo ); - } - - VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); - } - VkResult vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfoEXT * pCopyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyImageToImageEXT( device, pCopyImageToImageInfo ); - } - - VkResult vkTransitionImageLayoutEXT( VkDevice device, - uint32_t transitionCount, - const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); - } - VkResult vkTransitionImageLayoutEXT( VkDevice device, - uint32_t transitionCount, - const VkHostImageLayoutTransitionInfoEXT * pTransitions ) const VULKAN_HPP_NOEXCEPT + vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo * pTransitions ) const VULKAN_HPP_NOEXCEPT { return ::vkTransitionImageLayoutEXT( device, transitionCount, pTransitions ); } - void vkGetImageSubresourceLayout2EXT( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); - } - void vkGetImageSubresourceLayout2EXT( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + void vkGetImageSubresourceLayout2EXT( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout ); } - //=== VK_KHR_map_memory2 === //=== VK_KHR_map_memory2 === - VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); - } - VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfoKHR * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT + VkResult vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfo * pMemoryMapInfo, void ** ppData ) const VULKAN_HPP_NOEXCEPT { return ::vkMapMemory2KHR( device, pMemoryMapInfo, ppData ); } - VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); - } - VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfoKHR * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT + VkResult vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfo * pMemoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkUnmapMemory2KHR( device, pMemoryUnmapInfo ); } - //=== VK_EXT_swapchain_maintenance1 === //=== VK_EXT_swapchain_maintenance1 === - VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); - } VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo ); } - //=== VK_NV_device_generated_commands === //=== VK_NV_device_generated_commands === - void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); - } void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -7851,23 +4805,12 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements ); } - void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, - const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); - } void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo ); } - void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, - VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); - } void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT @@ -7875,13 +4818,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); } - void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline, - uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); - } void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, @@ -7890,13 +4826,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex ); } - VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -7905,12 +4834,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); } - void vkDestroyIndirectCommandsLayoutNV( VkDevice device, - VkIndirectCommandsLayoutNV indirectCommandsLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); - } void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -7918,49 +4841,27 @@ namespace VULKAN_HPP_NAMESPACE return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator ); } - //=== VK_EXT_depth_bias_control === //=== VK_EXT_depth_bias_control === - void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); - } void vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT * pDepthBiasInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBias2EXT( commandBuffer, pDepthBiasInfo ); } - //=== VK_EXT_acquire_drm_display === //=== VK_EXT_acquire_drm_display === - VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); - } VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display ); } - VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); - } VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display ); } - //=== VK_EXT_private_data === //=== VK_EXT_private_data === - VkResult vkCreatePrivateDataSlotEXT( VkDevice device, - const VkPrivateDataSlotCreateInfo * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); - } VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -7969,47 +4870,25 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot ); } - void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); - } void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator ); } - VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); - } VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT { return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data ); } - void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); - } void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData ); } - //=== VK_KHR_video_encode_queue === //=== VK_KHR_video_encode_queue === - VkResult - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, - VkVideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); - } VkResult vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo, @@ -8018,14 +4897,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( physicalDevice, pQualityLevelInfo, pQualityLevelProperties ); } - VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, - const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, - VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, - size_t * pDataSize, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetEncodedVideoSessionParametersKHR( device, pVideoSessionParametersInfo, pFeedbackInfo, pDataSize, pData ); - } VkResult vkGetEncodedVideoSessionParametersKHR( VkDevice device, const VkVideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo, VkVideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo, @@ -8039,22 +4910,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); } - void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo ); - } # if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === - //=== VK_NV_cuda_kernel_launch === - VkResult vkCreateCudaModuleNV( VkDevice device, - const VkCudaModuleCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCudaModuleNV * pModule ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); - } VkResult vkCreateCudaModuleNV( VkDevice device, const VkCudaModuleCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -8063,22 +4922,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateCudaModuleNV( device, pCreateInfo, pAllocator, pModule ); } - VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); - } VkResult vkGetCudaModuleCacheNV( VkDevice device, VkCudaModuleNV module, size_t * pCacheSize, void * pCacheData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetCudaModuleCacheNV( device, module, pCacheSize, pCacheData ); } - VkResult vkCreateCudaFunctionNV( VkDevice device, - const VkCudaFunctionCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkCudaFunctionNV * pFunction ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); - } VkResult vkCreateCudaFunctionNV( VkDevice device, const VkCudaFunctionCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -8087,19 +4935,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateCudaFunctionNV( device, pCreateInfo, pAllocator, pFunction ); } - void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCudaModuleNV( device, module, pAllocator ); - } void vkDestroyCudaModuleNV( VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyCudaModuleNV( device, module, pAllocator ); } - void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); - } void vkDestroyCudaFunctionNV( VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyCudaFunctionNV( device, function, pAllocator ); @@ -8109,54 +4949,29 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); } - void vkCmdCudaLaunchKernelNV( VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCudaLaunchKernelNV( commandBuffer, pLaunchInfo ); - } # endif /*VK_ENABLE_BETA_EXTENSIONS*/ # if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === - //=== VK_EXT_metal_objects === void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); } - void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo ); - } # endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === - //=== VK_KHR_synchronization2 === - void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); - } void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo ); } - void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); - } void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask ); } - void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent * pEvents, - const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); - } void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, @@ -8165,54 +4980,29 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos ); } - void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); - } void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo ); } - void - vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); - } void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query ); } - VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); - } VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence ); } - //=== VK_EXT_descriptor_buffer === //=== VK_EXT_descriptor_buffer === - void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); - } void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes ); } - void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, - VkDescriptorSetLayout layout, - uint32_t binding, - VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); - } void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, @@ -8221,21 +5011,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset ); } - void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); - } void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor ); } - void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, - uint32_t bufferCount, - const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); - } void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT @@ -8243,16 +5023,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos ); } - void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t setCount, - const uint32_t * pBufferIndices, - const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); - } void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -8264,13 +5034,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets ); } - void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); - } void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -8279,34 +5042,18 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set ); } - VkResult - vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } VkResult vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); } - VkResult - vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } VkResult vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); } - VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT @@ -8314,12 +5061,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); } - VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT @@ -8327,12 +5068,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); } - VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, - void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); - } VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT @@ -8340,15 +5075,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData ); } - //=== VK_NV_fragment_shading_rate_enums === //=== VK_NV_fragment_shading_rate_enums === - void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, - VkFragmentShadingRateNV shadingRate, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); - } void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT @@ -8356,39 +5084,19 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps ); } - //=== VK_EXT_mesh_shader === //=== VK_EXT_mesh_shader === - void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ ); } - void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); - } void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride ); } - void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); - } void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -8400,83 +5108,48 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride ); } - //=== VK_KHR_copy_commands2 === //=== VK_KHR_copy_commands2 === - void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); - } void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo ); } - void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); - } void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo ); } - void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); - } void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo ); } - void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); - } void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo ); } - void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); - } void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo ); } - void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); - } void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo ); } - //=== VK_EXT_device_fault === //=== VK_EXT_device_fault === VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); } - VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo ); - } # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === - //=== VK_NV_acquire_winrt_display === - VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); - } VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT { return ::vkAcquireWinrtDisplayNV( physicalDevice, display ); @@ -8486,23 +5159,11 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); } - VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay ); - } # endif /*VK_USE_PLATFORM_WIN32_KHR*/ # if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === - //=== VK_EXT_directfb_surface === - VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, - const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -8517,16 +5178,9 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); } - VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb ); - } # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ //=== VK_EXT_vertex_input_dynamic_state === - //=== VK_EXT_vertex_input_dynamic_state === void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, @@ -8537,26 +5191,10 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetVertexInputEXT( commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); } - void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, - uint32_t vertexBindingDescriptionCount, - const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, - uint32_t vertexAttributeDescriptionCount, - const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetVertexInputEXT( - commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions ); - } # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === - //=== VK_FUCHSIA_external_memory === - VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, - const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT @@ -8571,25 +5209,11 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); } - VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, - VkExternalMemoryHandleTypeFlagBits handleType, - zx_handle_t zirconHandle, - VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties ); - } # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_semaphore === - //=== VK_FUCHSIA_external_semaphore === - VkResult - vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo ); - } VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT @@ -8603,25 +5227,11 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); } - VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, - const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, - zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle ); - } # endif /*VK_USE_PLATFORM_FUCHSIA*/ # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_buffer_collection === - //=== VK_FUCHSIA_buffer_collection === - VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, - const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); - } VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -8630,12 +5240,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection ); } - VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); - } VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT @@ -8643,12 +5247,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo ); } - VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); - } VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT @@ -8656,12 +5254,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo ); } - void vkDestroyBufferCollectionFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator ); - } void vkDestroyBufferCollectionFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -8675,58 +5267,30 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); } - VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, - VkBufferCollectionFUCHSIA collection, - VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties ); - } # endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === - //=== VK_HUAWEI_subpass_shading === - VkResult - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); - } VkResult vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize ); } - void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); - } void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSubpassShadingHUAWEI( commandBuffer ); } - //=== VK_HUAWEI_invocation_mask === //=== VK_HUAWEI_invocation_mask === - void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); - } void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout ); } - //=== VK_NV_external_memory_rdma === //=== VK_NV_external_memory_rdma === - VkResult vkGetMemoryRemoteAddressNV( VkDevice device, - const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, - VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); - } VkResult vkGetMemoryRemoteAddressNV( VkDevice device, const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT @@ -8734,15 +5298,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress ); } - //=== VK_EXT_pipeline_properties === //=== VK_EXT_pipeline_properties === - VkResult vkGetPipelinePropertiesEXT( VkDevice device, - const VkPipelineInfoEXT * pPipelineInfo, - VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); - } VkResult vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT @@ -8750,40 +5307,23 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties ); } - //=== VK_EXT_extended_dynamic_state2 === //=== VK_EXT_extended_dynamic_state2 === - void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); - } void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints ); } - void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); - } void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable ); } - void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); - } void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable ); } - void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); - } void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp ); @@ -8793,22 +5333,10 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); } - void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable ); - } # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === - //=== VK_QNX_screen_surface === - VkResult vkCreateScreenSurfaceQNX( VkInstance instance, - const VkScreenSurfaceCreateInfoQNX * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface ); - } VkResult vkCreateScreenSurfaceQNX( VkInstance instance, const VkScreenSurfaceCreateInfoQNX * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -8823,50 +5351,24 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); } - VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window ); - } # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_EXT_color_write_enable === - //=== VK_EXT_color_write_enable === - void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); - } void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables ); } - //=== VK_KHR_ray_tracing_maintenance1 === //=== VK_KHR_ray_tracing_maintenance1 === - void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); - } void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress ); } - //=== VK_EXT_multi_draw === //=== VK_EXT_multi_draw === - void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawInfoEXT * pVertexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); - } void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT * pVertexInfo, @@ -8877,16 +5379,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride ); } - void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, - uint32_t drawCount, - const VkMultiDrawIndexedInfoEXT * pIndexInfo, - uint32_t instanceCount, - uint32_t firstInstance, - uint32_t stride, - const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); - } void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT * pIndexInfo, @@ -8898,16 +5390,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset ); } - //=== VK_EXT_opacity_micromap === //=== VK_EXT_opacity_micromap === - VkResult vkCreateMicromapEXT( VkDevice device, - const VkMicromapCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); - } VkResult vkCreateMicromapEXT( VkDevice device, const VkMicromapCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -8916,31 +5400,16 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap ); } - void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); - } void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyMicromapEXT( device, micromap, pAllocator ); } - void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); - } void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos ); } - VkResult vkBuildMicromapsEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - uint32_t infoCount, - const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); - } VkResult vkBuildMicromapsEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, @@ -8949,21 +5418,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos ); } - VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); - } VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCopyMicromapEXT( device, deferredOperation, pInfo ); } - VkResult vkCopyMicromapToMemoryEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); - } VkResult vkCopyMicromapToMemoryEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT @@ -8971,12 +5430,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo ); } - VkResult vkCopyMemoryToMicromapEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); - } VkResult vkCopyMemoryToMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT @@ -8984,16 +5437,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo ); } - VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, - uint32_t micromapCount, - const VkMicromapEXT * pMicromaps, - VkQueryType queryType, - size_t dataSize, - void * pData, - size_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); - } VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, uint32_t micromapCount, const VkMicromapEXT * pMicromaps, @@ -9005,42 +5448,21 @@ namespace VULKAN_HPP_NAMESPACE return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride ); } - void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); - } void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo ); } - void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); - } void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo ); } - void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); - } void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo ); } - void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, - uint32_t micromapCount, - const VkMicromapEXT * pMicromaps, - VkQueryType queryType, - VkQueryPool queryPool, - uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); - } void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT * pMicromaps, @@ -9051,12 +5473,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery ); } - void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, - const VkMicromapVersionInfoEXT * pVersionInfo, - VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); - } void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, const VkMicromapVersionInfoEXT * pVersionInfo, VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT @@ -9064,13 +5480,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility ); } - void vkGetMicromapBuildSizesEXT( VkDevice device, - VkAccelerationStructureBuildTypeKHR buildType, - const VkMicromapBuildInfoEXT * pBuildInfo, - VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); - } void vkGetMicromapBuildSizesEXT( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT * pBuildInfo, @@ -9079,48 +5488,27 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo ); } - //=== VK_HUAWEI_cluster_culling_shader === //=== VK_HUAWEI_cluster_culling_shader === - void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); - } void vkCmdDrawClusterHUAWEI( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawClusterHUAWEI( commandBuffer, groupCountX, groupCountY, groupCountZ ); } - void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); - } void vkCmdDrawClusterIndirectHUAWEI( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdDrawClusterIndirectHUAWEI( commandBuffer, buffer, offset ); } - //=== VK_EXT_pageable_device_local_memory === //=== VK_EXT_pageable_device_local_memory === - void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); - } void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT { return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority ); } - //=== VK_KHR_maintenance4 === //=== VK_KHR_maintenance4 === - void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, - const VkDeviceBufferMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, const VkDeviceBufferMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -9128,12 +5516,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); } - void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); - } void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -9141,13 +5523,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements ); } - void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageMemoryRequirements * pInfo, - uint32_t * pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); - } void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, @@ -9156,15 +5531,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements ); } - //=== VK_VALVE_descriptor_set_host_mapping === //=== VK_VALVE_descriptor_set_host_mapping === - void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, - const VkDescriptorSetBindingReferenceVALVE * pBindingReference, - VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); - } void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, const VkDescriptorSetBindingReferenceVALVE * pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT @@ -9172,25 +5540,13 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping ); } - void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); - } void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData ); } - //=== VK_NV_copy_memory_indirect === //=== VK_NV_copy_memory_indirect === - void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, - VkDeviceAddress copyBufferAddress, - uint32_t copyCount, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); - } void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, @@ -9199,16 +5555,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride ); } - void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, - VkDeviceAddress copyBufferAddress, - uint32_t copyCount, - uint32_t stride, - VkImage dstImage, - VkImageLayout dstImageLayout, - const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); - } void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, @@ -9220,15 +5566,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources ); } - //=== VK_NV_memory_decompression === //=== VK_NV_memory_decompression === - void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, - uint32_t decompressRegionCount, - const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); - } void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT @@ -9236,13 +5575,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions ); } - void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, - VkDeviceAddress indirectCommandsAddress, - VkDeviceAddress indirectCommandsCountAddress, - uint32_t stride ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); - } void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, @@ -9251,15 +5583,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride ); } - //=== VK_NV_device_generated_commands_compute === //=== VK_NV_device_generated_commands_compute === - void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, - const VkComputePipelineCreateInfo * pCreateInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); - } void vkGetPipelineIndirectMemoryRequirementsNV( VkDevice device, const VkComputePipelineCreateInfo * pCreateInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT @@ -9267,12 +5592,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPipelineIndirectMemoryRequirementsNV( device, pCreateInfo, pMemoryRequirements ); } - void vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); - } void vkCmdUpdatePipelineIndirectBufferNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT @@ -9280,88 +5599,48 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdUpdatePipelineIndirectBufferNV( commandBuffer, pipelineBindPoint, pipeline ); } - VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); - } VkDeviceAddress vkGetPipelineIndirectDeviceAddressNV( VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV * pInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetPipelineIndirectDeviceAddressNV( device, pInfo ); } - //=== VK_EXT_extended_dynamic_state3 === //=== VK_EXT_extended_dynamic_state3 === - void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); - } void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable ); } - void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); - } void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode ); } - void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); - } void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples ); } - void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); - } void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask ); } - void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); - } void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable ); } - void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); - } void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable ); } - void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); - } void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable ); } - void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); - } void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -9370,13 +5649,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables ); } - void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); - } void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -9385,13 +5657,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations ); } - void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); - } void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -9400,69 +5665,37 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks ); } - void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); - } void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin ); } - void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); - } void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream ); } - void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, - VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); - } void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode ); } - void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); - } void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize ); } - void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); - } void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable ); } - void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); - } void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable ); } - void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); - } void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -9471,58 +5704,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced ); } - void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); - } void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode ); } - void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); - } void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode ); } - void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); - } void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable ); } - void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); - } void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne ); } - void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); - } void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable ); } - void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); - } void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, @@ -9531,48 +5737,26 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles ); } - void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); - } void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable ); } - void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); - } void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation ); } - void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); - } void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode ); } - void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); - } void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable ); } - void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, - uint32_t coverageModulationTableCount, - const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); - } void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT @@ -9580,51 +5764,28 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable ); } - void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); - } void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable ); } - void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); - } void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable ); } - void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); - } void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode ); } - //=== VK_EXT_shader_module_identifier === //=== VK_EXT_shader_module_identifier === - void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); - } void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT { return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier ); } - void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, - const VkShaderModuleCreateInfo * pCreateInfo, - VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); - } void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, const VkShaderModuleCreateInfo * pCreateInfo, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT @@ -9632,16 +5793,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier ); } - //=== VK_NV_optical_flow === //=== VK_NV_optical_flow === - VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, - const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, - uint32_t * pFormatCount, - VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); - } VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, uint32_t * pFormatCount, @@ -9650,13 +5803,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties ); } - VkResult vkCreateOpticalFlowSessionNV( VkDevice device, - const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); - } VkResult vkCreateOpticalFlowSessionNV( VkDevice device, const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, @@ -9665,23 +5811,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession ); } - void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); - } void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator ); } - VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, - VkOpticalFlowSessionNV session, - VkOpticalFlowSessionBindingPointNV bindingPoint, - VkImageView view, - VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); - } VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, @@ -9691,12 +5825,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout ); } - void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, - VkOpticalFlowSessionNV session, - const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); - } void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT @@ -9704,64 +5832,31 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo ); } - //=== VK_KHR_maintenance5 === //=== VK_KHR_maintenance5 === - void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const - VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); - } void vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindIndexBuffer2KHR( commandBuffer, buffer, offset, size, indexType ); } - void vkGetRenderingAreaGranularityKHR( VkDevice device, - const VkRenderingAreaInfoKHR * pRenderingAreaInfo, - VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); - } - void vkGetRenderingAreaGranularityKHR( VkDevice device, - const VkRenderingAreaInfoKHR * pRenderingAreaInfo, - VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT + void + vkGetRenderingAreaGranularityKHR( VkDevice device, const VkRenderingAreaInfo * pRenderingAreaInfo, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT { return ::vkGetRenderingAreaGranularityKHR( device, pRenderingAreaInfo, pGranularity ); } - void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, - const VkDeviceImageSubresourceInfoKHR * pInfo, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); - } - void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, - const VkDeviceImageSubresourceInfoKHR * pInfo, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + void vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, + const VkDeviceImageSubresourceInfo * pInfo, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkGetDeviceImageSubresourceLayoutKHR( device, pInfo, pLayout ); } - void vkGetImageSubresourceLayout2KHR( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); - } - - //=== VK_AMD_anti_lag === - - void vkAntiLagUpdateAMD( VkDevice device, const VkAntiLagDataAMD * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkAntiLagUpdateAMD( device, pData ); - } - void vkGetImageSubresourceLayout2KHR( VkDevice device, - VkImage image, - const VkImageSubresource2KHR * pSubresource, - VkSubresourceLayout2KHR * pLayout ) const VULKAN_HPP_NOEXCEPT + void vkGetImageSubresourceLayout2KHR( VkDevice device, + VkImage image, + const VkImageSubresource2 * pSubresource, + VkSubresourceLayout2 * pLayout ) const VULKAN_HPP_NOEXCEPT { return ::vkGetImageSubresourceLayout2KHR( device, image, pSubresource, pLayout ); } @@ -9773,17 +5868,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkAntiLagUpdateAMD( device, pData ); } - //=== VK_EXT_shader_object === //=== VK_EXT_shader_object === - VkResult vkCreateShadersEXT( VkDevice device, - uint32_t createInfoCount, - const VkShaderCreateInfoEXT * pCreateInfos, - const VkAllocationCallbacks * pAllocator, - VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); - } VkResult vkCreateShadersEXT( VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT * pCreateInfos, @@ -9793,19 +5879,11 @@ namespace VULKAN_HPP_NAMESPACE return ::vkCreateShadersEXT( device, createInfoCount, pCreateInfos, pAllocator, pShaders ); } - void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyShaderEXT( device, shader, pAllocator ); - } void vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT { return ::vkDestroyShaderEXT( device, shader, pAllocator ); } - VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); - } VkResult vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT { return ::vkGetShaderBinaryDataEXT( device, shader, pDataSize, pData ); @@ -9857,58 +5935,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); } - VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, - const VkReleaseCapturedPipelineDataInfoKHR * pInfo, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); - } - void vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, - uint32_t stageCount, - const VkShaderStageFlagBits * pStages, - const VkShaderEXT * pShaders ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindShadersEXT( commandBuffer, stageCount, pStages, pShaders ); - } - - void vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, - VkDepthClampModeEXT depthClampMode, - const VkDepthClampRangeEXT * pDepthClampRange ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDepthClampRangeEXT( commandBuffer, depthClampMode, pDepthClampRange ); - } - - //=== VK_KHR_pipeline_binary === - - VkResult vkCreatePipelineBinariesKHR( VkDevice device, - const VkPipelineBinaryCreateInfoKHR * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkPipelineBinaryHandlesInfoKHR * pBinaries ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreatePipelineBinariesKHR( device, pCreateInfo, pAllocator, pBinaries ); - } - - void vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyPipelineBinaryKHR( device, pipelineBinary, pAllocator ); - } - - VkResult vkGetPipelineKeyKHR( VkDevice device, - const VkPipelineCreateInfoKHR * pPipelineCreateInfo, - VkPipelineBinaryKeyKHR * pPipelineKey ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineKeyKHR( device, pPipelineCreateInfo, pPipelineKey ); - } - - VkResult vkGetPipelineBinaryDataKHR( VkDevice device, - const VkPipelineBinaryDataInfoKHR * pInfo, - VkPipelineBinaryKeyKHR * pPipelineBinaryKey, - size_t * pPipelineBinaryDataSize, - void * pPipelineBinaryData ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPipelineBinaryDataKHR( device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData ); - } - VkResult vkReleaseCapturedPipelineDataKHR( VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR * pInfo, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT @@ -9916,16 +5942,8 @@ namespace VULKAN_HPP_NAMESPACE return ::vkReleaseCapturedPipelineDataKHR( device, pInfo, pAllocator ); } - //=== VK_QCOM_tile_properties === //=== VK_QCOM_tile_properties === - VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, - VkFramebuffer framebuffer, - uint32_t * pPropertiesCount, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); - } VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, VkFramebuffer framebuffer, uint32_t * pPropertiesCount, @@ -9934,12 +5952,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties ); } - VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, - const VkRenderingInfo * pRenderingInfo, - VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); - } VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, const VkRenderingInfo * pRenderingInfo, VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -9947,63 +5959,35 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties ); } - //=== VK_NV_low_latency2 === //=== VK_NV_low_latency2 === - VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); - } VkResult vkSetLatencySleepModeNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV * pSleepModeInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSetLatencySleepModeNV( device, swapchain, pSleepModeInfo ); } - VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); - } VkResult vkLatencySleepNV( VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV * pSleepInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkLatencySleepNV( device, swapchain, pSleepInfo ); } - void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); - } void vkSetLatencyMarkerNV( VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkSetLatencyMarkerNV( device, swapchain, pLatencyMarkerInfo ); } - void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); - } void vkGetLatencyTimingsNV( VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV * pLatencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkGetLatencyTimingsNV( device, swapchain, pLatencyMarkerInfo ); } - void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); - } void vkQueueNotifyOutOfBandNV( VkQueue queue, const VkOutOfBandQueueTypeInfoNV * pQueueTypeInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkQueueNotifyOutOfBandNV( queue, pQueueTypeInfo ); } - //=== VK_KHR_cooperative_matrix === //=== VK_KHR_cooperative_matrix === - VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, - uint32_t * pPropertyCount, - VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); - } VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT @@ -10011,21 +5995,15 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( physicalDevice, pPropertyCount, pProperties ); } - //=== VK_EXT_attachment_feedback_loop_dynamic_state === //=== VK_EXT_attachment_feedback_loop_dynamic_state === void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); } - void vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetAttachmentFeedbackLoopEnableEXT( commandBuffer, aspectMask ); - } # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === - //=== VK_QNX_external_memory_screen_buffer === VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, const struct _screen_buffer * buffer, @@ -10033,35 +6011,17 @@ namespace VULKAN_HPP_NAMESPACE { return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); } - VkResult vkGetScreenBufferPropertiesQNX( VkDevice device, - const struct _screen_buffer * buffer, - VkScreenBufferPropertiesQNX * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetScreenBufferPropertiesQNX( device, buffer, pProperties ); - } # endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === - //=== VK_KHR_line_rasterization === - void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); - } void vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdSetLineStippleKHR( commandBuffer, lineStippleFactor, lineStipplePattern ); } - //=== VK_KHR_calibrated_timestamps === //=== VK_KHR_calibrated_timestamps === - VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, - uint32_t * pTimeDomainCount, - VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); - } VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, uint32_t * pTimeDomainCount, VkTimeDomainKHR * pTimeDomains ) const VULKAN_HPP_NOEXCEPT @@ -10069,14 +6029,6 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( physicalDevice, pTimeDomainCount, pTimeDomains ); } - VkResult vkGetCalibratedTimestampsKHR( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoKHR * pTimestampInfos, - uint64_t * pTimestamps, - uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); - } VkResult vkGetCalibratedTimestampsKHR( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR * pTimestampInfos, @@ -10086,52 +6038,29 @@ namespace VULKAN_HPP_NAMESPACE return ::vkGetCalibratedTimestampsKHR( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation ); } - //=== VK_KHR_maintenance6 === //=== VK_KHR_maintenance6 === - void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); - } - void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo * pBindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdBindDescriptorSets2KHR( commandBuffer, pBindDescriptorSetsInfo ); } - void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); - } - void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfo * pPushConstantsInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPushConstants2KHR( commandBuffer, pPushConstantsInfo ); } - void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); - } - void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo * pPushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPushDescriptorSet2KHR( commandBuffer, pPushDescriptorSetInfo ); } - void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); - } - void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + void vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT { return ::vkCmdPushDescriptorSetWithTemplate2KHR( commandBuffer, pPushDescriptorSetWithTemplateInfo ); } - void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, - const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdSetDescriptorBufferOffsets2EXT( commandBuffer, pSetDescriptorBufferOffsetsInfo ); - } void vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT * pSetDescriptorBufferOffsetsInfo ) const VULKAN_HPP_NOEXCEPT { @@ -10230,108 +6159,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif - } // namespace detail -#if ( 14 <= VULKAN_HPP_CPP_VERSION ) - using std::exchange; -#else - template - VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue ) - { - T oldValue = std::move( obj ); - obj = std::forward( newValue ); - return oldValue; - void vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( - VkCommandBuffer commandBuffer, - const VkBindDescriptorBufferEmbeddedSamplersInfoEXT * pBindDescriptorBufferEmbeddedSamplersInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo ); - } - - //=== VK_EXT_device_generated_commands === - - void vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoEXT * pInfo, - VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetGeneratedCommandsMemoryRequirementsEXT( device, pInfo, pMemoryRequirements ); - } - - void vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, - const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo, - VkCommandBuffer stateCommandBuffer ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdPreprocessGeneratedCommandsEXT( commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer ); - } - - void vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, - VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoEXT * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCmdExecuteGeneratedCommandsEXT( commandBuffer, isPreprocessed, pGeneratedCommandsInfo ); - } - - VkResult vkCreateIndirectCommandsLayoutEXT( VkDevice device, - const VkIndirectCommandsLayoutCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectCommandsLayoutEXT * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectCommandsLayoutEXT( device, pCreateInfo, pAllocator, pIndirectCommandsLayout ); - } - - void vkDestroyIndirectCommandsLayoutEXT( VkDevice device, - VkIndirectCommandsLayoutEXT indirectCommandsLayout, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectCommandsLayoutEXT( device, indirectCommandsLayout, pAllocator ); - } - - VkResult vkCreateIndirectExecutionSetEXT( VkDevice device, - const VkIndirectExecutionSetCreateInfoEXT * pCreateInfo, - const VkAllocationCallbacks * pAllocator, - VkIndirectExecutionSetEXT * pIndirectExecutionSet ) const VULKAN_HPP_NOEXCEPT - { - return ::vkCreateIndirectExecutionSetEXT( device, pCreateInfo, pAllocator, pIndirectExecutionSet ); - } - - void vkDestroyIndirectExecutionSetEXT( VkDevice device, - VkIndirectExecutionSetEXT indirectExecutionSet, - const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT - { - return ::vkDestroyIndirectExecutionSetEXT( device, indirectExecutionSet, pAllocator ); - } - - void vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, - VkIndirectExecutionSetEXT indirectExecutionSet, - uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateIndirectExecutionSetPipelineEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); - } - - void vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, - VkIndirectExecutionSetEXT indirectExecutionSet, - uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetShaderEXT * pExecutionSetWrites ) const VULKAN_HPP_NOEXCEPT - { - return ::vkUpdateIndirectExecutionSetShaderEXT( device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites ); - } - - //=== VK_NV_cooperative_matrix2 === - - VkResult vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT - { - return ::vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( physicalDevice, pPropertyCount, pProperties ); - } - }; - - inline DispatchLoaderStatic & getDispatchLoaderStatic() - { - static DispatchLoaderStatic dls; - return dls; - } -#endif - } // namespace detail #if ( 14 <= VULKAN_HPP_CPP_VERSION ) using std::exchange; @@ -10348,13 +6175,6 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_SMART_HANDLE ) struct AllocationCallbacks; - namespace detail - { - template - class ObjectDestroy - { - public: - ObjectDestroy() = default; namespace detail { template @@ -10363,14 +6183,6 @@ namespace VULKAN_HPP_NAMESPACE public: ObjectDestroy() = default; - ObjectDestroy( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - { - } ObjectDestroy( OwnerType owner, Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT @@ -10380,19 +6192,11 @@ namespace VULKAN_HPP_NAMESPACE { } - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT - { - return m_allocationCallbacks; - } Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; @@ -10402,18 +6206,7 @@ namespace VULKAN_HPP_NAMESPACE { return *m_dispatch; } - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.destroy( t, m_allocationCallbacks, *m_dispatch ); - } protected: template void destroy( T t ) VULKAN_HPP_NOEXCEPT @@ -10427,32 +6220,15 @@ namespace VULKAN_HPP_NAMESPACE Optional m_allocationCallbacks = nullptr; Dispatch const * m_dispatch = nullptr; }; - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; - class NoParent; class NoParent; - template - class ObjectDestroy - { - public: - ObjectDestroy() = default; template class ObjectDestroy { public: ObjectDestroy() = default; - ObjectDestroy( Optional allocationCallbacks, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - { - } ObjectDestroy( Optional allocationCallbacks, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT : m_allocationCallbacks( allocationCallbacks ) @@ -10460,10 +6236,6 @@ namespace VULKAN_HPP_NAMESPACE { } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT - { - return m_allocationCallbacks; - } Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; @@ -10473,18 +6245,7 @@ namespace VULKAN_HPP_NAMESPACE { return *m_dispatch; } - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_dispatch ); - t.destroy( m_allocationCallbacks, *m_dispatch ); - } protected: template void destroy( T t ) VULKAN_HPP_NOEXCEPT @@ -10497,30 +6258,13 @@ namespace VULKAN_HPP_NAMESPACE Optional m_allocationCallbacks = nullptr; Dispatch const * m_dispatch = nullptr; }; - private: - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; - template - class ObjectFree - { - public: - ObjectFree() = default; template class ObjectFree { public: ObjectFree() = default; - ObjectFree( OwnerType owner, - Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, - Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_allocationCallbacks( allocationCallbacks ) - , m_dispatch( &dispatch ) - { - } ObjectFree( OwnerType owner, Optional allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT @@ -10530,19 +6274,11 @@ namespace VULKAN_HPP_NAMESPACE { } - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - Optional getAllocator() const VULKAN_HPP_NOEXCEPT - { - return m_allocationCallbacks; - } Optional getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; @@ -10552,18 +6288,7 @@ namespace VULKAN_HPP_NAMESPACE { return *m_dispatch; } - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch ); - } protected: template void destroy( T t ) VULKAN_HPP_NOEXCEPT @@ -10577,38 +6302,19 @@ namespace VULKAN_HPP_NAMESPACE Optional m_allocationCallbacks = nullptr; Dispatch const * m_dispatch = nullptr; }; - private: - OwnerType m_owner = {}; - Optional m_allocationCallbacks = nullptr; - Dispatch const * m_dispatch = nullptr; - }; - template - class ObjectRelease - { - public: - ObjectRelease() = default; template class ObjectRelease { public: ObjectRelease() = default; - ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_dispatch( &dispatch ) - { - } ObjectRelease( OwnerType owner, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT : m_owner( owner ) , m_dispatch( &dispatch ) { } - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; @@ -10618,18 +6324,7 @@ namespace VULKAN_HPP_NAMESPACE { return *m_dispatch; } - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( m_owner && m_dispatch ); - m_owner.release( t, *m_dispatch ); - } protected: template void destroy( T t ) VULKAN_HPP_NOEXCEPT @@ -10642,28 +6337,13 @@ namespace VULKAN_HPP_NAMESPACE OwnerType m_owner = {}; Dispatch const * m_dispatch = nullptr; }; - private: - OwnerType m_owner = {}; - Dispatch const * m_dispatch = nullptr; - }; - template - class PoolFree - { - public: - PoolFree() = default; template class PoolFree { public: PoolFree() = default; - PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT - : m_owner( owner ) - , m_pool( pool ) - , m_dispatch( &dispatch ) - { - } PoolFree( OwnerType owner, PoolType pool, Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT : m_owner( owner ) , m_pool( pool ) @@ -10671,19 +6351,11 @@ namespace VULKAN_HPP_NAMESPACE { } - OwnerType getOwner() const VULKAN_HPP_NOEXCEPT - { - return m_owner; - } OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; } - PoolType getPool() const VULKAN_HPP_NOEXCEPT - { - return m_pool; - } PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; @@ -10693,17 +6365,7 @@ namespace VULKAN_HPP_NAMESPACE { return *m_dispatch; } - Dispatch const & getDispatch() const VULKAN_HPP_NOEXCEPT - { - return *m_dispatch; - } - protected: - template - void destroy( T t ) VULKAN_HPP_NOEXCEPT - { - ( m_owner.free )( m_pool, t, *m_dispatch ); - } protected: template void destroy( T t ) VULKAN_HPP_NOEXCEPT @@ -10716,14 +6378,8 @@ namespace VULKAN_HPP_NAMESPACE PoolType m_pool = PoolType(); Dispatch const * m_dispatch = nullptr; }; - private: - OwnerType m_owner = OwnerType(); - PoolType m_pool = PoolType(); - Dispatch const * m_dispatch = nullptr; - }; } // namespace detail - } // namespace detail #endif // !VULKAN_HPP_NO_SMART_HANDLE //================== @@ -10736,7 +6392,6 @@ namespace VULKAN_HPP_NAMESPACE using RemoteAddressNV = void *; using SampleMask = uint32_t; - template template struct CppType { @@ -10979,6 +6634,14 @@ namespace VULKAN_HPP_NAMESPACE InvalidOpaqueCaptureAddressError( char const * message ) : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {} }; + class NotPermittedError : public SystemError + { + public: + NotPermittedError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotPermitted ), message ) {} + + NotPermittedError( char const * message ) : SystemError( make_error_code( Result::eErrorNotPermitted ), message ) {} + }; + class SurfaceLostKHRError : public SystemError { public: @@ -11110,14 +6773,6 @@ namespace VULKAN_HPP_NAMESPACE } }; - class NotPermittedKHRError : public SystemError - { - public: - NotPermittedKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {} - - NotPermittedKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {} - }; - # if defined( VK_USE_PLATFORM_WIN32_KHR ) class FullScreenExclusiveModeLostEXTError : public SystemError { @@ -11154,14 +6809,6 @@ namespace VULKAN_HPP_NAMESPACE NotEnoughSpaceKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} }; - class NotEnoughSpaceKHRError : public SystemError - { - public: - NotEnoughSpaceKHRError( std::string const & message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} - - NotEnoughSpaceKHRError( char const * message ) : SystemError( make_error_code( Result::eErrorNotEnoughSpaceKHR ), message ) {} - }; - namespace detail { [[noreturn]] VULKAN_HPP_INLINE void throwResultException( Result result, char const * message ) @@ -11185,6 +6832,7 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message ); case Result::eErrorFragmentation: throw FragmentationError( message ); case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message ); + case Result::eErrorNotPermitted: throw NotPermittedError( message ); case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message ); case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message ); case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message ); @@ -11198,14 +6846,12 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message ); case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message ); case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message ); - case Result::eErrorNotPermittedKHR: throw NotPermittedKHRError( message ); # if defined( VK_USE_PLATFORM_WIN32_KHR ) case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message ); # endif /*VK_USE_PLATFORM_WIN32_KHR*/ case Result::eErrorInvalidVideoStdParametersKHR: throw InvalidVideoStdParametersKHRError( message ); case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message ); case Result::eErrorNotEnoughSpaceKHR: throw NotEnoughSpaceKHRError( message ); - case Result::eErrorNotEnoughSpaceKHR: throw NotEnoughSpaceKHRError( message ); default: throw SystemError( make_error_code( result ), message ); } } @@ -11333,118 +6979,67 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result ) { - namespace detail - { - template - void ignore( T const & ) VULKAN_HPP_NOEXCEPT - { - } - - VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result ) - { #ifdef VULKAN_HPP_NO_EXCEPTIONS return result; - return result; #else VULKAN_HPP_NAMESPACE::detail::ignore( result ); - VULKAN_HPP_NAMESPACE::detail::ignore( result ); #endif } - } template VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T & data ) { - template - VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T & data ) - { #ifdef VULKAN_HPP_NO_EXCEPTIONS return ResultValue( result, data ); - return ResultValue( result, data ); #else VULKAN_HPP_NAMESPACE::detail::ignore( result ); return data; - VULKAN_HPP_NAMESPACE::detail::ignore( result ); - return data; #endif } - } template VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T && data ) { - template - VULKAN_HPP_INLINE typename VULKAN_HPP_NAMESPACE::ResultValueType::type createResultValueType( VULKAN_HPP_NAMESPACE::Result result, T && data ) - { #ifdef VULKAN_HPP_NO_EXCEPTIONS return ResultValue( result, std::move( data ) ); - return ResultValue( result, std::move( data ) ); #else VULKAN_HPP_NAMESPACE::detail::ignore( result ); return std::move( data ); - VULKAN_HPP_NAMESPACE::detail::ignore( result ); - return std::move( data ); #endif } } // namespace detail - } - } // namespace detail namespace detail { VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) { - namespace detail - { - VULKAN_HPP_INLINE void resultCheck( Result result, char const * message ) - { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty VULKAN_HPP_NAMESPACE::detail::ignore( message ); VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); - VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - VULKAN_HPP_NAMESPACE::detail::ignore( message ); - VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess ); #else if ( result != Result::eSuccess ) { VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); } - if ( result != Result::eSuccess ) - { - VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); - } #endif } - } VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) { - VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list successCodes ) - { #ifdef VULKAN_HPP_NO_EXCEPTIONS VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty VULKAN_HPP_NAMESPACE::detail::ignore( message ); VULKAN_HPP_NAMESPACE::detail::ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); - VULKAN_HPP_NAMESPACE::detail::ignore( result ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - VULKAN_HPP_NAMESPACE::detail::ignore( message ); - VULKAN_HPP_NAMESPACE::detail::ignore( successCodes ); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty - VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() ); #else if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) { VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); } - if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() ) - { - VULKAN_HPP_NAMESPACE::detail::throwResultException( result, message ); - } #endif } } // namespace detail - } - } // namespace detail //=========================== //=== CONSTEXPR CONSTANTs === @@ -11476,6 +7071,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverNameSize = VK_MAX_DRIVER_NAME_SIZE; VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDriverInfoSize = VK_MAX_DRIVER_INFO_SIZE; + //=== VK_VERSION_1_4 === + VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxGlobalPrioritySize = VK_MAX_GLOBAL_PRIORITY_SIZE; + //=== VK_KHR_device_group_creation === VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxDeviceGroupSizeKHR = VK_MAX_DEVICE_GROUP_SIZE_KHR; @@ -11518,9 +7116,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_pipeline_binary === VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPipelineBinaryKeySizeKHR = VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; - //=== VK_KHR_pipeline_binary === - VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxPipelineBinaryKeySizeKHR = VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR; - //=== VK_KHR_video_decode_av1 === VULKAN_HPP_CONSTEXPR_INLINE uint32_t MaxVideoAv1ReferencesPerFrameKHR = VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR; @@ -11529,8 +7124,6 @@ namespace VULKAN_HPP_NAMESPACE //======================== VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; VULKAN_HPP_CONSTEXPR_INLINE uint32_t Use64BitPtrDefines = VK_USE_64_BIT_PTR_DEFINES; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t HeaderVersion = VK_HEADER_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE uint32_t Use64BitPtrDefines = VK_USE_64_BIT_PTR_DEFINES; //========================= //=== CONSTEXPR CALLEEs === @@ -11601,7 +7194,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion11 = makeApiVersion( 0, 1, 1, 0 ); VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion12 = makeApiVersion( 0, 1, 2, 0 ); VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion13 = makeApiVersion( 0, 1, 3, 0 ); - VULKAN_HPP_CONSTEXPR_INLINE auto HeaderVersionComplete = makeApiVersion( 0, 1, 3, VK_HEADER_VERSION ); + VULKAN_HPP_CONSTEXPR_INLINE auto ApiVersion14 = makeApiVersion( 0, 1, 4, 0 ); + VULKAN_HPP_CONSTEXPR_INLINE auto HeaderVersionComplete = makeApiVersion( 0, 1, 4, VK_HEADER_VERSION ); //================================= //=== CONSTEXPR EXTENSION NAMEs === @@ -11672,7 +7266,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_sampler_mirror_clamp_to_edge === VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeExtensionName = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeSpecVersion = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerMirrorClampToEdgeSpecVersion = VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION; //=== VK_IMG_filter_cubic === VULKAN_HPP_CONSTEXPR_INLINE auto IMGFilterCubicExtensionName = VK_IMG_FILTER_CUBIC_EXTENSION_NAME; @@ -11693,7 +7286,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_debug_marker === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerExtensionName = VK_EXT_DEBUG_MARKER_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerSpecVersion = VK_EXT_DEBUG_MARKER_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugMarkerSpecVersion = VK_EXT_DEBUG_MARKER_SPEC_VERSION; //=== VK_KHR_video_queue === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoQueueExtensionName = VK_KHR_VIDEO_QUEUE_EXTENSION_NAME; @@ -11728,7 +7320,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMD_draw_indirect_count === VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountExtensionName = VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountSpecVersion = VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto AMDDrawIndirectCountSpecVersion = VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION; //=== VK_AMD_negative_viewport_height === VULKAN_HPP_DEPRECATED( "The VK_AMD_negative_viewport_height extension has been obsoleted by VK_KHR_maintenance1." ) @@ -11769,7 +7360,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_dynamic_rendering === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingExtensionName = VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingSpecVersion = VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDynamicRenderingSpecVersion = VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION; //=== VK_AMD_shader_image_load_store_lod === VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderImageLoadStoreLodExtensionName = VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME; @@ -11788,7 +7378,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_multiview === VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewExtensionName = VK_KHR_MULTIVIEW_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewSpecVersion = VK_KHR_MULTIVIEW_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMultiviewSpecVersion = VK_KHR_MULTIVIEW_SPEC_VERSION; //=== VK_IMG_format_pvrtc === VULKAN_HPP_DEPRECATED( "The VK_IMG_format_pvrtc extension has been deprecated." ) @@ -11820,18 +7409,15 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_win32_keyed_mutex === VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexExtensionName = VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexSpecVersion = VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto NVWin32KeyedMutexSpecVersion = VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2ExtensionName = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2SpecVersion = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetPhysicalDeviceProperties2SpecVersion = VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION; //=== VK_KHR_device_group === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupExtensionName = VK_KHR_DEVICE_GROUP_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupSpecVersion = VK_KHR_DEVICE_GROUP_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupSpecVersion = VK_KHR_DEVICE_GROUP_SPEC_VERSION; //=== VK_EXT_validation_flags === VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_flags extension has been deprecated by VK_EXT_layer_settings." ) @@ -11848,7 +7434,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_draw_parameters === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersExtensionName = VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersSpecVersion = VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderDrawParametersSpecVersion = VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION; //=== VK_EXT_shader_subgroup_ballot === VULKAN_HPP_DEPRECATED( "The VK_EXT_shader_subgroup_ballot extension has been deprecated by VK_VERSION_1_2." ) @@ -11865,7 +7450,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_texture_compression_astc_hdr === VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrExtensionName = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrSpecVersion = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTTextureCompressionAstcHdrSpecVersion = VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION; //=== VK_EXT_astc_decode_mode === VULKAN_HPP_CONSTEXPR_INLINE auto EXTAstcDecodeModeExtensionName = VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME; @@ -11878,22 +7462,18 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance1 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1ExtensionName = VK_KHR_MAINTENANCE_1_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1SpecVersion = VK_KHR_MAINTENANCE_1_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance1SpecVersion = VK_KHR_MAINTENANCE_1_SPEC_VERSION; //=== VK_KHR_device_group_creation === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationExtensionName = VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationSpecVersion = VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeviceGroupCreationSpecVersion = VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION; //=== VK_KHR_external_memory_capabilities === VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesExtensionName = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesSpecVersion = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryCapabilitiesSpecVersion = VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION; //=== VK_KHR_external_memory === VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemoryExtensionName = VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemorySpecVersion = VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalMemorySpecVersion = VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === @@ -11914,12 +7494,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_external_semaphore_capabilities === VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreCapabilitiesSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION; //=== VK_KHR_external_semaphore === VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreExtensionName = VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalSemaphoreSpecVersion = VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === @@ -11942,12 +7520,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_float16_int8 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8ExtensionName = VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8SpecVersion = VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloat16Int8SpecVersion = VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION; //=== VK_KHR_16bit_storage === VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageExtensionName = VK_KHR_16BIT_STORAGE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageSpecVersion = VK_KHR_16BIT_STORAGE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHR16BitStorageSpecVersion = VK_KHR_16BIT_STORAGE_SPEC_VERSION; //=== VK_KHR_incremental_present === VULKAN_HPP_CONSTEXPR_INLINE auto KHRIncrementalPresentExtensionName = VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME; @@ -11956,7 +7532,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_descriptor_update_template === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateExtensionName = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateSpecVersion = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDescriptorUpdateTemplateSpecVersion = VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION; //=== VK_NV_clip_space_w_scaling === VULKAN_HPP_CONSTEXPR_INLINE auto NVClipSpaceWScalingExtensionName = VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME; @@ -12027,12 +7602,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_imageless_framebuffer === VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferExtensionName = VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferSpecVersion = VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRImagelessFramebufferSpecVersion = VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION; //=== VK_KHR_create_renderpass2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2ExtensionName = VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2SpecVersion = VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRCreateRenderpass2SpecVersion = VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION; //=== VK_IMG_relaxed_line_rasterization === VULKAN_HPP_CONSTEXPR_INLINE auto IMGRelaxedLineRasterizationExtensionName = VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME; @@ -12045,12 +7618,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_external_fence_capabilities === VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesExtensionName = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesSpecVersion = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceCapabilitiesSpecVersion = VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION; //=== VK_KHR_external_fence === VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceExtensionName = VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceSpecVersion = VK_KHR_EXTERNAL_FENCE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRExternalFenceSpecVersion = VK_KHR_EXTERNAL_FENCE_SPEC_VERSION; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === @@ -12069,7 +7640,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2ExtensionName = VK_KHR_MAINTENANCE_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2SpecVersion = VK_KHR_MAINTENANCE_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance2SpecVersion = VK_KHR_MAINTENANCE_2_SPEC_VERSION; //=== VK_KHR_get_surface_capabilities2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetSurfaceCapabilities2ExtensionName = VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME; @@ -12078,7 +7648,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_variable_pointers === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersExtensionName = VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersSpecVersion = VK_KHR_VARIABLE_POINTERS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRVariablePointersSpecVersion = VK_KHR_VARIABLE_POINTERS_SPEC_VERSION; //=== VK_KHR_get_display_properties2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetDisplayProperties2ExtensionName = VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME; @@ -12111,7 +7680,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_dedicated_allocation === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationExtensionName = VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationSpecVersion = VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDedicatedAllocationSpecVersion = VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION; //=== VK_EXT_debug_utils === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDebugUtilsExtensionName = VK_EXT_DEBUG_UTILS_EXTENSION_NAME; @@ -12126,12 +7694,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_sampler_filter_minmax === VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxExtensionName = VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxSpecVersion = VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTSamplerFilterMinmaxSpecVersion = VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION; //=== VK_KHR_storage_buffer_storage_class === VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassExtensionName = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassSpecVersion = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRStorageBufferStorageClassSpecVersion = VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION; //=== VK_AMD_gpu_shader_int16 === VULKAN_HPP_DEPRECATED( "The VK_AMD_gpu_shader_int16 extension has been deprecated by VK_KHR_shader_float16_int8." ) @@ -12156,7 +7722,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_inline_uniform_block === VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockExtensionName = VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockSpecVersion = VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTInlineUniformBlockSpecVersion = VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION; //=== VK_EXT_shader_stencil_export === VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderStencilExportExtensionName = VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME; @@ -12169,17 +7734,14 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_relaxed_block_layout === VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutExtensionName = VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutSpecVersion = VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRRelaxedBlockLayoutSpecVersion = VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION; //=== VK_KHR_get_memory_requirements2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2ExtensionName = VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2SpecVersion = VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRGetMemoryRequirements2SpecVersion = VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION; //=== VK_KHR_image_format_list === VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListExtensionName = VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListSpecVersion = VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRImageFormatListSpecVersion = VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION; //=== VK_EXT_blend_operation_advanced === VULKAN_HPP_CONSTEXPR_INLINE auto EXTBlendOperationAdvancedExtensionName = VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME; @@ -12220,12 +7782,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_sampler_ycbcr_conversion === VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionExtensionName = VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionSpecVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSamplerYcbcrConversionSpecVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION; //=== VK_KHR_bind_memory2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2ExtensionName = VK_KHR_BIND_MEMORY_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2SpecVersion = VK_KHR_BIND_MEMORY_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRBindMemory2SpecVersion = VK_KHR_BIND_MEMORY_2_SPEC_VERSION; //=== VK_EXT_image_drm_format_modifier === VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageDrmFormatModifierExtensionName = VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME; @@ -12238,12 +7798,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_descriptor_indexing === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingExtensionName = VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingSpecVersion = VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorIndexingSpecVersion = VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION; //=== VK_EXT_shader_viewport_index_layer === VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerExtensionName = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerSpecVersion = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderViewportIndexLayerSpecVersion = VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION; #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_KHR_portability_subset === @@ -12257,12 +7815,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_ray_tracing === VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) - VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingExtensionName = VK_NV_RAY_TRACING_EXTENSION_NAME; VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingSpecVersion = VK_NV_RAY_TRACING_SPEC_VERSION; - VULKAN_HPP_DEPRECATED( "The VK_NV_ray_tracing extension has been deprecated by VK_KHR_ray_tracing_pipeline." ) - VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingSpecVersion = VK_NV_RAY_TRACING_SPEC_VERSION; //=== VK_NV_representative_fragment_test === VULKAN_HPP_CONSTEXPR_INLINE auto NVRepresentativeFragmentTestExtensionName = VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME; @@ -12271,12 +7826,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance3 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3ExtensionName = VK_KHR_MAINTENANCE_3_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3SpecVersion = VK_KHR_MAINTENANCE_3_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance3SpecVersion = VK_KHR_MAINTENANCE_3_SPEC_VERSION; //=== VK_KHR_draw_indirect_count === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountExtensionName = VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountSpecVersion = VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDrawIndirectCountSpecVersion = VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION; //=== VK_EXT_filter_cubic === VULKAN_HPP_CONSTEXPR_INLINE auto EXTFilterCubicExtensionName = VK_EXT_FILTER_CUBIC_EXTENSION_NAME; @@ -12289,17 +7842,14 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_global_priority === VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityExtensionName = VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPrioritySpecVersion = VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION; //=== VK_KHR_shader_subgroup_extended_types === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesExtensionName = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesSpecVersion = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderSubgroupExtendedTypesSpecVersion = VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION; //=== VK_KHR_8bit_storage === VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageExtensionName = VK_KHR_8BIT_STORAGE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageSpecVersion = VK_KHR_8BIT_STORAGE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHR8BitStorageSpecVersion = VK_KHR_8BIT_STORAGE_SPEC_VERSION; //=== VK_EXT_external_memory_host === VULKAN_HPP_CONSTEXPR_INLINE auto EXTExternalMemoryHostExtensionName = VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME; @@ -12312,7 +7862,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_atomic_int64 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64ExtensionName = VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64SpecVersion = VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderAtomicInt64SpecVersion = VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION; //=== VK_KHR_shader_clock === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderClockExtensionName = VK_KHR_SHADER_CLOCK_EXTENSION_NAME; @@ -12325,7 +7874,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_calibrated_timestamps === VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsExtensionName = VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsSpecVersion = VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTCalibratedTimestampsSpecVersion = VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION; //=== VK_AMD_shader_core_properties === VULKAN_HPP_CONSTEXPR_INLINE auto AMDShaderCorePropertiesExtensionName = VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME; @@ -12346,7 +7894,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_vertex_attribute_divisor === VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorExtensionName = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeDivisorSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_frame_token === @@ -12357,17 +7904,14 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_pipeline_creation_feedback === VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackExtensionName = VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackSpecVersion = VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationFeedbackSpecVersion = VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION; //=== VK_KHR_driver_properties === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesExtensionName = VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesSpecVersion = VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDriverPropertiesSpecVersion = VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION; //=== VK_KHR_shader_float_controls === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsExtensionName = VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsSpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderFloatControlsSpecVersion = VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION; //=== VK_NV_shader_subgroup_partitioned === VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderSubgroupPartitionedExtensionName = VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME; @@ -12376,7 +7920,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_depth_stencil_resolve === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveExtensionName = VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveSpecVersion = VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRDepthStencilResolveSpecVersion = VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION; //=== VK_KHR_swapchain_mutable_format === VULKAN_HPP_CONSTEXPR_INLINE auto KHRSwapchainMutableFormatExtensionName = VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME; @@ -12393,7 +7936,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_fragment_shader_barycentric === VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricExtensionName = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricSpecVersion = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShaderBarycentricSpecVersion = VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION; //=== VK_NV_shader_image_footprint === VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderImageFootprintExtensionName = VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME; @@ -12410,7 +7952,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_timeline_semaphore === VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreExtensionName = VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreSpecVersion = VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRTimelineSemaphoreSpecVersion = VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION; //=== VK_INTEL_shader_integer_functions2 === VULKAN_HPP_CONSTEXPR_INLINE auto INTELShaderIntegerFunctions2ExtensionName = VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME; @@ -12423,7 +7964,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_vulkan_memory_model === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelExtensionName = VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelSpecVersion = VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRVulkanMemoryModelSpecVersion = VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION; //=== VK_EXT_pci_bus_info === VULKAN_HPP_CONSTEXPR_INLINE auto EXTPciBusInfoExtensionName = VK_EXT_PCI_BUS_INFO_EXTENSION_NAME; @@ -12442,7 +7982,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_terminate_invocation === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationExtensionName = VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationSpecVersion = VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderTerminateInvocationSpecVersion = VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION; #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === @@ -12457,7 +7996,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_scalar_block_layout === VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutExtensionName = VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutSpecVersion = VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTScalarBlockLayoutSpecVersion = VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION; //=== VK_GOOGLE_hlsl_functionality1 === VULKAN_HPP_CONSTEXPR_INLINE auto GOOGLEHlslFunctionality1ExtensionName = VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME; @@ -12470,7 +8008,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_subgroup_size_control === VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlExtensionName = VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlSpecVersion = VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTSubgroupSizeControlSpecVersion = VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION; //=== VK_KHR_fragment_shading_rate === VULKAN_HPP_CONSTEXPR_INLINE auto KHRFragmentShadingRateExtensionName = VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME; @@ -12499,7 +8036,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_spirv_1_4 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14ExtensionName = VK_KHR_SPIRV_1_4_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14SpecVersion = VK_KHR_SPIRV_1_4_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSpirv14SpecVersion = VK_KHR_SPIRV_1_4_SPEC_VERSION; //=== VK_EXT_memory_budget === VULKAN_HPP_CONSTEXPR_INLINE auto EXTMemoryBudgetExtensionName = VK_EXT_MEMORY_BUDGET_EXTENSION_NAME; @@ -12520,7 +8056,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_separate_depth_stencil_layouts === VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsExtensionName = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsSpecVersion = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSeparateDepthStencilLayoutsSpecVersion = VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION; //=== VK_EXT_buffer_device_address === VULKAN_HPP_DEPRECATED( "The VK_EXT_buffer_device_address extension has been deprecated by VK_KHR_buffer_device_address." ) @@ -12531,12 +8066,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_tooling_info === VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoExtensionName = VK_EXT_TOOLING_INFO_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoSpecVersion = VK_EXT_TOOLING_INFO_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTToolingInfoSpecVersion = VK_EXT_TOOLING_INFO_SPEC_VERSION; //=== VK_EXT_separate_stencil_usage === VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageExtensionName = VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageSpecVersion = VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTSeparateStencilUsageSpecVersion = VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION; //=== VK_EXT_validation_features === VULKAN_HPP_DEPRECATED( "The VK_EXT_validation_features extension has been deprecated by VK_EXT_layer_settings." ) @@ -12567,7 +8100,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_uniform_buffer_standard_layout === VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutExtensionName = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutSpecVersion = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRUniformBufferStandardLayoutSpecVersion = VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION; //=== VK_EXT_provoking_vertex === VULKAN_HPP_CONSTEXPR_INLINE auto EXTProvokingVertexExtensionName = VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME; @@ -12586,12 +8118,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_buffer_device_address === VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressExtensionName = VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressSpecVersion = VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRBufferDeviceAddressSpecVersion = VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION; //=== VK_EXT_line_rasterization === VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationExtensionName = VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationSpecVersion = VK_EXT_LINE_RASTERIZATION_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTLineRasterizationSpecVersion = VK_EXT_LINE_RASTERIZATION_SPEC_VERSION; //=== VK_EXT_shader_atomic_float === VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderAtomicFloatExtensionName = VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME; @@ -12600,17 +8130,14 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_host_query_reset === VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetExtensionName = VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetSpecVersion = VK_EXT_HOST_QUERY_RESET_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTHostQueryResetSpecVersion = VK_EXT_HOST_QUERY_RESET_SPEC_VERSION; //=== VK_EXT_index_type_uint8 === VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8ExtensionName = VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8SpecVersion = VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTIndexTypeUint8SpecVersion = VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION; //=== VK_EXT_extended_dynamic_state === VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateSpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicStateSpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION; //=== VK_KHR_deferred_host_operations === VULKAN_HPP_CONSTEXPR_INLINE auto KHRDeferredHostOperationsExtensionName = VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME; @@ -12647,7 +8174,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_shader_demote_to_helper_invocation === VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationExtensionName = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationSpecVersion = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderDemoteToHelperInvocationSpecVersion = VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION; //=== VK_NV_device_generated_commands === VULKAN_HPP_CONSTEXPR_INLINE auto NVDeviceGeneratedCommandsExtensionName = VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME; @@ -12660,12 +8186,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_integer_dot_product === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductExtensionName = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductSpecVersion = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderIntegerDotProductSpecVersion = VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION; //=== VK_EXT_texel_buffer_alignment === VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentExtensionName = VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentSpecVersion = VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTTexelBufferAlignmentSpecVersion = VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION; //=== VK_QCOM_render_pass_transform === VULKAN_HPP_CONSTEXPR_INLINE auto QCOMRenderPassTransformExtensionName = VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME; @@ -12706,7 +8230,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shader_non_semantic_info === VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoExtensionName = VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoSpecVersion = VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderNonSemanticInfoSpecVersion = VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION; //=== VK_KHR_present_id === VULKAN_HPP_CONSTEXPR_INLINE auto KHRPresentIdExtensionName = VK_KHR_PRESENT_ID_EXTENSION_NAME; @@ -12715,12 +8238,10 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_private_data === VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataExtensionName = VK_EXT_PRIVATE_DATA_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataSpecVersion = VK_EXT_PRIVATE_DATA_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPrivateDataSpecVersion = VK_EXT_PRIVATE_DATA_SPEC_VERSION; //=== VK_EXT_pipeline_creation_cache_control === VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlExtensionName = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlSpecVersion = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPipelineCreationCacheControlSpecVersion = VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION; //=== VK_KHR_video_encode_queue === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoEncodeQueueExtensionName = VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME; @@ -12753,7 +8274,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_synchronization2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2ExtensionName = VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2SpecVersion = VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRSynchronization2SpecVersion = VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION; //=== VK_EXT_descriptor_buffer === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDescriptorBufferExtensionName = VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME; @@ -12778,7 +8298,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_zero_initialize_workgroup_memory === VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemoryExtensionName = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemorySpecVersion = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRZeroInitializeWorkgroupMemorySpecVersion = VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION; //=== VK_NV_fragment_shading_rate_enums === VULKAN_HPP_CONSTEXPR_INLINE auto NVFragmentShadingRateEnumsExtensionName = VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME; @@ -12795,7 +8314,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_ycbcr_2plane_444_formats === VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsExtensionName = VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsSpecVersion = VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTYcbcr2Plane444FormatsSpecVersion = VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION; //=== VK_EXT_fragment_density_map2 === VULKAN_HPP_CONSTEXPR_INLINE auto EXTFragmentDensityMap2ExtensionName = VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME; @@ -12808,7 +8326,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_image_robustness === VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessExtensionName = VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessSpecVersion = VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageRobustnessSpecVersion = VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION; //=== VK_KHR_workgroup_memory_explicit_layout === VULKAN_HPP_CONSTEXPR_INLINE auto KHRWorkgroupMemoryExplicitLayoutExtensionName = VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME; @@ -12817,7 +8334,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_copy_commands2 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2ExtensionName = VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2SpecVersion = VK_KHR_COPY_COMMANDS_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRCopyCommands2SpecVersion = VK_KHR_COPY_COMMANDS_2_SPEC_VERSION; //=== VK_EXT_image_compression_control === VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageCompressionControlExtensionName = VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME; @@ -12830,7 +8346,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_4444_formats === VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsExtensionName = VK_EXT_4444_FORMATS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsSpecVersion = VK_EXT_4444_FORMATS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXT4444FormatsSpecVersion = VK_EXT_4444_FORMATS_SPEC_VERSION; //=== VK_EXT_device_fault === VULKAN_HPP_CONSTEXPR_INLINE auto EXTDeviceFaultExtensionName = VK_EXT_DEVICE_FAULT_EXTENSION_NAME; @@ -12839,7 +8354,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_ARM_rasterization_order_attachment_access === VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessExtensionName = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessSpecVersion = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto ARMRasterizationOrderAttachmentAccessSpecVersion = VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION; //=== VK_EXT_rgba10x6_formats === VULKAN_HPP_CONSTEXPR_INLINE auto EXTRgba10X6FormatsExtensionName = VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME; @@ -12860,7 +8374,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VALVE_mutable_descriptor_type === VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeExtensionName = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeSpecVersion = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto VALVEMutableDescriptorTypeSpecVersion = VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION; //=== VK_EXT_vertex_input_dynamic_state === VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexInputDynamicStateExtensionName = VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME; @@ -12889,11 +8402,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_present_mode_fifo_latest_ready === VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadyExtensionName = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadySpecVersion = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRFormatFeatureFlags2SpecVersion = VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION; - - //=== VK_EXT_present_mode_fifo_latest_ready === - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadyExtensionName = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTPresentModeFifoLatestReadySpecVersion = VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === @@ -12940,7 +8448,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_extended_dynamic_state2 === VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2ExtensionName = VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTExtendedDynamicState2SpecVersion = VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === @@ -12963,7 +8470,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_global_priority_query === VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQueryExtensionName = VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQuerySpecVersion = VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTGlobalPriorityQuerySpecVersion = VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION; //=== VK_EXT_image_view_min_lod === VULKAN_HPP_CONSTEXPR_INLINE auto EXTImageViewMinLodExtensionName = VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME; @@ -12998,7 +8504,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_load_store_op_none === VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneExtensionName = VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneSpecVersion = VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTLoadStoreOpNoneSpecVersion = VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION; //=== VK_HUAWEI_cluster_culling_shader === VULKAN_HPP_CONSTEXPR_INLINE auto HUAWEIClusterCullingShaderExtensionName = VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME; @@ -13015,7 +8520,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance4 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4ExtensionName = VK_KHR_MAINTENANCE_4_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4SpecVersion = VK_KHR_MAINTENANCE_4_SPEC_VERSION; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance4SpecVersion = VK_KHR_MAINTENANCE_4_SPEC_VERSION; //=== VK_ARM_shader_core_properties === VULKAN_HPP_CONSTEXPR_INLINE auto ARMShaderCorePropertiesExtensionName = VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME; @@ -13139,10 +8643,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagExtensionName = VK_AMD_ANTI_LAG_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagSpecVersion = VK_AMD_ANTI_LAG_SPEC_VERSION; - //=== VK_AMD_anti_lag === - VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagExtensionName = VK_AMD_ANTI_LAG_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto AMDAntiLagSpecVersion = VK_AMD_ANTI_LAG_SPEC_VERSION; - //=== VK_KHR_ray_tracing_position_fetch === VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchExtensionName = VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRRayTracingPositionFetchSpecVersion = VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION; @@ -13155,10 +8655,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinaryExtensionName = VK_KHR_PIPELINE_BINARY_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinarySpecVersion = VK_KHR_PIPELINE_BINARY_SPEC_VERSION; - //=== VK_KHR_pipeline_binary === - VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinaryExtensionName = VK_KHR_PIPELINE_BINARY_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRPipelineBinarySpecVersion = VK_KHR_PIPELINE_BINARY_SPEC_VERSION; - //=== VK_QCOM_tile_properties === VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesExtensionName = VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto QCOMTilePropertiesSpecVersion = VK_QCOM_TILE_PROPERTIES_SPEC_VERSION; @@ -13187,10 +8683,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesExtensionName = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesSpecVersion = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION; - //=== VK_EXT_legacy_vertex_attributes === - VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesExtensionName = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTLegacyVertexAttributesSpecVersion = VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION; - //=== VK_EXT_layer_settings === VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsExtensionName = VK_EXT_LAYER_SETTINGS_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTLayerSettingsSpecVersion = VK_EXT_LAYER_SETTINGS_SPEC_VERSION; @@ -13223,10 +8715,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesExtensionName = VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesSpecVersion = VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; - //=== VK_KHR_compute_shader_derivatives === - VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesExtensionName = VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRComputeShaderDerivativesSpecVersion = VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION; - //=== VK_KHR_video_decode_av1 === VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1ExtensionName = VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRVideoDecodeAv1SpecVersion = VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION; @@ -13333,18 +8821,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7ExtensionName = VK_KHR_MAINTENANCE_7_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7SpecVersion = VK_KHR_MAINTENANCE_7_SPEC_VERSION; - //=== VK_KHR_shader_relaxed_extended_instruction === - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionExtensionName = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRShaderRelaxedExtendedInstructionSpecVersion = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION; - - //=== VK_NV_command_buffer_inheritance === - VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceExtensionName = VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto NVCommandBufferInheritanceSpecVersion = VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION; - - //=== VK_KHR_maintenance7 === - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7ExtensionName = VK_KHR_MAINTENANCE_7_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto KHRMaintenance7SpecVersion = VK_KHR_MAINTENANCE_7_SPEC_VERSION; - //=== VK_NV_shader_atomic_float16_vector === VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorExtensionName = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVShaderAtomicFloat16VectorSpecVersion = VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION; @@ -13353,10 +8829,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesExtensionName = VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesSpecVersion = VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION; - //=== VK_EXT_shader_replicated_composites === - VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesExtensionName = VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME; - VULKAN_HPP_CONSTEXPR_INLINE auto EXTShaderReplicatedCompositesSpecVersion = VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION; - //=== VK_NV_ray_tracing_validation === VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationExtensionName = VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVRayTracingValidationSpecVersion = VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION; @@ -13381,6 +8853,10 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2ExtensionName = VK_NV_COOPERATIVE_MATRIX_2_EXTENSION_NAME; VULKAN_HPP_CONSTEXPR_INLINE auto NVCooperativeMatrix2SpecVersion = VK_NV_COOPERATIVE_MATRIX_2_SPEC_VERSION; + //=== VK_EXT_vertex_attribute_robustness === + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessExtensionName = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME; + VULKAN_HPP_CONSTEXPR_INLINE auto EXTVertexAttributeRobustnessSpecVersion = VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION; + } // namespace VULKAN_HPP_NAMESPACE // clang-format off @@ -13409,7 +8885,7 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct StructExtends + struct StructExtends { enum { @@ -13418,7 +8894,7 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct StructExtends + struct StructExtends { enum { @@ -13427,7 +8903,7 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct StructExtends + struct StructExtends { enum { @@ -13436,7 +8912,7 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct StructExtends + struct StructExtends { enum { @@ -13471,15 +8947,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_VERSION_1_1 === template <> struct StructExtends @@ -14860,6 +10327,547 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_VERSION_1_4 === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + //=== VK_KHR_swapchain === template <> struct StructExtends @@ -15558,70 +11566,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_pipeline_robustness === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - # if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === template <> @@ -15695,16 +11639,6 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_USE_PLATFORM_WIN32_KHR*/ - //=== VK_KHR_push_descriptor === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_EXT_conditional_rendering === template <> struct StructExtends @@ -15810,33 +11744,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_NV_viewport_swizzle === template <> struct StructExtends @@ -16170,25 +12077,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_AMD_mixed_attachment_samples === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_EXT_sample_locations === template <> struct StructExtends @@ -16752,43 +12640,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_global_priority === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_AMD_memory_overallocation_behavior === template <> struct StructExtends @@ -16915,15 +12766,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_INTEL_shader_integer_functions2 === template <> struct StructExtends @@ -17037,15 +12879,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_KHR_fragment_shading_rate === template <> struct StructExtends @@ -17101,15 +12934,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_AMD_shader_core_properties2 === template <> struct StructExtends @@ -17139,61 +12963,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_dynamic_rendering_local_read === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_EXT_shader_image_atomic_int64 === template <> struct StructExtends @@ -17355,24 +13124,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_KHR_present_wait === template <> struct StructExtends @@ -17628,52 +13379,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_host_image_copy === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_EXT_map_memory_placed === template <> struct StructExtends @@ -17703,7 +13408,7 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct StructExtends + struct StructExtends { enum { @@ -18882,7 +14587,7 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct StructExtends + struct StructExtends { enum { @@ -19089,25 +14794,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_present_mode_fifo_latest_ready === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - # if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === template <> @@ -19703,25 +15389,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_shader_subgroup_rotate === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_ARM_scheduling_controls === template <> struct StructExtends @@ -20411,25 +16078,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_pipeline_protected_access === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - # if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_format_resolve === template <> @@ -20469,125 +16117,6 @@ namespace VULKAN_HPP_NAMESPACE }; # endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - //=== VK_KHR_maintenance5 === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - //=== VK_AMD_anti_lag === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_AMD_anti_lag === template <> struct StructExtends @@ -20718,70 +16247,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_pipeline_binary === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_QCOM_tile_properties === template <> struct StructExtends @@ -20969,34 +16434,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_legacy_vertex_attributes === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_EXT_layer_settings === template <> struct StructExtends @@ -21203,34 +16640,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_compute_shader_derivatives === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_KHR_video_decode_av1 === template <> struct StructExtends @@ -21609,62 +17018,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_vertex_attribute_divisor === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - //=== VK_KHR_shader_float_controls2 === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - # if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === template <> @@ -21732,127 +17085,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_index_type_uint8 === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - //=== VK_KHR_line_rasterization === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - //=== VK_KHR_shader_expect_assume === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - //=== VK_KHR_maintenance6 === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_NV_descriptor_pool_overallocation === template <> struct StructExtends @@ -22094,90 +17326,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_KHR_shader_relaxed_extended_instruction === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - //=== VK_NV_command_buffer_inheritance === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - //=== VK_KHR_maintenance7 === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_NV_shader_atomic_float16_vector === template <> struct StructExtends @@ -22216,25 +17364,6 @@ namespace VULKAN_HPP_NAMESPACE }; }; - //=== VK_EXT_shader_replicated_composites === - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - - template <> - struct StructExtends - { - enum - { - value = true - }; - }; - //=== VK_NV_ray_tracing_validation === template <> struct StructExtends @@ -22439,89 +17568,108 @@ namespace VULKAN_HPP_NAMESPACE }; }; + //=== VK_EXT_vertex_attribute_robustness === + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + + template <> + struct StructExtends + { + enum + { + value = true + }; + }; + #endif // VULKAN_HPP_DISABLE_ENHANCED_MODE namespace detail { - namespace detail - { #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL class DynamicLoader { public: - class DynamicLoader - { - public: # ifdef VULKAN_HPP_NO_EXCEPTIONS DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT - DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT # else DynamicLoader( std::string const & vulkanLibraryName = {} ) - DynamicLoader( std::string const & vulkanLibraryName = {} ) # endif { if ( !vulkanLibraryName.empty() ) { -# if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); -# elif defined( _WIN32 ) +# if defined( _WIN32 ) m_library = ::LoadLibraryA( vulkanLibraryName.c_str() ); +# elif defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL ); # else # error unsupported platform # endif } else { -# if defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) - m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) - { - m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); - } +# if defined( _WIN32 ) + m_library = ::LoadLibraryA( "vulkan-1.dll" ); # elif defined( __APPLE__ ) m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); - if ( m_library == nullptr ) + if ( !m_library ) { m_library = dlopen( "libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL ); } -# elif defined( _WIN32 ) - m_library = ::LoadLibraryA( "vulkan-1.dll" ); + if ( !m_library ) + { + m_library = dlopen( "libMoltenVK.dylib", RTLD_NOW | RTLD_LOCAL ); + } + // Add support for using Vulkan and MoltenVK in a Framework. App store rules for iOS + // strictly enforce no .dylib's. If they aren't found it just falls through + if ( !m_library ) + { + m_library = dlopen( "vulkan.framework/vulkan", RTLD_NOW | RTLD_LOCAL ); + } + if ( !m_library ) + { + m_library = dlopen( "MoltenVK.framework/MoltenVK", RTLD_NOW | RTLD_LOCAL ); + } + // modern versions of macOS don't search /usr/local/lib automatically contrary to what man dlopen says + // Vulkan SDK uses this as the system-wide installation location, so we're going to fallback to this if all else fails + if ( !m_library && ( getenv( "DYLD_FALLBACK_LIBRARY_PATH" ) == NULL ) ) + { + m_library = dlopen( "/usr/local/lib/libvulkan.dylib", RTLD_NOW | RTLD_LOCAL ); + } +# elif defined( __unix__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) + m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL ); + if ( !m_library ) + { + m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL ); + } # else # error unsupported platform # endif } - } # ifndef VULKAN_HPP_NO_EXCEPTIONS - if ( m_library == nullptr ) + if ( !m_library ) { // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function. throw std::runtime_error( "Failed to load vulkan library!" ); } # endif } - } - DynamicLoader( DynamicLoader const & ) = delete; DynamicLoader( DynamicLoader const & ) = delete; - DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) - { - other.m_library = nullptr; - } DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library( other.m_library ) { other.m_library = nullptr; } - DynamicLoader & operator=( DynamicLoader const & ) = delete; DynamicLoader & operator=( DynamicLoader const & ) = delete; - DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT - { - std::swap( m_library, other.m_library ); - return *this; - } DynamicLoader & operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT { std::swap( m_library, other.m_library ); @@ -22532,68 +17680,45 @@ namespace VULKAN_HPP_NAMESPACE { if ( m_library ) { - ~DynamicLoader() VULKAN_HPP_NOEXCEPT - { - if ( m_library ) - { # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) dlclose( m_library ); - dlclose( m_library ); # elif defined( _WIN32 ) ::FreeLibrary( m_library ); - ::FreeLibrary( m_library ); # else # error unsupported platform # endif } } - } - } template T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT { - template - T getProcAddress( const char * function ) const VULKAN_HPP_NOEXCEPT - { # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) return (T)dlsym( m_library, function ); - return (T)dlsym( m_library, function ); # elif defined( _WIN32 ) return ( T )::GetProcAddress( m_library, function ); - return ( T )::GetProcAddress( m_library, function ); # else # error unsupported platform # endif } - } bool success() const VULKAN_HPP_NOEXCEPT { return m_library != nullptr; } - bool success() const VULKAN_HPP_NOEXCEPT - { - return m_library != nullptr; - } private: - private: # if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNX__ ) || defined( __Fuchsia__ ) void * m_library; - void * m_library; # elif defined( _WIN32 ) ::HINSTANCE m_library; - ::HINSTANCE m_library; # else # error unsupported platform # endif }; - }; #endif using PFN_dummy = void ( * )(); - using PFN_dummy = void ( * )(); class DispatchLoaderDynamic : public DispatchLoaderBase { @@ -22736,177 +17861,7 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - class DispatchLoaderDynamic : public DispatchLoaderBase - { - public: - //=== VK_VERSION_1_0 === - PFN_vkCreateInstance vkCreateInstance = 0; - PFN_vkDestroyInstance vkDestroyInstance = 0; - PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0; - PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0; - PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0; - PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0; - PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0; - PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0; - PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0; - PFN_vkCreateDevice vkCreateDevice = 0; - PFN_vkDestroyDevice vkDestroyDevice = 0; - PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0; - PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0; - PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0; - PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0; - PFN_vkGetDeviceQueue vkGetDeviceQueue = 0; - PFN_vkQueueSubmit vkQueueSubmit = 0; - PFN_vkQueueWaitIdle vkQueueWaitIdle = 0; - PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0; - PFN_vkAllocateMemory vkAllocateMemory = 0; - PFN_vkFreeMemory vkFreeMemory = 0; - PFN_vkMapMemory vkMapMemory = 0; - PFN_vkUnmapMemory vkUnmapMemory = 0; - PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0; - PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0; - PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0; - PFN_vkBindBufferMemory vkBindBufferMemory = 0; - PFN_vkBindImageMemory vkBindImageMemory = 0; - PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0; - PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0; - PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0; - PFN_vkQueueBindSparse vkQueueBindSparse = 0; - PFN_vkCreateFence vkCreateFence = 0; - PFN_vkDestroyFence vkDestroyFence = 0; - PFN_vkResetFences vkResetFences = 0; - PFN_vkGetFenceStatus vkGetFenceStatus = 0; - PFN_vkWaitForFences vkWaitForFences = 0; - PFN_vkCreateSemaphore vkCreateSemaphore = 0; - PFN_vkDestroySemaphore vkDestroySemaphore = 0; - PFN_vkCreateEvent vkCreateEvent = 0; - PFN_vkDestroyEvent vkDestroyEvent = 0; - PFN_vkGetEventStatus vkGetEventStatus = 0; - PFN_vkSetEvent vkSetEvent = 0; - PFN_vkResetEvent vkResetEvent = 0; - PFN_vkCreateQueryPool vkCreateQueryPool = 0; - PFN_vkDestroyQueryPool vkDestroyQueryPool = 0; - PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0; - PFN_vkCreateBuffer vkCreateBuffer = 0; - PFN_vkDestroyBuffer vkDestroyBuffer = 0; - PFN_vkCreateBufferView vkCreateBufferView = 0; - PFN_vkDestroyBufferView vkDestroyBufferView = 0; - PFN_vkCreateImage vkCreateImage = 0; - PFN_vkDestroyImage vkDestroyImage = 0; - PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0; - PFN_vkCreateImageView vkCreateImageView = 0; - PFN_vkDestroyImageView vkDestroyImageView = 0; - PFN_vkCreateShaderModule vkCreateShaderModule = 0; - PFN_vkDestroyShaderModule vkDestroyShaderModule = 0; - PFN_vkCreatePipelineCache vkCreatePipelineCache = 0; - PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0; - PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0; - PFN_vkMergePipelineCaches vkMergePipelineCaches = 0; - PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0; - PFN_vkCreateComputePipelines vkCreateComputePipelines = 0; - PFN_vkDestroyPipeline vkDestroyPipeline = 0; - PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0; - PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0; - PFN_vkCreateSampler vkCreateSampler = 0; - PFN_vkDestroySampler vkDestroySampler = 0; - PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0; - PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0; - PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0; - PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0; - PFN_vkResetDescriptorPool vkResetDescriptorPool = 0; - PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0; - PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0; - PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0; - PFN_vkCreateFramebuffer vkCreateFramebuffer = 0; - PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0; - PFN_vkCreateRenderPass vkCreateRenderPass = 0; - PFN_vkDestroyRenderPass vkDestroyRenderPass = 0; - PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0; - PFN_vkCreateCommandPool vkCreateCommandPool = 0; - PFN_vkDestroyCommandPool vkDestroyCommandPool = 0; - PFN_vkResetCommandPool vkResetCommandPool = 0; - PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0; - PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0; - PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0; - PFN_vkEndCommandBuffer vkEndCommandBuffer = 0; - PFN_vkResetCommandBuffer vkResetCommandBuffer = 0; - PFN_vkCmdBindPipeline vkCmdBindPipeline = 0; - PFN_vkCmdSetViewport vkCmdSetViewport = 0; - PFN_vkCmdSetScissor vkCmdSetScissor = 0; - PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0; - PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0; - PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0; - PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0; - PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0; - PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0; - PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0; - PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0; - PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0; - PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0; - PFN_vkCmdDraw vkCmdDraw = 0; - PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0; - PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0; - PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0; - PFN_vkCmdDispatch vkCmdDispatch = 0; - PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0; - PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0; - PFN_vkCmdCopyImage vkCmdCopyImage = 0; - PFN_vkCmdBlitImage vkCmdBlitImage = 0; - PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0; - PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0; - PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0; - PFN_vkCmdFillBuffer vkCmdFillBuffer = 0; - PFN_vkCmdClearColorImage vkCmdClearColorImage = 0; - PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0; - PFN_vkCmdClearAttachments vkCmdClearAttachments = 0; - PFN_vkCmdResolveImage vkCmdResolveImage = 0; - PFN_vkCmdSetEvent vkCmdSetEvent = 0; - PFN_vkCmdResetEvent vkCmdResetEvent = 0; - PFN_vkCmdWaitEvents vkCmdWaitEvents = 0; - PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0; - PFN_vkCmdBeginQuery vkCmdBeginQuery = 0; - PFN_vkCmdEndQuery vkCmdEndQuery = 0; - PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0; - PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0; - PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0; - PFN_vkCmdPushConstants vkCmdPushConstants = 0; - PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0; - PFN_vkCmdNextSubpass vkCmdNextSubpass = 0; - PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0; - PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0; - //=== VK_VERSION_1_1 === - PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; - PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; - PFN_vkBindImageMemory2 vkBindImageMemory2 = 0; - PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0; - PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0; - PFN_vkCmdDispatchBase vkCmdDispatchBase = 0; - PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0; - PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0; - PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0; - PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0; - PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0; - PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0; - PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0; - PFN_vkTrimCommandPool vkTrimCommandPool = 0; - PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0; - PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0; - PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0; - PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0; - PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0; - PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0; - PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0; - PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0; - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; - PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; //=== VK_VERSION_1_1 === PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0; PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0; @@ -22937,20 +17892,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0; PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0; - //=== VK_VERSION_1_2 === - PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; - PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; - PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0; - PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0; - PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0; - PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0; - PFN_vkResetQueryPool vkResetQueryPool = 0; - PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0; - PFN_vkWaitSemaphores vkWaitSemaphores = 0; - PFN_vkSignalSemaphore vkSignalSemaphore = 0; - PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0; - PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0; //=== VK_VERSION_1_2 === PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0; PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0; @@ -23004,51 +17945,28 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - //=== VK_VERSION_1_3 === - PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0; - PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0; - PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0; - PFN_vkSetPrivateData vkSetPrivateData = 0; - PFN_vkGetPrivateData vkGetPrivateData = 0; - PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0; - PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0; - PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0; - PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0; - PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0; - PFN_vkQueueSubmit2 vkQueueSubmit2 = 0; - PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0; - PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0; - PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0; - PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0; - PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0; - PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0; - PFN_vkCmdBeginRendering vkCmdBeginRendering = 0; - PFN_vkCmdEndRendering vkCmdEndRendering = 0; - PFN_vkCmdSetCullMode vkCmdSetCullMode = 0; - PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0; - PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0; - PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0; - PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0; - PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0; - PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0; - PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0; - PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0; - PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0; - PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0; - PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0; - PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0; - PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0; - PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0; - PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0; - PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; - PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; - //=== VK_KHR_surface === - PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; + //=== VK_VERSION_1_4 === + PFN_vkCmdSetLineStipple vkCmdSetLineStipple = 0; + PFN_vkMapMemory2 vkMapMemory2 = 0; + PFN_vkUnmapMemory2 vkUnmapMemory2 = 0; + PFN_vkCmdBindIndexBuffer2 vkCmdBindIndexBuffer2 = 0; + PFN_vkGetRenderingAreaGranularity vkGetRenderingAreaGranularity = 0; + PFN_vkGetDeviceImageSubresourceLayout vkGetDeviceImageSubresourceLayout = 0; + PFN_vkGetImageSubresourceLayout2 vkGetImageSubresourceLayout2 = 0; + PFN_vkCmdPushDescriptorSet vkCmdPushDescriptorSet = 0; + PFN_vkCmdPushDescriptorSetWithTemplate vkCmdPushDescriptorSetWithTemplate = 0; + PFN_vkCmdSetRenderingAttachmentLocations vkCmdSetRenderingAttachmentLocations = 0; + PFN_vkCmdSetRenderingInputAttachmentIndices vkCmdSetRenderingInputAttachmentIndices = 0; + PFN_vkCmdBindDescriptorSets2 vkCmdBindDescriptorSets2 = 0; + PFN_vkCmdPushConstants2 vkCmdPushConstants2 = 0; + PFN_vkCmdPushDescriptorSet2 vkCmdPushDescriptorSet2 = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2 vkCmdPushDescriptorSetWithTemplate2 = 0; + PFN_vkCopyMemoryToImage vkCopyMemoryToImage = 0; + PFN_vkCopyImageToMemory vkCopyImageToMemory = 0; + PFN_vkCopyImageToImage vkCopyImageToImage = 0; + PFN_vkTransitionImageLayout vkTransitionImageLayout = 0; + //=== VK_KHR_surface === PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0; PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0; @@ -23056,16 +17974,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0; PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0; - //=== VK_KHR_swapchain === - PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; - PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; - PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0; - PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0; - PFN_vkQueuePresentKHR vkQueuePresentKHR = 0; - PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0; - PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0; - PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; - PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; //=== VK_KHR_swapchain === PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; @@ -23077,14 +17985,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0; PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0; - //=== VK_KHR_display === - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; - PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0; - PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0; - PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0; - PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0; - PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0; //=== VK_KHR_display === PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0; PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0; @@ -23096,90 +17996,55 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display_swapchain === PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; - //=== VK_KHR_display_swapchain === - PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0; #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; - //=== VK_KHR_xlib_surface === - PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0; #else PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; - PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) //=== VK_KHR_xcb_surface === PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; - //=== VK_KHR_xcb_surface === - PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0; #else PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; - PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) //=== VK_KHR_wayland_surface === PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; - //=== VK_KHR_wayland_surface === - PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0; #else PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; - PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; - //=== VK_KHR_android_surface === - PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0; #else PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; - PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_win32_surface === PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; - //=== VK_KHR_win32_surface === - PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0; - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0; #else PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; - PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - //=== VK_EXT_debug_report === - PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0; - PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0; - PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0; - //=== VK_EXT_debug_marker === - PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; - PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; - PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0; - PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; - PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; //=== VK_EXT_debug_marker === PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0; PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0; @@ -23187,19 +18052,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0; PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0; - //=== VK_KHR_video_queue === - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; - PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0; - PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0; - PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0; - PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0; - PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0; - PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0; - PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0; - PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0; - PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; - PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; //=== VK_KHR_video_queue === PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0; PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0; @@ -23214,18 +18066,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0; PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0; - //=== VK_KHR_video_decode_queue === - PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; //=== VK_KHR_video_decode_queue === PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0; - //=== VK_EXT_transform_feedback === - PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; - PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; - PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0; - PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0; - PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; - PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; //=== VK_EXT_transform_feedback === PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0; PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0; @@ -23234,12 +18077,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0; PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0; - //=== VK_NVX_binary_import === - PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; - PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; - PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0; - PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0; - PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_binary_import === PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0; PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0; @@ -23248,51 +18085,36 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; + PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; + PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; + PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; - //=== VK_AMD_draw_indirect_count === - PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; - PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; //=== VK_AMD_draw_indirect_count === PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0; - //=== VK_AMD_shader_info === - PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; //=== VK_AMD_shader_info === PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0; //=== VK_KHR_dynamic_rendering === PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; - //=== VK_KHR_dynamic_rendering === - PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0; - PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; - //=== VK_GGP_stream_descriptor_surface === - PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0; #else PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; - PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0; #endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; - //=== VK_NV_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; - //=== VK_NV_external_memory_win32 === - PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0; #else PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === @@ -23303,236 +18125,137 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; - //=== VK_KHR_get_physical_device_properties2 === - PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0; - PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0; - PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0; - PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0; - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0; //=== VK_KHR_device_group === PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; - //=== VK_KHR_device_group === - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0; - PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0; - PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0; #if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; - //=== VK_NN_vi_surface === - PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0; #else PFN_dummy vkCreateViSurfaceNN_placeholder = 0; - PFN_dummy vkCreateViSurfaceNN_placeholder = 0; #endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_maintenance1 === PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - //=== VK_KHR_maintenance1 === - PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0; - //=== VK_KHR_device_group_creation === - PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; //=== VK_KHR_device_group_creation === PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0; //=== VK_KHR_external_memory_capabilities === PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; - //=== VK_KHR_external_memory_capabilities === - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; - //=== VK_KHR_external_memory_win32 === - PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0; - PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0; #else PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; - //=== VK_KHR_external_memory_fd === - PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0; - PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0; //=== VK_KHR_external_semaphore_capabilities === PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; - //=== VK_KHR_external_semaphore_capabilities === - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; - //=== VK_KHR_external_semaphore_win32 === - PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0; - PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0; #else PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - //=== VK_KHR_external_semaphore_fd === - PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0; - PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0; - //=== VK_KHR_push_descriptor === - PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; //=== VK_KHR_push_descriptor === PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0; PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0; - //=== VK_EXT_conditional_rendering === - PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; - PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; //=== VK_EXT_conditional_rendering === PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0; PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0; - //=== VK_KHR_descriptor_update_template === - PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; - PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; - PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; //=== VK_KHR_descriptor_update_template === PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0; PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0; PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0; - //=== VK_NV_clip_space_w_scaling === - PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; //=== VK_NV_clip_space_w_scaling === PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0; //=== VK_EXT_direct_mode_display === PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; - //=== VK_EXT_direct_mode_display === - PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0; #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; - //=== VK_EXT_acquire_xlib_display === - PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0; - PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0; #else PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; - PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0; - PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - //=== VK_EXT_display_surface_counter === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0; - //=== VK_EXT_display_control === - PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; - PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; - PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; - PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; //=== VK_EXT_display_control === PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0; PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0; PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0; PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0; - //=== VK_GOOGLE_display_timing === - PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; - PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; //=== VK_GOOGLE_display_timing === PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0; PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0; - //=== VK_EXT_discard_rectangles === - PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; - PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; - PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; //=== VK_EXT_discard_rectangles === PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0; PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0; PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0; - //=== VK_EXT_hdr_metadata === - PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; //=== VK_EXT_hdr_metadata === PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0; - //=== VK_KHR_create_renderpass2 === - PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; - PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; - PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; - PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; //=== VK_KHR_create_renderpass2 === PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0; PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0; PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0; PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0; - //=== VK_KHR_shared_presentable_image === - PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; //=== VK_KHR_shared_presentable_image === PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0; //=== VK_KHR_external_fence_capabilities === PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; - //=== VK_KHR_external_fence_capabilities === - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; - //=== VK_KHR_external_fence_win32 === - PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0; - PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0; #else PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0; - PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - //=== VK_KHR_external_fence_fd === - PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0; - PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0; - //=== VK_KHR_performance_query === - PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; - PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; - PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; - PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; //=== VK_KHR_performance_query === PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0; PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0; PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0; PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0; - //=== VK_KHR_get_surface_capabilities2 === - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; //=== VK_KHR_get_surface_capabilities2 === PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0; PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0; @@ -23542,30 +18265,19 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; - //=== VK_KHR_get_display_properties2 === - PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0; - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0; - PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0; - PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0; #if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; - //=== VK_MVK_ios_surface === - PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0; #else PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; - PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_IOS_MVK*/ #if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; - //=== VK_MVK_macos_surface === - PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0; #else PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; - PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0; #endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === @@ -23580,31 +18292,14 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; - //=== VK_EXT_debug_utils === - PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0; - PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0; - PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0; - PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0; - PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0; - PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0; - PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0; - PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0; - PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0; - PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0; - PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; - //=== VK_ANDROID_external_memory_android_hardware_buffer === - PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0; - PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0; #else PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; - PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0; - PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -23616,14 +18311,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; - //=== VK_AMDX_shader_enqueue === - PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0; - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0; - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0; - PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0; - PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0; - PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0; - PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0; #else PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; @@ -23632,48 +18319,17 @@ namespace VULKAN_HPP_NAMESPACE PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; - PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0; - PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0; - PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0; - PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - //=== VK_EXT_sample_locations === - PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0; - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0; - //=== VK_KHR_get_memory_requirements2 === - PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; - PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; - PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; //=== VK_KHR_get_memory_requirements2 === PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0; PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0; PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0; - //=== VK_KHR_acceleration_structure === - PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; - PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; - PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0; - PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0; - PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0; - PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0; - PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0; - PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0; - PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0; - PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0; - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; - PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; //=== VK_KHR_acceleration_structure === PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0; PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0; @@ -23692,14 +18348,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0; PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0; - //=== VK_KHR_ray_tracing_pipeline === - PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; - PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; - PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0; - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0; - PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0; - PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; - PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; //=== VK_KHR_ray_tracing_pipeline === PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0; PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0; @@ -23709,58 +18357,28 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0; PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0; - //=== VK_KHR_sampler_ycbcr_conversion === - PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; - PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; //=== VK_KHR_sampler_ycbcr_conversion === PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0; PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0; - //=== VK_KHR_bind_memory2 === - PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; - PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; //=== VK_KHR_bind_memory2 === PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0; PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0; - //=== VK_EXT_image_drm_format_modifier === - PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; //=== VK_EXT_image_drm_format_modifier === PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0; - //=== VK_EXT_validation_cache === - PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; - PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; - PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; - PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; //=== VK_EXT_validation_cache === PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0; PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0; PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0; PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0; - //=== VK_NV_shading_rate_image === - PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; - PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; - PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; //=== VK_NV_shading_rate_image === PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0; PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0; PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0; - //=== VK_NV_ray_tracing === - PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; - PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; - PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0; - PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0; - PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0; - PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0; - PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0; - PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0; - PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0; - PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0; - PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; - PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; //=== VK_NV_ray_tracing === PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0; PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0; @@ -23775,81 +18393,43 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0; PFN_vkCompileDeferredNV vkCompileDeferredNV = 0; - //=== VK_KHR_maintenance3 === - PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; //=== VK_KHR_maintenance3 === PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0; - //=== VK_KHR_draw_indirect_count === - PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; - PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; //=== VK_KHR_draw_indirect_count === PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0; PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0; - //=== VK_EXT_external_memory_host === - PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; //=== VK_EXT_external_memory_host === PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0; - //=== VK_AMD_buffer_marker === - PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; - PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; //=== VK_AMD_buffer_marker === PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0; PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0; - //=== VK_EXT_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; - PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; //=== VK_EXT_calibrated_timestamps === PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0; PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0; - //=== VK_NV_mesh_shader === - PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; - PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; - PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; //=== VK_NV_mesh_shader === PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0; PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0; PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0; - //=== VK_NV_scissor_exclusive === - PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; - PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; //=== VK_NV_scissor_exclusive === PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0; PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0; - //=== VK_NV_device_diagnostic_checkpoints === - PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; - PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; - PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; //=== VK_NV_device_diagnostic_checkpoints === PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0; PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0; PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0; - //=== VK_KHR_timeline_semaphore === - PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; - PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; - PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; //=== VK_KHR_timeline_semaphore === PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0; PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0; PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0; - //=== VK_INTEL_performance_query === - PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; - PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; - PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0; - PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0; - PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0; - PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0; - PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0; - PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0; - PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0; //=== VK_INTEL_performance_query === PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0; PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0; @@ -23863,67 +18443,43 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMD_display_native_hdr === PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; - //=== VK_AMD_display_native_hdr === - PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; - //=== VK_FUCHSIA_imagepipe_surface === - PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0; #else PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; - PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; - //=== VK_EXT_metal_surface === - PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0; #else PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; - PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - //=== VK_KHR_fragment_shading_rate === - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0; - PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0; - //=== VK_KHR_dynamic_rendering_local_read === - PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; //=== VK_KHR_dynamic_rendering_local_read === PFN_vkCmdSetRenderingAttachmentLocationsKHR vkCmdSetRenderingAttachmentLocationsKHR = 0; PFN_vkCmdSetRenderingInputAttachmentIndicesKHR vkCmdSetRenderingInputAttachmentIndicesKHR = 0; - //=== VK_EXT_buffer_device_address === - PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; //=== VK_EXT_buffer_device_address === PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0; - //=== VK_EXT_tooling_info === - PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; //=== VK_EXT_tooling_info === PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0; - //=== VK_KHR_present_wait === - PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; //=== VK_KHR_present_wait === PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0; - //=== VK_NV_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; //=== VK_NV_cooperative_matrix === PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0; //=== VK_NV_coverage_reduction_mode === PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; - //=== VK_NV_coverage_reduction_mode === - PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === @@ -23931,59 +18487,27 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; - //=== VK_EXT_full_screen_exclusive === - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0; - PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0; - PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0; - PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0; #else PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0; - PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0; - PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; - //=== VK_EXT_headless_surface === - PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0; - //=== VK_KHR_buffer_device_address === - PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; - PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; //=== VK_KHR_buffer_device_address === PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0; PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0; PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0; - //=== VK_EXT_line_rasterization === - PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; //=== VK_EXT_line_rasterization === PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0; - //=== VK_EXT_host_query_reset === - PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; //=== VK_EXT_host_query_reset === PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0; - //=== VK_EXT_extended_dynamic_state === - PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; - PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; - PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0; - PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0; - PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0; - PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0; - PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0; - PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0; - PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0; - PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0; - PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; - PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; //=== VK_EXT_extended_dynamic_state === PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0; PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0; @@ -23998,12 +18522,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0; PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0; - //=== VK_KHR_deferred_host_operations === - PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; - PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; - PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0; - PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; - PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; //=== VK_KHR_deferred_host_operations === PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0; PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0; @@ -24011,21 +18529,11 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0; PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0; - //=== VK_KHR_pipeline_executable_properties === - PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; - PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; - PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; //=== VK_KHR_pipeline_executable_properties === PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0; PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0; PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0; - //=== VK_EXT_host_image_copy === - PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; - PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; - PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0; - PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; - PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; //=== VK_EXT_host_image_copy === PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0; PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0; @@ -24033,25 +18541,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0; PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0; - //=== VK_KHR_map_memory2 === - PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; - PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; //=== VK_KHR_map_memory2 === PFN_vkMapMemory2KHR vkMapMemory2KHR = 0; PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0; - //=== VK_EXT_swapchain_maintenance1 === - PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; //=== VK_EXT_swapchain_maintenance1 === PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0; - //=== VK_NV_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; - PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; - PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0; - PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0; - PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; - PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; //=== VK_NV_device_generated_commands === PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0; PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0; @@ -24060,23 +18556,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0; PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0; - //=== VK_EXT_depth_bias_control === - PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; //=== VK_EXT_depth_bias_control === PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0; - //=== VK_EXT_acquire_drm_display === - PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; - PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; //=== VK_EXT_acquire_drm_display === PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0; PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0; - //=== VK_EXT_private_data === - PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; - PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; - PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0; - PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0; //=== VK_EXT_private_data === PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0; PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0; @@ -24087,10 +18573,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; - //=== VK_KHR_video_encode_queue === - PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0; - PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0; - PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0; #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === @@ -24100,13 +18582,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; - //=== VK_NV_cuda_kernel_launch === - PFN_vkCreateCudaModuleNV vkCreateCudaModuleNV = 0; - PFN_vkGetCudaModuleCacheNV vkGetCudaModuleCacheNV = 0; - PFN_vkCreateCudaFunctionNV vkCreateCudaFunctionNV = 0; - PFN_vkDestroyCudaModuleNV vkDestroyCudaModuleNV = 0; - PFN_vkDestroyCudaFunctionNV vkDestroyCudaFunctionNV = 0; - PFN_vkCmdCudaLaunchKernelNV vkCmdCudaLaunchKernelNV = 0; #else PFN_dummy vkCreateCudaModuleNV_placeholder = 0; PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; @@ -24114,22 +18589,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; - PFN_dummy vkCreateCudaModuleNV_placeholder = 0; - PFN_dummy vkGetCudaModuleCacheNV_placeholder = 0; - PFN_dummy vkCreateCudaFunctionNV_placeholder = 0; - PFN_dummy vkDestroyCudaModuleNV_placeholder = 0; - PFN_dummy vkDestroyCudaFunctionNV_placeholder = 0; - PFN_dummy vkCmdCudaLaunchKernelNV_placeholder = 0; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; - //=== VK_EXT_metal_objects === - PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0; #else PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; - PFN_dummy vkExportMetalObjectsEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === @@ -24139,26 +18605,7 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - //=== VK_KHR_synchronization2 === - PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0; - PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0; - PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0; - PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0; - PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0; - PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0; - //=== VK_EXT_descriptor_buffer === - PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; - PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0; - PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0; - PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0; - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; - PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; //=== VK_EXT_descriptor_buffer === PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0; PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0; @@ -24172,27 +18619,14 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0; PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0; - //=== VK_NV_fragment_shading_rate_enums === - PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; //=== VK_NV_fragment_shading_rate_enums === PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0; - //=== VK_EXT_mesh_shader === - PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; - PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; //=== VK_EXT_mesh_shader === PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0; PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0; PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0; - //=== VK_KHR_copy_commands2 === - PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; - PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; - PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0; - PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0; - PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0; - PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0; //=== VK_KHR_copy_commands2 === PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0; PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0; @@ -24203,68 +18637,44 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_device_fault === PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; - //=== VK_EXT_device_fault === - PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; - //=== VK_NV_acquire_winrt_display === - PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0; - PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0; #else PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; - PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0; - PFN_dummy vkGetWinrtDisplayNV_placeholder = 0; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) //=== VK_EXT_directfb_surface === PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; - //=== VK_EXT_directfb_surface === - PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0; - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0; #else PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; - PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0; #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ //=== VK_EXT_vertex_input_dynamic_state === PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; - //=== VK_EXT_vertex_input_dynamic_state === - PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0; #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; - //=== VK_FUCHSIA_external_memory === - PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0; - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0; #else PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_semaphore === PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; - //=== VK_FUCHSIA_external_semaphore === - PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0; - PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0; #else PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0; - PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -24274,44 +18684,24 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; - //=== VK_FUCHSIA_buffer_collection === - PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0; - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0; - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0; - PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0; - PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0; #else PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; - PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0; - PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0; - PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0; #endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - //=== VK_HUAWEI_subpass_shading === - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0; - PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0; - //=== VK_HUAWEI_invocation_mask === - PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; //=== VK_HUAWEI_invocation_mask === PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0; - //=== VK_NV_external_memory_rdma === - PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; //=== VK_NV_external_memory_rdma === PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0; - //=== VK_EXT_pipeline_properties === - PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; //=== VK_EXT_pipeline_properties === PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0; @@ -24321,59 +18711,26 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; - //=== VK_EXT_extended_dynamic_state2 === - PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0; - PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0; - PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0; - PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0; - PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; - //=== VK_QNX_screen_surface === - PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0; - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0; #else PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; - PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0; - PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_EXT_color_write_enable === PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - //=== VK_EXT_color_write_enable === - PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0; - //=== VK_KHR_ray_tracing_maintenance1 === - PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; //=== VK_KHR_ray_tracing_maintenance1 === PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0; - //=== VK_EXT_multi_draw === - PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; - PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; //=== VK_EXT_multi_draw === PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0; PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0; - //=== VK_EXT_opacity_micromap === - PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; - PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; - PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0; - PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0; - PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0; - PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0; - PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0; - PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0; - PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0; - PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0; - PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0; - PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0; - PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; - PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; //=== VK_EXT_opacity_micromap === PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0; PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0; @@ -24390,89 +18747,35 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0; PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0; - //=== VK_HUAWEI_cluster_culling_shader === - PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; - PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; //=== VK_HUAWEI_cluster_culling_shader === PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0; PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0; - //=== VK_EXT_pageable_device_local_memory === - PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; //=== VK_EXT_pageable_device_local_memory === PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0; - //=== VK_KHR_maintenance4 === - PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; //=== VK_KHR_maintenance4 === PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0; PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0; PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0; - //=== VK_VALVE_descriptor_set_host_mapping === - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; - PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; //=== VK_VALVE_descriptor_set_host_mapping === PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0; PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0; - //=== VK_NV_copy_memory_indirect === - PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; - PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; //=== VK_NV_copy_memory_indirect === PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0; PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0; - //=== VK_NV_memory_decompression === - PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; - PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; //=== VK_NV_memory_decompression === PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0; PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0; - //=== VK_NV_device_generated_commands_compute === - PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; - PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; - PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; //=== VK_NV_device_generated_commands_compute === PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0; PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0; PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0; - //=== VK_EXT_extended_dynamic_state3 === - PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; - PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; - PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0; - PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0; - PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0; - PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0; - PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0; - PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0; - PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0; - PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0; - PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0; - PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0; - PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0; - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0; - PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0; - PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0; - PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0; - PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0; - PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0; - PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0; - PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0; - PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0; - PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0; - PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0; - PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0; - PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0; - PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0; - PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0; - PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0; - PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; - PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; //=== VK_EXT_extended_dynamic_state3 === PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0; PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0; @@ -24506,19 +18809,10 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0; PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0; - //=== VK_EXT_shader_module_identifier === - PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; - PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; //=== VK_EXT_shader_module_identifier === PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0; PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0; - //=== VK_NV_optical_flow === - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; - PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; - PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0; - PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0; - PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0; //=== VK_NV_optical_flow === PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0; PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0; @@ -24532,14 +18826,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; - //=== VK_AMD_anti_lag === - PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; - //=== VK_KHR_maintenance5 === - PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0; - PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0; - PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0; - PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0; - //=== VK_AMD_anti_lag === PFN_vkAntiLagUpdateAMD vkAntiLagUpdateAMD = 0; @@ -24556,33 +18842,11 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; - //=== VK_EXT_shader_object === - PFN_vkCreateShadersEXT vkCreateShadersEXT = 0; - PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0; - PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0; - PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0; - PFN_vkCmdSetDepthClampRangeEXT vkCmdSetDepthClampRangeEXT = 0; - //=== VK_KHR_pipeline_binary === - PFN_vkCreatePipelineBinariesKHR vkCreatePipelineBinariesKHR = 0; - PFN_vkDestroyPipelineBinaryKHR vkDestroyPipelineBinaryKHR = 0; - PFN_vkGetPipelineKeyKHR vkGetPipelineKeyKHR = 0; - PFN_vkGetPipelineBinaryDataKHR vkGetPipelineBinaryDataKHR = 0; - PFN_vkReleaseCapturedPipelineDataKHR vkReleaseCapturedPipelineDataKHR = 0; - - //=== VK_QCOM_tile_properties === - PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; - PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; //=== VK_QCOM_tile_properties === PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0; PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0; - //=== VK_NV_low_latency2 === - PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; - PFN_vkLatencySleepNV vkLatencySleepNV = 0; - PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0; - PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; - PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; //=== VK_NV_low_latency2 === PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0; PFN_vkLatencySleepNV vkLatencySleepNV = 0; @@ -24590,45 +18854,26 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0; PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0; - //=== VK_KHR_cooperative_matrix === - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; //=== VK_KHR_cooperative_matrix === PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0; //=== VK_EXT_attachment_feedback_loop_dynamic_state === PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; - //=== VK_QNX_external_memory_screen_buffer === - PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0; #else PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; - PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; - //=== VK_KHR_line_rasterization === - PFN_vkCmdSetLineStippleKHR vkCmdSetLineStippleKHR = 0; - //=== VK_KHR_calibrated_timestamps === - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; - PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; //=== VK_KHR_calibrated_timestamps === PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = 0; PFN_vkGetCalibratedTimestampsKHR vkGetCalibratedTimestampsKHR = 0; - //=== VK_KHR_maintenance6 === - PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; - PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; - PFN_vkCmdPushDescriptorSet2KHR vkCmdPushDescriptorSet2KHR = 0; - PFN_vkCmdPushDescriptorSetWithTemplate2KHR vkCmdPushDescriptorSetWithTemplate2KHR = 0; - PFN_vkCmdSetDescriptorBufferOffsets2EXT vkCmdSetDescriptorBufferOffsets2EXT = 0; - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = 0; //=== VK_KHR_maintenance6 === PFN_vkCmdBindDescriptorSets2KHR vkCmdBindDescriptorSets2KHR = 0; PFN_vkCmdPushConstants2KHR vkCmdPushConstants2KHR = 0; @@ -24654,41 +18899,12 @@ namespace VULKAN_HPP_NAMESPACE public: DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; - //=== VK_EXT_device_generated_commands === - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT vkGetGeneratedCommandsMemoryRequirementsEXT = 0; - PFN_vkCmdPreprocessGeneratedCommandsEXT vkCmdPreprocessGeneratedCommandsEXT = 0; - PFN_vkCmdExecuteGeneratedCommandsEXT vkCmdExecuteGeneratedCommandsEXT = 0; - PFN_vkCreateIndirectCommandsLayoutEXT vkCreateIndirectCommandsLayoutEXT = 0; - PFN_vkDestroyIndirectCommandsLayoutEXT vkDestroyIndirectCommandsLayoutEXT = 0; - PFN_vkCreateIndirectExecutionSetEXT vkCreateIndirectExecutionSetEXT = 0; - PFN_vkDestroyIndirectExecutionSetEXT vkDestroyIndirectExecutionSetEXT = 0; - PFN_vkUpdateIndirectExecutionSetPipelineEXT vkUpdateIndirectExecutionSetPipelineEXT = 0; - PFN_vkUpdateIndirectExecutionSetShaderEXT vkUpdateIndirectExecutionSetShaderEXT = 0; - //=== VK_NV_cooperative_matrix2 === - PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = 0; - - public: - DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default; - DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT - { - init( getInstanceProcAddr ); - } DispatchLoaderDynamic( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { init( getInstanceProcAddr ); } - // This interface does not require a linked vulkan library. - DispatchLoaderDynamic( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT - { - init( instance, getInstanceProcAddr, device, getDeviceProcAddr ); - } // This interface does not require a linked vulkan library. DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, @@ -24699,30 +18915,16 @@ namespace VULKAN_HPP_NAMESPACE } template void init() - { - static DynamicLoader dl; - init( dl ); - } - > - void init() { static DynamicLoader dl; init( dl ); } - template - void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT - { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); - init( getInstanceProcAddr ); - } template void init( DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT { @@ -24730,21 +18932,12 @@ namespace VULKAN_HPP_NAMESPACE init( getInstanceProcAddr ); } - void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( getInstanceProcAddr ); void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getInstanceProcAddr ); - vkGetInstanceProcAddr = getInstanceProcAddr; vkGetInstanceProcAddr = getInstanceProcAddr; - //=== VK_VERSION_1_0 === - vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); - vkEnumerateInstanceExtensionProperties = - PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ); - vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ); //=== VK_VERSION_1_0 === vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) ); vkEnumerateInstanceExtensionProperties = @@ -24754,24 +18947,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_VERSION_1_1 === vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); } - //=== VK_VERSION_1_1 === - vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) ); - } - // This interface does not require a linked vulkan library. - void init( VkInstance instance, - PFN_vkGetInstanceProcAddr getInstanceProcAddr, - VkDevice device = {}, - PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( instance && getInstanceProcAddr ); - vkGetInstanceProcAddr = getInstanceProcAddr; - init( VULKAN_HPP_NAMESPACE::Instance( instance ) ); - if ( device ) - { - init( VULKAN_HPP_NAMESPACE::Device( device ) ); - } - } // This interface does not require a linked vulkan library. void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, @@ -24787,9 +18963,6 @@ namespace VULKAN_HPP_NAMESPACE } } - void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT - { - VkInstance instance = static_cast( instanceCpp ); void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT { VkInstance instance = static_cast( instanceCpp ); @@ -24934,184 +19107,7 @@ namespace VULKAN_HPP_NAMESPACE vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - //=== VK_VERSION_1_0 === - vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) ); - vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) ); - vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) ); - vkGetPhysicalDeviceFormatProperties = - PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) ); - vkGetPhysicalDeviceImageFormatProperties = - PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) ); - vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) ); - vkGetPhysicalDeviceQueueFamilyProperties = - PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) ); - vkGetPhysicalDeviceMemoryProperties = - PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) ); - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) ); - vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) ); - vkEnumerateDeviceExtensionProperties = - PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) ); - vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) ); - //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) ); - vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = - PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) ); - vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) ); - vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) ); - vkGetPhysicalDeviceFormatProperties2 = - PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) ); - vkGetPhysicalDeviceImageFormatProperties2 = - PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) ); - vkGetPhysicalDeviceQueueFamilyProperties2 = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) ); - vkGetPhysicalDeviceMemoryProperties2 = - PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) ); - vkGetPhysicalDeviceSparseImageFormatProperties2 = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetPhysicalDeviceExternalBufferProperties = - PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) ); - vkGetPhysicalDeviceExternalFenceProperties = - PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) ); - vkGetPhysicalDeviceExternalSemaphoreProperties = - PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); //=== VK_VERSION_1_1 === vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) ); vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) ); @@ -25150,21 +19146,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) ); vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) ); - //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); //=== VK_VERSION_1_2 === vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) ); vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) ); @@ -25221,59 +19202,33 @@ namespace VULKAN_HPP_NAMESPACE vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); - //=== VK_VERSION_1_3 === - vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) ); - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = - PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) ); + + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetInstanceProcAddr( instance, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetInstanceProcAddr( instance, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetInstanceProcAddr( instance, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = + PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetInstanceProcAddr( instance, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetInstanceProcAddr( instance, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetInstanceProcAddr( instance, "vkTransitionImageLayout" ) ); //=== VK_KHR_surface === vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); - vkGetPhysicalDeviceSurfaceSupportKHR = - PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); - vkGetPhysicalDeviceSurfaceCapabilitiesKHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) ); - vkGetPhysicalDeviceSurfaceFormatsKHR = - PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) ); - vkGetPhysicalDeviceSurfacePresentModesKHR = - PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) ); - //=== VK_KHR_surface === - vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) ); vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) ); vkGetPhysicalDeviceSurfaceCapabilitiesKHR = @@ -25296,31 +19251,7 @@ namespace VULKAN_HPP_NAMESPACE vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); - //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkGetPhysicalDevicePresentRectanglesKHR = - PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) ); - //=== VK_KHR_display === - vkGetPhysicalDeviceDisplayPropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); - vkGetPhysicalDeviceDisplayPlanePropertiesKHR = - PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) ); - vkGetDisplayPlaneSupportedDisplaysKHR = - PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) ); - vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) ); - vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) ); - vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) ); - vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) ); //=== VK_KHR_display === vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) ); @@ -25335,18 +19266,12 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_display_swapchain === vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); - //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) ); #if defined( VK_USE_PLATFORM_XLIB_KHR ) //=== VK_KHR_xlib_surface === vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); - //=== VK_KHR_xlib_surface === - vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) ); - vkGetPhysicalDeviceXlibPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_XLIB_KHR*/ #if defined( VK_USE_PLATFORM_XCB_KHR ) @@ -25354,10 +19279,6 @@ namespace VULKAN_HPP_NAMESPACE vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); - //=== VK_KHR_xcb_surface === - vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) ); - vkGetPhysicalDeviceXcbPresentationSupportKHR = - PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_XCB_KHR*/ #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) @@ -25365,17 +19286,11 @@ namespace VULKAN_HPP_NAMESPACE vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); - //=== VK_KHR_wayland_surface === - vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) ); - vkGetPhysicalDeviceWaylandPresentationSupportKHR = - PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_KHR_android_surface === vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); - //=== VK_KHR_android_surface === - vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_USE_PLATFORM_WIN32_KHR ) @@ -25383,27 +19298,13 @@ namespace VULKAN_HPP_NAMESPACE vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); - //=== VK_KHR_win32_surface === - vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) ); - vkGetPhysicalDeviceWin32PresentationSupportKHR = - PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_debug_report === vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); - //=== VK_EXT_debug_report === - vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) ); - vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) ); - vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) ); - //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); //=== VK_EXT_debug_marker === vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) ); vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) ); @@ -25411,22 +19312,6 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) ); vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) ); - //=== VK_KHR_video_queue === - vkGetPhysicalDeviceVideoCapabilitiesKHR = - PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); - vkGetPhysicalDeviceVideoFormatPropertiesKHR = - PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) ); - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); //=== VK_KHR_video_queue === vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) ); @@ -25444,19 +19329,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) ); vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) ); - //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); //=== VK_KHR_video_decode_queue === vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) ); - //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); //=== VK_EXT_transform_feedback === vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) ); @@ -25466,12 +19341,6 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) ); vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) ); - //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_binary_import === vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) ); vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) ); @@ -25480,16 +19349,10 @@ namespace VULKAN_HPP_NAMESPACE vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) ); - //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) ); if ( !vkCmdDrawIndirectCount ) @@ -25500,8 +19363,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMD_shader_info === vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); - //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) ); //=== VK_KHR_dynamic_rendering === vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); @@ -25510,33 +19371,19 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); if ( !vkCmdEndRendering ) vkCmdEndRendering = vkCmdEndRenderingKHR; - //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; #if defined( VK_USE_PLATFORM_GGP ) //=== VK_GGP_stream_descriptor_surface === vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); - //=== VK_GGP_stream_descriptor_surface === - vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) ); #endif /*VK_USE_PLATFORM_GGP*/ //=== VK_NV_external_memory_capabilities === vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); - //=== VK_NV_external_memory_capabilities === - vkGetPhysicalDeviceExternalImageFormatPropertiesNV = - PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); - //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_get_physical_device_properties2 === @@ -25544,33 +19391,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceProperties2 ) - vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; - vkGetPhysicalDeviceFormatProperties2KHR = - PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceFormatProperties2 ) - vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR; - vkGetPhysicalDeviceImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceImageFormatProperties2 ) - vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR; - vkGetPhysicalDeviceQueueFamilyProperties2KHR = - PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) - vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR; - vkGetPhysicalDeviceMemoryProperties2KHR = - PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceMemoryProperties2 ) - vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR; - vkGetPhysicalDeviceSparseImageFormatProperties2KHR = - PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) ); - if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) - vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR; - //=== VK_KHR_get_physical_device_properties2 === - vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) ); - if ( !vkGetPhysicalDeviceFeatures2 ) - vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR; - vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) ); if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR; vkGetPhysicalDeviceFormatProperties2KHR = @@ -25605,40 +19425,19 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR; - //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; #if defined( VK_USE_PLATFORM_VI_NN ) //=== VK_NN_vi_surface === vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); - //=== VK_NN_vi_surface === - vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) ); #endif /*VK_USE_PLATFORM_VI_NN*/ //=== VK_KHR_maintenance1 === vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; - //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) ); if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR; //=== VK_KHR_device_group_creation === vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); - if ( !vkEnumeratePhysicalDeviceGroups ) - vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; - //=== VK_KHR_device_group_creation === - vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) ); if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR; @@ -25647,86 +19446,47 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; - //=== VK_KHR_external_memory_capabilities === - vkGetPhysicalDeviceExternalBufferPropertiesKHR = - PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalBufferProperties ) - vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); - //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = - PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); - //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) ); //=== VK_KHR_external_semaphore_capabilities === vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; - //=== VK_KHR_external_semaphore_capabilities === - vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) - vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); - //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); - //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) ); //=== VK_KHR_push_descriptor === vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; //=== VK_EXT_conditional_rendering === vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); - //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) ); - //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = - PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; //=== VK_KHR_descriptor_update_template === vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) ); @@ -25743,27 +19503,17 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_clip_space_w_scaling === vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); - //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) ); //=== VK_EXT_direct_mode_display === vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); - //=== VK_EXT_direct_mode_display === - vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) ); #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) //=== VK_EXT_acquire_xlib_display === vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); - //=== VK_EXT_acquire_xlib_display === - vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) ); - vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) ); #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ //=== VK_EXT_display_surface_counter === - vkGetPhysicalDeviceSurfaceCapabilities2EXT = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); - //=== VK_EXT_display_surface_counter === vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) ); @@ -25772,46 +19522,19 @@ namespace VULKAN_HPP_NAMESPACE vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); - //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) ); - //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); //=== VK_GOOGLE_display_timing === vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) ); vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) ); - //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); //=== VK_EXT_discard_rectangles === vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) ); vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEnableEXT" ) ); vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleModeEXT" ) ); - //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); //=== VK_EXT_hdr_metadata === vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) ); - //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; //=== VK_KHR_create_renderpass2 === vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) ); if ( !vkCreateRenderPass2 ) @@ -25828,43 +19551,23 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shared_presentable_image === vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); - //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) ); //=== VK_KHR_external_fence_capabilities === vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; - //=== VK_KHR_external_fence_capabilities === - vkGetPhysicalDeviceExternalFencePropertiesKHR = - PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) ); - if ( !vkGetPhysicalDeviceExternalFenceProperties ) - vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); - //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); - //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) ); - //=== VK_KHR_performance_query === - vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( - vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); - vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) ); - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); //=== VK_KHR_performance_query === vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) ); @@ -25873,11 +19576,6 @@ namespace VULKAN_HPP_NAMESPACE vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) ); vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) ); - //=== VK_KHR_get_surface_capabilities2 === - vkGetPhysicalDeviceSurfaceCapabilities2KHR = - PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); - vkGetPhysicalDeviceSurfaceFormats2KHR = - PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) ); //=== VK_KHR_get_surface_capabilities2 === vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) ); @@ -25891,26 +19589,15 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); - //=== VK_KHR_get_display_properties2 === - vkGetPhysicalDeviceDisplayProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) ); - vkGetPhysicalDeviceDisplayPlaneProperties2KHR = - PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) ); - vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) ); - vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) ); #if defined( VK_USE_PLATFORM_IOS_MVK ) //=== VK_MVK_ios_surface === vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); - //=== VK_MVK_ios_surface === - vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) ); #endif /*VK_USE_PLATFORM_IOS_MVK*/ #if defined( VK_USE_PLATFORM_MACOS_MVK ) //=== VK_MVK_macos_surface === vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); - //=== VK_MVK_macos_surface === - vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) ); #endif /*VK_USE_PLATFORM_MACOS_MVK*/ //=== VK_EXT_debug_utils === @@ -25925,18 +19612,6 @@ namespace VULKAN_HPP_NAMESPACE vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); - //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) ); - vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) ); - vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) ); - vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) ); #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === @@ -25944,11 +19619,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); - //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -25965,27 +19635,10 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); - //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = - PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetInstanceProcAddr( instance, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetInstanceProcAddr( instance, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetInstanceProcAddr( instance, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = - PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetInstanceProcAddr( instance, "vkCmdDispatchGraphIndirectCountAMDX" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); - vkGetPhysicalDeviceMultisamplePropertiesEXT = - PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); - //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) ); vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) ); @@ -25994,17 +19647,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; - //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) ); if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; vkGetImageSparseMemoryRequirements2KHR = @@ -26028,34 +19670,6 @@ namespace VULKAN_HPP_NAMESPACE vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) ); - //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = - PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) ); vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); vkCmdCopyMemoryToAccelerationStructureKHR = @@ -26077,18 +19691,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); - //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) ); vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); vkCmdSetRayTracingPipelineStackSizeKHR = @@ -26099,13 +19701,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; - //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) ); if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; @@ -26114,20 +19709,10 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR; vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; - //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) ); if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR; //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - //=== VK_EXT_image_drm_format_modifier === vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); @@ -26136,42 +19721,13 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); - //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) ); - //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); //=== VK_NV_shading_rate_image === vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) ); vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) ); vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) ); - //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = - PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = - PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); //=== VK_NV_ray_tracing === vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) ); vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) ); @@ -26192,10 +19748,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) ); - //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; //=== VK_KHR_maintenance3 === vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) ); if ( !vkGetDescriptorSetLayoutSupport ) @@ -26206,38 +19758,17 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; - //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) ); if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = - PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); - //=== VK_EXT_external_memory_host === vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) ); //=== VK_AMD_buffer_marker === vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); - //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) ); - //=== VK_EXT_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); - if ( !vkGetPhysicalDeviceCalibrateableTimeDomainsKHR ) - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; //=== VK_EXT_calibrated_timestamps === vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) ); @@ -26251,37 +19782,16 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); //=== VK_NV_scissor_exclusive === vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorEnableNV" ) ); vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) ); - //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); //=== VK_NV_device_diagnostic_checkpoints === vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) ); vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) ); vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) ); - //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; //=== VK_KHR_timeline_semaphore === vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) ); if ( !vkGetSemaphoreCounterValue ) @@ -26307,75 +19817,41 @@ namespace VULKAN_HPP_NAMESPACE vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); - //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) ); //=== VK_AMD_display_native_hdr === vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); - //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) ); #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_imagepipe_surface === vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); - //=== VK_FUCHSIA_imagepipe_surface === - vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_surface === vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); - //=== VK_EXT_metal_surface === - vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_fragment_shading_rate === vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); - //=== VK_KHR_fragment_shading_rate === - vkGetPhysicalDeviceFragmentShadingRatesKHR = - PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) ); - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) ); //=== VK_KHR_dynamic_rendering_local_read === vkCmdSetRenderingAttachmentLocationsKHR = PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; vkCmdSetRenderingInputAttachmentIndicesKHR = PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); - //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; //=== VK_EXT_buffer_device_address === vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; - //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) ); if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; //=== VK_EXT_tooling_info === - vkGetPhysicalDeviceToolPropertiesEXT = - PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); - if ( !vkGetPhysicalDeviceToolProperties ) - vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT; - //=== VK_EXT_tooling_info === vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) ); if ( !vkGetPhysicalDeviceToolProperties ) @@ -26383,12 +19859,7 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_present_wait === vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); - //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) ); - //=== VK_NV_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); //=== VK_NV_cooperative_matrix === vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) ); @@ -26396,9 +19867,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_coverage_reduction_mode === vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); - //=== VK_NV_coverage_reduction_mode === - vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === @@ -26410,33 +19878,11 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); - //=== VK_EXT_full_screen_exclusive === - vkGetPhysicalDeviceSurfacePresentModes2EXT = - PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) ); - vkAcquireFullScreenExclusiveModeEXT = - PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = - PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_EXT_headless_surface === vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); - //=== VK_EXT_headless_surface === - vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) ); - //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; //=== VK_KHR_buffer_device_address === vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) ); if ( !vkGetBufferDeviceAddress ) @@ -26451,19 +19897,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_line_rasterization === vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; - //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; //=== VK_EXT_host_query_reset === vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; - //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) ); if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT; @@ -26502,43 +19940,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdSetStencilTestEnable ) vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; - //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) ); if ( !vkCmdSetStencilOp ) vkCmdSetStencilOp = vkCmdSetStencilOpEXT; @@ -26549,21 +19950,7 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); - //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) ); - //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); //=== VK_KHR_pipeline_executable_properties === vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) ); @@ -26573,42 +19960,33 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; - //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetInstanceProcAddr( instance, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetInstanceProcAddr( instance, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); - //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetInstanceProcAddr( instance, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetInstanceProcAddr( instance, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; - //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); //=== VK_EXT_swapchain_maintenance1 === vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) ); - //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); //=== VK_NV_device_generated_commands === vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); @@ -26618,31 +19996,13 @@ namespace VULKAN_HPP_NAMESPACE vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) ); vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) ); - //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); //=== VK_EXT_depth_bias_control === vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias2EXT" ) ); - //=== VK_EXT_acquire_drm_display === - vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); - vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); //=== VK_EXT_acquire_drm_display === vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) ); vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) ); - //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; //=== VK_EXT_private_data === vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) ); if ( !vkCreatePrivateDataSlot ) @@ -26663,12 +20023,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetEncodedVideoSessionParametersKHR = PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); - //=== VK_KHR_video_encode_queue === - vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) ); - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) ); #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === @@ -26678,20 +20032,11 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); - //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetInstanceProcAddr( instance, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetInstanceProcAddr( instance, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetInstanceProcAddr( instance, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetInstanceProcAddr( instance, "vkCmdCudaLaunchKernelNV" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); - //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === @@ -26711,25 +20056,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdWriteTimestamp2 ) vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) ); if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR; @@ -26740,25 +20066,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); - //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); vkCmdBindDescriptorBufferEmbeddedSamplersEXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); vkGetBufferOpaqueCaptureDescriptorDataEXT = @@ -26774,37 +20081,12 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_fragment_shading_rate_enums === vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); //=== VK_EXT_mesh_shader === vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) ); vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) ); vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); - //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; //=== VK_KHR_copy_commands2 === vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) ); if ( !vkCmdCopyBuffer2 ) @@ -26827,16 +20109,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_device_fault === vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); - //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); - //=== VK_NV_acquire_winrt_display === - vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) ); - vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) @@ -26844,26 +20121,16 @@ namespace VULKAN_HPP_NAMESPACE vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); - //=== VK_EXT_directfb_surface === - vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) ); - vkGetPhysicalDeviceDirectFBPresentationSupportEXT = - PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) ); #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ //=== VK_EXT_vertex_input_dynamic_state === vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); - //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) ); #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); - //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -26871,10 +20138,6 @@ namespace VULKAN_HPP_NAMESPACE vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); - //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -26887,38 +20150,19 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); - //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); - //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) ); - //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); //=== VK_HUAWEI_invocation_mask === vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) ); - //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); //=== VK_NV_external_memory_rdma === vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) ); - //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); //=== VK_EXT_pipeline_properties === vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) ); @@ -26934,63 +20178,24 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); if ( !vkCmdSetPrimitiveRestartEnable ) vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; - //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_screen_surface === vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); - //=== VK_QNX_screen_surface === - vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) ); - vkGetPhysicalDeviceScreenPresentationSupportQNX = - PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) ); #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_EXT_color_write_enable === vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); - //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) ); - //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); //=== VK_KHR_ray_tracing_maintenance1 === vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) ); - //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); //=== VK_EXT_multi_draw === vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) ); vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) ); - //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = - PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); //=== VK_EXT_opacity_micromap === vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) ); vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) ); @@ -27008,31 +20213,13 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) ); vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) ); - //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); //=== VK_HUAWEI_cluster_culling_shader === vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterHUAWEI" ) ); vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdDrawClusterIndirectHUAWEI" ) ); - //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); //=== VK_EXT_pageable_device_local_memory === vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) ); - //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; //=== VK_KHR_maintenance4 === vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); @@ -27051,35 +20238,17 @@ namespace VULKAN_HPP_NAMESPACE vkGetDescriptorSetLayoutHostMappingInfoVALVE = PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); - //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) ); - //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); //=== VK_NV_copy_memory_indirect === vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) ); vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) ); - //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = - PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); //=== VK_NV_memory_decompression === vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) ); vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) ); //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = - PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetInstanceProcAddr( instance, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = - PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectDeviceAddressNV" ) ); - //=== VK_NV_device_generated_commands_compute === vkGetPipelineIndirectMemoryRequirementsNV = PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); vkCmdUpdatePipelineIndirectBufferNV = @@ -27125,49 +20294,7 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) ); - //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = - PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = - PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) ); - //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); //=== VK_EXT_shader_module_identifier === vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) ); vkGetShaderModuleCreateInfoIdentifierEXT = @@ -27180,29 +20307,21 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) ); vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); - //=== VK_NV_optical_flow === - vkGetPhysicalDeviceOpticalFlowImageFormatsNV = - PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) ); - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) ); //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = - PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); - - //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) ); - //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetInstanceProcAddr( instance, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; //=== VK_AMD_anti_lag === vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetInstanceProcAddr( instance, "vkAntiLagUpdateAMD" ) ); @@ -27214,19 +20333,6 @@ namespace VULKAN_HPP_NAMESPACE vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampRangeEXT" ) ); - //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetInstanceProcAddr( instance, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetInstanceProcAddr( instance, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetInstanceProcAddr( instance, "vkReleaseCapturedPipelineDataKHR" ) ); - //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetInstanceProcAddr( instance, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetInstanceProcAddr( instance, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetInstanceProcAddr( instance, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindShadersEXT" ) ); - vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampRangeEXT" ) ); - //=== VK_KHR_pipeline_binary === vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetInstanceProcAddr( instance, "vkCreatePipelineBinariesKHR" ) ); vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetInstanceProcAddr( instance, "vkDestroyPipelineBinaryKHR" ) ); @@ -27234,10 +20340,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineBinaryDataKHR" ) ); vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetInstanceProcAddr( instance, "vkReleaseCapturedPipelineDataKHR" ) ); - //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = @@ -27249,16 +20351,7 @@ namespace VULKAN_HPP_NAMESPACE vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetInstanceProcAddr( instance, "vkSetLatencyMarkerNV" ) ); vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetInstanceProcAddr( instance, "vkGetLatencyTimingsNV" ) ); vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetInstanceProcAddr( instance, "vkQueueNotifyOutOfBandNV" ) ); - //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetInstanceProcAddr( instance, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetInstanceProcAddr( instance, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetInstanceProcAddr( instance, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetInstanceProcAddr( instance, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetInstanceProcAddr( instance, "vkQueueNotifyOutOfBandNV" ) ); - //=== VK_KHR_cooperative_matrix === - vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = - PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); //=== VK_KHR_cooperative_matrix === vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) ); @@ -27266,26 +20359,17 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_attachment_feedback_loop_dynamic_state === vkCmdSetAttachmentFeedbackLoopEnableEXT = PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetInstanceProcAddr( instance, "vkGetScreenBufferPropertiesQNX" ) ); - //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetInstanceProcAddr( instance, "vkGetScreenBufferPropertiesQNX" ) ); #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleKHR" ) ); - //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; - //=== VK_KHR_calibrated_timestamps === - vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = - PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsKHR" ) ); //=== VK_KHR_calibrated_timestamps === vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsKHR" ) ); @@ -27293,40 +20377,18 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance6 === vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = - PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); - - //=== VK_EXT_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsEXT = - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); - vkCmdPreprocessGeneratedCommandsEXT = - PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsEXT" ) ); - vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsEXT" ) ); - vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutEXT" ) ); - vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutEXT" ) ); - vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkCreateIndirectExecutionSetEXT" ) ); - vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetInstanceProcAddr( instance, "vkDestroyIndirectExecutionSetEXT" ) ); - vkUpdateIndirectExecutionSetPipelineEXT = - PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); - vkUpdateIndirectExecutionSetShaderEXT = - PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetInstanceProcAddr( instance, "vkUpdateIndirectExecutionSetShaderEXT" ) ); - - //=== VK_NV_cooperative_matrix2 === - vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); - } - //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; vkCmdPushDescriptorSetWithTemplate2KHR = PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = @@ -27352,9 +20414,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV" ) ); } - void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT - { - VkDevice device = static_cast( deviceCpp ); void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT { VkDevice device = static_cast( deviceCpp ); @@ -27481,146 +20540,7 @@ namespace VULKAN_HPP_NAMESPACE vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); - //=== VK_VERSION_1_0 === - vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) ); - vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) ); - vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) ); - vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) ); - vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) ); - vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) ); - vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) ); - vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) ); - vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) ); - vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) ); - vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) ); - vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) ); - vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) ); - vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) ); - vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) ); - vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) ); - vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) ); - vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) ); - vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) ); - vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) ); - vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) ); - vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) ); - vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) ); - vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) ); - vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) ); - vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) ); - vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) ); - vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) ); - vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) ); - vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) ); - vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) ); - vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) ); - vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) ); - vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) ); - vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) ); - vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) ); - vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) ); - vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) ); - vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) ); - vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) ); - vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) ); - vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) ); - vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) ); - vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) ); - vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) ); - vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) ); - vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) ); - vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) ); - vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) ); - vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) ); - vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) ); - vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) ); - vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) ); - vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) ); - vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) ); - vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) ); - vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) ); - vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) ); - vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) ); - vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) ); - vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) ); - vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) ); - vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) ); - vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) ); - vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) ); - vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) ); - vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) ); - vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) ); - vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) ); - vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) ); - vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) ); - vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) ); - vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) ); - vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) ); - vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) ); - vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) ); - vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) ); - vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) ); - vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) ); - vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) ); - vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) ); - vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) ); - vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) ); - vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) ); - vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) ); - vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) ); - vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) ); - vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) ); - vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) ); - vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) ); - vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) ); - vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) ); - vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) ); - vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) ); - vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) ); - vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) ); - vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) ); - vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) ); - vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) ); - vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) ); - vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) ); - vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) ); - vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) ); - vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) ); - vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) ); - vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) ); - vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) ); - vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) ); - vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) ); - vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) ); - vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) ); - vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) ); - vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) ); - vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) ); - vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) ); - vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) ); - vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) ); - vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) ); - vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) ); - vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) ); - vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) ); - //=== VK_VERSION_1_1 === - vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); - vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); - vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) ); - vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) ); - vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) ); - vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) ); - vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) ); - vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) ); - vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) ); - vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) ); - vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) ); - vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) ); - vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) ); - vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) ); - vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); - vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); //=== VK_VERSION_1_1 === vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) ); vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) ); @@ -27639,21 +20559,6 @@ namespace VULKAN_HPP_NAMESPACE vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) ); vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) ); - //=== VK_VERSION_1_2 === - vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); - vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); - vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) ); - vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) ); - vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) ); - vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) ); - vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) ); - vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) ); - vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) ); - vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) ); - vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) ); - vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) ); - vkGetDeviceMemoryOpaqueCaptureAddress = - PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) ); //=== VK_VERSION_1_2 === vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) ); vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) ); @@ -27706,58 +20611,32 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); - vkGetDeviceImageSparseMemoryRequirements = - PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); - //=== VK_VERSION_1_3 === - vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) ); - vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) ); - vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) ); - vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) ); - vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) ); - vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) ); - vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) ); - vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) ); - vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) ); - vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) ); - vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) ); - vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) ); - vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) ); - vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) ); - vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) ); - vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) ); - vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) ); - vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) ); - vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) ); - vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) ); - vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) ); - vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) ); - vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) ); - vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) ); - vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) ); - vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) ); - vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) ); - vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) ); - vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) ); - vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) ); - vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) ); - vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) ); - vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) ); - vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) ); - vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) ); vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); - //=== VK_KHR_swapchain === - vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); - vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); - vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) ); - vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) ); - vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) ); - vkGetDeviceGroupPresentCapabilitiesKHR = - PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) ); - vkGetDeviceGroupSurfacePresentModesKHR = - PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); - vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetDeviceProcAddr( device, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetDeviceProcAddr( device, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetDeviceProcAddr( device, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetDeviceProcAddr( device, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetDeviceProcAddr( device, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetDeviceProcAddr( device, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetDeviceProcAddr( device, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetDeviceProcAddr( device, "vkTransitionImageLayout" ) ); + //=== VK_KHR_swapchain === vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); @@ -27770,17 +20649,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) ); vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) ); - //=== VK_KHR_display_swapchain === - vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); //=== VK_KHR_display_swapchain === vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) ); - //=== VK_EXT_debug_marker === - vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); - vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); - vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) ); - vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); - vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); //=== VK_EXT_debug_marker === vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) ); vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) ); @@ -27788,18 +20659,6 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) ); vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) ); - //=== VK_KHR_video_queue === - vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); - vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); - vkGetVideoSessionMemoryRequirementsKHR = - PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) ); - vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) ); - vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) ); - vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) ); - vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) ); - vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) ); - vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); - vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); //=== VK_KHR_video_queue === vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) ); vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) ); @@ -27813,19 +20672,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) ); vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) ); - //=== VK_KHR_video_decode_queue === - vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); //=== VK_KHR_video_decode_queue === vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) ); - //=== VK_EXT_transform_feedback === - vkCmdBindTransformFeedbackBuffersEXT = - PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); - vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) ); - vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) ); - vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) ); - vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); - vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); //=== VK_EXT_transform_feedback === vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) ); @@ -27835,12 +20684,6 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) ); vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) ); - //=== VK_NVX_binary_import === - vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); - vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); - vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) ); - vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) ); - vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_binary_import === vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) ); vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) ); @@ -27849,16 +20692,10 @@ namespace VULKAN_HPP_NAMESPACE vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_image_view_handle === - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); + vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); + vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); + vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); - //=== VK_AMD_draw_indirect_count === - vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD; - vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD; //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); if ( !vkCmdDrawIndirectCount ) @@ -27869,8 +20706,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_AMD_shader_info === vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); - //=== VK_AMD_shader_info === - vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) ); //=== VK_KHR_dynamic_rendering === vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); @@ -27879,19 +20714,10 @@ namespace VULKAN_HPP_NAMESPACE vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); if ( !vkCmdEndRendering ) vkCmdEndRendering = vkCmdEndRenderingKHR; - //=== VK_KHR_dynamic_rendering === - vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) ); - if ( !vkCmdBeginRendering ) - vkCmdBeginRendering = vkCmdBeginRenderingKHR; - vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) ); - if ( !vkCmdEndRendering ) - vkCmdEndRendering = vkCmdEndRenderingKHR; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_external_memory_win32 === vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); - //=== VK_NV_external_memory_win32 === - vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_device_group === @@ -27903,17 +20729,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); - if ( !vkCmdDispatchBase ) - vkCmdDispatchBase = vkCmdDispatchBaseKHR; - //=== VK_KHR_device_group === - vkGetDeviceGroupPeerMemoryFeaturesKHR = - PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) ); - if ( !vkGetDeviceGroupPeerMemoryFeatures ) - vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR; - vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) ); - if ( !vkCmdSetDeviceMask ) - vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR; - vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) ); if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR; @@ -27921,71 +20736,40 @@ namespace VULKAN_HPP_NAMESPACE vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR; - //=== VK_KHR_maintenance1 === - vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) ); - if ( !vkTrimCommandPool ) - vkTrimCommandPool = vkTrimCommandPoolKHR; #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); - //=== VK_KHR_external_memory_win32 === - vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) ); - vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_memory_fd === vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); - //=== VK_KHR_external_memory_fd === - vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) ); - vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_semaphore_win32 === vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); - //=== VK_KHR_external_semaphore_win32 === - vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) ); - vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_semaphore_fd === vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); - //=== VK_KHR_external_semaphore_fd === - vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) ); - vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) ); //=== VK_KHR_push_descriptor === vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); - //=== VK_KHR_push_descriptor === - vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); - vkCmdPushDescriptorSetWithTemplateKHR = - PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; //=== VK_EXT_conditional_rendering === vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); - //=== VK_EXT_conditional_rendering === - vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); - vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) ); - //=== VK_KHR_descriptor_update_template === - vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); - if ( !vkCreateDescriptorUpdateTemplate ) - vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR; - vkDestroyDescriptorUpdateTemplateKHR = - PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) ); - if ( !vkDestroyDescriptorUpdateTemplate ) - vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR; - vkUpdateDescriptorSetWithTemplateKHR = - PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) ); - if ( !vkUpdateDescriptorSetWithTemplate ) - vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR; //=== VK_KHR_descriptor_update_template === vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) ); if ( !vkCreateDescriptorUpdateTemplate ) @@ -28001,54 +20785,25 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_clip_space_w_scaling === vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); - //=== VK_NV_clip_space_w_scaling === - vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) ); - //=== VK_EXT_display_control === - vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); - vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); - vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); - vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); //=== VK_EXT_display_control === vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) ); vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) ); vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) ); vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) ); - //=== VK_GOOGLE_display_timing === - vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); - vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); //=== VK_GOOGLE_display_timing === vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) ); vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) ); - //=== VK_EXT_discard_rectangles === - vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); - vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); - vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); //=== VK_EXT_discard_rectangles === vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) ); vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) ); vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) ); - //=== VK_EXT_hdr_metadata === - vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); //=== VK_EXT_hdr_metadata === vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) ); - //=== VK_KHR_create_renderpass2 === - vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); - if ( !vkCreateRenderPass2 ) - vkCreateRenderPass2 = vkCreateRenderPass2KHR; - vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) ); - if ( !vkCmdBeginRenderPass2 ) - vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR; - vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) ); - if ( !vkCmdNextSubpass2 ) - vkCmdNextSubpass2 = vkCmdNextSubpass2KHR; - vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) ); - if ( !vkCmdEndRenderPass2 ) - vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR; //=== VK_KHR_create_renderpass2 === vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) ); if ( !vkCreateRenderPass2 ) @@ -28065,28 +20820,17 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_shared_presentable_image === vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); - //=== VK_KHR_shared_presentable_image === - vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_fence_win32 === vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); - //=== VK_KHR_external_fence_win32 === - vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) ); - vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_external_fence_fd === vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); - //=== VK_KHR_external_fence_fd === - vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) ); - vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) ); - //=== VK_KHR_performance_query === - vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); - vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); //=== VK_KHR_performance_query === vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) ); vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) ); @@ -28100,15 +20844,6 @@ namespace VULKAN_HPP_NAMESPACE vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); - //=== VK_EXT_debug_utils === - vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) ); - vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) ); - vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) ); - vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) ); - vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) ); - vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) ); - vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) ); - vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) ); #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_memory_android_hardware_buffer === @@ -28116,11 +20851,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); - //=== VK_ANDROID_external_memory_android_hardware_buffer === - vkGetAndroidHardwareBufferPropertiesANDROID = - PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) ); - vkGetMemoryAndroidHardwareBufferANDROID = - PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) ); #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -28135,35 +20865,11 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); - //=== VK_AMDX_shader_enqueue === - vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) ); - vkGetExecutionGraphPipelineScratchSizeAMDX = - PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) ); - vkGetExecutionGraphPipelineNodeIndexAMDX = - PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) ); - vkCmdInitializeGraphScratchMemoryAMDX = - PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) ); - vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) ); - vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) ); - vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ //=== VK_EXT_sample_locations === vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); - //=== VK_EXT_sample_locations === - vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) ); - //=== VK_KHR_get_memory_requirements2 === - vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); - if ( !vkGetImageMemoryRequirements2 ) - vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR; - vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) ); - if ( !vkGetBufferMemoryRequirements2 ) - vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR; - vkGetImageSparseMemoryRequirements2KHR = - PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) ); - if ( !vkGetImageSparseMemoryRequirements2 ) - vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR; //=== VK_KHR_get_memory_requirements2 === vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) ); if ( !vkGetImageMemoryRequirements2 ) @@ -28191,33 +20897,6 @@ namespace VULKAN_HPP_NAMESPACE vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); - vkCmdCopyAccelerationStructureToMemoryKHR = - PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); - vkCmdCopyMemoryToAccelerationStructureKHR = - PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) ); - vkGetAccelerationStructureDeviceAddressKHR = - PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) ); - vkCmdWriteAccelerationStructuresPropertiesKHR = - PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) ); - vkGetDeviceAccelerationStructureCompatibilityKHR = - PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) ); - vkGetAccelerationStructureBuildSizesKHR = - PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) ); - //=== VK_KHR_acceleration_structure === - vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) ); - vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) ); - vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) ); - vkCmdBuildAccelerationStructuresIndirectKHR = - PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) ); - vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) ); - vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) ); - vkCopyAccelerationStructureToMemoryKHR = - PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) ); - vkCopyMemoryToAccelerationStructureKHR = - PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) ); - vkWriteAccelerationStructuresPropertiesKHR = - PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) ); - vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) ); vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) ); vkCmdCopyMemoryToAccelerationStructureKHR = @@ -28239,18 +20918,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); - vkGetRayTracingShaderGroupStackSizeKHR = - PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); - vkCmdSetRayTracingPipelineStackSizeKHR = - PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) ); - //=== VK_KHR_ray_tracing_pipeline === - vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) ); - vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) ); - vkGetRayTracingShaderGroupHandlesKHR = - PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) ); - vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = - PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) ); - vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) ); vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) ); vkCmdSetRayTracingPipelineStackSizeKHR = @@ -28261,13 +20928,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); - if ( !vkDestroySamplerYcbcrConversion ) - vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; - //=== VK_KHR_sampler_ycbcr_conversion === - vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) ); - if ( !vkCreateSamplerYcbcrConversion ) - vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR; - vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) ); if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR; @@ -28276,20 +20936,10 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR; vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); - if ( !vkBindImageMemory2 ) - vkBindImageMemory2 = vkBindImageMemory2KHR; - //=== VK_KHR_bind_memory2 === - vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) ); - if ( !vkBindBufferMemory2 ) - vkBindBufferMemory2 = vkBindBufferMemory2KHR; - vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) ); if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR; //=== VK_EXT_image_drm_format_modifier === - vkGetImageDrmFormatModifierPropertiesEXT = - PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); - //=== VK_EXT_image_drm_format_modifier === vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) ); @@ -28298,40 +20948,13 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); - //=== VK_EXT_validation_cache === - vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) ); - vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) ); - vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) ); - vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) ); - //=== VK_NV_shading_rate_image === - vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); - vkCmdSetViewportShadingRatePaletteNV = - PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); - vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); //=== VK_NV_shading_rate_image === vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) ); vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) ); vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) ); - //=== VK_NV_ray_tracing === - vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); - vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); - vkGetAccelerationStructureMemoryRequirementsNV = - PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) ); - vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) ); - vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) ); - vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) ); - vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) ); - vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) ); - vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) ); - if ( !vkGetRayTracingShaderGroupHandlesKHR ) - vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV; - vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) ); - vkCmdWriteAccelerationStructuresPropertiesNV = - PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); - vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); //=== VK_NV_ray_tracing === vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) ); vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) ); @@ -28350,10 +20973,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) ); vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) ); - //=== VK_KHR_maintenance3 === - vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); - if ( !vkGetDescriptorSetLayoutSupport ) - vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR; //=== VK_KHR_maintenance3 === vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) ); if ( !vkGetDescriptorSetLayoutSupport ) @@ -28364,32 +20983,16 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); - if ( !vkCmdDrawIndexedIndirectCount ) - vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; - //=== VK_KHR_draw_indirect_count === - vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) ); - if ( !vkCmdDrawIndirectCount ) - vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR; - vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) ); if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR; //=== VK_EXT_external_memory_host === vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); - //=== VK_EXT_external_memory_host === - vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) ); - //=== VK_AMD_buffer_marker === - vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); - vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); //=== VK_AMD_buffer_marker === vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) ); vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) ); - //=== VK_EXT_calibrated_timestamps === - vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); - if ( !vkGetCalibratedTimestampsKHR ) - vkGetCalibratedTimestampsKHR = vkGetCalibratedTimestampsEXT; //=== VK_EXT_calibrated_timestamps === vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) ); if ( !vkGetCalibratedTimestampsKHR ) @@ -28399,37 +21002,16 @@ namespace VULKAN_HPP_NAMESPACE vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - //=== VK_NV_mesh_shader === - vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) ); - vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) ); - vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) ); - //=== VK_NV_scissor_exclusive === - vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); - vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); //=== VK_NV_scissor_exclusive === vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) ); vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) ); - //=== VK_NV_device_diagnostic_checkpoints === - vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); - vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); - vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); //=== VK_NV_device_diagnostic_checkpoints === vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) ); vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) ); vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) ); - //=== VK_KHR_timeline_semaphore === - vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); - if ( !vkGetSemaphoreCounterValue ) - vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR; - vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) ); - if ( !vkWaitSemaphores ) - vkWaitSemaphores = vkWaitSemaphoresKHR; - vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) ); - if ( !vkSignalSemaphore ) - vkSignalSemaphore = vkSignalSemaphoreKHR; //=== VK_KHR_timeline_semaphore === vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) ); if ( !vkGetSemaphoreCounterValue ) @@ -28455,55 +21037,30 @@ namespace VULKAN_HPP_NAMESPACE vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); - //=== VK_INTEL_performance_query === - vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) ); - vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) ); - vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) ); - vkCmdSetPerformanceStreamMarkerINTEL = - PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) ); - vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) ); - vkAcquirePerformanceConfigurationINTEL = - PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) ); - vkReleasePerformanceConfigurationINTEL = - PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) ); - vkQueueSetPerformanceConfigurationINTEL = - PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) ); - vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) ); - //=== VK_AMD_display_native_hdr === - vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); //=== VK_AMD_display_native_hdr === vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) ); - //=== VK_KHR_fragment_shading_rate === - vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); //=== VK_KHR_fragment_shading_rate === vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) ); //=== VK_KHR_dynamic_rendering_local_read === vkCmdSetRenderingAttachmentLocationsKHR = PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; vkCmdSetRenderingInputAttachmentIndicesKHR = PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); - //=== VK_KHR_dynamic_rendering_local_read === - vkCmdSetRenderingAttachmentLocationsKHR = - PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); - vkCmdSetRenderingInputAttachmentIndicesKHR = - PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; //=== VK_EXT_buffer_device_address === vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; - //=== VK_EXT_buffer_device_address === - vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT; //=== VK_KHR_present_wait === vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); - //=== VK_KHR_present_wait === - vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) ); #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_EXT_full_screen_exclusive === @@ -28511,11 +21068,6 @@ namespace VULKAN_HPP_NAMESPACE vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); - //=== VK_EXT_full_screen_exclusive === - vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) ); - vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) ); - vkGetDeviceGroupSurfacePresentModes2EXT = - PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) ); #endif /*VK_USE_PLATFORM_WIN32_KHR*/ //=== VK_KHR_buffer_device_address === @@ -28523,17 +21075,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); - if ( !vkGetBufferOpaqueCaptureAddress ) - vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; - vkGetDeviceMemoryOpaqueCaptureAddressKHR = - PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) ); - if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) - vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR; - //=== VK_KHR_buffer_device_address === - vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) ); - if ( !vkGetBufferDeviceAddress ) - vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR; - vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) ); if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR; vkGetDeviceMemoryOpaqueCaptureAddressKHR = @@ -28543,19 +21084,11 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_line_rasterization === vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; - //=== VK_EXT_line_rasterization === - vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; //=== VK_EXT_host_query_reset === vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); - if ( !vkResetQueryPool ) - vkResetQueryPool = vkResetQueryPoolEXT; - //=== VK_EXT_host_query_reset === - vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT; @@ -28594,43 +21127,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdSetStencilTestEnable ) vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); - if ( !vkCmdSetStencilOp ) - vkCmdSetStencilOp = vkCmdSetStencilOpEXT; - //=== VK_EXT_extended_dynamic_state === - vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) ); - if ( !vkCmdSetCullMode ) - vkCmdSetCullMode = vkCmdSetCullModeEXT; - vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) ); - if ( !vkCmdSetFrontFace ) - vkCmdSetFrontFace = vkCmdSetFrontFaceEXT; - vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) ); - if ( !vkCmdSetPrimitiveTopology ) - vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT; - vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) ); - if ( !vkCmdSetViewportWithCount ) - vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT; - vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) ); - if ( !vkCmdSetScissorWithCount ) - vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT; - vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) ); - if ( !vkCmdBindVertexBuffers2 ) - vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT; - vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) ); - if ( !vkCmdSetDepthTestEnable ) - vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT; - vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) ); - if ( !vkCmdSetDepthWriteEnable ) - vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT; - vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) ); - if ( !vkCmdSetDepthCompareOp ) - vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT; - vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) ); - if ( !vkCmdSetDepthBoundsTestEnable ) - vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT; - vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) ); - if ( !vkCmdSetStencilTestEnable ) - vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT; - vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) ); if ( !vkCmdSetStencilOp ) vkCmdSetStencilOp = vkCmdSetStencilOpEXT; @@ -28641,21 +21137,7 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); - //=== VK_KHR_deferred_host_operations === - vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) ); - vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) ); - vkGetDeferredOperationMaxConcurrencyKHR = - PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) ); - vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) ); - vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) ); - //=== VK_KHR_pipeline_executable_properties === - vkGetPipelineExecutablePropertiesKHR = - PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); - vkGetPipelineExecutableStatisticsKHR = - PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) ); - vkGetPipelineExecutableInternalRepresentationsKHR = - PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); //=== VK_KHR_pipeline_executable_properties === vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) ); @@ -28665,42 +21147,33 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; - //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); - vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); - vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); - //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; - //=== VK_EXT_swapchain_maintenance1 === - vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); //=== VK_EXT_swapchain_maintenance1 === vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); - //=== VK_NV_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsNV = - PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); - vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) ); - vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) ); - vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) ); - vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); - vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); //=== VK_NV_device_generated_commands === vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) ); @@ -28710,24 +21183,9 @@ namespace VULKAN_HPP_NAMESPACE vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) ); vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) ); - //=== VK_EXT_depth_bias_control === - vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); //=== VK_EXT_depth_bias_control === vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) ); - //=== VK_EXT_private_data === - vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); - if ( !vkCreatePrivateDataSlot ) - vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT; - vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) ); - if ( !vkDestroyPrivateDataSlot ) - vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT; - vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) ); - if ( !vkSetPrivateData ) - vkSetPrivateData = vkSetPrivateDataEXT; - vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) ); - if ( !vkGetPrivateData ) - vkGetPrivateData = vkGetPrivateDataEXT; //=== VK_EXT_private_data === vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) ); if ( !vkCreatePrivateDataSlot ) @@ -28746,10 +21204,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetEncodedVideoSessionParametersKHR = PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); - //=== VK_KHR_video_encode_queue === - vkGetEncodedVideoSessionParametersKHR = - PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) ); - vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) ); #if defined( VK_ENABLE_BETA_EXTENSIONS ) //=== VK_NV_cuda_kernel_launch === @@ -28759,20 +21213,11 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); - //=== VK_NV_cuda_kernel_launch === - vkCreateCudaModuleNV = PFN_vkCreateCudaModuleNV( vkGetDeviceProcAddr( device, "vkCreateCudaModuleNV" ) ); - vkGetCudaModuleCacheNV = PFN_vkGetCudaModuleCacheNV( vkGetDeviceProcAddr( device, "vkGetCudaModuleCacheNV" ) ); - vkCreateCudaFunctionNV = PFN_vkCreateCudaFunctionNV( vkGetDeviceProcAddr( device, "vkCreateCudaFunctionNV" ) ); - vkDestroyCudaModuleNV = PFN_vkDestroyCudaModuleNV( vkGetDeviceProcAddr( device, "vkDestroyCudaModuleNV" ) ); - vkDestroyCudaFunctionNV = PFN_vkDestroyCudaFunctionNV( vkGetDeviceProcAddr( device, "vkDestroyCudaFunctionNV" ) ); - vkCmdCudaLaunchKernelNV = PFN_vkCmdCudaLaunchKernelNV( vkGetDeviceProcAddr( device, "vkCmdCudaLaunchKernelNV" ) ); #endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_USE_PLATFORM_METAL_EXT ) //=== VK_EXT_metal_objects === vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); - //=== VK_EXT_metal_objects === - vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) ); #endif /*VK_USE_PLATFORM_METAL_EXT*/ //=== VK_KHR_synchronization2 === @@ -28792,25 +21237,6 @@ namespace VULKAN_HPP_NAMESPACE if ( !vkCmdWriteTimestamp2 ) vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); - if ( !vkQueueSubmit2 ) - vkQueueSubmit2 = vkQueueSubmit2KHR; - //=== VK_KHR_synchronization2 === - vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) ); - if ( !vkCmdSetEvent2 ) - vkCmdSetEvent2 = vkCmdSetEvent2KHR; - vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) ); - if ( !vkCmdResetEvent2 ) - vkCmdResetEvent2 = vkCmdResetEvent2KHR; - vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) ); - if ( !vkCmdWaitEvents2 ) - vkCmdWaitEvents2 = vkCmdWaitEvents2KHR; - vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) ); - if ( !vkCmdPipelineBarrier2 ) - vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR; - vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) ); - if ( !vkCmdWriteTimestamp2 ) - vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR; - vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) ); if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR; @@ -28821,25 +21247,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplersEXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); - vkGetBufferOpaqueCaptureDescriptorDataEXT = - PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) ); - vkGetImageViewOpaqueCaptureDescriptorDataEXT = - PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) ); - vkGetSamplerOpaqueCaptureDescriptorDataEXT = - PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) ); - vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( - vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) ); - //=== VK_EXT_descriptor_buffer === - vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) ); - vkGetDescriptorSetLayoutBindingOffsetEXT = - PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) ); - vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) ); - vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) ); - vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) ); vkCmdBindDescriptorBufferEmbeddedSamplersEXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) ); vkGetBufferOpaqueCaptureDescriptorDataEXT = @@ -28855,37 +21262,12 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_NV_fragment_shading_rate_enums === vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_NV_fragment_shading_rate_enums === - vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) ); - //=== VK_EXT_mesh_shader === - vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); - vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); - vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); //=== VK_EXT_mesh_shader === vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) ); vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) ); vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) ); - //=== VK_KHR_copy_commands2 === - vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); - if ( !vkCmdCopyBuffer2 ) - vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR; - vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) ); - if ( !vkCmdCopyImage2 ) - vkCmdCopyImage2 = vkCmdCopyImage2KHR; - vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) ); - if ( !vkCmdCopyBufferToImage2 ) - vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR; - vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) ); - if ( !vkCmdCopyImageToBuffer2 ) - vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR; - vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) ); - if ( !vkCmdBlitImage2 ) - vkCmdBlitImage2 = vkCmdBlitImage2KHR; - vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) ); - if ( !vkCmdResolveImage2 ) - vkCmdResolveImage2 = vkCmdResolveImage2KHR; //=== VK_KHR_copy_commands2 === vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) ); if ( !vkCmdCopyBuffer2 ) @@ -28908,23 +21290,15 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_device_fault === vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); - //=== VK_EXT_device_fault === - vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) ); //=== VK_EXT_vertex_input_dynamic_state === vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); - //=== VK_EXT_vertex_input_dynamic_state === - vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) ); #if defined( VK_USE_PLATFORM_FUCHSIA ) //=== VK_FUCHSIA_external_memory === vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); - //=== VK_FUCHSIA_external_memory === - vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) ); - vkGetMemoryZirconHandlePropertiesFUCHSIA = - PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -28932,10 +21306,6 @@ namespace VULKAN_HPP_NAMESPACE vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); - //=== VK_FUCHSIA_external_semaphore === - vkImportSemaphoreZirconHandleFUCHSIA = - PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) ); - vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -28948,53 +21318,22 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); - //=== VK_FUCHSIA_buffer_collection === - vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) ); - vkSetBufferCollectionImageConstraintsFUCHSIA = - PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) ); - vkSetBufferCollectionBufferConstraintsFUCHSIA = - PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) ); - vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) ); - vkGetBufferCollectionPropertiesFUCHSIA = - PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) ); #endif /*VK_USE_PLATFORM_FUCHSIA*/ //=== VK_HUAWEI_subpass_shading === vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); - //=== VK_HUAWEI_subpass_shading === - vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = - PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) ); - vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) ); - //=== VK_HUAWEI_invocation_mask === - vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); //=== VK_HUAWEI_invocation_mask === vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) ); - //=== VK_NV_external_memory_rdma === - vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); //=== VK_NV_external_memory_rdma === vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) ); - //=== VK_EXT_pipeline_properties === - vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); //=== VK_EXT_pipeline_properties === vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) ); - //=== VK_EXT_extended_dynamic_state2 === - vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); - vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); - if ( !vkCmdSetRasterizerDiscardEnable ) - vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT; - vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) ); - if ( !vkCmdSetDepthBiasEnable ) - vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT; - vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) ); - vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) ); - if ( !vkCmdSetPrimitiveRestartEnable ) - vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT; //=== VK_EXT_extended_dynamic_state2 === vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) ); vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) ); @@ -29010,36 +21349,14 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_color_write_enable === vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); - //=== VK_EXT_color_write_enable === - vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) ); - //=== VK_KHR_ray_tracing_maintenance1 === - vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); //=== VK_KHR_ray_tracing_maintenance1 === vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) ); - //=== VK_EXT_multi_draw === - vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); - vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); //=== VK_EXT_multi_draw === vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) ); vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) ); - //=== VK_EXT_opacity_micromap === - vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); - vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); - vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) ); - vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) ); - vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) ); - vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) ); - vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) ); - vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) ); - vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) ); - vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) ); - vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) ); - vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) ); - vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); - vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); //=== VK_EXT_opacity_micromap === vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) ); vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) ); @@ -29056,31 +21373,13 @@ namespace VULKAN_HPP_NAMESPACE vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) ); vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) ); - //=== VK_HUAWEI_cluster_culling_shader === - vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); - vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); //=== VK_HUAWEI_cluster_culling_shader === vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) ); vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) ); - //=== VK_EXT_pageable_device_local_memory === - vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); //=== VK_EXT_pageable_device_local_memory === vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) ); - //=== VK_KHR_maintenance4 === - vkGetDeviceBufferMemoryRequirementsKHR = - PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceBufferMemoryRequirements ) - vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR; - vkGetDeviceImageMemoryRequirementsKHR = - PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageMemoryRequirements ) - vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR; - vkGetDeviceImageSparseMemoryRequirementsKHR = - PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) ); - if ( !vkGetDeviceImageSparseMemoryRequirements ) - vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR; //=== VK_KHR_maintenance4 === vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) ); @@ -29099,22 +21398,11 @@ namespace VULKAN_HPP_NAMESPACE vkGetDescriptorSetLayoutHostMappingInfoVALVE = PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); - //=== VK_VALVE_descriptor_set_host_mapping === - vkGetDescriptorSetLayoutHostMappingInfoVALVE = - PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) ); - vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) ); - //=== VK_NV_copy_memory_indirect === - vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); - vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); //=== VK_NV_copy_memory_indirect === vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) ); vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) ); - //=== VK_NV_memory_decompression === - vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); - vkCmdDecompressMemoryIndirectCountNV = - PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) ); //=== VK_NV_memory_decompression === vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) ); vkCmdDecompressMemoryIndirectCountNV = @@ -29124,12 +21412,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineIndirectMemoryRequirementsNV = PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); - vkGetPipelineIndirectDeviceAddressNV = - PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); - //=== VK_NV_device_generated_commands_compute === - vkGetPipelineIndirectMemoryRequirementsNV = - PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) ); - vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) ); vkGetPipelineIndirectDeviceAddressNV = PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) ); @@ -29170,48 +21452,7 @@ namespace VULKAN_HPP_NAMESPACE vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); - //=== VK_EXT_extended_dynamic_state3 === - vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) ); - vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) ); - vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) ); - vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) ); - vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) ); - vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) ); - vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) ); - vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) ); - vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) ); - vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) ); - vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) ); - vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) ); - vkCmdSetConservativeRasterizationModeEXT = - PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) ); - vkCmdSetExtraPrimitiveOverestimationSizeEXT = - PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) ); - vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) ); - vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) ); - vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) ); - vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) ); - vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) ); - vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) ); - vkCmdSetDepthClipNegativeOneToOneEXT = - PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) ); - vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) ); - vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) ); - vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) ); - vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) ); - vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) ); - vkCmdSetCoverageModulationTableEnableNV = - PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) ); - vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) ); - vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) ); - vkCmdSetRepresentativeFragmentTestEnableNV = - PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) ); - vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) ); - //=== VK_EXT_shader_module_identifier === - vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); - vkGetShaderModuleCreateInfoIdentifierEXT = - PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) ); //=== VK_EXT_shader_module_identifier === vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) ); vkGetShaderModuleCreateInfoIdentifierEXT = @@ -29222,27 +21463,21 @@ namespace VULKAN_HPP_NAMESPACE vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); - //=== VK_NV_optical_flow === - vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) ); - vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) ); - vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) ); - vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); - vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); - vkGetDeviceImageSubresourceLayoutKHR = - PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); - vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); - - //=== VK_AMD_anti_lag === - vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); - //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; //=== VK_AMD_anti_lag === vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); @@ -29254,19 +21489,6 @@ namespace VULKAN_HPP_NAMESPACE vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); - //=== VK_KHR_pipeline_binary === - vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); - vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); - vkGetPipelineKeyKHR = PFN_vkGetPipelineKeyKHR( vkGetDeviceProcAddr( device, "vkGetPipelineKeyKHR" ) ); - vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); - vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); - //=== VK_EXT_shader_object === - vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) ); - vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) ); - vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) ); - vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) ); - vkCmdSetDepthClampRangeEXT = PFN_vkCmdSetDepthClampRangeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampRangeEXT" ) ); - //=== VK_KHR_pipeline_binary === vkCreatePipelineBinariesKHR = PFN_vkCreatePipelineBinariesKHR( vkGetDeviceProcAddr( device, "vkCreatePipelineBinariesKHR" ) ); vkDestroyPipelineBinaryKHR = PFN_vkDestroyPipelineBinaryKHR( vkGetDeviceProcAddr( device, "vkDestroyPipelineBinaryKHR" ) ); @@ -29274,10 +21496,6 @@ namespace VULKAN_HPP_NAMESPACE vkGetPipelineBinaryDataKHR = PFN_vkGetPipelineBinaryDataKHR( vkGetDeviceProcAddr( device, "vkGetPipelineBinaryDataKHR" ) ); vkReleaseCapturedPipelineDataKHR = PFN_vkReleaseCapturedPipelineDataKHR( vkGetDeviceProcAddr( device, "vkReleaseCapturedPipelineDataKHR" ) ); - //=== VK_QCOM_tile_properties === - vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); - vkGetDynamicRenderingTilePropertiesQCOM = - PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) ); //=== VK_QCOM_tile_properties === vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) ); vkGetDynamicRenderingTilePropertiesQCOM = @@ -29289,67 +21507,38 @@ namespace VULKAN_HPP_NAMESPACE vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); - //=== VK_NV_low_latency2 === - vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) ); - vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) ); - vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) ); - vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) ); - vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) ); //=== VK_EXT_attachment_feedback_loop_dynamic_state === vkCmdSetAttachmentFeedbackLoopEnableEXT = PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); - //=== VK_EXT_attachment_feedback_loop_dynamic_state === - vkCmdSetAttachmentFeedbackLoopEnableEXT = - PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) ); #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); - //=== VK_QNX_external_memory_screen_buffer === - vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) ); #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ //=== VK_KHR_line_rasterization === vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); - //=== VK_KHR_line_rasterization === - vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; - //=== VK_KHR_calibrated_timestamps === - vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); //=== VK_KHR_calibrated_timestamps === vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); //=== VK_KHR_maintenance6 === vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); - vkCmdPushDescriptorSetWithTemplate2KHR = - PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); - vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); - vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = - PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); - - //=== VK_EXT_device_generated_commands === - vkGetGeneratedCommandsMemoryRequirementsEXT = - PFN_vkGetGeneratedCommandsMemoryRequirementsEXT( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsEXT" ) ); - vkCmdPreprocessGeneratedCommandsEXT = PFN_vkCmdPreprocessGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsEXT" ) ); - vkCmdExecuteGeneratedCommandsEXT = PFN_vkCmdExecuteGeneratedCommandsEXT( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsEXT" ) ); - vkCreateIndirectCommandsLayoutEXT = PFN_vkCreateIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutEXT" ) ); - vkDestroyIndirectCommandsLayoutEXT = PFN_vkDestroyIndirectCommandsLayoutEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutEXT" ) ); - vkCreateIndirectExecutionSetEXT = PFN_vkCreateIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkCreateIndirectExecutionSetEXT" ) ); - vkDestroyIndirectExecutionSetEXT = PFN_vkDestroyIndirectExecutionSetEXT( vkGetDeviceProcAddr( device, "vkDestroyIndirectExecutionSetEXT" ) ); - vkUpdateIndirectExecutionSetPipelineEXT = - PFN_vkUpdateIndirectExecutionSetPipelineEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetPipelineEXT" ) ); - vkUpdateIndirectExecutionSetShaderEXT = - PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); - } - //=== VK_KHR_maintenance6 === - vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; vkCmdPushDescriptorSetWithTemplate2KHR = PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); @@ -29369,13 +21558,6 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkUpdateIndirectExecutionSetShaderEXT( vkGetDeviceProcAddr( device, "vkUpdateIndirectExecutionSetShaderEXT" ) ); } - template - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT - { - PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress( "vkGetInstanceProcAddr" ); - PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress( "vkGetDeviceProcAddr" ); - init( static_cast( instance ), getInstanceProcAddr, static_cast( device ), device ? getDeviceProcAddr : nullptr ); - } template void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl ) VULKAN_HPP_NOEXCEPT { @@ -29385,10 +21567,8 @@ namespace VULKAN_HPP_NAMESPACE } template void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT @@ -29397,14 +21577,6 @@ namespace VULKAN_HPP_NAMESPACE init( instance, device, dl ); } }; - } // namespace detail - > - void init( VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device ) VULKAN_HPP_NOEXCEPT - { - static DynamicLoader dl; - init( instance, device, dl ); - } - }; } // namespace detail } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index e647c87..4e716da 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,28 +69,24 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 301 +#define VK_HEADER_VERSION 303 // Complete version of this file -#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 4, VK_HEADER_VERSION) -// VK_MAKE_VERSION is deprecated, but no reason was given in the API XML // VK_MAKE_VERSION is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. #define VK_MAKE_VERSION(major, minor, patch) \ ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) -// VK_VERSION_MAJOR is deprecated, but no reason was given in the API XML // VK_VERSION_MAJOR is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. #define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22U) -// VK_VERSION_MINOR is deprecated, but no reason was given in the API XML // VK_VERSION_MINOR is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. #define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) -// VK_VERSION_PATCH is deprecated, but no reason was given in the API XML // VK_VERSION_PATCH is deprecated, but no reason was given in the API XML // DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. #define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) @@ -170,6 +166,7 @@ typedef enum VkResult { VK_ERROR_FRAGMENTATION = -1000161000, VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000, VK_PIPELINE_COMPILE_REQUIRED = 1000297000, + VK_ERROR_NOT_PERMITTED = -1000174001, VK_ERROR_SURFACE_LOST_KHR = -1000000000, VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, VK_SUBOPTIMAL_KHR = 1000001003, @@ -184,7 +181,6 @@ typedef enum VkResult { VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004, VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005, VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, - VK_ERROR_NOT_PERMITTED_KHR = -1000174001, VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, VK_THREAD_IDLE_KHR = 1000268000, VK_THREAD_DONE_KHR = 1000268001, @@ -195,17 +191,15 @@ typedef enum VkResult { VK_INCOMPATIBLE_SHADER_BINARY_EXT = 1000482000, VK_PIPELINE_BINARY_MISSING_KHR = 1000483000, VK_ERROR_NOT_ENOUGH_SPACE_KHR = -1000483000, - VK_PIPELINE_BINARY_MISSING_KHR = 1000483000, - VK_ERROR_NOT_ENOUGH_SPACE_KHR = -1000483000, VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, - VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED_KHR, + VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED, + VK_ERROR_NOT_PERMITTED_KHR = VK_ERROR_NOT_PERMITTED, VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, - // VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT is a deprecated alias // VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT is a deprecated alias VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, VK_RESULT_MAX_ENUM = 0x7FFFFFFF @@ -428,6 +422,56 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001, VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002, VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES = 55, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES = 56, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO = 1000174000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES = 1000388000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES = 1000388001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES = 1000416000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES = 1000528000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES = 1000544000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES = 1000259000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO = 1000259001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES = 1000259002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES = 1000525000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO = 1000190001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES = 1000190002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES = 1000265000, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO = 1000271000, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO = 1000271001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES = 1000470000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES = 1000470001, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO = 1000470003, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO = 1000470004, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2 = 1000338002, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2 = 1000338003, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO = 1000470005, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO = 1000470006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES = 1000080000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES = 1000232000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO = 1000232001, + VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO = 1000232002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES = 1000545000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES = 1000545001, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS = 1000545002, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO = 1000545003, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO = 1000545004, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO = 1000545005, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO = 1000545006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES = 1000466000, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO = 1000068000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES = 1000068001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES = 1000068002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES = 1000270000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES = 1000270001, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY = 1000270002, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY = 1000270003, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO = 1000270004, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO = 1000270005, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO = 1000270006, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO = 1000270007, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE = 1000270008, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY = 1000270009, VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, @@ -527,9 +571,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, - VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = 1000068000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = 1000068001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = 1000068002, VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, @@ -544,7 +585,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, @@ -558,7 +598,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, - VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, @@ -618,7 +657,6 @@ typedef enum VkStructureType { #ifdef VK_ENABLE_BETA_EXTENSIONS VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, #endif - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, @@ -698,9 +736,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR = 1000187003, VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR = 1000187004, VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR = 1000187005, - VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = 1000388001, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, @@ -713,8 +748,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, - VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, @@ -731,19 +764,14 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, - VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR = 1000232000, - VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR = 1000232001, - VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR = 1000232002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR = 1000235000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, @@ -778,18 +806,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = 1000270000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = 1000270001, - VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = 1000270002, - VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = 1000270003, - VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = 1000270004, - VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = 1000270005, - VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = 1000270006, - VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = 1000270007, - VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = 1000270008, - VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = 1000270009, - VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = 1000271000, - VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = 1000271001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT = 1000272000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT = 1000272001, VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT = 1000272002, @@ -917,7 +933,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT = 1000361000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT = 1000361000, VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, @@ -985,7 +1000,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR = 1000416000, VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM = 1000417000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM = 1000417001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM = 1000417002, @@ -1041,21 +1055,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005, VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID = 1000468000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468001, VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = 1000470000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = 1000470001, - VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = 1000470003, - VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = 1000470004, - VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = 1000338002, - VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = 1000338003, - VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = 1000470005, - VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = 1000470006, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD = 1000476000, - VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD = 1000476001, - VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD = 1000476002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD = 1000476000, VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD = 1000476001, VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD = 1000476002, @@ -1073,16 +1075,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR = 1000483007, VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR = 1000483008, VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR = 1000483009, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR = 1000483000, - VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR = 1000483001, - VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR = 1000483002, - VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR = 1000483003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR = 1000483004, - VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR = 1000483005, - VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR = 1000483006, - VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR = 1000483007, - VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR = 1000483008, - VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR = 1000483009, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000, VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000, @@ -1096,8 +1088,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT = 1000495000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT = 1000495001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT = 1000495000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT = 1000495001, VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT = 1000496000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001, @@ -1119,8 +1109,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM = 1000510001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR = 1000201000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR = 1000511000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR = 1000201000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR = 1000511000, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR = 1000512000, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR = 1000512001, VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR = 1000512003, @@ -1150,29 +1138,13 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM = 1000520001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM = 1000521000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT = 1000524000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR = 1000525000, - VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR = 1000190001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR = 1000190002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR = 1000528000, VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX = 1000529000, VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX = 1000529001, VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX = 1000529002, VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX = 1000529003, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX = 1000529004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT = 1000530000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR = 1000265000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR = 1000259000, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR = 1000259001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR = 1000259002, VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR = 1000184000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR = 1000544000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR = 1000545000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR = 1000545001, - VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR = 1000545002, - VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR = 1000545003, - VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR = 1000545004, - VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR = 1000545005, - VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR = 1000545006, VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT = 1000545007, VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT = 1000545008, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV = 1000546000, @@ -1196,16 +1168,8 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR = 1000562002, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR = 1000562003, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR = 1000562004, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR = 1000558000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV = 1000559000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR = 1000562000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR = 1000562001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR = 1000562002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR = 1000562003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR = 1000562004, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV = 1000563000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV = 1000568000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT = 1000572000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT = 1000572001, @@ -1231,9 +1195,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV = 1000593000, VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV = 1000593001, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV = 1000593002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT = 1000608000, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, - // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO, @@ -1261,6 +1225,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, @@ -1274,11 +1241,11 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, - // VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT is a deprecated alias // VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT is a deprecated alias VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, @@ -1316,7 +1283,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, - VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, @@ -1332,20 +1298,22 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, - VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, - VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, @@ -1353,7 +1321,6 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, - // VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL is a deprecated alias // VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL is a deprecated alias VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, @@ -1362,6 +1329,9 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO, + VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, @@ -1375,11 +1345,23 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, @@ -1409,20 +1391,46 @@ typedef enum VkStructureType { VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, - VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, - VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO, VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO, VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkStructureType; @@ -1449,6 +1457,7 @@ typedef enum VkImageLayout { VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000, VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001, + VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ = 1000232000, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, @@ -1456,7 +1465,6 @@ typedef enum VkImageLayout { VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003, - VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR = 1000232000, VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, @@ -1465,6 +1473,7 @@ typedef enum VkImageLayout { VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ, VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, @@ -1529,9 +1538,6 @@ typedef enum VkObjectType { VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000, VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT = 1000572000, VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT = 1000572001, - VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000, - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT = 1000572000, - VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT = 1000572001, VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, @@ -1539,7 +1545,6 @@ typedef enum VkObjectType { } VkObjectType; typedef enum VkVendorId { - VK_VENDOR_ID_KHRONOS = 0x10000, VK_VENDOR_ID_KHRONOS = 0x10000, VK_VENDOR_ID_VIV = 0x10001, VK_VENDOR_ID_VSI = 0x10002, @@ -1805,6 +1810,8 @@ typedef enum VkFormat { VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011, VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012, VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013, + VK_FORMAT_A1B5G5R5_UNORM_PACK16 = 1000470000, + VK_FORMAT_A8_UNORM = 1000470001, VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, @@ -1814,9 +1821,6 @@ typedef enum VkFormat { VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, VK_FORMAT_R16G16_SFIXED5_NV = 1000464000, - VK_FORMAT_R16G16_SFIXED5_NV = 1000464000, - VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = 1000470000, - VK_FORMAT_A8_UNORM_KHR = 1000470001, VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, @@ -1873,8 +1877,8 @@ typedef enum VkFormat { VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16, // VK_FORMAT_R16G16_S10_5_NV is a deprecated alias VK_FORMAT_R16G16_S10_5_NV = VK_FORMAT_R16G16_SFIXED5_NV, - // VK_FORMAT_R16G16_S10_5_NV is a deprecated alias - VK_FORMAT_R16G16_S10_5_NV = VK_FORMAT_R16G16_SFIXED5_NV, + VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16, + VK_FORMAT_A8_UNORM_KHR = VK_FORMAT_A8_UNORM, VK_FORMAT_MAX_ENUM = 0x7FFFFFFF } VkFormat; @@ -2065,6 +2069,7 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001, VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002, VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004, + VK_DYNAMIC_STATE_LINE_STIPPLE = 1000259000, VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT = 1000099001, @@ -2112,10 +2117,8 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031, VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032, VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT = 1000524000, - VK_DYNAMIC_STATE_LINE_STIPPLE_KHR = 1000259000, VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT = 1000582000, - VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT = 1000582000, - VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, + VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = VK_DYNAMIC_STATE_LINE_STIPPLE, VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE, VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE, VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, @@ -2131,6 +2134,7 @@ typedef enum VkDynamicState { VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + VK_DYNAMIC_STATE_LINE_STIPPLE_KHR = VK_DYNAMIC_STATE_LINE_STIPPLE, VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF } VkDynamicState; @@ -2227,7 +2231,6 @@ typedef enum VkSamplerAddressMode { VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, - // VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR is a deprecated alias // VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR is a deprecated alias VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF @@ -2266,8 +2269,9 @@ typedef enum VkAttachmentLoadOp { VK_ATTACHMENT_LOAD_OP_LOAD = 0, VK_ATTACHMENT_LOAD_OP_CLEAR = 1, VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, - VK_ATTACHMENT_LOAD_OP_NONE_KHR = 1000400000, - VK_ATTACHMENT_LOAD_OP_NONE_EXT = VK_ATTACHMENT_LOAD_OP_NONE_KHR, + VK_ATTACHMENT_LOAD_OP_NONE = 1000400000, + VK_ATTACHMENT_LOAD_OP_NONE_EXT = VK_ATTACHMENT_LOAD_OP_NONE, + VK_ATTACHMENT_LOAD_OP_NONE_KHR = VK_ATTACHMENT_LOAD_OP_NONE, VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF } VkAttachmentLoadOp; @@ -2302,10 +2306,11 @@ typedef enum VkCommandBufferLevel { typedef enum VkIndexType { VK_INDEX_TYPE_UINT16 = 0, VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_UINT8 = 1000265000, VK_INDEX_TYPE_NONE_KHR = 1000165000, - VK_INDEX_TYPE_UINT8_KHR = 1000265000, VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, - VK_INDEX_TYPE_UINT8_EXT = VK_INDEX_TYPE_UINT8_KHR, + VK_INDEX_TYPE_UINT8_EXT = VK_INDEX_TYPE_UINT8, + VK_INDEX_TYPE_UINT8_KHR = VK_INDEX_TYPE_UINT8, VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF } VkIndexType; @@ -2314,8 +2319,6 @@ typedef enum VkSubpassContents { VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR = 1000451000, VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, - VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR = 1000451000, - VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF } VkSubpassContents; @@ -2355,8 +2358,6 @@ typedef enum VkAccessFlagBits { VK_ACCESS_NONE_KHR = VK_ACCESS_NONE, VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, - VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, - VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkAccessFlagBits; typedef VkFlags VkAccessFlags; @@ -2481,12 +2482,12 @@ typedef enum VkImageUsageFlagBits { VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT = 0x00400000, VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, - VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = 0x00400000, VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, @@ -2497,6 +2498,7 @@ typedef enum VkImageUsageFlagBits { VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x02000000, VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR = 0x04000000, VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT, VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkImageUsageFlagBits; typedef VkFlags VkImageUsageFlags; @@ -2584,7 +2586,6 @@ typedef enum VkPipelineStageFlagBits { VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE, VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, - VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineStageFlagBits; typedef VkFlags VkPipelineStageFlags; @@ -2737,6 +2738,8 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT = 0x08000000, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT = 0x40000000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, @@ -2747,8 +2750,6 @@ typedef enum VkPipelineCreateFlagBits { VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, - VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, @@ -2763,21 +2764,17 @@ typedef enum VkPipelineCreateFlagBits { #ifdef VK_ENABLE_BETA_EXTENSIONS VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000, #endif - VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000, - VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000, VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias - VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT, VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkPipelineCreateFlagBits; typedef VkFlags VkPipelineCreateFlags; @@ -2885,12 +2882,13 @@ typedef VkFlags VkDescriptorPoolResetFlags; typedef enum VkDescriptorSetLayoutCreateFlagBits { VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT = 0x00000001, VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010, VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020, VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00000080, VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV = 0x00000040, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -2981,7 +2979,6 @@ typedef enum VkStencilFaceFlagBits { VK_STENCIL_FACE_FRONT_BIT = 0x00000001, VK_STENCIL_FACE_BACK_BIT = 0x00000002, VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003, - // VK_STENCIL_FRONT_AND_BACK is a deprecated alias // VK_STENCIL_FRONT_AND_BACK is a deprecated alias VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK, VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF @@ -3394,10 +3391,8 @@ typedef struct VkDeviceCreateInfo { uint32_t queueCreateInfoCount; const VkDeviceQueueCreateInfo* pQueueCreateInfos; // enabledLayerCount is deprecated and should not be used - // enabledLayerCount is deprecated and should not be used uint32_t enabledLayerCount; // ppEnabledLayerNames is deprecated and should not be used - // ppEnabledLayerNames is deprecated and should not be used const char* const* ppEnabledLayerNames; uint32_t enabledExtensionCount; const char* const* ppEnabledExtensionNames; @@ -5111,7 +5106,8 @@ typedef enum VkChromaLocation { typedef enum VkDescriptorUpdateTemplateType { VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF } VkDescriptorUpdateTemplateType; @@ -5125,9 +5121,11 @@ typedef enum VkSubgroupFeatureFlagBits { VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_ROTATE_BIT = 0x00000200, + VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT = 0x00000400, VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, - VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR = 0x00000200, - VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR = 0x00000400, + VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_BIT, + VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkSubgroupFeatureFlagBits; typedef VkFlags VkSubgroupFeatureFlags; @@ -5957,8 +5955,6 @@ typedef enum VkDriverId { VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25, VK_DRIVER_ID_MESA_HONEYKRISP = 26, VK_DRIVER_ID_RESERVED_27 = 27, - VK_DRIVER_ID_MESA_HONEYKRISP = 26, - VK_DRIVER_ID_RESERVED_27 = 27, VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, @@ -6723,64 +6719,63 @@ typedef VkFlags64 VkPipelineStageFlags2; // Flag bits for VkPipelineStageFlagBits2 typedef VkFlags64 VkPipelineStageFlagBits2; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT = 0x00020000ULL; -static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT = 0x00020000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; @@ -6794,7 +6789,6 @@ static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI = 0x8000000000ULL; // VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI is a deprecated alias -// VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI is a deprecated alias static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL; @@ -6807,51 +6801,51 @@ typedef VkFlags64 VkAccessFlags2; // Flag bits for VkAccessFlagBits2 typedef VkFlags64 VkAccessFlagBits2; static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; @@ -6860,8 +6854,6 @@ static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00 static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT = 0x00020000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT = 0x00040000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT = 0x00020000ULL; -static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT = 0x00040000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL; static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; @@ -6892,12 +6884,10 @@ typedef enum VkRenderingFlagBits { VK_RENDERING_RESUMING_BIT = 0x00000004, VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008, VK_RENDERING_CONTENTS_INLINE_BIT_KHR = 0x00000010, - VK_RENDERING_CONTENTS_INLINE_BIT_KHR = 0x00000010, VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT, VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT, VK_RENDERING_CONTENTS_INLINE_BIT_EXT = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, - VK_RENDERING_CONTENTS_INLINE_BIT_EXT = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VkRenderingFlagBits; typedef VkFlags VkRenderingFlags; @@ -6906,59 +6896,33 @@ typedef VkFlags64 VkFormatFeatureFlags2; // Flag bits for VkFormatFeatureFlagBits2 typedef VkFlags64 VkFormatFeatureFlagBits2; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL; -static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT = 0x400000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL; @@ -6967,6 +6931,33 @@ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT = 0x400000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL; static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL; @@ -7730,6 +7721,731 @@ VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements( #endif +// VK_VERSION_1_4 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_4 1 +// Vulkan 1.4 version number +#define VK_API_VERSION_1_4 VK_MAKE_API_VERSION(0, 1, 4, 0)// Patch version should always be set to 0 + +#define VK_MAX_GLOBAL_PRIORITY_SIZE 16U + +typedef enum VkPipelineRobustnessBufferBehavior { + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT = 0, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED = 1, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS = 2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2 = 3, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPipelineRobustnessBufferBehavior; + +typedef enum VkPipelineRobustnessImageBehavior { + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT = 0, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED = 1, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS = 2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2 = 3, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPipelineRobustnessImageBehavior; + +typedef enum VkQueueGlobalPriority { + VK_QUEUE_GLOBAL_PRIORITY_LOW = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME = 1024, + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = VK_QUEUE_GLOBAL_PRIORITY_LOW, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM = 0x7FFFFFFF +} VkQueueGlobalPriority; + +typedef enum VkLineRasterizationMode { + VK_LINE_RASTERIZATION_MODE_DEFAULT = 0, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR = 1, + VK_LINE_RASTERIZATION_MODE_BRESENHAM = 2, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH = 3, + VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR = VK_LINE_RASTERIZATION_MODE_DEFAULT, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + VK_LINE_RASTERIZATION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkLineRasterizationMode; + +typedef enum VkMemoryUnmapFlagBits { + VK_MEMORY_UNMAP_RESERVE_BIT_EXT = 0x00000001, + VK_MEMORY_UNMAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryUnmapFlagBits; +typedef VkFlags VkMemoryUnmapFlags; +typedef VkFlags64 VkPipelineCreateFlags2; + +// Flag bits for VkPipelineCreateFlagBits2 +typedef VkFlags64 VkPipelineCreateFlagBits2; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT = 0x00000001ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT = 0x00000002ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DERIVATIVE_BIT = 0x00000004ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT = 0x00000010ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT = 0x40000000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; +#endif +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV = 0x00040000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; +static const VkPipelineCreateFlagBits2 VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT = 0x4000000000ULL; + +typedef VkFlags64 VkBufferUsageFlags2; + +// Flag bits for VkBufferUsageFlagBits2 +typedef VkFlags64 VkBufferUsageFlagBits2; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT = 0x00000001ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_DST_BIT = 0x00000002ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000008ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT = 0x00000010ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT = 0x00000020ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT = 0x00000040ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT = 0x00000080ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT = 0x00000100ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT = 0x00020000ULL; +#ifdef VK_ENABLE_BETA_EXTENSIONS +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +#endif +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; +static const VkBufferUsageFlagBits2 VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT = 0x80000000ULL; + + +typedef enum VkHostImageCopyFlagBits { + VK_HOST_IMAGE_COPY_MEMCPY = 0x00000001, + VK_HOST_IMAGE_COPY_MEMCPY_EXT = VK_HOST_IMAGE_COPY_MEMCPY, + VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkHostImageCopyFlagBits; +typedef VkFlags VkHostImageCopyFlags; +typedef struct VkPhysicalDeviceVulkan14Features { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; + VkBool32 shaderSubgroupRotate; + VkBool32 shaderSubgroupRotateClustered; + VkBool32 shaderFloatControls2; + VkBool32 shaderExpectAssume; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; + VkBool32 indexTypeUint8; + VkBool32 dynamicRenderingLocalRead; + VkBool32 maintenance5; + VkBool32 maintenance6; + VkBool32 pipelineProtectedAccess; + VkBool32 pipelineRobustness; + VkBool32 hostImageCopy; + VkBool32 pushDescriptor; +} VkPhysicalDeviceVulkan14Features; + +typedef struct VkPhysicalDeviceVulkan14Properties { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; + uint32_t maxPushDescriptors; + VkBool32 dynamicRenderingLocalReadDepthStencilAttachments; + VkBool32 dynamicRenderingLocalReadMultisampledAttachments; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; + VkPipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehavior defaultRobustnessImages; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceVulkan14Properties; + +typedef struct VkDeviceQueueGlobalPriorityCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriority globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfo; + +typedef struct VkPhysicalDeviceGlobalPriorityQueryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; +} VkPhysicalDeviceGlobalPriorityQueryFeatures; + +typedef struct VkQueueFamilyGlobalPriorityProperties { + VkStructureType sType; + void* pNext; + uint32_t priorityCount; + VkQueueGlobalPriority priorities[VK_MAX_GLOBAL_PRIORITY_SIZE]; +} VkQueueFamilyGlobalPriorityProperties; + +typedef struct VkPhysicalDeviceShaderSubgroupRotateFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupRotate; + VkBool32 shaderSubgroupRotateClustered; +} VkPhysicalDeviceShaderSubgroupRotateFeatures; + +typedef struct VkPhysicalDeviceShaderFloatControls2Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloatControls2; +} VkPhysicalDeviceShaderFloatControls2Features; + +typedef struct VkPhysicalDeviceShaderExpectAssumeFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderExpectAssume; +} VkPhysicalDeviceShaderExpectAssumeFeatures; + +typedef struct VkPhysicalDeviceLineRasterizationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; +} VkPhysicalDeviceLineRasterizationFeatures; + +typedef struct VkPhysicalDeviceLineRasterizationProperties { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; +} VkPhysicalDeviceLineRasterizationProperties; + +typedef struct VkPipelineRasterizationLineStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkLineRasterizationMode lineRasterizationMode; + VkBool32 stippledLineEnable; + uint32_t lineStippleFactor; + uint16_t lineStipplePattern; +} VkPipelineRasterizationLineStateCreateInfo; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorProperties { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; +} VkPhysicalDeviceVertexAttributeDivisorProperties; + +typedef struct VkVertexInputBindingDivisorDescription { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescription; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescription* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfo; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeatures; + +typedef struct VkPhysicalDeviceIndexTypeUint8Features { + VkStructureType sType; + void* pNext; + VkBool32 indexTypeUint8; +} VkPhysicalDeviceIndexTypeUint8Features; + +typedef struct VkMemoryMapInfo { + VkStructureType sType; + const void* pNext; + VkMemoryMapFlags flags; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMemoryMapInfo; + +typedef struct VkMemoryUnmapInfo { + VkStructureType sType; + const void* pNext; + VkMemoryUnmapFlags flags; + VkDeviceMemory memory; +} VkMemoryUnmapInfo; + +typedef struct VkPhysicalDeviceMaintenance5Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance5; +} VkPhysicalDeviceMaintenance5Features; + +typedef struct VkPhysicalDeviceMaintenance5Properties { + VkStructureType sType; + void* pNext; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; +} VkPhysicalDeviceMaintenance5Properties; + +typedef struct VkRenderingAreaInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkRenderingAreaInfo; + +typedef struct VkImageSubresource2 { + VkStructureType sType; + void* pNext; + VkImageSubresource imageSubresource; +} VkImageSubresource2; + +typedef struct VkDeviceImageSubresourceInfo { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + const VkImageSubresource2* pSubresource; +} VkDeviceImageSubresourceInfo; + +typedef struct VkSubresourceLayout2 { + VkStructureType sType; + void* pNext; + VkSubresourceLayout subresourceLayout; +} VkSubresourceLayout2; + +typedef struct VkPipelineCreateFlags2CreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags2 flags; +} VkPipelineCreateFlags2CreateInfo; + +typedef struct VkBufferUsageFlags2CreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferUsageFlags2 usage; +} VkBufferUsageFlags2CreateInfo; + +typedef struct VkPhysicalDevicePushDescriptorProperties { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorProperties; + +typedef struct VkPhysicalDeviceDynamicRenderingLocalReadFeatures { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRenderingLocalRead; +} VkPhysicalDeviceDynamicRenderingLocalReadFeatures; + +typedef struct VkRenderingAttachmentLocationInfo { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentLocations; +} VkRenderingAttachmentLocationInfo; + +typedef struct VkRenderingInputAttachmentIndexInfo { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentInputIndices; + const uint32_t* pDepthInputAttachmentIndex; + const uint32_t* pStencilInputAttachmentIndex; +} VkRenderingInputAttachmentIndexInfo; + +typedef struct VkPhysicalDeviceMaintenance6Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance6; +} VkPhysicalDeviceMaintenance6Features; + +typedef struct VkPhysicalDeviceMaintenance6Properties { + VkStructureType sType; + void* pNext; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; +} VkPhysicalDeviceMaintenance6Properties; + +typedef struct VkBindMemoryStatus { + VkStructureType sType; + const void* pNext; + VkResult* pResult; +} VkBindMemoryStatus; + +typedef struct VkBindDescriptorSetsInfo { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t descriptorSetCount; + const VkDescriptorSet* pDescriptorSets; + uint32_t dynamicOffsetCount; + const uint32_t* pDynamicOffsets; +} VkBindDescriptorSetsInfo; + +typedef struct VkPushConstantsInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayout layout; + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; + const void* pValues; +} VkPushConstantsInfo; + +typedef struct VkPushDescriptorSetInfo { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; + uint32_t descriptorWriteCount; + const VkWriteDescriptorSet* pDescriptorWrites; +} VkPushDescriptorSetInfo; + +typedef struct VkPushDescriptorSetWithTemplateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplate descriptorUpdateTemplate; + VkPipelineLayout layout; + uint32_t set; + const void* pData; +} VkPushDescriptorSetWithTemplateInfo; + +typedef struct VkPhysicalDevicePipelineProtectedAccessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineProtectedAccess; +} VkPhysicalDevicePipelineProtectedAccessFeatures; + +typedef struct VkPhysicalDevicePipelineRobustnessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineRobustness; +} VkPhysicalDevicePipelineRobustnessFeatures; + +typedef struct VkPhysicalDevicePipelineRobustnessProperties { + VkStructureType sType; + void* pNext; + VkPipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehavior defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehavior defaultRobustnessImages; +} VkPhysicalDevicePipelineRobustnessProperties; + +typedef struct VkPipelineRobustnessCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRobustnessBufferBehavior storageBuffers; + VkPipelineRobustnessBufferBehavior uniformBuffers; + VkPipelineRobustnessBufferBehavior vertexInputs; + VkPipelineRobustnessImageBehavior images; +} VkPipelineRobustnessCreateInfo; + +typedef struct VkPhysicalDeviceHostImageCopyFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostImageCopy; +} VkPhysicalDeviceHostImageCopyFeatures; + +typedef struct VkPhysicalDeviceHostImageCopyProperties { + VkStructureType sType; + void* pNext; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceHostImageCopyProperties; + +typedef struct VkMemoryToImageCopy { + VkStructureType sType; + const void* pNext; + const void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkMemoryToImageCopy; + +typedef struct VkImageToMemoryCopy { + VkStructureType sType; + const void* pNext; + void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkImageToMemoryCopy; + +typedef struct VkCopyMemoryToImageInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkMemoryToImageCopy* pRegions; +} VkCopyMemoryToImageInfo; + +typedef struct VkCopyImageToMemoryInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + uint32_t regionCount; + const VkImageToMemoryCopy* pRegions; +} VkCopyImageToMemoryInfo; + +typedef struct VkCopyImageToImageInfo { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlags flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageToImageInfo; + +typedef struct VkHostImageLayoutTransitionInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageLayout oldLayout; + VkImageLayout newLayout; + VkImageSubresourceRange subresourceRange; +} VkHostImageLayoutTransitionInfo; + +typedef struct VkSubresourceHostMemcpySize { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkSubresourceHostMemcpySize; + +typedef struct VkHostImageCopyDevicePerformanceQuery { + VkStructureType sType; + void* pNext; + VkBool32 optimalDeviceAccess; + VkBool32 identicalMemoryLayout; +} VkHostImageCopyDevicePerformanceQuery; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStipple)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2)(VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2)(VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularity)(VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayout)(VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocations)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndices)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2)(VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImage)(VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemory)(VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImage)(VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayout)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStipple( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2( + VkDevice device, + const VkMemoryMapInfo* pMemoryMapInfo, + void** ppData); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2( + VkDevice device, + const VkMemoryUnmapInfo* pMemoryUnmapInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularity( + VkDevice device, + const VkRenderingAreaInfo* pRenderingAreaInfo, + VkExtent2D* pGranularity); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayout( + VkDevice device, + const VkDeviceImageSubresourceInfo* pInfo, + VkSubresourceLayout2* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2( + VkDevice device, + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocations( + VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfo* pLocationInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndices( + VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2( + VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfo* pPushConstantsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImage( + VkDevice device, + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemory( + VkDevice device, + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImage( + VkDevice device, + const VkCopyImageToImageInfo* pCopyImageToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayout( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfo* pTransitions); +#endif + + // VK_KHR_surface is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_surface 1 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) @@ -7744,7 +8460,6 @@ typedef enum VkPresentModeKHR { VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, VK_PRESENT_MODE_FIFO_LATEST_READY_EXT = 1000361000, - VK_PRESENT_MODE_FIFO_LATEST_READY_EXT = 1000361000, VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF } VkPresentModeKHR; @@ -7758,7 +8473,6 @@ typedef enum VkColorSpaceKHR { VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, - // VK_COLOR_SPACE_DOLBYVISION_EXT is deprecated, but no reason was given in the API XML // VK_COLOR_SPACE_DOLBYVISION_EXT is deprecated, but no reason was given in the API XML VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, @@ -7767,10 +8481,8 @@ typedef enum VkColorSpaceKHR { VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, - // VK_COLORSPACE_SRGB_NONLINEAR_KHR is a deprecated alias // VK_COLORSPACE_SRGB_NONLINEAR_KHR is a deprecated alias VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - // VK_COLOR_SPACE_DCI_P3_LINEAR_EXT is a deprecated alias // VK_COLOR_SPACE_DCI_P3_LINEAR_EXT is a deprecated alias VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF @@ -9181,10 +9893,8 @@ VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( #define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 #define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" // VK_KHR_MAINTENANCE1_SPEC_VERSION is a deprecated alias -// VK_KHR_MAINTENANCE1_SPEC_VERSION is a deprecated alias #define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION // VK_KHR_MAINTENANCE1_EXTENSION_NAME is a deprecated alias -// VK_KHR_MAINTENANCE1_EXTENSION_NAME is a deprecated alias #define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; @@ -9383,11 +10093,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( #define VK_KHR_push_descriptor 1 #define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 #define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" -typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t maxPushDescriptors; -} VkPhysicalDevicePushDescriptorPropertiesKHR; +typedef VkPhysicalDevicePushDescriptorProperties VkPhysicalDevicePushDescriptorPropertiesKHR; typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); @@ -9666,13 +10372,10 @@ typedef enum VkPerformanceCounterScopeKHR { VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, - // VK_QUERY_SCOPE_COMMAND_BUFFER_KHR is a deprecated alias // VK_QUERY_SCOPE_COMMAND_BUFFER_KHR is a deprecated alias VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, - // VK_QUERY_SCOPE_RENDER_PASS_KHR is a deprecated alias // VK_QUERY_SCOPE_RENDER_PASS_KHR is a deprecated alias VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, - // VK_QUERY_SCOPE_COMMAND_KHR is a deprecated alias // VK_QUERY_SCOPE_COMMAND_KHR is a deprecated alias VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF @@ -9691,10 +10394,8 @@ typedef enum VkPerformanceCounterStorageKHR { typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, - // VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR is a deprecated alias // VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR is a deprecated alias VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, - // VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR is a deprecated alias // VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR is a deprecated alias VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF @@ -9798,10 +10499,8 @@ VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( #define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" // VK_KHR_MAINTENANCE2_SPEC_VERSION is a deprecated alias -// VK_KHR_MAINTENANCE2_SPEC_VERSION is a deprecated alias #define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION // VK_KHR_MAINTENANCE2_EXTENSION_NAME is a deprecated alias -// VK_KHR_MAINTENANCE2_EXTENSION_NAME is a deprecated alias #define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; @@ -10069,10 +10768,8 @@ VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( #define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" // VK_KHR_MAINTENANCE3_SPEC_VERSION is a deprecated alias -// VK_KHR_MAINTENANCE3_SPEC_VERSION is a deprecated alias #define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION // VK_KHR_MAINTENANCE3_EXTENSION_NAME is a deprecated alias -// VK_KHR_MAINTENANCE3_EXTENSION_NAME is a deprecated alias #define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; @@ -10208,39 +10905,16 @@ typedef struct VkVideoDecodeH265DpbSlotInfoKHR { // VK_KHR_global_priority is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_global_priority 1 -#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR 16U #define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 #define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR VK_MAX_GLOBAL_PRIORITY_SIZE +typedef VkQueueGlobalPriority VkQueueGlobalPriorityKHR; -typedef enum VkQueueGlobalPriorityKHR { - VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = 128, - VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = 256, - VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = 512, - VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = 1024, - VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, - VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, - VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, - VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR, - VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_KHR = 0x7FFFFFFF -} VkQueueGlobalPriorityKHR; -typedef struct VkDeviceQueueGlobalPriorityCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkQueueGlobalPriorityKHR globalPriority; -} VkDeviceQueueGlobalPriorityCreateInfoKHR; +typedef VkDeviceQueueGlobalPriorityCreateInfo VkDeviceQueueGlobalPriorityCreateInfoKHR; -typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 globalPriorityQuery; -} VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; +typedef VkPhysicalDeviceGlobalPriorityQueryFeatures VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; -typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t priorityCount; - VkQueueGlobalPriorityKHR priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_KHR]; -} VkQueueFamilyGlobalPriorityPropertiesKHR; +typedef VkQueueFamilyGlobalPriorityProperties VkQueueFamilyGlobalPriorityPropertiesKHR; @@ -10419,14 +11093,6 @@ typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { VkExtent2D shadingRateAttachmentTexelSize; } VkRenderingFragmentShadingRateAttachmentInfoKHR; -typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; - VkExtent2D shadingRateAttachmentTexelSize; -} VkRenderingFragmentShadingRateAttachmentInfoKHR; - typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); @@ -10447,41 +11113,23 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( #define VK_KHR_dynamic_rendering_local_read 1 #define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION 1 #define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME "VK_KHR_dynamic_rendering_local_read" -typedef struct VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 dynamicRenderingLocalRead; -} VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; +typedef VkPhysicalDeviceDynamicRenderingLocalReadFeatures VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; -typedef struct VkRenderingAttachmentLocationInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t colorAttachmentCount; - const uint32_t* pColorAttachmentLocations; -} VkRenderingAttachmentLocationInfoKHR; +typedef VkRenderingAttachmentLocationInfo VkRenderingAttachmentLocationInfoKHR; -typedef struct VkRenderingInputAttachmentIndexInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t colorAttachmentCount; - const uint32_t* pColorAttachmentInputIndices; - const uint32_t* pDepthInputAttachmentIndex; - const uint32_t* pStencilInputAttachmentIndex; -} VkRenderingInputAttachmentIndexInfoKHR; +typedef VkRenderingInputAttachmentIndexInfo VkRenderingInputAttachmentIndexInfoKHR; -typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocationsKHR)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfoKHR* pLocationInfo); -typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); -typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocationsKHR)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfoKHR* pLocationInfo); + const VkRenderingAttachmentLocationInfo* pLocationInfo); VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); - const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); #endif @@ -10720,40 +11368,26 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR #define VK_KHR_map_memory2 1 #define VK_KHR_MAP_MEMORY_2_SPEC_VERSION 1 #define VK_KHR_MAP_MEMORY_2_EXTENSION_NAME "VK_KHR_map_memory2" +typedef VkMemoryUnmapFlagBits VkMemoryUnmapFlagBitsKHR; -typedef enum VkMemoryUnmapFlagBitsKHR { - VK_MEMORY_UNMAP_RESERVE_BIT_EXT = 0x00000001, - VK_MEMORY_UNMAP_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkMemoryUnmapFlagBitsKHR; -typedef VkFlags VkMemoryUnmapFlagsKHR; -typedef struct VkMemoryMapInfoKHR { - VkStructureType sType; - const void* pNext; - VkMemoryMapFlags flags; - VkDeviceMemory memory; - VkDeviceSize offset; - VkDeviceSize size; -} VkMemoryMapInfoKHR; +typedef VkMemoryUnmapFlags VkMemoryUnmapFlagsKHR; -typedef struct VkMemoryUnmapInfoKHR { - VkStructureType sType; - const void* pNext; - VkMemoryUnmapFlagsKHR flags; - VkDeviceMemory memory; -} VkMemoryUnmapInfoKHR; +typedef VkMemoryMapInfo VkMemoryMapInfoKHR; -typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2KHR)(VkDevice device, const VkMemoryMapInfoKHR* pMemoryMapInfo, void** ppData); -typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2KHR)(VkDevice device, const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo); +typedef VkMemoryUnmapInfo VkMemoryUnmapInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2KHR)(VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2KHR)(VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2KHR( VkDevice device, - const VkMemoryMapInfoKHR* pMemoryMapInfo, + const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2KHR( VkDevice device, - const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo); + const VkMemoryUnmapInfo* pMemoryUnmapInfo); #endif @@ -10818,6 +11452,12 @@ typedef enum VkVideoEncodeTuningModeKHR { VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF } VkVideoEncodeTuningModeKHR; + +typedef enum VkVideoEncodeFlagBitsKHR { + VK_VIDEO_ENCODE_WITH_QUANTIZATION_DELTA_MAP_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_WITH_EMPHASIS_MAP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFlagBitsKHR; typedef VkFlags VkVideoEncodeFlagsKHR; typedef enum VkVideoEncodeCapabilityFlagBitsKHR { @@ -11261,12 +11901,7 @@ VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( #define VK_KHR_shader_subgroup_rotate 1 #define VK_KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION 2 #define VK_KHR_SHADER_SUBGROUP_ROTATE_EXTENSION_NAME "VK_KHR_shader_subgroup_rotate" -typedef struct VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 shaderSubgroupRotate; - VkBool32 shaderSubgroupRotateClustered; -} VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR; +typedef VkPhysicalDeviceShaderSubgroupRotateFeatures VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR; @@ -11286,154 +11921,34 @@ typedef struct VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR { #define VK_KHR_maintenance5 1 #define VK_KHR_MAINTENANCE_5_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_5_EXTENSION_NAME "VK_KHR_maintenance5" -typedef VkFlags64 VkPipelineCreateFlags2KHR; +typedef VkPipelineCreateFlags2 VkPipelineCreateFlags2KHR; -// Flag bits for VkPipelineCreateFlagBits2KHR -typedef VkFlags64 VkPipelineCreateFlagBits2KHR; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; -#ifdef VK_ENABLE_BETA_EXTENSIONS -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; -#endif -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; -#ifdef VK_ENABLE_BETA_EXTENSIONS -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX = 0x100000000ULL; -#endif -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV = 0x00040000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT = 0x4000000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; -static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT = 0x4000000000ULL; +typedef VkPipelineCreateFlagBits2 VkPipelineCreateFlagBits2KHR; -typedef VkFlags64 VkBufferUsageFlags2KHR; +typedef VkBufferUsageFlags2 VkBufferUsageFlags2KHR; -// Flag bits for VkBufferUsageFlagBits2KHR -typedef VkFlags64 VkBufferUsageFlagBits2KHR; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; -#ifdef VK_ENABLE_BETA_EXTENSIONS -#ifdef VK_ENABLE_BETA_EXTENSIONS -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; -#endif -#endif -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT = 0x80000000ULL; -static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT = 0x80000000ULL; +typedef VkBufferUsageFlagBits2 VkBufferUsageFlagBits2KHR; -typedef struct VkPhysicalDeviceMaintenance5FeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 maintenance5; -} VkPhysicalDeviceMaintenance5FeaturesKHR; +typedef VkPhysicalDeviceMaintenance5Features VkPhysicalDeviceMaintenance5FeaturesKHR; -typedef struct VkPhysicalDeviceMaintenance5PropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; - VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; - VkBool32 depthStencilSwizzleOneSupport; - VkBool32 polygonModePointSize; - VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; - VkBool32 nonStrictWideLinesUseParallelogram; -} VkPhysicalDeviceMaintenance5PropertiesKHR; +typedef VkPhysicalDeviceMaintenance5Properties VkPhysicalDeviceMaintenance5PropertiesKHR; -typedef struct VkRenderingAreaInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t viewMask; - uint32_t colorAttachmentCount; - const VkFormat* pColorAttachmentFormats; - VkFormat depthAttachmentFormat; - VkFormat stencilAttachmentFormat; -} VkRenderingAreaInfoKHR; +typedef VkRenderingAreaInfo VkRenderingAreaInfoKHR; -typedef struct VkImageSubresource2KHR { - VkStructureType sType; - void* pNext; - VkImageSubresource imageSubresource; -} VkImageSubresource2KHR; +typedef VkDeviceImageSubresourceInfo VkDeviceImageSubresourceInfoKHR; -typedef struct VkDeviceImageSubresourceInfoKHR { - VkStructureType sType; - const void* pNext; - const VkImageCreateInfo* pCreateInfo; - const VkImageSubresource2KHR* pSubresource; -} VkDeviceImageSubresourceInfoKHR; +typedef VkImageSubresource2 VkImageSubresource2KHR; -typedef struct VkSubresourceLayout2KHR { - VkStructureType sType; - void* pNext; - VkSubresourceLayout subresourceLayout; -} VkSubresourceLayout2KHR; +typedef VkSubresourceLayout2 VkSubresourceLayout2KHR; -typedef struct VkPipelineCreateFlags2CreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkPipelineCreateFlags2KHR flags; -} VkPipelineCreateFlags2CreateInfoKHR; +typedef VkPipelineCreateFlags2CreateInfo VkPipelineCreateFlags2CreateInfoKHR; -typedef struct VkBufferUsageFlags2CreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkBufferUsageFlags2KHR usage; -} VkBufferUsageFlags2CreateInfoKHR; +typedef VkBufferUsageFlags2CreateInfo VkBufferUsageFlags2CreateInfoKHR; typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2KHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); -typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfoKHR* pRenderingAreaInfo, VkExtent2D* pGranularity); -typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfoKHR* pInfo, VkSubresourceLayout2KHR* pLayout); -typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR( @@ -11445,19 +11960,19 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR( VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularityKHR( VkDevice device, - const VkRenderingAreaInfoKHR* pRenderingAreaInfo, + const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, - const VkDeviceImageSubresourceInfoKHR* pInfo, - VkSubresourceLayout2KHR* pLayout); + const VkDeviceImageSubresourceInfo* pInfo, + VkSubresourceLayout2* pLayout); VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR( VkDevice device, VkImage image, - const VkImageSubresource2KHR* pSubresource, - VkSubresourceLayout2KHR* pLayout); + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); #endif @@ -11595,128 +12110,6 @@ VKAPI_ATTR VkResult VKAPI_CALL vkReleaseCapturedPipelineDataKHR( #endif -// VK_KHR_pipeline_binary is a preprocessor guard. Do not pass it to API calls. -#define VK_KHR_pipeline_binary 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineBinaryKHR) -#define VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR 32U -#define VK_KHR_PIPELINE_BINARY_SPEC_VERSION 1 -#define VK_KHR_PIPELINE_BINARY_EXTENSION_NAME "VK_KHR_pipeline_binary" -typedef struct VkPhysicalDevicePipelineBinaryFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 pipelineBinaries; -} VkPhysicalDevicePipelineBinaryFeaturesKHR; - -typedef struct VkPhysicalDevicePipelineBinaryPropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 pipelineBinaryInternalCache; - VkBool32 pipelineBinaryInternalCacheControl; - VkBool32 pipelineBinaryPrefersInternalCache; - VkBool32 pipelineBinaryPrecompiledInternalCache; - VkBool32 pipelineBinaryCompressedData; -} VkPhysicalDevicePipelineBinaryPropertiesKHR; - -typedef struct VkDevicePipelineBinaryInternalCacheControlKHR { - VkStructureType sType; - const void* pNext; - VkBool32 disableInternalCache; -} VkDevicePipelineBinaryInternalCacheControlKHR; - -typedef struct VkPipelineBinaryKeyKHR { - VkStructureType sType; - void* pNext; - uint32_t keySize; - uint8_t key[VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR]; -} VkPipelineBinaryKeyKHR; - -typedef struct VkPipelineBinaryDataKHR { - size_t dataSize; - void* pData; -} VkPipelineBinaryDataKHR; - -typedef struct VkPipelineBinaryKeysAndDataKHR { - uint32_t binaryCount; - const VkPipelineBinaryKeyKHR* pPipelineBinaryKeys; - const VkPipelineBinaryDataKHR* pPipelineBinaryData; -} VkPipelineBinaryKeysAndDataKHR; - -typedef struct VkPipelineCreateInfoKHR { - VkStructureType sType; - void* pNext; -} VkPipelineCreateInfoKHR; - -typedef struct VkPipelineBinaryCreateInfoKHR { - VkStructureType sType; - const void* pNext; - const VkPipelineBinaryKeysAndDataKHR* pKeysAndDataInfo; - VkPipeline pipeline; - const VkPipelineCreateInfoKHR* pPipelineCreateInfo; -} VkPipelineBinaryCreateInfoKHR; - -typedef struct VkPipelineBinaryInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t binaryCount; - const VkPipelineBinaryKHR* pPipelineBinaries; -} VkPipelineBinaryInfoKHR; - -typedef struct VkReleaseCapturedPipelineDataInfoKHR { - VkStructureType sType; - void* pNext; - VkPipeline pipeline; -} VkReleaseCapturedPipelineDataInfoKHR; - -typedef struct VkPipelineBinaryDataInfoKHR { - VkStructureType sType; - void* pNext; - VkPipelineBinaryKHR pipelineBinary; -} VkPipelineBinaryDataInfoKHR; - -typedef struct VkPipelineBinaryHandlesInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t pipelineBinaryCount; - VkPipelineBinaryKHR* pPipelineBinaries; -} VkPipelineBinaryHandlesInfoKHR; - -typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineBinariesKHR)(VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries); -typedef void (VKAPI_PTR *PFN_vkDestroyPipelineBinaryKHR)(VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineKeyKHR)(VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey); -typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineBinaryDataKHR)(VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData); -typedef VkResult (VKAPI_PTR *PFN_vkReleaseCapturedPipelineDataKHR)(VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineBinariesKHR( - VkDevice device, - const VkPipelineBinaryCreateInfoKHR* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkPipelineBinaryHandlesInfoKHR* pBinaries); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineBinaryKHR( - VkDevice device, - VkPipelineBinaryKHR pipelineBinary, - const VkAllocationCallbacks* pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineKeyKHR( - VkDevice device, - const VkPipelineCreateInfoKHR* pPipelineCreateInfo, - VkPipelineBinaryKeyKHR* pPipelineKey); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineBinaryDataKHR( - VkDevice device, - const VkPipelineBinaryDataInfoKHR* pInfo, - VkPipelineBinaryKeyKHR* pPipelineBinaryKey, - size_t* pPipelineBinaryDataSize, - void* pPipelineBinaryData); - -VKAPI_ATTR VkResult VKAPI_CALL vkReleaseCapturedPipelineDataKHR( - VkDevice device, - const VkReleaseCapturedPipelineDataInfoKHR* pInfo, - const VkAllocationCallbacks* pAllocator); -#endif - - // VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_cooperative_matrix 1 #define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 @@ -11815,25 +12208,6 @@ typedef struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR { -// VK_KHR_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. -#define VK_KHR_compute_shader_derivatives 1 -#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 -#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_KHR_compute_shader_derivatives" -typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 computeDerivativeGroupQuads; - VkBool32 computeDerivativeGroupLinear; -} VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; - -typedef struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 meshAndTaskShaderDerivatives; -} VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; - - - // VK_KHR_video_decode_av1 is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_video_decode_av1 1 #include "vk_video/vulkan_video_codec_av1std.h" @@ -12097,31 +12471,13 @@ typedef struct VkVideoInlineQueryInfoKHR { #define VK_KHR_vertex_attribute_divisor 1 #define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 1 #define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_KHR_vertex_attribute_divisor" -typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t maxVertexAttribDivisor; - VkBool32 supportsNonZeroFirstInstance; -} VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; +typedef VkPhysicalDeviceVertexAttributeDivisorProperties VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; -typedef struct VkVertexInputBindingDivisorDescriptionKHR { - uint32_t binding; - uint32_t divisor; -} VkVertexInputBindingDivisorDescriptionKHR; +typedef VkVertexInputBindingDivisorDescription VkVertexInputBindingDivisorDescriptionKHR; -typedef struct VkPipelineVertexInputDivisorStateCreateInfoKHR { - VkStructureType sType; - const void* pNext; - uint32_t vertexBindingDivisorCount; - const VkVertexInputBindingDivisorDescriptionKHR* pVertexBindingDivisors; -} VkPipelineVertexInputDivisorStateCreateInfoKHR; +typedef VkPipelineVertexInputDivisorStateCreateInfo VkPipelineVertexInputDivisorStateCreateInfoKHR; -typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 vertexAttributeInstanceRateDivisor; - VkBool32 vertexAttributeInstanceRateZeroDivisor; -} VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; +typedef VkPhysicalDeviceVertexAttributeDivisorFeatures VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; @@ -12135,11 +12491,7 @@ typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR { #define VK_KHR_shader_float_controls2 1 #define VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION 1 #define VK_KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME "VK_KHR_shader_float_controls2" -typedef struct VkPhysicalDeviceShaderFloatControls2FeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 shaderFloatControls2; -} VkPhysicalDeviceShaderFloatControls2FeaturesKHR; +typedef VkPhysicalDeviceShaderFloatControls2Features VkPhysicalDeviceShaderFloatControls2FeaturesKHR; @@ -12147,11 +12499,7 @@ typedef struct VkPhysicalDeviceShaderFloatControls2FeaturesKHR { #define VK_KHR_index_type_uint8 1 #define VK_KHR_INDEX_TYPE_UINT8_SPEC_VERSION 1 #define VK_KHR_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_KHR_index_type_uint8" -typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 indexTypeUint8; -} VkPhysicalDeviceIndexTypeUint8FeaturesKHR; +typedef VkPhysicalDeviceIndexTypeUint8Features VkPhysicalDeviceIndexTypeUint8FeaturesKHR; @@ -12159,43 +12507,13 @@ typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesKHR { #define VK_KHR_line_rasterization 1 #define VK_KHR_LINE_RASTERIZATION_SPEC_VERSION 1 #define VK_KHR_LINE_RASTERIZATION_EXTENSION_NAME "VK_KHR_line_rasterization" +typedef VkLineRasterizationMode VkLineRasterizationModeKHR; -typedef enum VkLineRasterizationModeKHR { - VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR = 0, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR = 1, - VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR = 2, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR = 3, - VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR, - VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR, - VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR, - VK_LINE_RASTERIZATION_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkLineRasterizationModeKHR; -typedef struct VkPhysicalDeviceLineRasterizationFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 rectangularLines; - VkBool32 bresenhamLines; - VkBool32 smoothLines; - VkBool32 stippledRectangularLines; - VkBool32 stippledBresenhamLines; - VkBool32 stippledSmoothLines; -} VkPhysicalDeviceLineRasterizationFeaturesKHR; +typedef VkPhysicalDeviceLineRasterizationFeatures VkPhysicalDeviceLineRasterizationFeaturesKHR; -typedef struct VkPhysicalDeviceLineRasterizationPropertiesKHR { - VkStructureType sType; - void* pNext; - uint32_t lineSubPixelPrecisionBits; -} VkPhysicalDeviceLineRasterizationPropertiesKHR; +typedef VkPhysicalDeviceLineRasterizationProperties VkPhysicalDeviceLineRasterizationPropertiesKHR; -typedef struct VkPipelineRasterizationLineStateCreateInfoKHR { - VkStructureType sType; - const void* pNext; - VkLineRasterizationModeKHR lineRasterizationMode; - VkBool32 stippledLineEnable; - uint32_t lineStippleFactor; - uint16_t lineStipplePattern; -} VkPipelineRasterizationLineStateCreateInfoKHR; +typedef VkPipelineRasterizationLineStateCreateInfo VkPipelineRasterizationLineStateCreateInfoKHR; typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleKHR)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); @@ -12251,11 +12569,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsKHR( #define VK_KHR_shader_expect_assume 1 #define VK_KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION 1 #define VK_KHR_SHADER_EXPECT_ASSUME_EXTENSION_NAME "VK_KHR_shader_expect_assume" -typedef struct VkPhysicalDeviceShaderExpectAssumeFeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 shaderExpectAssume; -} VkPhysicalDeviceShaderExpectAssumeFeaturesKHR; +typedef VkPhysicalDeviceShaderExpectAssumeFeatures VkPhysicalDeviceShaderExpectAssumeFeaturesKHR; @@ -12263,66 +12577,19 @@ typedef struct VkPhysicalDeviceShaderExpectAssumeFeaturesKHR { #define VK_KHR_maintenance6 1 #define VK_KHR_MAINTENANCE_6_SPEC_VERSION 1 #define VK_KHR_MAINTENANCE_6_EXTENSION_NAME "VK_KHR_maintenance6" -typedef struct VkPhysicalDeviceMaintenance6FeaturesKHR { - VkStructureType sType; - void* pNext; - VkBool32 maintenance6; -} VkPhysicalDeviceMaintenance6FeaturesKHR; +typedef VkPhysicalDeviceMaintenance6Features VkPhysicalDeviceMaintenance6FeaturesKHR; -typedef struct VkPhysicalDeviceMaintenance6PropertiesKHR { - VkStructureType sType; - void* pNext; - VkBool32 blockTexelViewCompatibleMultipleLayers; - uint32_t maxCombinedImageSamplerDescriptorCount; - VkBool32 fragmentShadingRateClampCombinerInputs; -} VkPhysicalDeviceMaintenance6PropertiesKHR; +typedef VkPhysicalDeviceMaintenance6Properties VkPhysicalDeviceMaintenance6PropertiesKHR; -typedef struct VkBindMemoryStatusKHR { - VkStructureType sType; - const void* pNext; - VkResult* pResult; -} VkBindMemoryStatusKHR; +typedef VkBindMemoryStatus VkBindMemoryStatusKHR; -typedef struct VkBindDescriptorSetsInfoKHR { - VkStructureType sType; - const void* pNext; - VkShaderStageFlags stageFlags; - VkPipelineLayout layout; - uint32_t firstSet; - uint32_t descriptorSetCount; - const VkDescriptorSet* pDescriptorSets; - uint32_t dynamicOffsetCount; - const uint32_t* pDynamicOffsets; -} VkBindDescriptorSetsInfoKHR; +typedef VkBindDescriptorSetsInfo VkBindDescriptorSetsInfoKHR; -typedef struct VkPushConstantsInfoKHR { - VkStructureType sType; - const void* pNext; - VkPipelineLayout layout; - VkShaderStageFlags stageFlags; - uint32_t offset; - uint32_t size; - const void* pValues; -} VkPushConstantsInfoKHR; +typedef VkPushConstantsInfo VkPushConstantsInfoKHR; -typedef struct VkPushDescriptorSetInfoKHR { - VkStructureType sType; - const void* pNext; - VkShaderStageFlags stageFlags; - VkPipelineLayout layout; - uint32_t set; - uint32_t descriptorWriteCount; - const VkWriteDescriptorSet* pDescriptorWrites; -} VkPushDescriptorSetInfoKHR; +typedef VkPushDescriptorSetInfo VkPushDescriptorSetInfoKHR; -typedef struct VkPushDescriptorSetWithTemplateInfoKHR { - VkStructureType sType; - const void* pNext; - VkDescriptorUpdateTemplate descriptorUpdateTemplate; - VkPipelineLayout layout; - uint32_t set; - const void* pData; -} VkPushDescriptorSetWithTemplateInfoKHR; +typedef VkPushDescriptorSetWithTemplateInfo VkPushDescriptorSetWithTemplateInfoKHR; typedef struct VkSetDescriptorBufferOffsetsInfoEXT { VkStructureType sType; @@ -12343,29 +12610,29 @@ typedef struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT { uint32_t set; } VkBindDescriptorBufferEmbeddedSamplersInfoEXT; -typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2KHR)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2KHR)(VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR* pPushConstantsInfo); -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo); -typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2KHR)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2KHR)(VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsets2EXT)(VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT)(VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, - const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo); + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, - const VkPushConstantsInfoKHR* pPushConstantsInfo); + const VkPushConstantsInfo* pPushConstantsInfo); VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo); + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo); + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, @@ -12377,6 +12644,76 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( #endif +// VK_KHR_video_encode_quantization_map is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_quantization_map 1 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_SPEC_VERSION 2 +#define VK_KHR_VIDEO_ENCODE_QUANTIZATION_MAP_EXTENSION_NAME "VK_KHR_video_encode_quantization_map" +typedef struct VkVideoEncodeQuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D maxQuantizationMapExtent; +} VkVideoEncodeQuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatQuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoFormatQuantizationMapPropertiesKHR; + +typedef struct VkVideoEncodeQuantizationMapInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView quantizationMap; + VkExtent2D quantizationMapExtent; +} VkVideoEncodeQuantizationMapInfoKHR; + +typedef struct VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D quantizationMapTexelSize; +} VkVideoEncodeQuantizationMapSessionParametersCreateInfoKHR; + +typedef struct VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoEncodeQuantizationMap; +} VkPhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR; + +typedef struct VkVideoEncodeH264QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH264QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoEncodeH265QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQpDelta; + int32_t maxQpDelta; +} VkVideoEncodeH265QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatH265QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CtbSizeFlagsKHR compatibleCtbSizes; +} VkVideoFormatH265QuantizationMapPropertiesKHR; + +typedef struct VkVideoEncodeAV1QuantizationMapCapabilitiesKHR { + VkStructureType sType; + void* pNext; + int32_t minQIndexDelta; + int32_t maxQIndexDelta; +} VkVideoEncodeAV1QuantizationMapCapabilitiesKHR; + +typedef struct VkVideoFormatAV1QuantizationMapPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeAV1SuperblockSizeFlagsKHR compatibleSuperblockSizes; +} VkVideoFormatAV1QuantizationMapPropertiesKHR; + + + // VK_KHR_shader_relaxed_extended_instruction is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_shader_relaxed_extended_instruction 1 #define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION 1 @@ -12493,10 +12830,8 @@ typedef enum VkDebugReportObjectTypeEXT { VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT = 1000307000, VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT = 1000307001, VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, - // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT is a deprecated alias // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT is a deprecated alias VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, - // VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT is a deprecated alias // VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT is a deprecated alias VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, @@ -13146,45 +13481,15 @@ typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { #define VK_EXT_pipeline_robustness 1 #define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1 #define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness" +typedef VkPipelineRobustnessBufferBehavior VkPipelineRobustnessBufferBehaviorEXT; -typedef enum VkPipelineRobustnessBufferBehaviorEXT { - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = 1, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = 2, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = 3, - VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF -} VkPipelineRobustnessBufferBehaviorEXT; +typedef VkPipelineRobustnessImageBehavior VkPipelineRobustnessImageBehaviorEXT; -typedef enum VkPipelineRobustnessImageBehaviorEXT { - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = 1, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = 2, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = 3, - VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF -} VkPipelineRobustnessImageBehaviorEXT; -typedef struct VkPhysicalDevicePipelineRobustnessFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 pipelineRobustness; -} VkPhysicalDevicePipelineRobustnessFeaturesEXT; +typedef VkPhysicalDevicePipelineRobustnessFeatures VkPhysicalDevicePipelineRobustnessFeaturesEXT; -typedef struct VkPhysicalDevicePipelineRobustnessPropertiesEXT { - VkStructureType sType; - void* pNext; - VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers; - VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers; - VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs; - VkPipelineRobustnessImageBehaviorEXT defaultRobustnessImages; -} VkPhysicalDevicePipelineRobustnessPropertiesEXT; +typedef VkPhysicalDevicePipelineRobustnessProperties VkPhysicalDevicePipelineRobustnessPropertiesEXT; -typedef struct VkPipelineRobustnessCreateInfoEXT { - VkStructureType sType; - const void* pNext; - VkPipelineRobustnessBufferBehaviorEXT storageBuffers; - VkPipelineRobustnessBufferBehaviorEXT uniformBuffers; - VkPipelineRobustnessBufferBehaviorEXT vertexInputs; - VkPipelineRobustnessImageBehaviorEXT images; -} VkPipelineRobustnessCreateInfoEXT; +typedef VkPipelineRobustnessCreateInfo VkPipelineRobustnessCreateInfoEXT; @@ -13280,7 +13585,6 @@ VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( typedef enum VkSurfaceCounterFlagBitsEXT { VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, - // VK_SURFACE_COUNTER_VBLANK_EXT is a deprecated alias // VK_SURFACE_COUNTER_VBLANK_EXT is a deprecated alias VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF @@ -13451,10 +13755,8 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( #define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 #define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" // VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION is a deprecated alias -// VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION is a deprecated alias #define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION // VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME is a deprecated alias -// VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME is a deprecated alias #define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME @@ -13475,13 +13777,6 @@ typedef struct VkMultiviewPerViewAttributesInfoNVX { VkBool32 perViewAttributesPositionXOnly; } VkMultiviewPerViewAttributesInfoNVX; -typedef struct VkMultiviewPerViewAttributesInfoNVX { - VkStructureType sType; - const void* pNext; - VkBool32 perViewAttributes; - VkBool32 perViewAttributesPositionXOnly; -} VkMultiviewPerViewAttributesInfoNVX; - // VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. @@ -13624,14 +13919,12 @@ typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { // VK_EXT_swapchain_colorspace is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_swapchain_colorspace 1 #define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 5 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 5 #define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" // VK_EXT_hdr_metadata is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_hdr_metadata 1 #define VK_EXT_HDR_METADATA_SPEC_VERSION 3 -#define VK_EXT_HDR_METADATA_SPEC_VERSION 3 #define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" typedef struct VkXYColorEXT { float x; @@ -13860,14 +14153,6 @@ typedef struct VkAttachmentSampleCountInfoAMD { VkSampleCountFlagBits depthStencilAttachmentSamples; } VkAttachmentSampleCountInfoAMD; -typedef struct VkAttachmentSampleCountInfoAMD { - VkStructureType sType; - const void* pNext; - uint32_t colorAttachmentCount; - const VkSampleCountFlagBits* pColorAttachmentSamples; - VkSampleCountFlagBits depthStencilAttachmentSamples; -} VkAttachmentSampleCountInfoAMD; - // VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. @@ -14049,8 +14334,6 @@ typedef struct VkPipelineCoverageModulationStateCreateInfoNV { typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; -typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; - // VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. @@ -14763,9 +15046,9 @@ typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { #define VK_EXT_global_priority 1 #define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 #define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" -typedef VkQueueGlobalPriorityKHR VkQueueGlobalPriorityEXT; +typedef VkQueueGlobalPriority VkQueueGlobalPriorityEXT; -typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCreateInfoEXT; +typedef VkDeviceQueueGlobalPriorityCreateInfo VkDeviceQueueGlobalPriorityCreateInfoEXT; @@ -14809,7 +15092,6 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( #define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); -typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( @@ -14819,13 +15101,6 @@ VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( VkDeviceSize dstOffset, uint32_t marker); -VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags2 stage, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - uint32_t marker); - VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, @@ -14932,11 +15207,11 @@ typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { uint32_t maxVertexAttribDivisor; } VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; -typedef VkVertexInputBindingDivisorDescriptionKHR VkVertexInputBindingDivisorDescriptionEXT; +typedef VkVertexInputBindingDivisorDescription VkVertexInputBindingDivisorDescriptionEXT; -typedef VkPipelineVertexInputDivisorStateCreateInfoKHR VkPipelineVertexInputDivisorStateCreateInfoEXT; +typedef VkPipelineVertexInputDivisorStateCreateInfo VkPipelineVertexInputDivisorStateCreateInfoEXT; -typedef VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; +typedef VkPhysicalDeviceVertexAttributeDivisorFeatures VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; @@ -14965,7 +15240,6 @@ typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; #define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 #define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; -typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; @@ -15116,23 +15390,9 @@ typedef struct VkCheckpointData2NV { void* pCheckpointMarker; } VkCheckpointData2NV; -typedef struct VkQueueFamilyCheckpointProperties2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 checkpointExecutionStageMask; -} VkQueueFamilyCheckpointProperties2NV; - -typedef struct VkCheckpointData2NV { - VkStructureType sType; - void* pNext; - VkPipelineStageFlags2 stage; - void* pCheckpointMarker; -} VkCheckpointData2NV; - typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); -typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( @@ -15144,11 +15404,6 @@ VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); -VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( - VkQueue queue, - uint32_t* pCheckpointDataCount, - VkCheckpointData2NV* pCheckpointData); - VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t* pCheckpointDataCount, @@ -15381,13 +15636,6 @@ typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { VkImageLayout imageLayout; } VkRenderingFragmentDensityMapAttachmentInfoEXT; -typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { - VkStructureType sType; - const void* pNext; - VkImageView imageView; - VkImageLayout imageLayout; -} VkRenderingFragmentDensityMapAttachmentInfoEXT; - // VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. @@ -15403,10 +15651,8 @@ typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLay #define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 #define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" // VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION is a deprecated alias -// VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION is a deprecated alias #define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION // VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME is a deprecated alias -// VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME is a deprecated alias #define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME @@ -15780,13 +16026,13 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( #define VK_EXT_line_rasterization 1 #define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 #define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" -typedef VkLineRasterizationModeKHR VkLineRasterizationModeEXT; +typedef VkLineRasterizationMode VkLineRasterizationModeEXT; -typedef VkPhysicalDeviceLineRasterizationFeaturesKHR VkPhysicalDeviceLineRasterizationFeaturesEXT; +typedef VkPhysicalDeviceLineRasterizationFeatures VkPhysicalDeviceLineRasterizationFeaturesEXT; -typedef VkPhysicalDeviceLineRasterizationPropertiesKHR VkPhysicalDeviceLineRasterizationPropertiesEXT; +typedef VkPhysicalDeviceLineRasterizationProperties VkPhysicalDeviceLineRasterizationPropertiesEXT; -typedef VkPipelineRasterizationLineStateCreateInfoKHR VkPipelineRasterizationLineStateCreateInfoEXT; +typedef VkPipelineRasterizationLineStateCreateInfo VkPipelineRasterizationLineStateCreateInfoEXT; typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); @@ -15842,7 +16088,7 @@ VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( #define VK_EXT_index_type_uint8 1 #define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 #define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" -typedef VkPhysicalDeviceIndexTypeUint8FeaturesKHR VkPhysicalDeviceIndexTypeUint8FeaturesEXT; +typedef VkPhysicalDeviceIndexTypeUint8Features VkPhysicalDeviceIndexTypeUint8FeaturesEXT; @@ -15935,138 +16181,63 @@ VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( #define VK_EXT_host_image_copy 1 #define VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION 1 #define VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME "VK_EXT_host_image_copy" +typedef VkHostImageCopyFlagBits VkHostImageCopyFlagBitsEXT; -typedef enum VkHostImageCopyFlagBitsEXT { - VK_HOST_IMAGE_COPY_MEMCPY_EXT = 0x00000001, - VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkHostImageCopyFlagBitsEXT; -typedef VkFlags VkHostImageCopyFlagsEXT; -typedef struct VkPhysicalDeviceHostImageCopyFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 hostImageCopy; -} VkPhysicalDeviceHostImageCopyFeaturesEXT; +typedef VkHostImageCopyFlags VkHostImageCopyFlagsEXT; -typedef struct VkPhysicalDeviceHostImageCopyPropertiesEXT { - VkStructureType sType; - void* pNext; - uint32_t copySrcLayoutCount; - VkImageLayout* pCopySrcLayouts; - uint32_t copyDstLayoutCount; - VkImageLayout* pCopyDstLayouts; - uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; - VkBool32 identicalMemoryTypeRequirements; -} VkPhysicalDeviceHostImageCopyPropertiesEXT; +typedef VkPhysicalDeviceHostImageCopyFeatures VkPhysicalDeviceHostImageCopyFeaturesEXT; -typedef struct VkMemoryToImageCopyEXT { - VkStructureType sType; - const void* pNext; - const void* pHostPointer; - uint32_t memoryRowLength; - uint32_t memoryImageHeight; - VkImageSubresourceLayers imageSubresource; - VkOffset3D imageOffset; - VkExtent3D imageExtent; -} VkMemoryToImageCopyEXT; +typedef VkPhysicalDeviceHostImageCopyProperties VkPhysicalDeviceHostImageCopyPropertiesEXT; -typedef struct VkImageToMemoryCopyEXT { - VkStructureType sType; - const void* pNext; - void* pHostPointer; - uint32_t memoryRowLength; - uint32_t memoryImageHeight; - VkImageSubresourceLayers imageSubresource; - VkOffset3D imageOffset; - VkExtent3D imageExtent; -} VkImageToMemoryCopyEXT; +typedef VkMemoryToImageCopy VkMemoryToImageCopyEXT; -typedef struct VkCopyMemoryToImageInfoEXT { - VkStructureType sType; - const void* pNext; - VkHostImageCopyFlagsEXT flags; - VkImage dstImage; - VkImageLayout dstImageLayout; - uint32_t regionCount; - const VkMemoryToImageCopyEXT* pRegions; -} VkCopyMemoryToImageInfoEXT; +typedef VkImageToMemoryCopy VkImageToMemoryCopyEXT; -typedef struct VkCopyImageToMemoryInfoEXT { - VkStructureType sType; - const void* pNext; - VkHostImageCopyFlagsEXT flags; - VkImage srcImage; - VkImageLayout srcImageLayout; - uint32_t regionCount; - const VkImageToMemoryCopyEXT* pRegions; -} VkCopyImageToMemoryInfoEXT; +typedef VkCopyMemoryToImageInfo VkCopyMemoryToImageInfoEXT; -typedef struct VkCopyImageToImageInfoEXT { - VkStructureType sType; - const void* pNext; - VkHostImageCopyFlagsEXT flags; - VkImage srcImage; - VkImageLayout srcImageLayout; - VkImage dstImage; - VkImageLayout dstImageLayout; - uint32_t regionCount; - const VkImageCopy2* pRegions; -} VkCopyImageToImageInfoEXT; +typedef VkCopyImageToMemoryInfo VkCopyImageToMemoryInfoEXT; -typedef struct VkHostImageLayoutTransitionInfoEXT { - VkStructureType sType; - const void* pNext; - VkImage image; - VkImageLayout oldLayout; - VkImageLayout newLayout; - VkImageSubresourceRange subresourceRange; -} VkHostImageLayoutTransitionInfoEXT; +typedef VkCopyImageToImageInfo VkCopyImageToImageInfoEXT; -typedef struct VkSubresourceHostMemcpySizeEXT { - VkStructureType sType; - void* pNext; - VkDeviceSize size; -} VkSubresourceHostMemcpySizeEXT; +typedef VkHostImageLayoutTransitionInfo VkHostImageLayoutTransitionInfoEXT; -typedef struct VkHostImageCopyDevicePerformanceQueryEXT { - VkStructureType sType; - void* pNext; - VkBool32 optimalDeviceAccess; - VkBool32 identicalMemoryLayout; -} VkHostImageCopyDevicePerformanceQueryEXT; +typedef VkSubresourceHostMemcpySize VkSubresourceHostMemcpySizeEXT; -typedef VkSubresourceLayout2KHR VkSubresourceLayout2EXT; +typedef VkHostImageCopyDevicePerformanceQuery VkHostImageCopyDevicePerformanceQueryEXT; -typedef VkImageSubresource2KHR VkImageSubresource2EXT; +typedef VkSubresourceLayout2 VkSubresourceLayout2EXT; -typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); -typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); -typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions); -typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); +typedef VkImageSubresource2 VkImageSubresource2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT( VkDevice device, - const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT( VkDevice device, - const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT( VkDevice device, - const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); + const VkCopyImageToImageInfo* pCopyImageToImageInfo); VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, - const VkHostImageLayoutTransitionInfoEXT* pTransitions); + const VkHostImageLayoutTransitionInfo* pTransitions); VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, - const VkImageSubresource2KHR* pSubresource, - VkSubresourceLayout2KHR* pLayout); + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); #endif @@ -16924,7 +17095,6 @@ typedef struct VkDescriptorAddressInfoEXT { typedef struct VkDescriptorBufferBindingInfoEXT { VkStructureType sType; const void* pNext; - const void* pNext; VkDeviceAddress address; VkBufferUsageFlags usage; } VkDescriptorBufferBindingInfoEXT; @@ -16932,7 +17102,6 @@ typedef struct VkDescriptorBufferBindingInfoEXT { typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT { VkStructureType sType; const void* pNext; - const void* pNext; VkBuffer buffer; } VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; @@ -17680,18 +17849,6 @@ typedef struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT { -// VK_EXT_present_mode_fifo_latest_ready is a preprocessor guard. Do not pass it to API calls. -#define VK_EXT_present_mode_fifo_latest_ready 1 -#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_SPEC_VERSION 1 -#define VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME "VK_EXT_present_mode_fifo_latest_ready" -typedef struct VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 presentModeFifoLatestReady; -} VkPhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; - - - // VK_HUAWEI_subpass_shading is a preprocessor guard. Do not pass it to API calls. #define VK_HUAWEI_subpass_shading 1 #define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 @@ -17948,10 +18105,10 @@ typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { #define VK_EXT_global_priority_query 1 #define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 #define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" -#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE_KHR -typedef VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; +#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE +typedef VkPhysicalDeviceGlobalPriorityQueryFeatures VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; -typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityPropertiesEXT; +typedef VkQueueFamilyGlobalPriorityProperties VkQueueFamilyGlobalPriorityPropertiesEXT; @@ -18589,7 +18746,6 @@ typedef struct VkRenderPassStripeSubmitInfoARM { // VK_QCOM_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. #define VK_QCOM_fragment_density_map_offset 1 #define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 2 -#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 2 #define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { VkStructureType sType; @@ -19401,7 +19557,6 @@ VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV( // VK_EXT_legacy_dithering is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_legacy_dithering 1 #define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 2 -#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 2 #define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { VkStructureType sType; @@ -19415,61 +19570,10 @@ typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { #define VK_EXT_pipeline_protected_access 1 #define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1 #define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access" -typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 pipelineProtectedAccess; -} VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; +typedef VkPhysicalDevicePipelineProtectedAccessFeatures VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; -// VK_AMD_anti_lag is a preprocessor guard. Do not pass it to API calls. -#define VK_AMD_anti_lag 1 -#define VK_AMD_ANTI_LAG_SPEC_VERSION 1 -#define VK_AMD_ANTI_LAG_EXTENSION_NAME "VK_AMD_anti_lag" - -typedef enum VkAntiLagModeAMD { - VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD = 0, - VK_ANTI_LAG_MODE_ON_AMD = 1, - VK_ANTI_LAG_MODE_OFF_AMD = 2, - VK_ANTI_LAG_MODE_MAX_ENUM_AMD = 0x7FFFFFFF -} VkAntiLagModeAMD; - -typedef enum VkAntiLagStageAMD { - VK_ANTI_LAG_STAGE_INPUT_AMD = 0, - VK_ANTI_LAG_STAGE_PRESENT_AMD = 1, - VK_ANTI_LAG_STAGE_MAX_ENUM_AMD = 0x7FFFFFFF -} VkAntiLagStageAMD; -typedef struct VkPhysicalDeviceAntiLagFeaturesAMD { - VkStructureType sType; - void* pNext; - VkBool32 antiLag; -} VkPhysicalDeviceAntiLagFeaturesAMD; - -typedef struct VkAntiLagPresentationInfoAMD { - VkStructureType sType; - void* pNext; - VkAntiLagStageAMD stage; - uint64_t frameIndex; -} VkAntiLagPresentationInfoAMD; - -typedef struct VkAntiLagDataAMD { - VkStructureType sType; - const void* pNext; - VkAntiLagModeAMD mode; - uint32_t maxFPS; - const VkAntiLagPresentationInfoAMD* pPresentationInfo; -} VkAntiLagDataAMD; - -typedef void (VKAPI_PTR *PFN_vkAntiLagUpdateAMD)(VkDevice device, const VkAntiLagDataAMD* pData); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkAntiLagUpdateAMD( - VkDevice device, - const VkAntiLagDataAMD* pData); -#endif - - // VK_AMD_anti_lag is a preprocessor guard. Do not pass it to API calls. #define VK_AMD_anti_lag 1 #define VK_AMD_ANTI_LAG_SPEC_VERSION 1 @@ -19535,12 +19639,6 @@ typedef enum VkDepthClampModeEXT { VK_DEPTH_CLAMP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF } VkDepthClampModeEXT; -typedef enum VkDepthClampModeEXT { - VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT = 0, - VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT = 1, - VK_DEPTH_CLAMP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDepthClampModeEXT; - typedef enum VkShaderCreateFlagBitsEXT { VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001, VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002, @@ -19550,7 +19648,6 @@ typedef enum VkShaderCreateFlagBitsEXT { VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT = 0x00000020, VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00000040, VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT = 0x00000080, - VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT = 0x00000080, VK_SHADER_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF } VkShaderCreateFlagBitsEXT; typedef VkFlags VkShaderCreateFlagsEXT; @@ -19591,17 +19688,11 @@ typedef struct VkDepthClampRangeEXT { float maxDepthClamp; } VkDepthClampRangeEXT; -typedef struct VkDepthClampRangeEXT { - float minDepthClamp; - float maxDepthClamp; -} VkDepthClampRangeEXT; - typedef VkResult (VKAPI_PTR *PFN_vkCreateShadersEXT)(VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders); typedef void (VKAPI_PTR *PFN_vkDestroyShaderEXT)(VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator); typedef VkResult (VKAPI_PTR *PFN_vkGetShaderBinaryDataEXT)(VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData); typedef void (VKAPI_PTR *PFN_vkCmdBindShadersEXT)(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampRangeEXT)(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange); -typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampRangeEXT)(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange); #ifndef VK_NO_PROTOTYPES VKAPI_ATTR VkResult VKAPI_CALL vkCreateShadersEXT( @@ -19628,11 +19719,6 @@ VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT( const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampRangeEXT( - VkCommandBuffer commandBuffer, - VkDepthClampModeEXT depthClampMode, - const VkDepthClampRangeEXT* pDepthClampRange); - VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, @@ -19774,24 +19860,6 @@ typedef struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT { -// VK_EXT_legacy_vertex_attributes is a preprocessor guard. Do not pass it to API calls. -#define VK_EXT_legacy_vertex_attributes 1 -#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION 1 -#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME "VK_EXT_legacy_vertex_attributes" -typedef struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 legacyVertexAttributes; -} VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; - -typedef struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT { - VkStructureType sType; - void* pNext; - VkBool32 nativeUnalignedPerformance; -} VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; - - - // VK_EXT_layer_settings is a preprocessor guard. Do not pass it to API calls. #define VK_EXT_layer_settings 1 #define VK_EXT_LAYER_SETTINGS_SPEC_VERSION 2 @@ -20226,18 +20294,6 @@ typedef struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV { -// VK_NV_command_buffer_inheritance is a preprocessor guard. Do not pass it to API calls. -#define VK_NV_command_buffer_inheritance 1 -#define VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION 1 -#define VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME "VK_NV_command_buffer_inheritance" -typedef struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV { - VkStructureType sType; - void* pNext; - VkBool32 commandBufferInheritance; -} VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; - - - // VK_NV_shader_atomic_float16_vector is a preprocessor guard. Do not pass it to API calls. #define VK_NV_shader_atomic_float16_vector 1 #define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION 1 @@ -20262,18 +20318,6 @@ typedef struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT { -// VK_EXT_shader_replicated_composites is a preprocessor guard. Do not pass it to API calls. -#define VK_EXT_shader_replicated_composites 1 -#define VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION 1 -#define VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME "VK_EXT_shader_replicated_composites" -typedef struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT { - VkStructureType sType; - void* pNext; - VkBool32 shaderReplicatedComposites; -} VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; - - - // VK_NV_ray_tracing_validation is a preprocessor guard. Do not pass it to API calls. #define VK_NV_ray_tracing_validation 1 #define VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION 1 @@ -20677,6 +20721,18 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixFlexibleDimen #endif +// VK_EXT_vertex_attribute_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_robustness 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_VERTEX_ATTRIBUTE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_vertex_attribute_robustness" +typedef struct VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeRobustness; +} VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; + + + // VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. #define VK_KHR_acceleration_structure 1 #define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index 22ea829..c7de355 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -11,9 +11,6 @@ // include-what-you-use: make sure, vulkan.hpp is used by code-completers // IWYU pragma: private; include "vulkan.hpp" -// include-what-you-use: make sure, vulkan.hpp is used by code-completers -// IWYU pragma: private; include "vulkan.hpp" - namespace VULKAN_HPP_NAMESPACE { template @@ -261,6 +258,9 @@ namespace VULKAN_HPP_NAMESPACE ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED, eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorNotPermitted = VK_ERROR_NOT_PERMITTED, + eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, + eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR, eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR, eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR, eSuboptimalKHR = VK_SUBOPTIMAL_KHR, @@ -275,8 +275,6 @@ namespace VULKAN_HPP_NAMESPACE eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR, eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR, eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT, - eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR, - eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, #if defined( VK_USE_PLATFORM_WIN32_KHR ) eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT, #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -290,9 +288,6 @@ namespace VULKAN_HPP_NAMESPACE eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT, ePipelineBinaryMissingKHR = VK_PIPELINE_BINARY_MISSING_KHR, eErrorNotEnoughSpaceKHR = VK_ERROR_NOT_ENOUGH_SPACE_KHR - eErrorIncompatibleShaderBinaryEXT = VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT, - ePipelineBinaryMissingKHR = VK_PIPELINE_BINARY_MISSING_KHR, - eErrorNotEnoughSpaceKHR = VK_ERROR_NOT_ENOUGH_SPACE_KHR }; enum class StructureType @@ -674,6 +669,115 @@ namespace VULKAN_HPP_NAMESPACE eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR, eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR, + ePhysicalDeviceVulkan14Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_FEATURES, + ePhysicalDeviceVulkan14Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_4_PROPERTIES, + eDeviceQueueGlobalPriorityCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO, + eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, + eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + ePhysicalDeviceGlobalPriorityQueryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, + ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, + ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + eQueueFamilyGlobalPriorityProperties = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, + eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, + eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + ePhysicalDeviceShaderSubgroupRotateFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES, + ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, + ePhysicalDeviceShaderFloatControls2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES, + ePhysicalDeviceShaderFloatControls2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR, + ePhysicalDeviceShaderExpectAssumeFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES, + ePhysicalDeviceShaderExpectAssumeFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR, + ePhysicalDeviceLineRasterizationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES, + ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, + ePhysicalDeviceLineRasterizationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, + ePipelineRasterizationLineStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO, + ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, + ePipelineRasterizationLineStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, + ePhysicalDeviceLineRasterizationProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES, + ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, + ePhysicalDeviceLineRasterizationPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, + ePhysicalDeviceVertexAttributeDivisorProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES, + ePhysicalDeviceVertexAttributeDivisorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR, + ePipelineVertexInputDivisorStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO, + ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, + ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, + ePhysicalDeviceVertexAttributeDivisorFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES, + ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, + ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, + ePhysicalDeviceIndexTypeUint8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES, + ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, + ePhysicalDeviceIndexTypeUint8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, + eMemoryMapInfo = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO, + eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR, + eMemoryUnmapInfo = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO, + eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR, + ePhysicalDeviceMaintenance5Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES, + ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR, + ePhysicalDeviceMaintenance5Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES, + ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR, + eRenderingAreaInfo = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO, + eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR, + eDeviceImageSubresourceInfo = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO, + eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR, + eSubresourceLayout2 = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2, + eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, + eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + eImageSubresource2 = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2, + eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, + eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + ePipelineCreateFlags2CreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO, + ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR, + eBufferUsageFlags2CreateInfo = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO, + eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR, + ePhysicalDevicePushDescriptorProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES, + ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, + ePhysicalDeviceDynamicRenderingLocalReadFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES, + ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR, + eRenderingAttachmentLocationInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO, + eRenderingAttachmentLocationInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR, + eRenderingInputAttachmentIndexInfo = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO, + eRenderingInputAttachmentIndexInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR, + ePhysicalDeviceMaintenance6Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES, + ePhysicalDeviceMaintenance6FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR, + ePhysicalDeviceMaintenance6Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES, + ePhysicalDeviceMaintenance6PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR, + eBindMemoryStatus = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS, + eBindMemoryStatusKHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR, + eBindDescriptorSetsInfo = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO, + eBindDescriptorSetsInfoKHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR, + ePushConstantsInfo = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO, + ePushConstantsInfoKHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR, + ePushDescriptorSetInfo = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO, + ePushDescriptorSetInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, + ePushDescriptorSetWithTemplateInfo = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO, + ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, + ePhysicalDevicePipelineProtectedAccessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES, + ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, + ePipelineRobustnessCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO, + ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT, + ePhysicalDevicePipelineRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES, + ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT, + ePhysicalDevicePipelineRobustnessProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES, + ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT, + ePhysicalDeviceHostImageCopyFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES, + ePhysicalDeviceHostImageCopyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT, + ePhysicalDeviceHostImageCopyProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES, + ePhysicalDeviceHostImageCopyPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT, + eMemoryToImageCopy = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY, + eMemoryToImageCopyEXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT, + eImageToMemoryCopy = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY, + eImageToMemoryCopyEXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT, + eCopyImageToMemoryInfo = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO, + eCopyImageToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT, + eCopyMemoryToImageInfo = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO, + eCopyMemoryToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT, + eHostImageLayoutTransitionInfo = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO, + eHostImageLayoutTransitionInfoEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT, + eCopyImageToImageInfo = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO, + eCopyImageToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT, + eSubresourceHostMemcpySize = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE, + eSubresourceHostMemcpySizeEXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT, + eHostImageCopyDevicePerformanceQuery = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY, + eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT, eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR, @@ -788,11 +892,8 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_VI_NN ) eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, #endif /*VK_USE_PLATFORM_VI_NN*/ - eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, - ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, - ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT, - ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT, - ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT, + eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT, + ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT, #if defined( VK_USE_PLATFORM_WIN32_KHR ) eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR, eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR, @@ -811,7 +912,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR, - ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR, eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT, ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT, eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT, @@ -825,7 +925,6 @@ namespace VULKAN_HPP_NAMESPACE ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE, ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX, eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, - eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX, ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV, ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT, ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT, @@ -885,8 +984,6 @@ namespace VULKAN_HPP_NAMESPACE eExecutionGraphPipelineCreateInfoAMDX = VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX, ePipelineShaderStageNodeCreateInfoAMDX = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX, #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, - eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV, eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT, @@ -965,14 +1062,6 @@ namespace VULKAN_HPP_NAMESPACE eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR, eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR, eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR, - eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, - eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, - eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, - eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, - ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, - ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, - eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, - eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD, ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT, #if defined( VK_USE_PLATFORM_GGP ) @@ -987,8 +1076,6 @@ namespace VULKAN_HPP_NAMESPACE eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV, eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, - eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV, - eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV, ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL, eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL, @@ -1010,19 +1097,14 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT, eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT, eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, - eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT, eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR, ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR, ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR, ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR, eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, - eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR, ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD, ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD, - ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR, - eRenderingAttachmentLocationInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR, - eRenderingInputAttachmentIndexInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR, ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT, ePhysicalDeviceShaderQuadControlFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR, ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT, @@ -1061,18 +1143,6 @@ namespace VULKAN_HPP_NAMESPACE ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR, ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR, ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR, - ePhysicalDeviceHostImageCopyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT, - ePhysicalDeviceHostImageCopyPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT, - eMemoryToImageCopyEXT = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT, - eImageToMemoryCopyEXT = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT, - eCopyImageToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT, - eCopyMemoryToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT, - eHostImageLayoutTransitionInfoEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT, - eCopyImageToImageInfoEXT = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT, - eSubresourceHostMemcpySizeEXT = VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT, - eHostImageCopyDevicePerformanceQueryEXT = VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT, - eMemoryMapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR, - eMemoryUnmapInfoKHR = VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR, ePhysicalDeviceMapMemoryPlacedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT, ePhysicalDeviceMapMemoryPlacedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT, eMemoryMapPlacedInfoEXT = VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT, @@ -1194,49 +1264,6 @@ namespace VULKAN_HPP_NAMESPACE eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT, eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT, ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, - ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT, - ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT, - ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT, - eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT, - eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT, - eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT, - eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT, - eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT, - eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT, - ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT, - ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT, - eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT, - ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD, - ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, - ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV, - ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR, - ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR, - ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV, - ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV, - ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV, - eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV, - ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV, - eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV, - ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT, - ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT, - ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT, - ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT, - ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT, - eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM, - ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR, - ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT, - eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT, - eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT, - ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT, - ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT, - ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT, - eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT, - eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT, - ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT, #if defined( VK_USE_PLATFORM_DIRECTFB_EXT ) eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT, #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/ @@ -1250,7 +1277,6 @@ namespace VULKAN_HPP_NAMESPACE ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT, ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT, ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT, - ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_EXT, #if defined( VK_USE_PLATFORM_FUCHSIA ) eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA, eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA, @@ -1318,7 +1344,6 @@ namespace VULKAN_HPP_NAMESPACE eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, - ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM, ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM, ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM, @@ -1375,94 +1400,11 @@ namespace VULKAN_HPP_NAMESPACE eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV, eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, - ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, - ePhysicalDeviceClusterCullingShaderFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI, - ePhysicalDeviceClusterCullingShaderPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI, - ePhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI, - ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT, - eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT, - ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT, - ePhysicalDeviceShaderCorePropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM, - ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, - eDeviceQueueShaderCoreControlCreateInfoARM = VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM, - ePhysicalDeviceSchedulingControlsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM, - ePhysicalDeviceSchedulingControlsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM, - ePhysicalDeviceImageSlicedViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT, - eImageViewSlicedCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT, - ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE, - eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE, - eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE, - ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT, - ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT, - ePhysicalDeviceRenderPassStripedFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM, - ePhysicalDeviceRenderPassStripedPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM, - eRenderPassStripeBeginInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM, - eRenderPassStripeInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM, - eRenderPassStripeSubmitInfoARM = VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM, - ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM, - ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM, - eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM, - ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV, - ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV, - ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV, - ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV, - ePhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV, - eComputePipelineIndirectBufferInfoNV = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV, - ePipelineIndirectDeviceAddressInfoNV = VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV, - ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV, - ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR, - ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT, - ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM, - ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM, - eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM, - ePhysicalDeviceNestedCommandBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT, - ePhysicalDeviceNestedCommandBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT, - eExternalMemoryAcquireUnmodifiedEXT = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT, - ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT, - ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT, - ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT, - eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT, - eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT, - eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT, - eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG, - eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG, - ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT, - ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT, - ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT, - eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT, - ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, - ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM, - ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV, - ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV, - eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV, - eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV, - eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV, - eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV, - eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV, - ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT, - ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT, #if defined( VK_USE_PLATFORM_ANDROID_KHR ) ePhysicalDeviceExternalFormatResolveFeaturesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID, ePhysicalDeviceExternalFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID, eAndroidHardwareBufferFormatResolvePropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID, #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - ePhysicalDeviceMaintenance5FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR, - ePhysicalDeviceMaintenance5PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR, - eRenderingAreaInfoKHR = VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR, - eDeviceImageSubresourceInfoKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR, - eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, - eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, - eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, - eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, - eSubresourceLayout2KHR = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, - eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT, - eImageSubresource2KHR = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, - eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT, - ePipelineCreateFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR, - eBufferUsageFlags2CreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR, - ePhysicalDeviceAntiLagFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD, - eAntiLagDataAMD = VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD, - eAntiLagPresentationInfoAMD = VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD, ePhysicalDeviceAntiLagFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD, eAntiLagDataAMD = VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD, eAntiLagPresentationInfoAMD = VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD, @@ -1480,16 +1422,6 @@ namespace VULKAN_HPP_NAMESPACE ePipelineCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR, eDevicePipelineBinaryInternalCacheControlKHR = VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR, ePipelineBinaryHandlesInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR, - ePhysicalDevicePipelineBinaryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR, - ePipelineBinaryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR, - ePipelineBinaryInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR, - ePipelineBinaryKeyKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR, - ePhysicalDevicePipelineBinaryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR, - eReleaseCapturedPipelineDataInfoKHR = VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR, - ePipelineBinaryDataInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR, - ePipelineCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR, - eDevicePipelineBinaryInternalCacheControlKHR = VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR, - ePipelineBinaryHandlesInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR, ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM, eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM, ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC, @@ -1505,12 +1437,6 @@ namespace VULKAN_HPP_NAMESPACE eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, ePhysicalDeviceLegacyVertexAttributesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT, ePhysicalDeviceLegacyVertexAttributesPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT, - ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, - ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE, - eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, - eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE, - ePhysicalDeviceLegacyVertexAttributesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT, - ePhysicalDeviceLegacyVertexAttributesPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT, eLayerSettingsCreateInfoEXT = VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT, ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM, ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM, @@ -1533,9 +1459,6 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceComputeShaderDerivativesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, ePhysicalDeviceComputeShaderDerivativesPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR, - ePhysicalDeviceComputeShaderDerivativesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, - ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV, - ePhysicalDeviceComputeShaderDerivativesPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR, eVideoDecodeAv1CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR, eVideoDecodeAv1PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR, eVideoDecodeAv1ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR, @@ -1565,16 +1488,6 @@ namespace VULKAN_HPP_NAMESPACE eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM, ePhysicalDeviceCubicClampFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM, ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT, - ePhysicalDeviceVertexAttributeDivisorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR, - ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, - ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, - ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, - ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, - ePipelineVertexInputDivisorStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, - ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT, - ePhysicalDeviceVertexAttributeDivisorFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, - ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT, - ePhysicalDeviceShaderFloatControls2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR, #if defined( VK_USE_PLATFORM_SCREEN_QNX ) eScreenBufferPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX, eScreenBufferFormatPropertiesQNX = VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX, @@ -1583,27 +1496,23 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX, #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ ePhysicalDeviceLayeredDriverPropertiesMSFT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT, - ePhysicalDeviceIndexTypeUint8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, - ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT, - ePhysicalDeviceLineRasterizationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, - ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT, - ePipelineRasterizationLineStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, - ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT, - ePhysicalDeviceLineRasterizationPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, - ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT, eCalibratedTimestampInfoKHR = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT, - ePhysicalDeviceShaderExpectAssumeFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR, - ePhysicalDeviceMaintenance6FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR, - ePhysicalDeviceMaintenance6PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR, - eBindMemoryStatusKHR = VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR, - eBindDescriptorSetsInfoKHR = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR, - ePushConstantsInfoKHR = VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR, - ePushDescriptorSetInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR, - ePushDescriptorSetWithTemplateInfoKHR = VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR, eSetDescriptorBufferOffsetsInfoEXT = VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT, eBindDescriptorBufferEmbeddedSamplersInfoEXT = VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT, ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV, + eDisplaySurfaceStereoCreateInfoNV = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_STEREO_CREATE_INFO_NV, + eDisplayModeStereoPropertiesNV = VK_STRUCTURE_TYPE_DISPLAY_MODE_STEREO_PROPERTIES_NV, + eVideoEncodeQuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatQuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeQuantizationMapInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_INFO_KHR, + eVideoEncodeQuantizationMapSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUANTIZATION_MAP_SESSION_PARAMETERS_CREATE_INFO_KHR, + ePhysicalDeviceVideoEncodeQuantizationMapFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUANTIZATION_MAP_FEATURES_KHR, + eVideoEncodeH264QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoEncodeH265QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatH265QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_H265_QUANTIZATION_MAP_PROPERTIES_KHR, + eVideoEncodeAv1QuantizationMapCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_AV1_QUANTIZATION_MAP_CAPABILITIES_KHR, + eVideoFormatAv1QuantizationMapPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_AV1_QUANTIZATION_MAP_PROPERTIES_KHR, ePhysicalDeviceRawAccessChainsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV, ePhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR, ePhysicalDeviceCommandBufferInheritanceFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV, @@ -1638,7 +1547,8 @@ namespace VULKAN_HPP_NAMESPACE eHdrVividDynamicMetadataHUAWEI = VK_STRUCTURE_TYPE_HDR_VIVID_DYNAMIC_METADATA_HUAWEI, ePhysicalDeviceCooperativeMatrix2FeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_FEATURES_NV, eCooperativeMatrixFlexibleDimensionsPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_FLEXIBLE_DIMENSIONS_PROPERTIES_NV, - ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV + ePhysicalDeviceCooperativeMatrix2PropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_2_PROPERTIES_NV, + ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_ROBUSTNESS_FEATURES_EXT }; enum class PipelineCacheHeaderVersion @@ -1709,17 +1619,10 @@ namespace VULKAN_HPP_NAMESPACE ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR, eIndirectCommandsLayoutEXT = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT, eIndirectExecutionSetEXT = VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT - eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT, - eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV, - eShaderEXT = VK_OBJECT_TYPE_SHADER_EXT, - ePipelineBinaryKHR = VK_OBJECT_TYPE_PIPELINE_BINARY_KHR, - eIndirectCommandsLayoutEXT = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT, - eIndirectExecutionSetEXT = VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT }; enum class VendorId { - eKhronos = VK_VENDOR_ID_KHRONOS, eKhronos = VK_VENDOR_ID_KHRONOS, eVIV = VK_VENDOR_ID_VIV, eVSI = VK_VENDOR_ID_VSI, @@ -2025,6 +1928,10 @@ namespace VULKAN_HPP_NAMESPACE eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT, eAstc12x12SfloatBlock = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT, + eA1B5G5R5UnormPack16 = VK_FORMAT_A1B5G5R5_UNORM_PACK16, + eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR, + eA8Unorm = VK_FORMAT_A8_UNORM, + eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR, ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG, ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG, ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG, @@ -2034,10 +1941,7 @@ namespace VULKAN_HPP_NAMESPACE ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG, ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG, eR16G16Sfixed5NV = VK_FORMAT_R16G16_SFIXED5_NV, - eR16G16Sfixed5NV = VK_FORMAT_R16G16_SFIXED5_NV, - eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV, - eA1B5G5R5UnormPack16KHR = VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR, - eA8UnormKHR = VK_FORMAT_A8_UNORM_KHR + eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV }; enum class FormatFeatureFlagBits : VkFormatFeatureFlags @@ -2082,8 +1986,6 @@ namespace VULKAN_HPP_NAMESPACE eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, - eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, - eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR, @@ -2110,10 +2012,6 @@ namespace VULKAN_HPP_NAMESPACE FormatFeatureFlagBits::eVideoDecodeDpbKHR | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR | FormatFeatureFlagBits::eSampledImageFilterCubicEXT | FormatFeatureFlagBits::eFragmentDensityMapEXT | FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits::eVideoEncodeInputKHR | FormatFeatureFlagBits::eVideoEncodeDpbKHR; - FormatFeatureFlagBits::eCositedChromaSamples | FormatFeatureFlagBits::eSampledImageFilterMinmax | FormatFeatureFlagBits::eVideoDecodeOutputKHR | - FormatFeatureFlagBits::eVideoDecodeDpbKHR | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR | - FormatFeatureFlagBits::eSampledImageFilterCubicEXT | FormatFeatureFlagBits::eFragmentDensityMapEXT | - FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits::eVideoEncodeInputKHR | FormatFeatureFlagBits::eVideoEncodeDpbKHR; }; enum class ImageCreateFlagBits : VkImageCreateFlags @@ -2178,28 +2076,31 @@ namespace VULKAN_HPP_NAMESPACE enum class ImageUsageFlagBits : VkImageUsageFlags { - eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, - eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, - eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, - eStorage = VK_IMAGE_USAGE_STORAGE_BIT, - eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, - eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, - eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, - eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, - eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, - eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, - eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, - eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, - eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, - eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, - eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, - eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, - eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, - eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, - eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, - eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, - eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM + eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT, + eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT, + eSampled = VK_IMAGE_USAGE_SAMPLED_BIT, + eStorage = VK_IMAGE_USAGE_STORAGE_BIT, + eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, + eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT, + eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, + eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT, + eHostTransfer = VK_IMAGE_USAGE_HOST_TRANSFER_BIT, + eHostTransferEXT = VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT, + eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR, + eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR, + eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT, + eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV, + eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR, + eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR, + eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI, + eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM, + eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM, + eVideoEncodeQuantizationDeltaMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_QUANTIZATION_DELTA_MAP_BIT_KHR, + eVideoEncodeEmphasisMapKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_EMPHASIS_MAP_BIT_KHR }; using ImageUsageFlags = Flags; @@ -2211,10 +2112,9 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR ImageUsageFlags allFlags = ImageUsageFlagBits::eTransferSrc | ImageUsageFlagBits::eTransferDst | ImageUsageFlagBits::eSampled | ImageUsageFlagBits::eStorage | ImageUsageFlagBits::eColorAttachment | ImageUsageFlagBits::eDepthStencilAttachment | ImageUsageFlagBits::eTransientAttachment | - ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eVideoDecodeDstKHR | ImageUsageFlagBits::eVideoDecodeSrcKHR | - ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | - ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | - ImageUsageFlagBits::eHostTransferEXT | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | + ImageUsageFlagBits::eInputAttachment | ImageUsageFlagBits::eHostTransfer | ImageUsageFlagBits::eVideoDecodeDstKHR | + ImageUsageFlagBits::eVideoDecodeSrcKHR | ImageUsageFlagBits::eVideoDecodeDpbKHR | ImageUsageFlagBits::eFragmentDensityMapEXT | + ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR | ImageUsageFlagBits::eVideoEncodeDstKHR | ImageUsageFlagBits::eVideoEncodeSrcKHR | ImageUsageFlagBits::eVideoEncodeDpbKHR | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT | ImageUsageFlagBits::eInvocationMaskHUAWEI | ImageUsageFlagBits::eSampleWeightQCOM | ImageUsageFlagBits::eSampleBlockMatchQCOM | ImageUsageFlagBits::eVideoEncodeQuantizationDeltaMapKHR | ImageUsageFlagBits::eVideoEncodeEmphasisMapKHR; @@ -2398,18 +2298,14 @@ namespace VULKAN_HPP_NAMESPACE eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV, eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, - eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV, eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, eCommandPreprocessEXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT, - eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, - eCommandPreprocessEXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT, eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV, eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV - eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV }; using PipelineStageFlags = Flags; @@ -2428,9 +2324,6 @@ namespace VULKAN_HPP_NAMESPACE PipelineStageFlagBits::eAccelerationStructureBuildKHR | PipelineStageFlagBits::eRayTracingShaderKHR | PipelineStageFlagBits::eFragmentDensityProcessEXT | PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits::eCommandPreprocessNV | PipelineStageFlagBits::eTaskShaderEXT | PipelineStageFlagBits::eMeshShaderEXT; - PipelineStageFlagBits::eAccelerationStructureBuildKHR | PipelineStageFlagBits::eRayTracingShaderKHR | PipelineStageFlagBits::eFragmentDensityProcessEXT | - PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR | PipelineStageFlagBits::eCommandPreprocessNV | PipelineStageFlagBits::eTaskShaderEXT | - PipelineStageFlagBits::eMeshShaderEXT; }; enum class MemoryMapFlagBits : VkMemoryMapFlags @@ -2768,16 +2661,16 @@ namespace VULKAN_HPP_NAMESPACE eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR, eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR, + eRenderingLocalRead = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ, + eRenderingLocalReadKHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR, ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR, eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR, eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR, eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR, eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, - eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT, eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV, - eRenderingLocalReadKHR = VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR, eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR, eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR, eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR, @@ -3020,6 +2913,9 @@ namespace VULKAN_HPP_NAMESPACE eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT, ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT, + eLineStipple = VK_DYNAMIC_STATE_LINE_STIPPLE, + eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, + eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV, eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT, eDiscardRectangleEnableEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT, @@ -3067,12 +2963,6 @@ namespace VULKAN_HPP_NAMESPACE eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV, eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV, eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT, - eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, - eDepthClampRangeEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT - eAttachmentFeedbackLoopEnableEXT = VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT, - eLineStippleKHR = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, - eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT, eDepthClampRangeEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT }; @@ -3115,6 +3005,10 @@ namespace VULKAN_HPP_NAMESPACE eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT, eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT, + eNoProtectedAccess = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnly = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT, eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, @@ -3127,10 +3021,6 @@ namespace VULKAN_HPP_NAMESPACE eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR, eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV, @@ -3143,10 +3033,8 @@ namespace VULKAN_HPP_NAMESPACE eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, #if defined( VK_ENABLE_BETA_EXTENSIONS ) - eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT, - eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT }; using PipelineCreateFlags = Flags; @@ -3158,29 +3046,21 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = PipelineCreateFlagBits::eDisableOptimization | PipelineCreateFlagBits::eAllowDerivatives | PipelineCreateFlagBits::eDerivative | PipelineCreateFlagBits::eViewIndexFromDeviceIndex | PipelineCreateFlagBits::eDispatchBase | PipelineCreateFlagBits::eFailOnPipelineCompileRequired | - PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | - PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | - PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR | - PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR | PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR | - PipelineCreateFlagBits::eRayTracingSkipAabbsKHR | PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR | - PipelineCreateFlagBits::eDeferCompileNV | PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | - PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | PipelineCreateFlagBits::eCaptureStatisticsKHR | - PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits::eIndirectBindableNV | PipelineCreateFlagBits::eLibraryKHR | - PipelineCreateFlagBits::eDescriptorBufferEXT | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | - PipelineCreateFlagBits::eLinkTimeOptimizationEXT | PipelineCreateFlagBits::eRayTracingAllowMotionNV | - PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | - PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT - PipelineCreateFlagBits::eDeferCompileNV | PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | - PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | PipelineCreateFlagBits::eCaptureStatisticsKHR | - PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits::eIndirectBindableNV | PipelineCreateFlagBits::eLibraryKHR | - PipelineCreateFlagBits::eDescriptorBufferEXT | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | + PipelineCreateFlagBits::eEarlyReturnOnFailure | PipelineCreateFlagBits::eNoProtectedAccess | PipelineCreateFlagBits::eProtectedAccessOnly | + PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR | + PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR | + PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits::eRayTracingSkipAabbsKHR | + PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR | PipelineCreateFlagBits::eDeferCompileNV | + PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR | + PipelineCreateFlagBits::eCaptureStatisticsKHR | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits::eIndirectBindableNV | + PipelineCreateFlagBits::eLibraryKHR | PipelineCreateFlagBits::eDescriptorBufferEXT | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits::eLinkTimeOptimizationEXT | PipelineCreateFlagBits::eRayTracingAllowMotionNV | PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT | PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT #if defined( VK_ENABLE_BETA_EXTENSIONS ) | PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | PipelineCreateFlagBits::eNoProtectedAccessEXT | PipelineCreateFlagBits::eProtectedAccessOnlyEXT; + ; }; enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags @@ -3508,11 +3388,11 @@ namespace VULKAN_HPP_NAMESPACE { eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT, + ePushDescriptor = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT, ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT, eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT, eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, - eIndirectBindableNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV, eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE, ePerStageNV = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV @@ -3525,10 +3405,9 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = - DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR | + DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool | DescriptorSetLayoutCreateFlagBits::ePushDescriptor | DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT | DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT | - DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT | DescriptorSetLayoutCreateFlagBits::ePerStageNV; }; @@ -3553,9 +3432,6 @@ namespace VULKAN_HPP_NAMESPACE eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM, eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE - eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM, - eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, - eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE }; enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags @@ -3602,16 +3478,12 @@ namespace VULKAN_HPP_NAMESPACE eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV, eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, - eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT, eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV, eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, eCommandPreprocessReadEXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT, eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, eCommandPreprocessWriteEXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT - eCommandPreprocessReadEXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT, - eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, - eCommandPreprocessWriteEXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT }; using AccessFlags = Flags; @@ -3629,8 +3501,6 @@ namespace VULKAN_HPP_NAMESPACE AccessFlagBits::eTransformFeedbackCounterWriteEXT | AccessFlagBits::eConditionalRenderingReadEXT | AccessFlagBits::eColorAttachmentReadNoncoherentEXT | AccessFlagBits::eAccelerationStructureReadKHR | AccessFlagBits::eAccelerationStructureWriteKHR | AccessFlagBits::eFragmentDensityMapReadEXT | AccessFlagBits::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits::eCommandPreprocessReadNV | AccessFlagBits::eCommandPreprocessWriteNV; - AccessFlagBits::eAccelerationStructureReadKHR | AccessFlagBits::eAccelerationStructureWriteKHR | AccessFlagBits::eFragmentDensityMapReadEXT | - AccessFlagBits::eFragmentShadingRateAttachmentReadKHR | AccessFlagBits::eCommandPreprocessReadNV | AccessFlagBits::eCommandPreprocessWriteNV; }; enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags @@ -3652,8 +3522,9 @@ namespace VULKAN_HPP_NAMESPACE eLoad = VK_ATTACHMENT_LOAD_OP_LOAD, eClear = VK_ATTACHMENT_LOAD_OP_CLEAR, eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE, - eNoneKHR = VK_ATTACHMENT_LOAD_OP_NONE_KHR, - eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT + eNone = VK_ATTACHMENT_LOAD_OP_NONE, + eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT, + eNoneKHR = VK_ATTACHMENT_LOAD_OP_NONE_KHR }; enum class AttachmentStoreOp @@ -3841,10 +3712,11 @@ namespace VULKAN_HPP_NAMESPACE { eUint16 = VK_INDEX_TYPE_UINT16, eUint32 = VK_INDEX_TYPE_UINT32, - eNoneKHR = VK_INDEX_TYPE_NONE_KHR, - eNoneNV = VK_INDEX_TYPE_NONE_NV, + eUint8 = VK_INDEX_TYPE_UINT8, + eUint8EXT = VK_INDEX_TYPE_UINT8_EXT, eUint8KHR = VK_INDEX_TYPE_UINT8_KHR, - eUint8EXT = VK_INDEX_TYPE_UINT8_EXT + eNoneKHR = VK_INDEX_TYPE_NONE_KHR, + eNoneNV = VK_INDEX_TYPE_NONE_NV }; //========================= @@ -3883,56 +3755,11 @@ namespace VULKAN_HPP_NAMESPACE template <> struct IndexTypeValue { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8; }; template <> - struct CppType - { - using Type = uint8_t; - }; - - //========================= - //=== Index Type Traits === - //========================= - - template - struct IndexTypeValue - { - }; - - template <> - struct IndexTypeValue - { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16; - }; - - template <> - struct CppType - { - using Type = uint16_t; - }; - - template <> - struct IndexTypeValue - { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32; - }; - - template <> - struct CppType - { - using Type = uint32_t; - }; - - template <> - struct IndexTypeValue - { - static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8KHR; - }; - - template <> - struct CppType + struct CppType { using Type = uint8_t; }; @@ -3960,7 +3787,6 @@ namespace VULKAN_HPP_NAMESPACE eInline = VK_SUBPASS_CONTENTS_INLINE, eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, eInlineAndSecondaryCommandBuffersKHR = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, - eInlineAndSecondaryCommandBuffersKHR = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, eInlineAndSecondaryCommandBuffersEXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT }; @@ -3976,9 +3802,11 @@ namespace VULKAN_HPP_NAMESPACE eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT, eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT, eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT, - ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV, + eRotate = VK_SUBGROUP_FEATURE_ROTATE_BIT, eRotateKHR = VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR, - eRotateClusteredKHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR + eRotateClustered = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT, + eRotateClusteredKHR = VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR, + ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV }; using SubgroupFeatureFlags = Flags; @@ -3990,7 +3818,7 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR SubgroupFeatureFlags allFlags = SubgroupFeatureFlagBits::eBasic | SubgroupFeatureFlagBits::eVote | SubgroupFeatureFlagBits::eArithmetic | SubgroupFeatureFlagBits::eBallot | SubgroupFeatureFlagBits::eShuffle | SubgroupFeatureFlagBits::eShuffleRelative | SubgroupFeatureFlagBits::eClustered | SubgroupFeatureFlagBits::eQuad | - SubgroupFeatureFlagBits::ePartitionedNV | SubgroupFeatureFlagBits::eRotateKHR | SubgroupFeatureFlagBits::eRotateClusteredKHR; + SubgroupFeatureFlagBits::eRotate | SubgroupFeatureFlagBits::eRotateClustered | SubgroupFeatureFlagBits::ePartitionedNV; }; enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags @@ -4086,8 +3914,8 @@ namespace VULKAN_HPP_NAMESPACE enum class DescriptorUpdateTemplateType { - eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, - ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR + eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + ePushDescriptors = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS }; using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType; @@ -4324,8 +4152,6 @@ namespace VULKAN_HPP_NAMESPACE eImaginationOpenSourceMESA = VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA, eMesaHoneykrisp = VK_DRIVER_ID_MESA_HONEYKRISP, eReserved27 = VK_DRIVER_ID_RESERVED_27 - eMesaHoneykrisp = VK_DRIVER_ID_MESA_HONEYKRISP, - eReserved27 = VK_DRIVER_ID_RESERVED_27 }; using DriverIdKHR = DriverId; @@ -4513,7 +4339,6 @@ namespace VULKAN_HPP_NAMESPACE eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT, eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV, eCommandPreprocessEXT = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT, - eCommandPreprocessEXT = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV, eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, @@ -4592,10 +4417,8 @@ namespace VULKAN_HPP_NAMESPACE eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT, eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV, eCommandPreprocessReadEXT = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT, - eCommandPreprocessReadEXT = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT, eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV, eCommandPreprocessWriteEXT = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT, - eCommandPreprocessWriteEXT = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT, eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV, eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR, @@ -4661,9 +4484,6 @@ namespace VULKAN_HPP_NAMESPACE eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT, eContentsInlineKHR = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, eContentsInlineEXT = VK_RENDERING_CONTENTS_INLINE_BIT_EXT - eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT, - eContentsInlineKHR = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, - eContentsInlineEXT = VK_RENDERING_CONTENTS_INLINE_BIT_EXT }; using RenderingFlagBitsKHR = RenderingFlagBits; @@ -4677,8 +4497,6 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR RenderingFlags allFlags = RenderingFlagBits::eContentsSecondaryCommandBuffers | RenderingFlagBits::eSuspending | RenderingFlagBits::eResuming | RenderingFlagBits::eEnableLegacyDitheringEXT | RenderingFlagBits::eContentsInlineKHR; - RenderingFlagBits::eResuming | RenderingFlagBits::eEnableLegacyDitheringEXT | - RenderingFlagBits::eContentsInlineKHR; }; enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2 @@ -4696,8 +4514,6 @@ namespace VULKAN_HPP_NAMESPACE eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT, eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT, eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT, - eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT, - eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT, eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT, eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT, @@ -4712,12 +4528,15 @@ namespace VULKAN_HPP_NAMESPACE eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT, eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT, eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT, + eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT, + eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + eHostImageTransfer = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT, + eHostImageTransferEXT = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR, eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR, eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR, eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT, eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eHostImageTransferEXT = VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT, eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR, eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR, eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV, @@ -4745,19 +4564,230 @@ namespace VULKAN_HPP_NAMESPACE FormatFeatureFlagBits2::eUniformTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBuffer | FormatFeatureFlagBits2::eStorageTexelBufferAtomic | FormatFeatureFlagBits2::eVertexBuffer | FormatFeatureFlagBits2::eColorAttachment | FormatFeatureFlagBits2::eColorAttachmentBlend | FormatFeatureFlagBits2::eDepthStencilAttachment | FormatFeatureFlagBits2::eBlitSrc | FormatFeatureFlagBits2::eBlitDst | - FormatFeatureFlagBits2::eSampledImageFilterLinear | FormatFeatureFlagBits2::eSampledImageFilterCubic | FormatFeatureFlagBits2::eTransferSrc | - FormatFeatureFlagBits2::eTransferDst | FormatFeatureFlagBits2::eSampledImageFilterMinmax | FormatFeatureFlagBits2::eMidpointChromaSamples | + FormatFeatureFlagBits2::eSampledImageFilterLinear | FormatFeatureFlagBits2::eTransferSrc | FormatFeatureFlagBits2::eTransferDst | + FormatFeatureFlagBits2::eSampledImageFilterMinmax | FormatFeatureFlagBits2::eMidpointChromaSamples | FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter | FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter | FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit | FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable | FormatFeatureFlagBits2::eDisjoint | FormatFeatureFlagBits2::eCositedChromaSamples | FormatFeatureFlagBits2::eStorageReadWithoutFormat | FormatFeatureFlagBits2::eStorageWriteWithoutFormat | - FormatFeatureFlagBits2::eSampledImageDepthComparison | FormatFeatureFlagBits2::eVideoDecodeOutputKHR | FormatFeatureFlagBits2::eVideoDecodeDpbKHR | + FormatFeatureFlagBits2::eSampledImageDepthComparison | FormatFeatureFlagBits2::eSampledImageFilterCubic | FormatFeatureFlagBits2::eHostImageTransfer | + FormatFeatureFlagBits2::eVideoDecodeOutputKHR | FormatFeatureFlagBits2::eVideoDecodeDpbKHR | FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR | FormatFeatureFlagBits2::eFragmentDensityMapEXT | - FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eHostImageTransferEXT | FormatFeatureFlagBits2::eVideoEncodeInputKHR | - FormatFeatureFlagBits2::eVideoEncodeDpbKHR | FormatFeatureFlagBits2::eLinearColorAttachmentNV | FormatFeatureFlagBits2::eWeightImageQCOM | - FormatFeatureFlagBits2::eWeightSampledImageQCOM | FormatFeatureFlagBits2::eBlockMatchingQCOM | FormatFeatureFlagBits2::eBoxFilterSampledQCOM | - FormatFeatureFlagBits2::eOpticalFlowImageNV | FormatFeatureFlagBits2::eOpticalFlowVectorNV | FormatFeatureFlagBits2::eOpticalFlowCostNV | - FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR | FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR; + FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR | FormatFeatureFlagBits2::eVideoEncodeInputKHR | FormatFeatureFlagBits2::eVideoEncodeDpbKHR | + FormatFeatureFlagBits2::eLinearColorAttachmentNV | FormatFeatureFlagBits2::eWeightImageQCOM | FormatFeatureFlagBits2::eWeightSampledImageQCOM | + FormatFeatureFlagBits2::eBlockMatchingQCOM | FormatFeatureFlagBits2::eBoxFilterSampledQCOM | FormatFeatureFlagBits2::eOpticalFlowImageNV | + FormatFeatureFlagBits2::eOpticalFlowVectorNV | FormatFeatureFlagBits2::eOpticalFlowCostNV | FormatFeatureFlagBits2::eVideoEncodeQuantizationDeltaMapKHR | + FormatFeatureFlagBits2::eVideoEncodeEmphasisMapKHR; + }; + + //=== VK_VERSION_1_4 === + + enum class QueueGlobalPriority + { + eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW, + eLowKHR = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM, + eMediumKHR = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH, + eHighKHR = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME, + eRealtimeKHR = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR + }; + using QueueGlobalPriorityEXT = QueueGlobalPriority; + using QueueGlobalPriorityKHR = QueueGlobalPriority; + + enum class LineRasterizationMode + { + eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT, + eDefaultKHR = VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR, + eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR, + eRectangularKHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR, + eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM, + eBresenhamKHR = VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR, + eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH, + eRectangularSmoothKHR = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR + }; + using LineRasterizationModeEXT = LineRasterizationMode; + using LineRasterizationModeKHR = LineRasterizationMode; + + enum class MemoryUnmapFlagBits : VkMemoryUnmapFlags + { + eReserveEXT = VK_MEMORY_UNMAP_RESERVE_BIT_EXT + }; + using MemoryUnmapFlagBitsKHR = MemoryUnmapFlagBits; + + using MemoryUnmapFlags = Flags; + using MemoryUnmapFlagsKHR = MemoryUnmapFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlags allFlags = MemoryUnmapFlagBits::eReserveEXT; + }; + + enum class PipelineCreateFlagBits2 : VkPipelineCreateFlags2 + { + eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT, + eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT, + eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT, + eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT, + eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT, + eNoProtectedAccess = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT, + eNoProtectedAccessEXT = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT, + eProtectedAccessOnly = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT, + eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, + eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, + eCaptureStatisticsKHR = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR, + eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, + eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT, + eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, + eLibraryKHR = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR, + eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, + eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR, + eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, + eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, + eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, + eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, + eIndirectBindableNV = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV, + eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV, + eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, + eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, + eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, + eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT, + eCaptureDataKHR = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR, + eIndirectBindableEXT = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT + }; + using PipelineCreateFlagBits2KHR = PipelineCreateFlagBits2; + + using PipelineCreateFlags2 = Flags; + using PipelineCreateFlags2KHR = PipelineCreateFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2 allFlags = + PipelineCreateFlagBits2::eDisableOptimization | PipelineCreateFlagBits2::eAllowDerivatives | PipelineCreateFlagBits2::eDerivative | + PipelineCreateFlagBits2::eViewIndexFromDeviceIndex | PipelineCreateFlagBits2::eDispatchBase | PipelineCreateFlagBits2::eFailOnPipelineCompileRequired | + PipelineCreateFlagBits2::eEarlyReturnOnFailure | PipelineCreateFlagBits2::eNoProtectedAccess | PipelineCreateFlagBits2::eProtectedAccessOnly +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | PipelineCreateFlagBits2::eExecutionGraphAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | PipelineCreateFlagBits2::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2::eDeferCompileNV | PipelineCreateFlagBits2::eCaptureStatisticsKHR | + PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR | PipelineCreateFlagBits2::eLinkTimeOptimizationEXT | + PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2::eLibraryKHR | + PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR | PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR | + PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR | + PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR | PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR | + PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR | PipelineCreateFlagBits2::eIndirectBindableNV | + PipelineCreateFlagBits2::eRayTracingAllowMotionNV | PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR | + PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT | + PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT | + PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2::eDescriptorBufferEXT | PipelineCreateFlagBits2::eCaptureDataKHR | + PipelineCreateFlagBits2::eIndirectBindableEXT; + }; + + enum class BufferUsageFlagBits2 : VkBufferUsageFlags2 + { + eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT, + eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT, + eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT, + eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT, + eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT, + eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT, + eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT, + eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT, + eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT, + eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT, +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + eConditionalRenderingEXT = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT, + eShaderBindingTableKHR = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, + eRayTracingNV = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV, + eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, + eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, + eVideoDecodeSrcKHR = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, + eVideoDecodeDstKHR = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, + eVideoEncodeDstKHR = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR, + eVideoEncodeSrcKHR = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR, + eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, + eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, + eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, + eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, + ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, + eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, + eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT, + ePreprocessBufferEXT = VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT + }; + using BufferUsageFlagBits2KHR = BufferUsageFlagBits2; + + using BufferUsageFlags2 = Flags; + using BufferUsageFlags2KHR = BufferUsageFlags2; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2 allFlags = + BufferUsageFlagBits2::eTransferSrc | BufferUsageFlagBits2::eTransferDst | BufferUsageFlagBits2::eUniformTexelBuffer | + BufferUsageFlagBits2::eStorageTexelBuffer | BufferUsageFlagBits2::eUniformBuffer | BufferUsageFlagBits2::eStorageBuffer | + BufferUsageFlagBits2::eIndexBuffer | BufferUsageFlagBits2::eVertexBuffer | BufferUsageFlagBits2::eIndirectBuffer | + BufferUsageFlagBits2::eShaderDeviceAddress +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + | BufferUsageFlagBits2::eExecutionGraphScratchAMDX +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + | BufferUsageFlagBits2::eConditionalRenderingEXT | BufferUsageFlagBits2::eShaderBindingTableKHR | BufferUsageFlagBits2::eTransformFeedbackBufferEXT | + BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT | BufferUsageFlagBits2::eVideoDecodeSrcKHR | BufferUsageFlagBits2::eVideoDecodeDstKHR | + BufferUsageFlagBits2::eVideoEncodeDstKHR | BufferUsageFlagBits2::eVideoEncodeSrcKHR | BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR | + BufferUsageFlagBits2::eAccelerationStructureStorageKHR | BufferUsageFlagBits2::eSamplerDescriptorBufferEXT | + BufferUsageFlagBits2::eResourceDescriptorBufferEXT | BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT | + BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT | BufferUsageFlagBits2::eMicromapStorageEXT | BufferUsageFlagBits2::ePreprocessBufferEXT; + }; + + enum class PipelineRobustnessBufferBehavior + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT, + eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED, + eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS, + eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2 + }; + using PipelineRobustnessBufferBehaviorEXT = PipelineRobustnessBufferBehavior; + + enum class PipelineRobustnessImageBehavior + { + eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT, + eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED, + eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS, + eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2 + }; + using PipelineRobustnessImageBehaviorEXT = PipelineRobustnessImageBehavior; + + enum class HostImageCopyFlagBits : VkHostImageCopyFlags + { + eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY + }; + using HostImageCopyFlagBitsEXT = HostImageCopyFlagBits; + + using HostImageCopyFlags = Flags; + using HostImageCopyFlagsEXT = HostImageCopyFlags; + + template <> + struct FlagTraits + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; + static VULKAN_HPP_CONST_OR_CONSTEXPR HostImageCopyFlags allFlags = HostImageCopyFlagBits::eMemcpy; }; //=== VK_KHR_surface === @@ -4796,8 +4826,6 @@ namespace VULKAN_HPP_NAMESPACE eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR, eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, eFifoLatestReadyEXT = VK_PRESENT_MODE_FIFO_LATEST_READY_EXT - eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR, - eFifoLatestReadyEXT = VK_PRESENT_MODE_FIFO_LATEST_READY_EXT }; enum class ColorSpaceKHR @@ -4820,13 +4848,6 @@ namespace VULKAN_HPP_NAMESPACE ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD - eDolbyvisionEXT VULKAN_HPP_DEPRECATED_17( "eDolbyvisionEXT is deprecated, but no reason was given in the API XML" ) = VK_COLOR_SPACE_DOLBYVISION_EXT, - eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT, - eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT, - eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT, - ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT, - eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT, - eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD }; enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR @@ -5669,24 +5690,6 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_USE_PLATFORM_VI_NN*/ - //=== VK_EXT_pipeline_robustness === - - enum class PipelineRobustnessBufferBehaviorEXT - { - eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT, - eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT, - eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT, - eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT - }; - - enum class PipelineRobustnessImageBehaviorEXT - { - eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT, - eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT, - eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT, - eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT - }; - //=== VK_EXT_conditional_rendering === enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT @@ -6268,17 +6271,6 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCompilerControlFlagsAMD allFlags = {}; }; - //=== VK_KHR_global_priority === - - enum class QueueGlobalPriorityKHR - { - eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, - eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, - eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, - eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR - }; - using QueueGlobalPriorityEXT = QueueGlobalPriorityKHR; - //=== VK_AMD_memory_overallocation_behavior === enum class MemoryOverallocationBehaviorAMD @@ -6470,38 +6462,6 @@ namespace VULKAN_HPP_NAMESPACE eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR }; - //=== VK_EXT_host_image_copy === - - enum class HostImageCopyFlagBitsEXT : VkHostImageCopyFlagsEXT - { - eMemcpy = VK_HOST_IMAGE_COPY_MEMCPY_EXT - }; - - using HostImageCopyFlagsEXT = Flags; - - template <> - struct FlagTraits - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR HostImageCopyFlagsEXT allFlags = HostImageCopyFlagBitsEXT::eMemcpy; - }; - - //=== VK_KHR_map_memory2 === - - enum class MemoryUnmapFlagBitsKHR : VkMemoryUnmapFlagsKHR - { - eReserveEXT = VK_MEMORY_UNMAP_RESERVE_BIT_EXT - }; - - using MemoryUnmapFlagsKHR = Flags; - - template <> - struct FlagTraits - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryUnmapFlagsKHR allFlags = MemoryUnmapFlagBitsKHR::eReserveEXT; - }; - //=== VK_EXT_surface_maintenance1 === enum class PresentScalingFlagBitsEXT : VkPresentScalingFlagsEXT @@ -6625,7 +6585,9 @@ namespace VULKAN_HPP_NAMESPACE enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR { ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR, - eInsufficientBitstreamBufferRangeDetection = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR + eInsufficientBitstreamBufferRangeDetection = VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR, + eQuantizationDeltaMap = VK_VIDEO_ENCODE_CAPABILITY_QUANTIZATION_DELTA_MAP_BIT_KHR, + eEmphasisMap = VK_VIDEO_ENCODE_CAPABILITY_EMPHASIS_MAP_BIT_KHR }; using VideoEncodeCapabilityFlagsKHR = Flags; @@ -6635,7 +6597,8 @@ namespace VULKAN_HPP_NAMESPACE { static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = - VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection; + VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes | VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection | + VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap | VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap; }; enum class VideoEncodeFeedbackFlagBitsKHR : VkVideoEncodeFeedbackFlagsKHR @@ -6643,9 +6606,6 @@ namespace VULKAN_HPP_NAMESPACE eBitstreamBufferOffset = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR, eBitstreamBytesWritten = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, eBitstreamHasOverrides = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR - eBitstreamBufferOffset = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR, - eBitstreamBytesWritten = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR, - eBitstreamHasOverrides = VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR }; using VideoEncodeFeedbackFlagsKHR = Flags; @@ -6657,9 +6617,6 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFeedbackFlagsKHR allFlags = VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset | VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten | VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides; - static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFeedbackFlagsKHR allFlags = VideoEncodeFeedbackFlagBitsKHR::eBitstreamBufferOffset | - VideoEncodeFeedbackFlagBitsKHR::eBitstreamBytesWritten | - VideoEncodeFeedbackFlagBitsKHR::eBitstreamHasOverrides; }; enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR @@ -7331,174 +7288,6 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints; }; - //=== VK_KHR_maintenance5 === - - enum class PipelineCreateFlagBits2KHR : VkPipelineCreateFlags2KHR - { - eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, - eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, - eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, - eDisableOptimization = VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR, - eAllowDerivatives = VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR, - eDerivative = VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eExecutionGraphAMDX = VK_PIPELINE_CREATE_2_EXECUTION_GRAPH_BIT_AMDX, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eEnableLegacyDitheringEXT = VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT, - eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR, - eDispatchBase = VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR, - eDeferCompileNV = VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV, - eCaptureStatistics = VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR, - eCaptureInternalRepresentations = VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR, - eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR, - eEarlyReturnOnFailure = VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR, - eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT, - eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT, - eLibrary = VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR, - eRayTracingSkipTriangles = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR, - eRayTracingSkipAabbs = VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR, - eRayTracingNoNullAnyHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR, - eRayTracingNoNullClosestHitShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR, - eRayTracingNoNullMissShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR, - eRayTracingNoNullIntersectionShaders = VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR, - eRayTracingShaderGroupHandleCaptureReplay = VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR, - eIndirectBindableNV = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV, - eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV, - eRenderingFragmentShadingRateAttachment = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, - eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT, - eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, - eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT, - eNoProtectedAccessEXT = VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT, - eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT, - eRayTracingDisplacementMicromapNV = VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV, - eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT, - eCaptureData = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR, - eIndirectBindableEXT = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT - eDescriptorBufferEXT = VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT, - eCaptureData = VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR, - eIndirectBindableEXT = VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT - }; - - using PipelineCreateFlags2KHR = Flags; - - template <> - struct FlagTraits - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags2KHR allFlags = - PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | PipelineCreateFlagBits2KHR::eExecutionGraphAMDX -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | - PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | PipelineCreateFlagBits2KHR::eCaptureStatistics | - PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | - PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | - PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2KHR::eLibrary | - PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | - PipelineCreateFlagBits2KHR::eDisableOptimization | PipelineCreateFlagBits2KHR::eAllowDerivatives | PipelineCreateFlagBits2KHR::eDerivative -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | PipelineCreateFlagBits2KHR::eExecutionGraphAMDX -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT | PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex | - PipelineCreateFlagBits2KHR::eDispatchBase | PipelineCreateFlagBits2KHR::eDeferCompileNV | PipelineCreateFlagBits2KHR::eCaptureStatistics | - PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations | PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired | - PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure | PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT | - PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT | PipelineCreateFlagBits2KHR::eLibrary | - PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles | PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs | - PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders | - PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders | PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders | - PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay | PipelineCreateFlagBits2KHR::eIndirectBindableNV | - PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV | PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment | - PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT | PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT | - PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT | PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT | - PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT | PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT | - PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2KHR::eDescriptorBufferEXT | - PipelineCreateFlagBits2KHR::eCaptureData | PipelineCreateFlagBits2KHR::eIndirectBindableEXT; - PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV | PipelineCreateFlagBits2KHR::eDescriptorBufferEXT | - PipelineCreateFlagBits2KHR::eCaptureData | PipelineCreateFlagBits2KHR::eIndirectBindableEXT; - }; - - enum class BufferUsageFlagBits2KHR : VkBufferUsageFlags2KHR - { - eTransferSrc = VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR, - eTransferDst = VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR, - eUniformTexelBuffer = VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR, - eStorageTexelBuffer = VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR, - eUniformBuffer = VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR, - eStorageBuffer = VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR, - eIndexBuffer = VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR, - eVertexBuffer = VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR, - eIndirectBuffer = VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR, -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - eExecutionGraphScratchAMDX = VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX, -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - eConditionalRenderingEXT = VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT, - eShaderBindingTable = VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR, - eRayTracingNV = VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV, - eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT, - eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT, - eVideoDecodeSrc = VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR, - eVideoDecodeDst = VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR, - eVideoEncodeDst = VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR, - eVideoEncodeSrc = VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR, - eShaderDeviceAddress = VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR, - eAccelerationStructureBuildInputReadOnly = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR, - eAccelerationStructureStorage = VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR, - eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT, - eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT, - ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT, - eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT, - eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT, - ePreprocessBufferEXT = VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT - eMicromapStorageEXT = VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT, - ePreprocessBufferEXT = VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT - }; - - using BufferUsageFlags2KHR = Flags; - - template <> - struct FlagTraits - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags2KHR allFlags = - BufferUsageFlagBits2KHR::eTransferSrc | BufferUsageFlagBits2KHR::eTransferDst | BufferUsageFlagBits2KHR::eUniformTexelBuffer | - BufferUsageFlagBits2KHR::eStorageTexelBuffer | BufferUsageFlagBits2KHR::eUniformBuffer | BufferUsageFlagBits2KHR::eStorageBuffer | - BufferUsageFlagBits2KHR::eIndexBuffer | BufferUsageFlagBits2KHR::eVertexBuffer | BufferUsageFlagBits2KHR::eIndirectBuffer -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - | BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - | BufferUsageFlagBits2KHR::eConditionalRenderingEXT | BufferUsageFlagBits2KHR::eShaderBindingTable | - BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT | BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT | - BufferUsageFlagBits2KHR::eVideoDecodeSrc | BufferUsageFlagBits2KHR::eVideoDecodeDst | BufferUsageFlagBits2KHR::eVideoEncodeDst | - BufferUsageFlagBits2KHR::eVideoEncodeSrc | BufferUsageFlagBits2KHR::eShaderDeviceAddress | - BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly | BufferUsageFlagBits2KHR::eAccelerationStructureStorage | - BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT | BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT | - BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT | BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT | - BufferUsageFlagBits2KHR::eMicromapStorageEXT | BufferUsageFlagBits2KHR::ePreprocessBufferEXT; - }; - - //=== VK_AMD_anti_lag === - - enum class AntiLagModeAMD - { - eDriverControl = VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD, - eOn = VK_ANTI_LAG_MODE_ON_AMD, - eOff = VK_ANTI_LAG_MODE_OFF_AMD - }; - - enum class AntiLagStageAMD - { - eInput = VK_ANTI_LAG_STAGE_INPUT_AMD, - ePresent = VK_ANTI_LAG_STAGE_PRESENT_AMD - BufferUsageFlagBits2KHR::eMicromapStorageEXT | BufferUsageFlagBits2KHR::ePreprocessBufferEXT; - }; - //=== VK_AMD_anti_lag === enum class AntiLagModeAMD @@ -7526,8 +7315,6 @@ namespace VULKAN_HPP_NAMESPACE eFragmentShadingRateAttachment = VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT, eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, eIndirectBindable = VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT - eFragmentDensityMapAttachment = VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, - eIndirectBindable = VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT }; using ShaderCreateFlagsEXT = Flags; @@ -7540,7 +7327,6 @@ namespace VULKAN_HPP_NAMESPACE ShaderCreateFlagBitsEXT::eLinkStage | ShaderCreateFlagBitsEXT::eAllowVaryingSubgroupSize | ShaderCreateFlagBitsEXT::eRequireFullSubgroups | ShaderCreateFlagBitsEXT::eNoTaskShader | ShaderCreateFlagBitsEXT::eDispatchBase | ShaderCreateFlagBitsEXT::eFragmentShadingRateAttachment | ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment | ShaderCreateFlagBitsEXT::eIndirectBindable; - ShaderCreateFlagBitsEXT::eFragmentDensityMapAttachment | ShaderCreateFlagBitsEXT::eIndirectBindable; }; enum class ShaderCodeTypeEXT @@ -7640,75 +7426,6 @@ namespace VULKAN_HPP_NAMESPACE } } - //================================= - //=== Layer Setting Type Traits === - //================================= - - template <> - struct CppType - { - using Type = vk::Bool32; - }; - - template <> - struct CppType - { - using Type = int32_t; - }; - - template <> - struct CppType - { - using Type = int64_t; - }; - - template <> - struct CppType - { - using Type = uint32_t; - }; - - template <> - struct CppType - { - using Type = uint64_t; - }; - - template <> - struct CppType - { - using Type = float; - }; - - template <> - struct CppType - { - using Type = double; - }; - - template <> - struct CppType - { - using Type = char *; - }; - - template - bool isSameType( LayerSettingTypeEXT layerSettingType ) - { - switch ( layerSettingType ) - { - case LayerSettingTypeEXT::eBool32: return std::is_same::value; - case LayerSettingTypeEXT::eInt32: return std::is_same::value; - case LayerSettingTypeEXT::eInt64: return std::is_same::value; - case LayerSettingTypeEXT::eUint32: return std::is_same::value; - case LayerSettingTypeEXT::eUint64: return std::is_same::value; - case LayerSettingTypeEXT::eFloat32: return std::is_same::value; - case LayerSettingTypeEXT::eFloat64: return std::is_same::value; - case LayerSettingTypeEXT::eString: return std::is_same::value; - default: return false; - } - } - //=== VK_NV_low_latency2 === enum class LatencyMarkerNV @@ -7878,17 +7595,6 @@ namespace VULKAN_HPP_NAMESPACE eD3D12 = VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT }; - //=== VK_KHR_line_rasterization === - - enum class LineRasterizationModeKHR - { - eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR, - eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR, - eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR, - eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR - }; - using LineRasterizationModeEXT = LineRasterizationModeKHR; - //=== VK_KHR_calibrated_timestamps === enum class TimeDomainKHR @@ -7900,6 +7606,16 @@ namespace VULKAN_HPP_NAMESPACE }; using TimeDomainEXT = TimeDomainKHR; + //=== VK_NV_display_stereo === + + enum class DisplaySurfaceStereoTypeNV + { + eNone = VK_DISPLAY_SURFACE_STEREO_TYPE_NONE_NV, + eOnboardDin = VK_DISPLAY_SURFACE_STEREO_TYPE_ONBOARD_DIN_NV, + eHdmi3D = VK_DISPLAY_SURFACE_STEREO_TYPE_HDMI_3D_NV, + eInbandDisplayport = VK_DISPLAY_SURFACE_STEREO_TYPE_INBAND_DISPLAYPORT_NV + }; + //=== VK_KHR_maintenance7 === enum class PhysicalDeviceLayeredApiKHR @@ -7972,19 +7688,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_depth_clamp_control === - enum class DepthClampModeEXT - { - eViewportRange = VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT, - eUserDefinedRange = VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT - struct FlagTraits - { - static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true; - static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsInputModeFlagsEXT allFlags = - IndirectCommandsInputModeFlagBitsEXT::eVulkanIndexBuffer | IndirectCommandsInputModeFlagBitsEXT::eDxgiIndexBuffer; - }; - - //=== VK_EXT_depth_clamp_control === - enum class DepthClampModeEXT { eViewportRange = VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT, @@ -8120,16 +7823,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_device_generated_commands === case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - case VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT: - return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - - //=== VK_KHR_pipeline_binary === - case VULKAN_HPP_NAMESPACE::ObjectType::ePipelineBinaryKHR: - return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - - //=== VK_EXT_device_generated_commands === - case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; - case VULKAN_HPP_NAMESPACE::ObjectType::eIndirectExecutionSetEXT: return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; default: VULKAN_HPP_ASSERT( false && "unknown ObjectType" ); return VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown; } diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index c86bbf4..4829143 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -8,15 +8,6 @@ #ifndef VULKAN_EXTENSION_INSPECTION_HPP #define VULKAN_EXTENSION_INSPECTION_HPP -#if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) -import VULKAN_HPP_STD_MODULE; -#else -# include -# include -# include -# include -# include -#endif #if defined( VULKAN_HPP_ENABLE_STD_MODULE ) && defined( VULKAN_HPP_STD_MODULE ) import VULKAN_HPP_STD_MODULE; #else @@ -77,7 +68,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_MACOS_MVK*/ { "VK_AMD_gpu_shader_int16", "VK_KHR_shader_float16_int8" }, { "VK_NV_ray_tracing", "VK_KHR_ray_tracing_pipeline" }, - { "VK_NV_ray_tracing", "VK_KHR_ray_tracing_pipeline" }, { "VK_EXT_buffer_device_address", "VK_KHR_buffer_device_address" }, { "VK_EXT_validation_features", "VK_EXT_layer_settings" } }; @@ -357,7 +347,6 @@ namespace VULKAN_HPP_NAMESPACE "VK_EXT_primitive_topology_list_restart", "VK_KHR_format_feature_flags2", "VK_EXT_present_mode_fifo_latest_ready", - "VK_EXT_present_mode_fifo_latest_ready", #if defined( VK_USE_PLATFORM_FUCHSIA ) "VK_FUCHSIA_external_memory", "VK_FUCHSIA_external_semaphore", @@ -417,11 +406,9 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ "VK_KHR_maintenance5", "VK_AMD_anti_lag", - "VK_AMD_anti_lag", "VK_KHR_ray_tracing_position_fetch", "VK_EXT_shader_object", "VK_KHR_pipeline_binary", - "VK_KHR_pipeline_binary", "VK_QCOM_tile_properties", "VK_SEC_amigo_profiling", "VK_QCOM_multiview_per_view_viewports", @@ -429,7 +416,6 @@ namespace VULKAN_HPP_NAMESPACE "VK_NV_extended_sparse_address_space", "VK_EXT_mutable_descriptor_type", "VK_EXT_legacy_vertex_attributes", - "VK_EXT_legacy_vertex_attributes", "VK_ARM_shader_core_builtins", "VK_EXT_pipeline_library_group_handles", "VK_EXT_dynamic_rendering_unused_attachments", @@ -437,7 +423,6 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_cooperative_matrix", "VK_QCOM_multiview_per_view_render_areas", "VK_KHR_compute_shader_derivatives", - "VK_KHR_compute_shader_derivatives", "VK_KHR_video_decode_av1", "VK_KHR_video_encode_av1", "VK_KHR_video_maintenance1", @@ -465,9 +450,6 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_shader_relaxed_extended_instruction", "VK_NV_command_buffer_inheritance", "VK_KHR_maintenance7", - "VK_KHR_shader_relaxed_extended_instruction", - "VK_NV_command_buffer_inheritance", - "VK_KHR_maintenance7", "VK_NV_shader_atomic_float16_vector", "VK_EXT_shader_replicated_composites", "VK_NV_ray_tracing_validation", @@ -475,7 +457,8 @@ namespace VULKAN_HPP_NAMESPACE "VK_MESA_image_alignment_control", "VK_EXT_depth_clamp_control", "VK_HUAWEI_hdr_vivid", - "VK_NV_cooperative_matrix2" + "VK_NV_cooperative_matrix2", + "VK_EXT_vertex_attribute_robustness" }; return deviceExtensions; } @@ -1020,12 +1003,9 @@ namespace VULKAN_HPP_NAMESPACE "VK_KHR_synchronization2", "VK_KHR_spirv_1_4", "VK_EXT_extended_dynamic_state", - "VK_KHR_spirv_1_4", - "VK_EXT_extended_dynamic_state", } } }, { "VK_VERSION_1_3", { { - "VK_KHR_maintenance5", "VK_KHR_maintenance5", "VK_KHR_pipeline_library", } } } } }, @@ -1537,7 +1517,8 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_VERSION_1_0", { { "VK_KHR_map_memory2", - } } } } }, + } } }, + { "VK_VERSION_1_4", { {} } } } }, { "VK_EXT_shader_atomic_float2", { { "VK_VERSION_1_0", { { @@ -1857,11 +1838,6 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_KHR_swapchain", } } } } }, - { "VK_EXT_present_mode_fifo_latest_ready", - { { "VK_VERSION_1_0", - { { - "VK_KHR_swapchain", - } } } } }, #if defined( VK_USE_PLATFORM_FUCHSIA ) { "VK_FUCHSIA_external_memory", { { "VK_VERSION_1_0", @@ -2201,11 +2177,6 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_KHR_maintenance5", } } } } }, - { "VK_KHR_pipeline_binary", - { { "VK_VERSION_1_0", - { { - "VK_KHR_maintenance5", - } } } } }, { "VK_QCOM_tile_properties", { { "VK_VERSION_1_0", { { @@ -2239,11 +2210,6 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_EXT_vertex_input_dynamic_state", } } } } }, - { "VK_EXT_legacy_vertex_attributes", - { { "VK_VERSION_1_0", - { { - "VK_EXT_vertex_input_dynamic_state", - } } } } }, { "VK_ARM_shader_core_builtins", { { "VK_VERSION_1_0", { { @@ -2283,11 +2249,6 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_KHR_get_physical_device_properties2", } } } } }, - { "VK_KHR_compute_shader_derivatives", - { { "VK_VERSION_1_0", - { { - "VK_KHR_get_physical_device_properties2", - } } } } }, { "VK_KHR_video_decode_av1", { { "VK_VERSION_1_0", { { @@ -2307,7 +2268,8 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_VERSION_1_0", { { "VK_KHR_maintenance6", - } } } } }, + } } }, + { "VK_VERSION_1_4", { {} } } } }, { "VK_QCOM_image_processing2", { { "VK_VERSION_1_0", { { @@ -2393,6 +2355,18 @@ namespace VULKAN_HPP_NAMESPACE { "VK_VERSION_1_1", { {} } } } }, { "VK_KHR_maintenance6", { { "VK_VERSION_1_1", { {} } } } }, { "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", { {} } } } }, + { "VK_NV_display_stereo", + { { "VK_VERSION_1_0", + { { + "VK_KHR_display", + "VK_KHR_get_display_properties2", + } } } } }, + { "VK_KHR_video_encode_quantization_map", + { { "VK_VERSION_1_0", + { { + "VK_KHR_video_encode_queue", + "VK_KHR_format_feature_flags2", + } } } } }, { "VK_KHR_maintenance7", { { "VK_VERSION_1_1", { {} } } } }, { "VK_EXT_device_generated_commands", { { "VK_VERSION_1_0", @@ -2436,7 +2410,7 @@ namespace VULKAN_HPP_NAMESPACE std::string const & extension ) { #if !defined( NDEBUG ) - static std::set versions = { "VK_VERSION_1_0", "VK_VERSION_1_1", "VK_VERSION_1_2", "VK_VERSION_1_3" }; + static std::set versions = { "VK_VERSION_1_0", "VK_VERSION_1_1", "VK_VERSION_1_2", "VK_VERSION_1_3", "VK_VERSION_1_4" }; assert( versions.find( version ) != versions.end() ); #endif static std::vector> noDependencies; @@ -2482,12 +2456,14 @@ namespace VULKAN_HPP_NAMESPACE { "VK_KHR_device_group", "VK_VERSION_1_1" }, { "VK_KHR_shader_draw_parameters", "VK_VERSION_1_1" }, { "VK_EXT_texture_compression_astc_hdr", "VK_VERSION_1_3" }, + { "VK_EXT_pipeline_robustness", "VK_VERSION_1_4" }, { "VK_KHR_maintenance1", "VK_VERSION_1_1" }, { "VK_KHR_device_group_creation", "VK_VERSION_1_1" }, { "VK_KHR_external_memory_capabilities", "VK_VERSION_1_1" }, { "VK_KHR_external_memory", "VK_VERSION_1_1" }, { "VK_KHR_external_semaphore_capabilities", "VK_VERSION_1_1" }, { "VK_KHR_external_semaphore", "VK_VERSION_1_1" }, + { "VK_KHR_push_descriptor", "VK_VERSION_1_4" }, { "VK_KHR_shader_float16_int8", "VK_VERSION_1_2" }, { "VK_KHR_16bit_storage", "VK_VERSION_1_1" }, { "VK_KHR_descriptor_update_template", "VK_VERSION_1_1" }, @@ -2515,19 +2491,20 @@ namespace VULKAN_HPP_NAMESPACE { "VK_KHR_8bit_storage", "VK_VERSION_1_2" }, { "VK_KHR_shader_atomic_int64", "VK_VERSION_1_2" }, { "VK_EXT_calibrated_timestamps", "VK_KHR_calibrated_timestamps" }, + { "VK_KHR_global_priority", "VK_VERSION_1_4" }, { "VK_EXT_vertex_attribute_divisor", "VK_KHR_vertex_attribute_divisor" }, { "VK_EXT_pipeline_creation_feedback", "VK_VERSION_1_3" }, { "VK_KHR_driver_properties", "VK_VERSION_1_2" }, { "VK_KHR_shader_float_controls", "VK_VERSION_1_2" }, { "VK_KHR_depth_stencil_resolve", "VK_VERSION_1_2" }, { "VK_NV_compute_shader_derivatives", "VK_KHR_compute_shader_derivatives" }, - { "VK_NV_compute_shader_derivatives", "VK_KHR_compute_shader_derivatives" }, { "VK_NV_fragment_shader_barycentric", "VK_KHR_fragment_shader_barycentric" }, { "VK_KHR_timeline_semaphore", "VK_VERSION_1_2" }, { "VK_KHR_vulkan_memory_model", "VK_VERSION_1_2" }, { "VK_KHR_shader_terminate_invocation", "VK_VERSION_1_3" }, { "VK_EXT_scalar_block_layout", "VK_VERSION_1_2" }, { "VK_EXT_subgroup_size_control", "VK_VERSION_1_3" }, + { "VK_KHR_dynamic_rendering_local_read", "VK_VERSION_1_4" }, { "VK_KHR_spirv_1_4", "VK_VERSION_1_2" }, { "VK_KHR_separate_depth_stencil_layouts", "VK_VERSION_1_2" }, { "VK_EXT_tooling_info", "VK_VERSION_1_3" }, @@ -2538,6 +2515,8 @@ namespace VULKAN_HPP_NAMESPACE { "VK_EXT_host_query_reset", "VK_VERSION_1_2" }, { "VK_EXT_index_type_uint8", "VK_KHR_index_type_uint8" }, { "VK_EXT_extended_dynamic_state", "VK_VERSION_1_3" }, + { "VK_EXT_host_image_copy", "VK_VERSION_1_4" }, + { "VK_KHR_map_memory2", "VK_VERSION_1_4" }, { "VK_EXT_shader_demote_to_helper_invocation", "VK_VERSION_1_3" }, { "VK_KHR_shader_integer_dot_product", "VK_VERSION_1_3" }, { "VK_EXT_texel_buffer_alignment", "VK_VERSION_1_3" }, @@ -2556,7 +2535,17 @@ namespace VULKAN_HPP_NAMESPACE { "VK_EXT_extended_dynamic_state2", "VK_VERSION_1_3" }, { "VK_EXT_global_priority_query", "VK_KHR_global_priority" }, { "VK_EXT_load_store_op_none", "VK_KHR_load_store_op_none" }, - { "VK_KHR_maintenance4", "VK_VERSION_1_3" } + { "VK_KHR_maintenance4", "VK_VERSION_1_3" }, + { "VK_KHR_shader_subgroup_rotate", "VK_VERSION_1_4" }, + { "VK_EXT_pipeline_protected_access", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance5", "VK_VERSION_1_4" }, + { "VK_KHR_vertex_attribute_divisor", "VK_VERSION_1_4" }, + { "VK_KHR_load_store_op_none", "VK_VERSION_1_4" }, + { "VK_KHR_shader_float_controls2", "VK_VERSION_1_4" }, + { "VK_KHR_index_type_uint8", "VK_VERSION_1_4" }, + { "VK_KHR_line_rasterization", "VK_VERSION_1_4" }, + { "VK_KHR_shader_expect_assume", "VK_VERSION_1_4" }, + { "VK_KHR_maintenance6", "VK_VERSION_1_4" } }; return promotedExtensions; } @@ -2629,10 +2618,6 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_KHR_ray_tracing_pipeline"; } - if ( extension == "VK_NV_ray_tracing" ) - { - return "VK_KHR_ray_tracing_pipeline"; - } if ( extension == "VK_EXT_buffer_device_address" ) { return "VK_KHR_buffer_device_address"; @@ -2697,6 +2682,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_VERSION_1_3"; } + if ( extension == "VK_EXT_pipeline_robustness" ) + { + return "VK_VERSION_1_4"; + } if ( extension == "VK_KHR_maintenance1" ) { return "VK_VERSION_1_1"; @@ -2721,6 +2710,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_VERSION_1_1"; } + if ( extension == "VK_KHR_push_descriptor" ) + { + return "VK_VERSION_1_4"; + } if ( extension == "VK_KHR_shader_float16_int8" ) { return "VK_VERSION_1_2"; @@ -2829,6 +2822,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_KHR_calibrated_timestamps"; } + if ( extension == "VK_KHR_global_priority" ) + { + return "VK_VERSION_1_4"; + } if ( extension == "VK_EXT_vertex_attribute_divisor" ) { return "VK_KHR_vertex_attribute_divisor"; @@ -2853,10 +2850,6 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_KHR_compute_shader_derivatives"; } - if ( extension == "VK_NV_compute_shader_derivatives" ) - { - return "VK_KHR_compute_shader_derivatives"; - } if ( extension == "VK_NV_fragment_shader_barycentric" ) { return "VK_KHR_fragment_shader_barycentric"; @@ -2881,6 +2874,10 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_VERSION_1_3"; } + if ( extension == "VK_KHR_dynamic_rendering_local_read" ) + { + return "VK_VERSION_1_4"; + } if ( extension == "VK_KHR_spirv_1_4" ) { return "VK_VERSION_1_2"; @@ -2921,6 +2918,14 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_VERSION_1_3"; } + if ( extension == "VK_EXT_host_image_copy" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_map_memory2" ) + { + return "VK_VERSION_1_4"; + } if ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) { return "VK_VERSION_1_3"; @@ -2997,6 +3002,46 @@ namespace VULKAN_HPP_NAMESPACE { return "VK_VERSION_1_3"; } + if ( extension == "VK_KHR_shader_subgroup_rotate" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_EXT_pipeline_protected_access" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance5" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_vertex_attribute_divisor" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_load_store_op_none" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_float_controls2" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_index_type_uint8" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_line_rasterization" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_shader_expect_assume" ) + { + return "VK_VERSION_1_4"; + } + if ( extension == "VK_KHR_maintenance6" ) + { + return "VK_VERSION_1_4"; + } return ""; } @@ -3017,8 +3062,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_MACOS_MVK*/ ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_EXT_buffer_device_address" ) || ( extension == "VK_EXT_validation_features" ); - ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_EXT_buffer_device_address" ) || - ( extension == "VK_EXT_validation_features" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension ) @@ -3163,7 +3206,6 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_EXT_physical_device_drm" ) || ( extension == "VK_EXT_device_address_binding_report" ) || ( extension == "VK_EXT_depth_clip_control" ) || ( extension == "VK_EXT_primitive_topology_list_restart" ) || ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_present_mode_fifo_latest_ready" ) - ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_present_mode_fifo_latest_ready" ) #if defined( VK_USE_PLATFORM_FUCHSIA ) || ( extension == "VK_FUCHSIA_external_memory" ) || ( extension == "VK_FUCHSIA_external_semaphore" ) || ( extension == "VK_FUCHSIA_buffer_collection" ) #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -3194,9 +3236,6 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_USE_PLATFORM_ANDROID_KHR ) || ( extension == "VK_ANDROID_external_format_resolve" ) #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_AMD_anti_lag" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || - ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_KHR_pipeline_binary" ) || ( extension == "VK_QCOM_tile_properties" ) || - ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_AMD_anti_lag" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || ( extension == "VK_EXT_shader_object" ) || ( extension == "VK_KHR_pipeline_binary" ) || ( extension == "VK_QCOM_tile_properties" ) || ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) || @@ -3205,8 +3244,8 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_ARM_shader_core_builtins" ) || ( extension == "VK_EXT_pipeline_library_group_handles" ) || ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) || ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) || ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || - ( extension == "VK_KHR_compute_shader_derivatives" ) || ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_maintenance1" ) || - ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || + ( extension == "VK_KHR_compute_shader_derivatives" ) || ( extension == "VK_KHR_video_decode_av1" ) || ( extension == "VK_KHR_video_encode_av1" ) || + ( extension == "VK_KHR_video_maintenance1" ) || ( extension == "VK_NV_per_stage_descriptor_set" ) || ( extension == "VK_QCOM_image_processing2" ) || ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) || ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" ) || ( extension == "VK_KHR_vertex_attribute_divisor" ) || ( extension == "VK_KHR_load_store_op_none" ) || ( extension == "VK_KHR_shader_float_controls2" ) @@ -3215,12 +3254,13 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || ( extension == "VK_KHR_calibrated_timestamps" ) || ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ) || - ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_NV_raw_access_chains" ) || - ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || ( extension == "VK_NV_command_buffer_inheritance" ) || - ( extension == "VK_KHR_maintenance7" ) || ( extension == "VK_NV_shader_atomic_float16_vector" ) || - ( extension == "VK_EXT_shader_replicated_composites" ) || ( extension == "VK_NV_ray_tracing_validation" ) || - ( extension == "VK_EXT_device_generated_commands" ) || ( extension == "VK_MESA_image_alignment_control" ) || - ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || ( extension == "VK_NV_cooperative_matrix2" ); + ( extension == "VK_NV_descriptor_pool_overallocation" ) || ( extension == "VK_KHR_video_encode_quantization_map" ) || + ( extension == "VK_NV_raw_access_chains" ) || ( extension == "VK_KHR_shader_relaxed_extended_instruction" ) || + ( extension == "VK_NV_command_buffer_inheritance" ) || ( extension == "VK_KHR_maintenance7" ) || + ( extension == "VK_NV_shader_atomic_float16_vector" ) || ( extension == "VK_EXT_shader_replicated_composites" ) || + ( extension == "VK_NV_ray_tracing_validation" ) || ( extension == "VK_EXT_device_generated_commands" ) || + ( extension == "VK_MESA_image_alignment_control" ) || ( extension == "VK_EXT_depth_clamp_control" ) || ( extension == "VK_HUAWEI_hdr_vivid" ) || + ( extension == "VK_NV_cooperative_matrix2" ) || ( extension == "VK_EXT_vertex_attribute_robustness" ); } VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension ) @@ -3297,40 +3337,31 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ ( extension == "VK_KHR_get_physical_device_properties2" ) || ( extension == "VK_KHR_device_group" ) || ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) || - ( extension == "VK_KHR_maintenance1" ) || ( extension == "VK_KHR_device_group_creation" ) || + ( extension == "VK_EXT_pipeline_robustness" ) || ( extension == "VK_KHR_maintenance1" ) || ( extension == "VK_KHR_device_group_creation" ) || ( extension == "VK_KHR_external_memory_capabilities" ) || ( extension == "VK_KHR_external_memory" ) || ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_KHR_external_semaphore" ) || - ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || ( extension == "VK_KHR_descriptor_update_template" ) || - ( extension == "VK_KHR_imageless_framebuffer" ) || ( extension == "VK_KHR_create_renderpass2" ) || - ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_external_fence" ) || ( extension == "VK_KHR_maintenance2" ) || - ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_KHR_dedicated_allocation" ) || ( extension == "VK_EXT_sampler_filter_minmax" ) || - ( extension == "VK_KHR_storage_buffer_storage_class" ) || ( extension == "VK_EXT_inline_uniform_block" ) || - ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) || - ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || - ( extension == "VK_EXT_descriptor_indexing" ) || ( extension == "VK_EXT_shader_viewport_index_layer" ) || ( extension == "VK_KHR_maintenance3" ) || - ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_global_priority" ) || - ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || - ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || + ( extension == "VK_KHR_push_descriptor" ) || ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || + ( extension == "VK_KHR_descriptor_update_template" ) || ( extension == "VK_KHR_imageless_framebuffer" ) || + ( extension == "VK_KHR_create_renderpass2" ) || ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_external_fence" ) || + ( extension == "VK_KHR_maintenance2" ) || ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_KHR_dedicated_allocation" ) || + ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) || + ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_KHR_relaxed_block_layout" ) || + ( extension == "VK_KHR_get_memory_requirements2" ) || ( extension == "VK_KHR_image_format_list" ) || + ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) || ( extension == "VK_EXT_descriptor_indexing" ) || + ( extension == "VK_EXT_shader_viewport_index_layer" ) || ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || + ( extension == "VK_EXT_global_priority" ) || ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) || + ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_calibrated_timestamps" ) || ( extension == "VK_KHR_global_priority" ) || ( extension == "VK_EXT_vertex_attribute_divisor" ) || ( extension == "VK_EXT_pipeline_creation_feedback" ) || ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) || ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) || ( extension == "VK_EXT_scalar_block_layout" ) || - ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_spirv_1_4" ) || ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || - ( extension == "VK_EXT_tooling_info" ) || ( extension == "VK_EXT_separate_stencil_usage" ) || - ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || ( extension == "VK_KHR_buffer_device_address" ) || - ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || - ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || - ( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) || - ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) || - ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_fragment_shader_barycentric" ) || - ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || - ( extension == "VK_KHR_shader_terminate_invocation" ) || ( extension == "VK_EXT_scalar_block_layout" ) || - ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_spirv_1_4" ) || ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || - ( extension == "VK_EXT_tooling_info" ) || ( extension == "VK_EXT_separate_stencil_usage" ) || - ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || ( extension == "VK_KHR_buffer_device_address" ) || - ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) || - ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || + ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_dynamic_rendering_local_read" ) || ( extension == "VK_KHR_spirv_1_4" ) || + ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_tooling_info" ) || + ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || + ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) || ( extension == "VK_EXT_host_query_reset" ) || + ( extension == "VK_EXT_index_type_uint8" ) || ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_EXT_host_image_copy" ) || + ( extension == "VK_KHR_map_memory2" ) || ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) || ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" ) || ( extension == "VK_KHR_synchronization2" ) || @@ -3338,7 +3369,11 @@ namespace VULKAN_HPP_NAMESPACE ( extension == "VK_EXT_image_robustness" ) || ( extension == "VK_KHR_copy_commands2" ) || ( extension == "VK_EXT_4444_formats" ) || ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_VALVE_mutable_descriptor_type" ) || ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) || - ( extension == "VK_EXT_global_priority_query" ) || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_KHR_maintenance4" ); + ( extension == "VK_EXT_global_priority_query" ) || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_KHR_maintenance4" ) || + ( extension == "VK_KHR_shader_subgroup_rotate" ) || ( extension == "VK_EXT_pipeline_protected_access" ) || ( extension == "VK_KHR_maintenance5" ) || + ( extension == "VK_KHR_vertex_attribute_divisor" ) || ( extension == "VK_KHR_load_store_op_none" ) || + ( extension == "VK_KHR_shader_float_controls2" ) || ( extension == "VK_KHR_index_type_uint8" ) || ( extension == "VK_KHR_line_rasterization" ) || + ( extension == "VK_KHR_shader_expect_assume" ) || ( extension == "VK_KHR_maintenance6" ); } } // namespace VULKAN_HPP_NAMESPACE diff --git a/third_party/vulkan/vulkan_format_traits.hpp b/third_party/vulkan/vulkan_format_traits.hpp index 25790fd..75d9d80 100644 --- a/third_party/vulkan/vulkan_format_traits.hpp +++ b/third_party/vulkan/vulkan_format_traits.hpp @@ -354,6 +354,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16; case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16; case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 2; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8; @@ -363,8 +365,6 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8; case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 4; - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 2; - case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; default: VULKAN_HPP_ASSERT( false ); return 0; } @@ -613,6 +613,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC_10x10"; case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC_12x10"; case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC_12x12"; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return "16-bit"; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return "8-bit alpha"; case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC1_2BPP"; case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC1_4BPP"; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC2_2BPP"; @@ -622,8 +624,6 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP"; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP"; case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return "32-bit"; - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return "16-bit"; - case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return "8-bit alpha"; default: VULKAN_HPP_ASSERT( false ); return ""; } @@ -2005,14 +2005,7 @@ namespace VULKAN_HPP_NAMESPACE case 3: return 4; default: VULKAN_HPP_ASSERT( false ); return 0; } - case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: - switch ( component ) - { - case 0: return 16; - case 1: return 16; - default: VULKAN_HPP_ASSERT( false ); return 0; - } - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: switch ( component ) { case 0: return 1; @@ -2021,12 +2014,19 @@ namespace VULKAN_HPP_NAMESPACE case 3: return 5; default: VULKAN_HPP_ASSERT( false ); return 0; } - case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: switch ( component ) { case 0: return 8; default: VULKAN_HPP_ASSERT( false ); return 0; } + case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: + switch ( component ) + { + case 0: return 16; + case 1: return 16; + default: VULKAN_HPP_ASSERT( false ); return 0; + } default: return 0; } @@ -2275,6 +2275,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4; case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4; case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 4; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4; @@ -2284,8 +2286,6 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4; case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 2; - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 4; - case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; default: return 0; } @@ -4227,6 +4227,21 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "A"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return "A"; + case 1: return "B"; + case 2: return "G"; + case 3: return "R"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return "A"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: switch ( component ) { @@ -4306,21 +4321,6 @@ namespace VULKAN_HPP_NAMESPACE case 1: return "G"; default: VULKAN_HPP_ASSERT( false ); return ""; } - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: - switch ( component ) - { - case 0: return "A"; - case 1: return "B"; - case 2: return "G"; - case 3: return "R"; - default: VULKAN_HPP_ASSERT( false ); return ""; - } - case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: - switch ( component ) - { - case 0: return "A"; - default: VULKAN_HPP_ASSERT( false ); return ""; - } default: return ""; } @@ -6262,6 +6262,21 @@ namespace VULKAN_HPP_NAMESPACE case 3: return "SFLOAT"; default: VULKAN_HPP_ASSERT( false ); return ""; } + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: + switch ( component ) + { + case 0: return "UNORM"; + case 1: return "UNORM"; + case 2: return "UNORM"; + case 3: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: + switch ( component ) + { + case 0: return "UNORM"; + default: VULKAN_HPP_ASSERT( false ); return ""; + } case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: switch ( component ) { @@ -6341,21 +6356,6 @@ namespace VULKAN_HPP_NAMESPACE case 1: return "SFIXED5"; default: VULKAN_HPP_ASSERT( false ); return ""; } - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: - switch ( component ) - { - case 0: return "UNORM"; - case 1: return "UNORM"; - case 2: return "UNORM"; - case 3: return "UNORM"; - default: VULKAN_HPP_ASSERT( false ); return ""; - } - case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: - switch ( component ) - { - case 0: return "UNORM"; - default: VULKAN_HPP_ASSERT( false ); return ""; - } default: return ""; } @@ -6796,7 +6796,7 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16; case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16; case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16; - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 16; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 16; default: return 0; } @@ -7649,6 +7649,8 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100; case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120; case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144; + case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16: return 1; + case VULKAN_HPP_NAMESPACE::Format::eA8Unorm: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1; @@ -7658,8 +7660,6 @@ namespace VULKAN_HPP_NAMESPACE case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1; case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfixed5NV: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA1B5G5R5UnormPack16KHR: return 1; - case VULKAN_HPP_NAMESPACE::Format::eA8UnormKHR: return 1; default: VULKAN_HPP_ASSERT( false ); return 0; } diff --git a/third_party/vulkan/vulkan_funcs.hpp b/third_party/vulkan/vulkan_funcs.hpp index 5773452..f7ecce5 100644 --- a/third_party/vulkan/vulkan_funcs.hpp +++ b/third_party/vulkan/vulkan_funcs.hpp @@ -11,9 +11,6 @@ // include-what-you-use: make sure, vulkan.hpp is used by code-completers // IWYU pragma: private; include "vulkan.hpp" -// include-what-you-use: make sure, vulkan.hpp is used by code-completers -// IWYU pragma: private; include "vulkan.hpp" - namespace VULKAN_HPP_NAMESPACE { @@ -51,10 +48,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( instance ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -73,12 +68,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &instance ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( instance, detail::ObjectDestroy( allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( instance, detail::ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -88,7 +80,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyInstance( static_cast( m_instance ), reinterpret_cast( pAllocator ) ); - d.vkDestroyInstance( static_cast( m_instance ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -113,14 +104,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkEnumeratePhysicalDevices( static_cast( m_instance ), pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); - return static_cast( - d.vkEnumeratePhysicalDevices( static_cast( m_instance ), pPhysicalDeviceCount, reinterpret_cast( pPhysicalDevices ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -146,14 +132,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); if ( physicalDeviceCount < physicalDevices.size() ) { physicalDevices.resize( physicalDeviceCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( physicalDevices ) ); } template ( m_physicalDevice ), reinterpret_cast( pFeatures ) ); - d.vkGetPhysicalDeviceFeatures( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -225,8 +206,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceFormatProperties( static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); - d.vkGetPhysicalDeviceFormatProperties( - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -256,7 +235,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), return static_cast( d.vkGetPhysicalDeviceImageFormatProperties( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), @@ -291,10 +269,8 @@ namespace VULKAN_HPP_NAMESPACE static_cast( flags ), reinterpret_cast( &imageFormatProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -304,7 +280,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); - d.vkGetPhysicalDeviceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -332,14 +307,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties( static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename QueueFamilyPropertiesAllocator, - typename Dispatch, - typename std::enable_if::value, int>::type> template < typename QueueFamilyPropertiesAllocator, typename Dispatch, @@ -402,8 +372,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), reinterpret_cast( pMemoryProperties ) ); - d.vkGetPhysicalDeviceMemoryProperties( static_cast( m_physicalDevice ), - reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -428,7 +396,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetInstanceProcAddr( static_cast( m_instance ), pName ); - return d.vkGetInstanceProcAddr( static_cast( m_instance ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -451,7 +418,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetDeviceProcAddr( static_cast( m_device ), pName ); - return d.vkGetDeviceProcAddr( static_cast( m_device ), pName ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -476,7 +442,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDevice( static_cast( m_physicalDevice ), return static_cast( d.vkCreateDevice( static_cast( m_physicalDevice ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -500,10 +465,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( device ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -525,12 +488,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &device ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( device, detail::ObjectDestroy( allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( device, detail::ObjectDestroy( allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -540,7 +500,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDevice( static_cast( m_device ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDevice( static_cast( m_device ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -569,10 +528,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename ExtensionPropertiesAllocator, - typename Dispatch, - typename std::enable_if::value, int>::type> template < typename ExtensionPropertiesAllocator, typename Dispatch, @@ -600,14 +555,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -639,14 +592,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -659,15 +610,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkEnumerateDeviceExtensionProperties( static_cast( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkEnumerateDeviceExtensionProperties( - static_cast( m_physicalDevice ), pLayerName, pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename ExtensionPropertiesAllocator, - typename Dispatch, - typename std::enable_if::value, int>::type> template < typename ExtensionPropertiesAllocator, typename Dispatch, @@ -695,14 +640,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -734,14 +677,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -755,9 +696,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -783,14 +721,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( d.vkEnumerateDeviceLayerProperties( static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkEnumerateDeviceLayerProperties( - static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -870,14 +799,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); - d.vkGetDeviceQueue( static_cast( m_device ), queueFamilyIndex, queueIndex, reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -951,8 +875,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkQueueSubmit( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); - return static_cast( - d.vkQueueSubmit( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -968,10 +890,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -981,7 +901,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkQueueWaitIdle( static_cast( m_queue ) ) ); - return static_cast( d.vkQueueWaitIdle( static_cast( m_queue ) ) ); } #else template @@ -994,10 +913,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueWaitIdle( m_queue ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1007,7 +924,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkDeviceWaitIdle( static_cast( m_device ) ) ); - return static_cast( d.vkDeviceWaitIdle( static_cast( m_device ) ) ); } #else template @@ -1020,10 +936,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDeviceWaitIdle( m_device ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1034,7 +948,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAllocateMemory( static_cast( m_device ), return static_cast( d.vkAllocateMemory( static_cast( m_device ), reinterpret_cast( pAllocateInfo ), reinterpret_cast( pAllocator ), @@ -1060,10 +973,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memory ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1085,12 +996,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &memory ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( memory, detail::ObjectFree( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( memory, detail::ObjectFree( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1102,7 +1010,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); - d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1129,7 +1036,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); - d.vkFreeMemory( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1158,7 +1064,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkMapMemory( static_cast( m_device ), return static_cast( d.vkMapMemory( static_cast( m_device ), static_cast( memory ), static_cast( offset ), @@ -1188,10 +1093,8 @@ namespace VULKAN_HPP_NAMESPACE static_cast( flags ), &pData ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1200,7 +1103,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkUnmapMemory( static_cast( m_device ), static_cast( memory ) ); - d.vkUnmapMemory( static_cast( m_device ), static_cast( memory ) ); } template @@ -1211,8 +1113,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkFlushMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); - return static_cast( - d.vkFlushMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1229,10 +1129,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1244,7 +1142,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkInvalidateMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); - d.vkInvalidateMappedMemoryRanges( static_cast( m_device ), memoryRangeCount, reinterpret_cast( pMemoryRanges ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1261,10 +1158,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast( memoryRanges.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1276,8 +1171,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceMemoryCommitment( static_cast( m_device ), static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); - d.vkGetDeviceMemoryCommitment( - static_cast( m_device ), static_cast( memory ), reinterpret_cast( pCommittedMemoryInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1307,8 +1200,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBindBufferMemory( static_cast( m_device ), static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return static_cast( d.vkBindBufferMemory( - static_cast( m_device ), static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template @@ -1323,10 +1214,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindBufferMemory( m_device, static_cast( buffer ), static_cast( memory ), static_cast( memoryOffset ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1340,8 +1229,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBindImageMemory( static_cast( m_device ), static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); - return static_cast( d.vkBindImageMemory( - static_cast( m_device ), static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); } #else template @@ -1356,10 +1243,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindImageMemory( m_device, static_cast( image ), static_cast( memory ), static_cast( memoryOffset ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -1371,8 +1256,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetBufferMemoryRequirements( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetBufferMemoryRequirements( - static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1400,8 +1283,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetImageMemoryRequirements( static_cast( m_device ), static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetImageMemoryRequirements( - static_cast( m_device ), static_cast( image ), reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1428,7 +1309,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements( static_cast( m_device ), d.vkGetImageSparseMemoryRequirements( static_cast( m_device ), static_cast( image ), pSparseMemoryRequirementCount, @@ -1436,11 +1316,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( m_physicalDevice ), d.vkGetPhysicalDeviceSparseImageFormatProperties( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), @@ -1528,11 +1402,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename SparseImageFormatPropertiesAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename SparseImageFormatPropertiesAllocator, typename Dispatch, @@ -1638,8 +1507,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkQueueBindSparse( static_cast( m_queue ), bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); - return static_cast( d.vkQueueBindSparse( - static_cast( m_queue ), bindInfoCount, reinterpret_cast( pBindInfo ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1655,10 +1522,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast( bindInfo.data() ), static_cast( fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1669,7 +1534,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateFence( static_cast( m_device ), return static_cast( d.vkCreateFence( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -1693,10 +1557,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1716,12 +1578,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1733,7 +1592,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); - d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1760,7 +1618,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); - d.vkDestroyFence( static_cast( m_device ), static_cast( fence ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1787,7 +1644,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkResetFences( static_cast( m_device ), fenceCount, reinterpret_cast( pFences ) ) ); - return static_cast( d.vkResetFences( static_cast( m_device ), fenceCount, reinterpret_cast( pFences ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1803,10 +1659,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetFences( m_device, fences.size(), reinterpret_cast( fences.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1816,7 +1670,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetFenceStatus( static_cast( m_device ), static_cast( fence ) ) ); - return static_cast( d.vkGetFenceStatus( static_cast( m_device ), static_cast( fence ) ) ); } #else template @@ -1828,7 +1681,6 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceStatus( m_device, static_cast( fence ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); @@ -1846,8 +1698,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkWaitForFences( static_cast( m_device ), fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); - return static_cast( d.vkWaitForFences( - static_cast( m_device ), fenceCount, reinterpret_cast( pFences ), static_cast( waitAll ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1865,7 +1715,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkWaitForFences( m_device, fences.size(), reinterpret_cast( fences.data() ), static_cast( waitAll ), timeout ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); @@ -1880,7 +1729,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSemaphore( static_cast( m_device ), return static_cast( d.vkCreateSemaphore( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -1906,10 +1754,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( semaphore ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -1931,12 +1777,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &semaphore ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( semaphore, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( semaphore, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -1949,8 +1792,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySemaphore( static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); - d.vkDestroySemaphore( - static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -1978,8 +1819,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySemaphore( static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); - d.vkDestroySemaphore( - static_cast( m_device ), static_cast( semaphore ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2006,7 +1845,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateEvent( static_cast( m_device ), return static_cast( d.vkCreateEvent( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -2030,10 +1868,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( event ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2053,12 +1889,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &event ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( event, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( event, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2070,7 +1903,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); - d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2097,7 +1929,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); - d.vkDestroyEvent( static_cast( m_device ), static_cast( event ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2123,7 +1954,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetEventStatus( static_cast( m_device ), static_cast( event ) ) ); - return static_cast( d.vkGetEventStatus( static_cast( m_device ), static_cast( event ) ) ); } #else template @@ -2135,7 +1965,6 @@ namespace VULKAN_HPP_NAMESPACE # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetEventStatus( m_device, static_cast( event ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); @@ -2149,7 +1978,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSetEvent( static_cast( m_device ), static_cast( event ) ) ); - return static_cast( d.vkSetEvent( static_cast( m_device ), static_cast( event ) ) ); } #else template @@ -2163,10 +1991,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetEvent( m_device, static_cast( event ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -2176,7 +2002,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkResetEvent( static_cast( m_device ), static_cast( event ) ) ); - return static_cast( d.vkResetEvent( static_cast( m_device ), static_cast( event ) ) ); } #else template @@ -2189,10 +2014,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetEvent( m_device, static_cast( event ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -2203,7 +2026,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateQueryPool( static_cast( m_device ), return static_cast( d.vkCreateQueryPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -2229,10 +2051,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( queryPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2254,12 +2074,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &queryPool ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( queryPool, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( queryPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2272,8 +2089,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyQueryPool( static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); - d.vkDestroyQueryPool( - static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2301,8 +2116,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyQueryPool( static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); - d.vkDestroyQueryPool( - static_cast( m_device ), static_cast( queryPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2333,7 +2146,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetQueryPoolResults( static_cast( m_device ), return static_cast( d.vkGetQueryPoolResults( static_cast( m_device ), static_cast( queryPool ), firstQuery, @@ -2345,10 +2157,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template >( result, std::move( data ) ); } @@ -2409,7 +2214,6 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &data ), static_cast( stride ), static_cast( flags ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); @@ -2424,7 +2228,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBuffer( static_cast( m_device ), return static_cast( d.vkCreateBuffer( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -2448,10 +2251,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2471,12 +2272,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &buffer ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( buffer, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( buffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2488,7 +2286,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); - d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2515,7 +2312,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); - d.vkDestroyBuffer( static_cast( m_device ), static_cast( buffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2542,7 +2338,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBufferView( static_cast( m_device ), return static_cast( d.vkCreateBufferView( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -2568,10 +2363,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2593,12 +2386,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2611,8 +2401,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferView( static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); - d.vkDestroyBufferView( - static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2640,8 +2428,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferView( static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); - d.vkDestroyBufferView( - static_cast( m_device ), static_cast( bufferView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2668,7 +2454,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImage( static_cast( m_device ), return static_cast( d.vkCreateImage( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -2692,10 +2477,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( image ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2715,12 +2498,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &image ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( image, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( image, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2732,7 +2512,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); - d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2759,7 +2538,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); - d.vkDestroyImage( static_cast( m_device ), static_cast( image ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2786,7 +2564,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout( static_cast( m_device ), d.vkGetImageSubresourceLayout( static_cast( m_device ), static_cast( image ), reinterpret_cast( pSubresource ), @@ -2820,7 +2597,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImageView( static_cast( m_device ), return static_cast( d.vkCreateImageView( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -2846,10 +2622,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( view ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2871,12 +2645,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &view ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( view, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -2889,8 +2660,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyImageView( static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); - d.vkDestroyImageView( - static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2918,8 +2687,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyImageView( static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); - d.vkDestroyImageView( - static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -2946,7 +2713,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateShaderModule( static_cast( m_device ), return static_cast( d.vkCreateShaderModule( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -2972,10 +2738,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( shaderModule ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -2997,12 +2761,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &shaderModule ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( shaderModule, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( shaderModule, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3015,8 +2776,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyShaderModule( static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); - d.vkDestroyShaderModule( - static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3044,8 +2803,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyShaderModule( static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); - d.vkDestroyShaderModule( - static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3072,7 +2829,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineCache( static_cast( m_device ), return static_cast( d.vkCreatePipelineCache( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -3098,10 +2854,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineCache ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3123,12 +2877,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineCache ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( pipelineCache, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( pipelineCache, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3141,8 +2892,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPipelineCache( static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPipelineCache( - static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3170,8 +2919,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPipelineCache( static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPipelineCache( - static_cast( m_device ), static_cast( pipelineCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3200,14 +2947,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPipelineCacheData( static_cast( m_device ), static_cast( pipelineCache ), pDataSize, pData ) ); - return static_cast( - d.vkGetPipelineCacheData( static_cast( m_device ), static_cast( pipelineCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -3234,14 +2976,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template ( d.vkMergePipelineCaches( static_cast( m_device ), static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); - return static_cast( d.vkMergePipelineCaches( - static_cast( m_device ), static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3309,10 +3045,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergePipelineCaches( m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3325,7 +3059,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateGraphicsPipelines( static_cast( m_device ), return static_cast( d.vkCreateGraphicsPipelines( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, @@ -3335,9 +3068,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -3363,9 +3093,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3396,9 +3123,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3426,18 +3150,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename PipelineAllocator, - typename std::enable_if>::value, int>::type> template < typename Dispatch, typename PipelineAllocator, @@ -3464,13 +3181,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3505,13 +3218,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3542,13 +3251,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); - result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3562,7 +3267,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateComputePipelines( static_cast( m_device ), return static_cast( d.vkCreateComputePipelines( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, @@ -3572,9 +3276,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -3600,9 +3301,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3633,9 +3331,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -3663,18 +3358,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename PipelineAllocator, - typename std::enable_if>::value, int>::type> template < typename Dispatch, typename PipelineAllocator, @@ -3701,13 +3389,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3742,13 +3426,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -3779,13 +3459,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); - result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3798,8 +3474,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPipeline( static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPipeline( - static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3827,8 +3501,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPipeline( static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPipeline( - static_cast( m_device ), static_cast( pipeline ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3855,7 +3527,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineLayout( static_cast( m_device ), return static_cast( d.vkCreatePipelineLayout( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -3881,10 +3552,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -3906,12 +3575,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &pipelineLayout ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( pipelineLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( pipelineLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -3924,8 +3590,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPipelineLayout( static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPipelineLayout( - static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3953,8 +3617,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPipelineLayout( static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPipelineLayout( - static_cast( m_device ), static_cast( pipelineLayout ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -3981,7 +3643,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSampler( static_cast( m_device ), return static_cast( d.vkCreateSampler( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -4005,10 +3666,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sampler ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4028,12 +3687,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &sampler ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( sampler, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( sampler, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4045,7 +3701,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); - d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4072,7 +3727,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); - d.vkDestroySampler( static_cast( m_device ), static_cast( sampler ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4099,7 +3753,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorSetLayout( static_cast( m_device ), return static_cast( d.vkCreateDescriptorSetLayout( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -4125,10 +3778,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( setLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4150,12 +3801,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &setLayout ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( setLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( setLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4169,9 +3817,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyDescriptorSetLayout( static_cast( m_device ), static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDescriptorSetLayout( static_cast( m_device ), - static_cast( descriptorSetLayout ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4201,9 +3846,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyDescriptorSetLayout( static_cast( m_device ), static_cast( descriptorSetLayout ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDescriptorSetLayout( static_cast( m_device ), - static_cast( descriptorSetLayout ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4231,7 +3873,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorPool( static_cast( m_device ), return static_cast( d.vkCreateDescriptorPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -4257,10 +3898,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4282,12 +3921,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorPool ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( descriptorPool, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( descriptorPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4300,8 +3936,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDescriptorPool( static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDescriptorPool( - static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4329,8 +3963,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDescriptorPool( static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDescriptorPool( - static_cast( m_device ), static_cast( descriptorPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4359,8 +3991,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkResetDescriptorPool( static_cast( m_device ), static_cast( descriptorPool ), static_cast( flags ) ) ); - return static_cast( d.vkResetDescriptorPool( - static_cast( m_device ), static_cast( descriptorPool ), static_cast( flags ) ) ); } #else template @@ -4386,15 +4016,9 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkAllocateDescriptorSets( static_cast( m_device ), reinterpret_cast( pAllocateInfo ), reinterpret_cast( pDescriptorSets ) ) ); - return static_cast( d.vkAllocateDescriptorSets( static_cast( m_device ), - reinterpret_cast( pAllocateInfo ), - reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -4410,10 +4034,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); } template ( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorSets ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename DescriptorSetAllocator, - typename std::enable_if>::value, - int>::type> template < typename Dispatch, typename DescriptorSetAllocator, @@ -4463,17 +4078,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets; uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); } template < @@ -4496,17 +4108,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateDescriptorSets( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( descriptorSets.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); std::vector, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); - detail::PoolFree deleter( *this, allocateInfo.descriptorPool, d ); for ( auto const & descriptorSet : descriptorSets ) { uniqueDescriptorSets.push_back( UniqueHandle( descriptorSet, deleter ) ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueDescriptorSets ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4522,10 +4131,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); - return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), - static_cast( descriptorPool ), - descriptorSetCount, - reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4555,10 +4160,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( descriptorPool ), descriptorSetCount, reinterpret_cast( pDescriptorSets ) ) ); - return static_cast( d.vkFreeDescriptorSets( static_cast( m_device ), - static_cast( descriptorPool ), - descriptorSetCount, - reinterpret_cast( pDescriptorSets ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4585,7 +4186,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateDescriptorSets( static_cast( m_device ), d.vkUpdateDescriptorSets( static_cast( m_device ), descriptorWriteCount, reinterpret_cast( pDescriptorWrites ), @@ -4620,7 +4220,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateFramebuffer( static_cast( m_device ), return static_cast( d.vkCreateFramebuffer( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -4646,10 +4245,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( framebuffer ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4671,12 +4268,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &framebuffer ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( framebuffer, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( framebuffer, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4689,8 +4283,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyFramebuffer( static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); - d.vkDestroyFramebuffer( - static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4718,8 +4310,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyFramebuffer( static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); - d.vkDestroyFramebuffer( - static_cast( m_device ), static_cast( framebuffer ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4746,7 +4336,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass( static_cast( m_device ), return static_cast( d.vkCreateRenderPass( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -4772,10 +4361,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4797,12 +4384,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4815,8 +4399,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyRenderPass( static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); - d.vkDestroyRenderPass( - static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4844,8 +4426,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyRenderPass( static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); - d.vkDestroyRenderPass( - static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4872,7 +4452,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetRenderAreaGranularity( static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pGranularity ) ); - d.vkGetRenderAreaGranularity( static_cast( m_device ), static_cast( renderPass ), reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4899,7 +4478,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCommandPool( static_cast( m_device ), return static_cast( d.vkCreateCommandPool( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -4925,10 +4503,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandPool ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -4950,12 +4526,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &commandPool ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( commandPool, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( commandPool, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -4968,8 +4541,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCommandPool( static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCommandPool( - static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -4997,8 +4568,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCommandPool( static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCommandPool( - static_cast( m_device ), static_cast( commandPool ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5027,8 +4596,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkResetCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ) ); - return static_cast( - d.vkResetCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ) ); } #else template @@ -5043,10 +4610,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetCommandPool( m_device, static_cast( commandPool ), static_cast( flags ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -5059,15 +4624,9 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkAllocateCommandBuffers( static_cast( m_device ), reinterpret_cast( pAllocateInfo ), reinterpret_cast( pCommandBuffers ) ) ); - return static_cast( d.vkAllocateCommandBuffers( static_cast( m_device ), - reinterpret_cast( pAllocateInfo ), - reinterpret_cast( pCommandBuffers ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -5083,10 +4642,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); } template ( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( commandBuffers ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename CommandBufferAllocator, - typename std::enable_if>::value, - int>::type> template < typename Dispatch, typename CommandBufferAllocator, @@ -5136,17 +4686,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers; uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); - detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); } template < @@ -5169,17 +4716,14 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAllocateCommandBuffers( m_device, reinterpret_cast( &allocateInfo ), reinterpret_cast( commandBuffers.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); std::vector, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); - detail::PoolFree deleter( *this, allocateInfo.commandPool, d ); for ( auto const & commandBuffer : commandBuffers ) { uniqueCommandBuffers.push_back( UniqueHandle( commandBuffer, deleter ) ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueCommandBuffers ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -5195,10 +4739,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - d.vkFreeCommandBuffers( static_cast( m_device ), - static_cast( commandPool ), - commandBufferCount, - reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5228,10 +4768,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( commandPool ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - d.vkFreeCommandBuffers( static_cast( m_device ), - static_cast( commandPool ), - commandBufferCount, - reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5257,8 +4793,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBeginCommandBuffer( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ) ); - return static_cast( - d.vkBeginCommandBuffer( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5274,10 +4808,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast( &beginInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -5287,7 +4819,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); - return static_cast( d.vkEndCommandBuffer( static_cast( m_commandBuffer ) ) ); } #else template @@ -5300,10 +4831,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEndCommandBuffer( m_commandBuffer ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -5314,7 +4843,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); - return static_cast( d.vkResetCommandBuffer( static_cast( m_commandBuffer ), static_cast( flags ) ) ); } #else template @@ -5328,10 +4856,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkResetCommandBuffer( m_commandBuffer, static_cast( flags ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -5343,8 +4869,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindPipeline( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); - d.vkCmdBindPipeline( - static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } template @@ -5355,7 +4879,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewport( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewports ) ); - d.vkCmdSetViewport( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5381,7 +4904,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetScissor( static_cast( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast( pScissors ) ); - d.vkCmdSetScissor( static_cast( m_commandBuffer ), firstScissor, scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5404,7 +4926,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); - d.vkCmdSetLineWidth( static_cast( m_commandBuffer ), lineWidth ); } template @@ -5413,7 +4934,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); - d.vkCmdSetDepthBias( static_cast( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); } template @@ -5421,7 +4941,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); - d.vkCmdSetBlendConstants( static_cast( m_commandBuffer ), blendConstants ); } template @@ -5429,7 +4948,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); - d.vkCmdSetDepthBounds( static_cast( m_commandBuffer ), minDepthBounds, maxDepthBounds ); } template @@ -5438,7 +4956,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); - d.vkCmdSetStencilCompareMask( static_cast( m_commandBuffer ), static_cast( faceMask ), compareMask ); } template @@ -5447,7 +4964,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); - d.vkCmdSetStencilWriteMask( static_cast( m_commandBuffer ), static_cast( faceMask ), writeMask ); } template @@ -5456,7 +4972,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); - d.vkCmdSetStencilReference( static_cast( m_commandBuffer ), static_cast( faceMask ), reference ); } template @@ -5470,7 +4985,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), d.vkCmdBindDescriptorSets( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), @@ -5517,10 +5031,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( buffer ), static_cast( offset ), static_cast( indexType ) ); - d.vkCmdBindIndexBuffer( static_cast( m_commandBuffer ), - static_cast( buffer ), - static_cast( offset ), - static_cast( indexType ) ); } template @@ -5536,11 +5046,6 @@ namespace VULKAN_HPP_NAMESPACE bindingCount, reinterpret_cast( pBuffers ), reinterpret_cast( pOffsets ) ); - d.vkCmdBindVertexBuffers( static_cast( m_commandBuffer ), - firstBinding, - bindingCount, - reinterpret_cast( pBuffers ), - reinterpret_cast( pOffsets ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5577,7 +5082,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); - d.vkCmdDraw( static_cast( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance ); } template @@ -5590,7 +5094,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); - d.vkCmdDrawIndexed( static_cast( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); } template @@ -5603,8 +5106,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawIndirect( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); - d.vkCmdDrawIndirect( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -5617,8 +5118,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawIndexedIndirect( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); - d.vkCmdDrawIndexedIndirect( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -5627,7 +5126,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); - d.vkCmdDispatch( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template @@ -5637,7 +5135,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDispatchIndirect( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); - d.vkCmdDispatchIndirect( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); } template @@ -5648,7 +5145,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBuffer( static_cast( m_commandBuffer ), d.vkCmdCopyBuffer( static_cast( m_commandBuffer ), static_cast( srcBuffer ), static_cast( dstBuffer ), @@ -5686,7 +5182,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImage( static_cast( m_commandBuffer ), d.vkCmdCopyImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), @@ -5731,7 +5226,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBlitImage( static_cast( m_commandBuffer ), d.vkCmdBlitImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), @@ -5777,7 +5271,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), d.vkCmdCopyBufferToImage( static_cast( m_commandBuffer ), static_cast( srcBuffer ), static_cast( dstImage ), @@ -5817,7 +5310,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), d.vkCmdCopyImageToBuffer( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), @@ -5861,11 +5353,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( dstOffset ), static_cast( dataSize ), pData ); - d.vkCmdUpdateBuffer( static_cast( m_commandBuffer ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - static_cast( dataSize ), - pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -5901,11 +5388,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( dstOffset ), static_cast( size ), data ); - d.vkCmdFillBuffer( static_cast( m_commandBuffer ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - static_cast( size ), - data ); } template @@ -5917,7 +5399,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearColorImage( static_cast( m_commandBuffer ), d.vkCmdClearColorImage( static_cast( m_commandBuffer ), static_cast( image ), static_cast( imageLayout ), @@ -5957,7 +5438,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), d.vkCmdClearDepthStencilImage( static_cast( m_commandBuffer ), static_cast( image ), static_cast( imageLayout ), @@ -5997,7 +5477,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdClearAttachments( static_cast( m_commandBuffer ), d.vkCmdClearAttachments( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pAttachments ), @@ -6034,7 +5513,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdResolveImage( static_cast( m_commandBuffer ), d.vkCmdResolveImage( static_cast( m_commandBuffer ), static_cast( srcImage ), static_cast( srcImageLayout ), @@ -6075,7 +5553,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); - d.vkCmdSetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -6085,7 +5562,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdResetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); - d.vkCmdResetEvent( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -6102,7 +5578,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWaitEvents( static_cast( m_commandBuffer ), d.vkCmdWaitEvents( static_cast( m_commandBuffer ), eventCount, reinterpret_cast( pEvents ), @@ -6159,7 +5634,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPipelineBarrier( static_cast( m_commandBuffer ), d.vkCmdPipelineBarrier( static_cast( m_commandBuffer ), static_cast( srcStageMask ), static_cast( dstStageMask ), @@ -6210,8 +5684,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBeginQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); - d.vkCmdBeginQuery( - static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ) ); } template @@ -6219,7 +5691,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); - d.vkCmdEndQuery( static_cast( m_commandBuffer ), static_cast( queryPool ), query ); } template @@ -6230,7 +5701,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); - d.vkCmdResetQueryPool( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, queryCount ); } template @@ -6242,8 +5712,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdWriteTimestamp( static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); - d.vkCmdWriteTimestamp( - static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( queryPool ), query ); } template @@ -6257,7 +5725,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), d.vkCmdCopyQueryPoolResults( static_cast( m_commandBuffer ), static_cast( queryPool ), firstQuery, @@ -6283,12 +5750,6 @@ namespace VULKAN_HPP_NAMESPACE offset, size, pValues ); - d.vkCmdPushConstants( static_cast( m_commandBuffer ), - static_cast( layout ), - static_cast( stageFlags ), - offset, - size, - pValues ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6322,9 +5783,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdBeginRenderPass( static_cast( m_commandBuffer ), reinterpret_cast( pRenderPassBegin ), static_cast( contents ) ); - d.vkCmdBeginRenderPass( static_cast( m_commandBuffer ), - reinterpret_cast( pRenderPassBegin ), - static_cast( contents ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6347,7 +5805,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); - d.vkCmdNextSubpass( static_cast( m_commandBuffer ), static_cast( contents ) ); } template @@ -6355,7 +5812,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); - d.vkCmdEndRenderPass( static_cast( m_commandBuffer ) ); } template @@ -6365,7 +5821,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdExecuteCommands( static_cast( m_commandBuffer ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); - d.vkCmdExecuteCommands( static_cast( m_commandBuffer ), commandBufferCount, reinterpret_cast( pCommandBuffers ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6403,10 +5858,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t apiVersion; VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkEnumerateInstanceVersion( &apiVersion ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( apiVersion ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6418,8 +5871,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBindBufferMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - return static_cast( - d.vkBindBufferMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6435,10 +5886,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6450,8 +5899,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBindImageMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - return static_cast( - d.vkBindImageMemory2( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6467,10 +5914,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -6484,7 +5929,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceGroupPeerMemoryFeatures( static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); - static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6511,7 +5955,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); - d.vkCmdSetDeviceMask( static_cast( m_commandBuffer ), deviceMask ); } template @@ -6525,7 +5968,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDispatchBase( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - d.vkCmdDispatchBase( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } template @@ -6538,17 +5980,9 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); - return static_cast( d.vkEnumeratePhysicalDeviceGroups( static_cast( m_instance ), - pPhysicalDeviceGroupCount, - reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetImageMemoryRequirements2( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6689,9 +6116,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetBufferMemoryRequirements2( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetBufferMemoryRequirements2( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6738,7 +6162,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2( static_cast( m_device ), d.vkGetImageSparseMemoryRequirements2( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, @@ -6746,11 +6169,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( m_physicalDevice ), reinterpret_cast( pFeatures ) ); - d.vkGetPhysicalDeviceFeatures2( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6868,7 +6285,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); - d.vkGetPhysicalDeviceProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6914,8 +6330,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceFormatProperties2( static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); - d.vkGetPhysicalDeviceFormatProperties2( - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -6960,7 +6374,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); @@ -6983,10 +6396,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } template @@ -7006,10 +6417,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7021,14 +6430,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties2( static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename QueueFamilyProperties2Allocator, - typename Dispatch, - typename std::enable_if::value, int>::type> template < typename QueueFamilyProperties2Allocator, typename Dispatch, @@ -7085,10 +6489,6 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template ::value, int>::type> template ( m_physicalDevice ), reinterpret_cast( pMemoryProperties ) ); - d.vkGetPhysicalDeviceMemoryProperties2( static_cast( m_physicalDevice ), - reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7220,7 +6618,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast( m_physicalDevice ), reinterpret_cast( pFormatInfo ), pPropertyCount, @@ -7228,11 +6625,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename SparseImageFormatProperties2Allocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename SparseImageFormatProperties2Allocator, typename Dispatch, @@ -7307,7 +6699,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkTrimCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); - d.vkTrimCommandPool( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); } template @@ -7317,7 +6708,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceQueue2( static_cast( m_device ), reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); - d.vkGetDeviceQueue2( static_cast( m_device ), reinterpret_cast( pQueueInfo ), reinterpret_cast( pQueue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7345,7 +6735,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSamplerYcbcrConversion( static_cast( m_device ), return static_cast( d.vkCreateSamplerYcbcrConversion( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -7372,10 +6761,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -7398,14 +6785,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7419,9 +6802,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), - static_cast( ycbcrConversion ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7452,9 +6832,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - d.vkDestroySamplerYcbcrConversion( static_cast( m_device ), - static_cast( ycbcrConversion ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7484,7 +6861,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorUpdateTemplate( static_cast( m_device ), return static_cast( d.vkCreateDescriptorUpdateTemplate( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -7511,10 +6887,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -7537,14 +6911,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7558,9 +6928,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7591,9 +6958,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDescriptorUpdateTemplate( static_cast( m_device ), - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7626,10 +6990,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - d.vkUpdateDescriptorSetWithTemplate( static_cast( m_device ), - static_cast( descriptorSet ), - static_cast( descriptorUpdateTemplate ), - pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7658,7 +7018,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceExternalBufferProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); @@ -7691,7 +7050,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceExternalFenceProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); @@ -7725,7 +7083,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceExternalSemaphoreProperties( static_cast( m_physicalDevice ), reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); @@ -7761,9 +7118,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetDescriptorSetLayoutSupport( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); - d.vkGetDescriptorSetLayoutSupport( static_cast( m_device ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7815,7 +7169,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), d.vkCmdDrawIndirectCount( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -7835,7 +7188,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), d.vkCmdDrawIndexedIndirectCount( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -7852,7 +7204,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass2( static_cast( m_device ), return static_cast( d.vkCreateRenderPass2( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -7878,10 +7229,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -7903,12 +7252,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7922,9 +7268,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); - d.vkCmdBeginRenderPass2( static_cast( m_commandBuffer ), - reinterpret_cast( pRenderPassBegin ), - reinterpret_cast( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7952,9 +7295,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdNextSubpass2( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); - d.vkCmdNextSubpass2( static_cast( m_commandBuffer ), - reinterpret_cast( pSubpassBeginInfo ), - reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -7979,7 +7319,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); - d.vkCmdEndRenderPass2( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8002,7 +7341,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkResetQueryPool( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); - d.vkResetQueryPool( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); } template @@ -8012,7 +7350,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( semaphore ), pValue ) ); - return static_cast( d.vkGetSemaphoreCounterValue( static_cast( m_device ), static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8029,10 +7366,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreCounterValue( m_device, static_cast( semaphore ), &value ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8043,7 +7378,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); - return static_cast( d.vkWaitSemaphores( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8058,7 +7392,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkWaitSemaphores( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); @@ -8072,7 +7405,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); - return static_cast( d.vkSignalSemaphore( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8088,10 +7420,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSignalSemaphore( m_device, reinterpret_cast( &signalInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8102,8 +7432,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); - return static_cast( - d.vkGetBufferDeviceAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8129,7 +7457,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ); - return d.vkGetBufferOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8156,8 +7483,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), reinterpret_cast( pInfo ) ); - return d.vkGetDeviceMemoryOpaqueCaptureAddress( static_cast( m_device ), - reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8187,16 +7512,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceToolProperties( static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); - return static_cast( d.vkGetPhysicalDeviceToolProperties( - static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename PhysicalDeviceToolPropertiesAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch, @@ -8226,14 +7544,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } template < @@ -8266,14 +7582,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8284,7 +7598,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePrivateDataSlot( static_cast( m_device ), return static_cast( d.vkCreatePrivateDataSlot( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -8310,10 +7623,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -8335,14 +7646,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8355,8 +7662,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPrivateDataSlot( static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPrivateDataSlot( - static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8385,8 +7690,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPrivateDataSlot( static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPrivateDataSlot( - static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8418,8 +7721,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSetPrivateData( static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); - return static_cast( d.vkSetPrivateData( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); } #else template @@ -8437,10 +7738,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetPrivateData( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -8454,8 +7753,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPrivateData( static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); - d.vkGetPrivateData( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8485,8 +7782,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetEvent2( static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); - d.vkCmdSetEvent2( - static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8511,7 +7806,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdResetEvent2( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); - d.vkCmdResetEvent2( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -8525,10 +7819,6 @@ namespace VULKAN_HPP_NAMESPACE eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); - d.vkCmdWaitEvents2( static_cast( m_commandBuffer ), - eventCount, - reinterpret_cast( pEvents ), - reinterpret_cast( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8563,7 +7853,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); - d.vkCmdPipelineBarrier2( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8589,8 +7878,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdWriteTimestamp2( static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); - d.vkCmdWriteTimestamp2( - static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); } template @@ -8602,8 +7889,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkQueueSubmit2( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); - return static_cast( - d.vkQueueSubmit2( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8619,10 +7904,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -8632,7 +7915,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); - d.vkCmdCopyBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8654,7 +7936,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); - d.vkCmdCopyImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8676,7 +7957,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferToImageInfo ) ); - d.vkCmdCopyBufferToImage2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferToImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8699,7 +7979,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageToBufferInfo ) ); - d.vkCmdCopyImageToBuffer2( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8721,7 +8000,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); - d.vkCmdBlitImage2( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8743,7 +8021,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdResolveImage2( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); - d.vkCmdResolveImage2( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8766,7 +8043,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); - d.vkCmdBeginRendering( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8788,7 +8064,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndRendering( static_cast( m_commandBuffer ) ); - d.vkCmdEndRendering( static_cast( m_commandBuffer ) ); } template @@ -8796,7 +8071,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); - d.vkCmdSetCullMode( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } template @@ -8804,7 +8078,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); - d.vkCmdSetFrontFace( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } template @@ -8813,7 +8086,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); - d.vkCmdSetPrimitiveTopology( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } template @@ -8823,7 +8095,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); - d.vkCmdSetViewportWithCount( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8847,7 +8118,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); - d.vkCmdSetScissorWithCount( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -8875,7 +8145,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), d.vkCmdBindVertexBuffers2( static_cast( m_commandBuffer ), firstBinding, bindingCount, @@ -8933,7 +8202,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); - d.vkCmdSetDepthTestEnable( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } template @@ -8941,7 +8209,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); - d.vkCmdSetDepthWriteEnable( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } template @@ -8949,7 +8216,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); - d.vkCmdSetDepthCompareOp( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } template @@ -8958,7 +8224,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); - d.vkCmdSetDepthBoundsTestEnable( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } template @@ -8966,7 +8231,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); - d.vkCmdSetStencilTestEnable( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } template @@ -8978,7 +8242,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOp( static_cast( m_commandBuffer ), d.vkCmdSetStencilOp( static_cast( m_commandBuffer ), static_cast( faceMask ), static_cast( failOp ), @@ -8993,7 +8256,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); - d.vkCmdSetRasterizerDiscardEnable( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } template @@ -9001,7 +8263,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); - d.vkCmdSetDepthBiasEnable( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } template @@ -9010,7 +8271,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); - d.vkCmdSetPrimitiveRestartEnable( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } template @@ -9022,9 +8282,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetDeviceBufferMemoryRequirements( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9073,9 +8330,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetDeviceImageMemoryRequirements( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetDeviceImageMemoryRequirements( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9122,7 +8376,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), d.vkGetDeviceImageSparseMemoryRequirements( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, @@ -9130,11 +8383,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template + VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetLineStipple( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkMapMemory2( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkMapMemory2 && "Function requires or " ); +# endif + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkMapMemory2( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkUnmapMemory2( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkUnmapMemory2 && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkUnmapMemory2( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindIndexBuffer2( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + template + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetRenderingAreaGranularity( + static_cast( m_device ), reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularity && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + d.vkGetRenderingAreaGranularity( + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); + + return granularity; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetDeviceImageSubresourceLayout( + static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayout && + "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetDeviceImageSubresourceLayout( + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( image ), + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + d.vkGetImageSubresourceLayout2( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkGetImageSubresourceLayout2 && + "Function requires or or or " ); +# endif + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + d.vkGetImageSubresourceLayout2( m_device, + static_cast( image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWriteCount, + reinterpret_cast( pDescriptorWrites ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet && "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSet( m_commandBuffer, + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + pData ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplate && + "Function requires or or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate( m_commandBuffer, + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingAttachmentLocations( static_cast( m_commandBuffer ), + reinterpret_cast( pLocationInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocations && + "Function requires or " ); +# endif + + d.vkCmdSetRenderingAttachmentLocations( m_commandBuffer, reinterpret_cast( &locationInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdSetRenderingInputAttachmentIndices( static_cast( m_commandBuffer ), + reinterpret_cast( pInputAttachmentIndexInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndices && + "Function requires or " ); +# endif + + d.vkCmdSetRenderingInputAttachmentIndices( m_commandBuffer, reinterpret_cast( &inputAttachmentIndexInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdBindDescriptorSets2( static_cast( m_commandBuffer ), + reinterpret_cast( pBindDescriptorSetsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2 && "Function requires or " ); +# endif + + d.vkCmdBindDescriptorSets2( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushConstants2( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2 && "Function requires or " ); +# endif + + d.vkCmdPushConstants2( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSet2( static_cast( m_commandBuffer ), reinterpret_cast( pPushDescriptorSetInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2 && "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSet2( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + d.vkCmdPushDescriptorSetWithTemplate2( static_cast( m_commandBuffer ), + reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2 && + "Function requires or " ); +# endif + + d.vkCmdPushDescriptorSetWithTemplate2( m_commandBuffer, + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyMemoryToImage( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImage && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyMemoryToImage( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyImageToMemory( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToMemory && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkCopyImageToMemory( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( + d.vkCopyImageToImage( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkCopyImageToImage && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = + static_cast( d.vkCopyImageToImage( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayout( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); + return static_cast( d.vkTransitionImageLayout( + static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); + } + +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type + Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkTransitionImageLayout && "Function requires or " ); +# endif + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + d.vkTransitionImageLayout( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); + + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); + } +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_surface === template @@ -9213,8 +9002,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySurfaceKHR( static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); - d.vkDestroySurfaceKHR( - static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9242,8 +9029,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySurfaceKHR( static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); - d.vkDestroySurfaceKHR( - static_cast( m_instance ), static_cast( surface ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9272,7 +9057,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); - static_cast( m_physicalDevice ), queueFamilyIndex, static_cast( surface ), reinterpret_cast( pSupported ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9289,10 +9073,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast( surface ), reinterpret_cast( &supported ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( supported ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9305,9 +9087,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast( m_physicalDevice ), - static_cast( surface ), - reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9324,10 +9103,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9342,16 +9119,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( surface ), pSurfaceFormatCount, reinterpret_cast( pSurfaceFormats ) ) ); - return static_cast( d.vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast( m_physicalDevice ), - static_cast( surface ), - pSurfaceFormatCount, - reinterpret_cast( pSurfaceFormats ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -9378,14 +9148,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { surfaceFormats.resize( surfaceFormatCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } template ( surface ), pPresentModeCount, reinterpret_cast( pPresentModes ) ) ); - return static_cast( d.vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast( m_physicalDevice ), - static_cast( surface ), - pPresentModeCount, - reinterpret_cast( pPresentModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -9474,14 +9233,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { presentModes.resize( presentModeCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } template ( d.vkCreateSwapchainKHR( static_cast( m_device ), return static_cast( d.vkCreateSwapchainKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -9558,10 +9312,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -9583,12 +9335,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9601,8 +9350,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySwapchainKHR( static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); - d.vkDestroySwapchainKHR( - static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9630,8 +9377,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroySwapchainKHR( static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); - d.vkDestroySwapchainKHR( - static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9660,14 +9405,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSwapchainImagesKHR( static_cast( m_device ), static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); - return static_cast( d.vkGetSwapchainImagesKHR( - static_cast( m_device ), static_cast( swapchain ), pSwapchainImageCount, reinterpret_cast( pSwapchainImages ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -9694,14 +9434,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); if ( swapchainImageCount < swapchainImages.size() ) { swapchainImages.resize( swapchainImageCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchainImages ) ); } template ( semaphore ), static_cast( fence ), pImageIndex ) ); - return static_cast( d.vkAcquireNextImageKHR( static_cast( m_device ), - static_cast( swapchain ), - timeout, - static_cast( semaphore ), - static_cast( fence ), - pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9786,12 +9516,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return ResultValue( result, std::move( imageIndex ) ); } @@ -9803,7 +9527,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( pPresentInfo ) ) ); - return static_cast( d.vkQueuePresentKHR( static_cast( m_queue ), reinterpret_cast( pPresentInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9818,7 +9541,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueuePresentKHR( m_queue, reinterpret_cast( &presentInfo ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); @@ -9833,8 +9555,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( static_cast( m_device ), reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); - return static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( - static_cast( m_device ), reinterpret_cast( pDeviceGroupPresentCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9852,10 +9572,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast( &deviceGroupPresentCapabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deviceGroupPresentCapabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9867,7 +9585,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( static_cast( m_device ), static_cast( surface ), reinterpret_cast( pModes ) ) ); - static_cast( m_device ), static_cast( surface ), reinterpret_cast( pModes ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -9885,10 +9602,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast( surface ), reinterpret_cast( &modes ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -9901,14 +9616,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( static_cast( m_physicalDevice ), static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); - return static_cast( d.vkGetPhysicalDevicePresentRectanglesKHR( - static_cast( m_physicalDevice ), static_cast( surface ), pRectCount, reinterpret_cast( pRects ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -9936,14 +9646,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); VULKAN_HPP_ASSERT( rectCount <= rects.size() ); if ( rectCount < rects.size() ) { rects.resize( rectCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( rects ) ); } template ( d.vkAcquireNextImage2KHR( static_cast( m_device ), reinterpret_cast( pAcquireInfo ), pImageIndex ) ); - return static_cast( - d.vkAcquireNextImage2KHR( static_cast( m_device ), reinterpret_cast( pAcquireInfo ), pImageIndex ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10015,12 +9719,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eTimeout, - VULKAN_HPP_NAMESPACE::Result::eNotReady, - VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return ResultValue( result, std::move( imageIndex ) ); } @@ -10036,15 +9734,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetPhysicalDeviceDisplayPropertiesKHR( - static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename DisplayPropertiesKHRAllocator, - typename Dispatch, - typename std::enable_if::value, int>::type> template < typename DisplayPropertiesKHRAllocator, typename Dispatch, @@ -10071,14 +9763,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -10107,14 +9797,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10126,16 +9814,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename DisplayPlanePropertiesKHRAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename DisplayPlanePropertiesKHRAllocator, typename Dispatch, @@ -10164,14 +9845,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -10202,14 +9881,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10222,14 +9899,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( static_cast( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); - return static_cast( d.vkGetDisplayPlaneSupportedDisplaysKHR( - static_cast( m_physicalDevice ), planeIndex, pDisplayCount, reinterpret_cast( pDisplays ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -10255,14 +9927,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); VULKAN_HPP_ASSERT( displayCount <= displays.size() ); if ( displayCount < displays.size() ) { displays.resize( displayCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displays ) ); } template ( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetDisplayModePropertiesKHR( static_cast( m_physicalDevice ), - static_cast( display ), - pPropertyCount, - reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ::value, @@ -10351,14 +10011,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template ( d.vkCreateDisplayModeKHR( static_cast( m_physicalDevice ), return static_cast( d.vkCreateDisplayModeKHR( static_cast( m_physicalDevice ), static_cast( display ), reinterpret_cast( pCreateInfo ), @@ -10439,10 +10094,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( mode ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10466,12 +10119,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &mode ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( mode, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( mode, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10488,10 +10138,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( mode ), planeIndex, reinterpret_cast( pCapabilities ) ) ); - return static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( static_cast( m_physicalDevice ), - static_cast( mode ), - planeIndex, - reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10508,10 +10154,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast( mode ), planeIndex, reinterpret_cast( &capabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10522,7 +10166,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( static_cast( m_instance ), return static_cast( d.vkCreateDisplayPlaneSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -10548,10 +10191,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10573,12 +10214,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10593,7 +10231,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSharedSwapchainsKHR( static_cast( m_device ), return static_cast( d.vkCreateSharedSwapchainsKHR( static_cast( m_device ), swapchainCount, reinterpret_cast( pCreateInfos ), @@ -10602,9 +10239,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -10626,10 +10260,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); } template ( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchains ) ); } template @@ -10679,17 +10309,11 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( swapchain ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template >::value, - int>::type> template >::value, @@ -10713,17 +10337,14 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains; uniqueSwapchains.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & swapchain : swapchains ) { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); } template ( static_cast( allocator ) ), reinterpret_cast( swapchains.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); std::vector, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); uniqueSwapchains.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & swapchain : swapchains ) { uniqueSwapchains.push_back( UniqueHandle( swapchain, deleter ) ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( uniqueSwapchains ) ); } template @@ -10782,12 +10400,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &swapchain ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( swapchain, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10802,7 +10417,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateXlibSurfaceKHR( static_cast( m_instance ), return static_cast( d.vkCreateXlibSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -10828,10 +10442,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10853,12 +10465,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10870,8 +10479,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); - return static_cast( - d.vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, dpy, visualID ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -10902,7 +10509,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateXcbSurfaceKHR( static_cast( m_instance ), return static_cast( d.vkCreateXcbSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -10928,10 +10534,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -10953,12 +10557,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -10972,8 +10573,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); - return static_cast( - d.vkGetPhysicalDeviceXcbPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, connection, visual_id ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11006,7 +10605,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateWaylandSurfaceKHR( static_cast( m_instance ), return static_cast( d.vkCreateWaylandSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -11032,10 +10630,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11057,12 +10653,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11075,8 +10668,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, display ) ); - return static_cast( - d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex, display ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11107,7 +10698,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAndroidSurfaceKHR( static_cast( m_instance ), return static_cast( d.vkCreateAndroidSurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -11133,10 +10723,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11158,12 +10746,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11179,7 +10764,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateWin32SurfaceKHR( static_cast( m_instance ), return static_cast( d.vkCreateWin32SurfaceKHR( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -11205,10 +10789,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11230,12 +10812,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11245,7 +10824,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ) ); - return static_cast( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast( m_physicalDevice ), queueFamilyIndex ) ); } #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -11259,7 +10837,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDebugReportCallbackEXT( static_cast( m_instance ), return static_cast( d.vkCreateDebugReportCallbackEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -11285,10 +10862,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( callback ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11310,14 +10885,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &callback ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( callback, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( callback, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11330,7 +10901,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDebugReportCallbackEXT( static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); - static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11359,7 +10929,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDebugReportCallbackEXT( static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); - static_cast( m_instance ), static_cast( callback ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11391,7 +10960,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDebugReportMessageEXT( static_cast( m_instance ), d.vkDebugReportMessageEXT( static_cast( m_instance ), static_cast( flags ), static_cast( objectType_ ), @@ -11438,8 +11006,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); - return static_cast( - d.vkDebugMarkerSetObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11455,10 +11021,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11469,8 +11033,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); - return static_cast( - d.vkDebugMarkerSetObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11486,10 +11048,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11499,7 +11059,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); - d.vkCmdDebugMarkerBeginEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11521,7 +11080,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); - d.vkCmdDebugMarkerEndEXT( static_cast( m_commandBuffer ) ); } template @@ -11530,7 +11088,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); - d.vkCmdDebugMarkerInsertEXT( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11558,9 +11115,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pVideoProfile ), reinterpret_cast( pCapabilities ) ) ); - return static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( pVideoProfile ), - reinterpret_cast( pCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11577,10 +11131,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } template @@ -11597,10 +11149,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast( &videoProfile ), reinterpret_cast( &capabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11612,7 +11162,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), return static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pVideoFormatInfo ), pVideoFormatPropertyCount, @@ -11620,10 +11169,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ::value, @@ -11656,14 +11201,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); if ( videoFormatPropertyCount < videoFormatProperties.size() ) { videoFormatProperties.resize( videoFormatPropertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoFormatProperties ) ); } template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector structureChains; + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR && + "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector videoFormatProperties; + uint32_t videoFormatPropertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( + m_physicalDevice, reinterpret_cast( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && videoFormatPropertyCount ) + { + structureChains.resize( videoFormatPropertyCount ); + videoFormatProperties.resize( videoFormatPropertyCount ); + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + videoFormatProperties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( + d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, + reinterpret_cast( &videoFormatInfo ), + &videoFormatPropertyCount, + reinterpret_cast( videoFormatProperties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); + VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); + if ( videoFormatPropertyCount < videoFormatProperties.size() ) + { + structureChains.resize( videoFormatPropertyCount ); + } + for ( uint32_t i = 0; i < videoFormatPropertyCount; i++ ) + { + structureChains[i].template get() = videoFormatProperties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template @@ -11717,7 +11359,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateVideoSessionKHR( static_cast( m_device ), return static_cast( d.vkCreateVideoSessionKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -11743,10 +11384,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSession ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSession ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -11768,12 +11407,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSession ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( videoSession, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( videoSession, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11786,8 +11422,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyVideoSessionKHR( static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); - d.vkDestroyVideoSessionKHR( - static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11816,8 +11450,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyVideoSessionKHR( static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); - d.vkDestroyVideoSessionKHR( - static_cast( m_device ), static_cast( videoSession ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -11846,7 +11478,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), return static_cast( d.vkGetVideoSessionMemoryRequirementsKHR( static_cast( m_device ), static_cast( videoSession ), pMemoryRequirementsCount, @@ -11854,11 +11485,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( d.vkBindVideoSessionMemoryKHR( static_cast( m_device ), return static_cast( d.vkBindVideoSessionMemoryKHR( static_cast( m_device ), static_cast( videoSession ), bindSessionMemoryInfoCount, @@ -11976,10 +11601,8 @@ namespace VULKAN_HPP_NAMESPACE bindSessionMemoryInfos.size(), reinterpret_cast( bindSessionMemoryInfos.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -11991,7 +11614,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateVideoSessionParametersKHR( static_cast( m_device ), return static_cast( d.vkCreateVideoSessionParametersKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -12017,10 +11639,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSessionParameters ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( videoSessionParameters ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -12042,14 +11662,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &videoSessionParameters ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( videoSessionParameters, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - videoSessionParameters, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12061,7 +11677,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkUpdateVideoSessionParametersKHR( static_cast( m_device ), return static_cast( d.vkUpdateVideoSessionParametersKHR( static_cast( m_device ), static_cast( videoSessionParameters ), reinterpret_cast( pUpdateInfo ) ) ); @@ -12084,10 +11699,8 @@ namespace VULKAN_HPP_NAMESPACE static_cast( videoSessionParameters ), reinterpret_cast( &updateInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12100,9 +11713,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); - d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), - static_cast( videoSessionParameters ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12132,9 +11742,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), static_cast( videoSessionParameters ), reinterpret_cast( pAllocator ) ); - d.vkDestroyVideoSessionParametersKHR( static_cast( m_device ), - static_cast( videoSessionParameters ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12161,7 +11768,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ); - d.vkCmdBeginVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12184,7 +11790,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEndCodingInfo ) ); - d.vkCmdEndVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEndCodingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12208,8 +11813,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pCodingControlInfo ) ); - d.vkCmdControlVideoCodingKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pCodingControlInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12234,7 +11837,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pDecodeInfo ) ); - d.vkCmdDecodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pDecodeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12262,7 +11864,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), d.vkCmdBindTransformFeedbackBuffersEXT( static_cast( m_commandBuffer ), firstBinding, bindingCount, @@ -12315,7 +11916,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), d.vkCmdBeginTransformFeedbackEXT( static_cast( m_commandBuffer ), firstCounterBuffer, counterBufferCount, @@ -12360,7 +11960,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), d.vkCmdEndTransformFeedbackEXT( static_cast( m_commandBuffer ), firstCounterBuffer, counterBufferCount, @@ -12407,8 +12006,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBeginQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); - d.vkCmdBeginQueryIndexedEXT( - static_cast( m_commandBuffer ), static_cast( queryPool ), query, static_cast( flags ), index ); } template @@ -12417,7 +12014,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); - d.vkCmdEndQueryIndexedEXT( static_cast( m_commandBuffer ), static_cast( queryPool ), query, index ); } template @@ -12430,7 +12026,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), d.vkCmdDrawIndirectByteCountEXT( static_cast( m_commandBuffer ), instanceCount, firstInstance, @@ -12449,7 +12044,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCuModuleNVX( static_cast( m_device ), return static_cast( d.vkCreateCuModuleNVX( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -12475,10 +12069,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -12500,12 +12092,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12517,7 +12106,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCuFunctionNVX( static_cast( m_device ), return static_cast( d.vkCreateCuFunctionNVX( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -12543,10 +12131,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -12568,12 +12154,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12586,8 +12169,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCuModuleNVX( static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCuModuleNVX( - static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12615,8 +12196,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCuModuleNVX( static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCuModuleNVX( - static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12644,8 +12223,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCuFunctionNVX( static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCuFunctionNVX( - static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12673,8 +12250,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCuFunctionNVX( static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCuFunctionNVX( - static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12700,7 +12275,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); - d.vkCmdCuLaunchKernelNVX( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12725,7 +12299,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); - return d.vkGetImageViewHandleNVX( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12776,8 +12349,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetImageViewAddressNVX( static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetImageViewAddressNVX( - static_cast( m_device ), static_cast( imageView ), reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12794,10 +12365,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageViewAddressNVX( m_device, static_cast( imageView ), reinterpret_cast( &properties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -12813,7 +12382,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), d.vkCmdDrawIndirectCountAMD( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -12833,7 +12401,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), d.vkCmdDrawIndexedIndirectCountAMD( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -12854,7 +12421,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetShaderInfoAMD( static_cast( m_device ), return static_cast( d.vkGetShaderInfoAMD( static_cast( m_device ), static_cast( pipeline ), static_cast( shaderStage ), @@ -12864,9 +12430,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -12904,14 +12467,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); VULKAN_HPP_ASSERT( infoSize <= info.size() ); if ( infoSize < info.size() ) { info.resize( infoSize ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( info ) ); } template ( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); - d.vkCmdBeginRenderingKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRenderingInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -12993,7 +12551,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); - d.vkCmdEndRenderingKHR( static_cast( m_commandBuffer ) ); } #if defined( VK_USE_PLATFORM_GGP ) @@ -13007,7 +12564,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( static_cast( m_instance ), return static_cast( d.vkCreateStreamDescriptorSurfaceGGP( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -13033,10 +12589,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -13058,12 +12612,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13084,7 +12635,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( static_cast( m_physicalDevice ), static_cast( format ), static_cast( type ), @@ -13123,10 +12673,8 @@ namespace VULKAN_HPP_NAMESPACE static_cast( externalHandleType ), reinterpret_cast( &externalImageFormatProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( externalImageFormatProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13142,8 +12690,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetMemoryWin32HandleNV( static_cast( m_device ), static_cast( memory ), static_cast( handleType ), pHandle ) ); - return static_cast( d.vkGetMemoryWin32HandleNV( - static_cast( m_device ), static_cast( memory ), static_cast( handleType ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13160,10 +12706,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryWin32HandleNV( m_device, static_cast( memory ), static_cast( handleType ), &handle ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -13176,7 +12720,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); - d.vkGetPhysicalDeviceFeatures2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13220,7 +12763,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); - d.vkGetPhysicalDeviceProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13266,7 +12808,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceFormatProperties2KHR( static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); - static_cast( m_physicalDevice ), static_cast( format ), reinterpret_cast( pFormatProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13313,7 +12854,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), return static_cast( d.vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pImageFormatInfo ), reinterpret_cast( pImageFormatProperties ) ) ); @@ -13336,10 +12876,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( imageFormatProperties ) ); } template @@ -13359,10 +12897,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &imageFormatInfo ), reinterpret_cast( &imageFormatProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13374,14 +12910,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); - static_cast( m_physicalDevice ), pQueueFamilyPropertyCount, reinterpret_cast( pQueueFamilyProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename QueueFamilyProperties2Allocator, - typename Dispatch, - typename std::enable_if::value, int>::type> template < typename QueueFamilyProperties2Allocator, typename Dispatch, @@ -13438,10 +12969,6 @@ namespace VULKAN_HPP_NAMESPACE return queueFamilyProperties; } - template ::value, int>::type> template ( m_physicalDevice ), reinterpret_cast( pMemoryProperties ) ); - d.vkGetPhysicalDeviceMemoryProperties2KHR( static_cast( m_physicalDevice ), - reinterpret_cast( pMemoryProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13573,7 +13098,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pFormatInfo ), pPropertyCount, @@ -13581,11 +13105,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename SparseImageFormatProperties2Allocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename SparseImageFormatProperties2Allocator, typename Dispatch, @@ -13666,7 +13185,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); - static_cast( m_device ), heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast( pPeerMemoryFeatures ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13693,7 +13211,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); - d.vkCmdSetDeviceMaskKHR( static_cast( m_commandBuffer ), deviceMask ); } template @@ -13707,7 +13224,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDispatchBaseKHR( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); - d.vkCmdDispatchBaseKHR( static_cast( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); } #if defined( VK_USE_PLATFORM_VI_NN ) @@ -13720,7 +13236,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateViSurfaceNN( static_cast( m_instance ), return static_cast( d.vkCreateViSurfaceNN( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -13746,10 +13261,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -13771,12 +13284,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13791,7 +13301,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkTrimCommandPoolKHR( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); - d.vkTrimCommandPoolKHR( static_cast( m_device ), static_cast( commandPool ), static_cast( flags ) ); } //=== VK_KHR_device_group_creation === @@ -13806,17 +13315,9 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), pPhysicalDeviceGroupCount, reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); - return static_cast( d.vkEnumeratePhysicalDeviceGroupsKHR( static_cast( m_instance ), - pPhysicalDeviceGroupCount, - reinterpret_cast( pPhysicalDeviceGroupProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( m_physicalDevice ), d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pExternalBufferInfo ), reinterpret_cast( pExternalBufferProperties ) ); @@ -13943,8 +13439,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetMemoryWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - return static_cast( d.vkGetMemoryWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -13961,10 +13455,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -13976,7 +13468,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), return static_cast( d.vkGetMemoryWin32HandlePropertiesKHR( static_cast( m_device ), static_cast( handleType ), handle, @@ -14000,10 +13491,8 @@ namespace VULKAN_HPP_NAMESPACE handle, reinterpret_cast( &memoryWin32HandleProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryWin32HandleProperties ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -14017,7 +13506,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); - return static_cast( d.vkGetMemoryFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14034,10 +13522,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14052,10 +13538,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( handleType ), fd, reinterpret_cast( pMemoryFdProperties ) ) ); - return static_cast( d.vkGetMemoryFdPropertiesKHR( static_cast( m_device ), - static_cast( handleType ), - fd, - reinterpret_cast( pMemoryFdProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14072,10 +13554,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast( handleType ), fd, reinterpret_cast( &memoryFdProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryFdProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14088,7 +13568,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pExternalSemaphoreInfo ), reinterpret_cast( pExternalSemaphoreProperties ) ); @@ -14125,8 +13604,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkImportSemaphoreWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); - return static_cast( d.vkImportSemaphoreWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( pImportSemaphoreWin32HandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14143,10 +13620,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &importSemaphoreWin32HandleInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14157,8 +13632,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSemaphoreWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - return static_cast( d.vkGetSemaphoreWin32HandleKHR( - static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14175,10 +13648,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -14192,8 +13663,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkImportSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pImportSemaphoreFdInfo ) ) ); - return static_cast( - d.vkImportSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pImportSemaphoreFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14209,10 +13678,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast( &importSemaphoreFdInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14224,8 +13691,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); - return static_cast( - d.vkGetSemaphoreFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14242,10 +13707,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14260,7 +13723,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), d.vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), @@ -14280,7 +13742,7 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetKHR && "Function requires or " ); # endif d.vkCmdPushDescriptorSetKHR( m_commandBuffer, @@ -14305,11 +13767,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( layout ), set, pData ); - d.vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), - static_cast( descriptorUpdateTemplate ), - static_cast( layout ), - set, - pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14322,8 +13779,9 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplateKHR && - "Function requires or " ); + VULKAN_HPP_ASSERT( + d.vkCmdPushDescriptorSetWithTemplateKHR && + "Function requires or or " ); # endif d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, @@ -14343,8 +13801,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), reinterpret_cast( pConditionalRenderingBegin ) ); - d.vkCmdBeginConditionalRenderingEXT( static_cast( m_commandBuffer ), - reinterpret_cast( pConditionalRenderingBegin ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14366,7 +13822,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); - d.vkCmdEndConditionalRenderingEXT( static_cast( m_commandBuffer ) ); } //=== VK_KHR_descriptor_update_template === @@ -14379,7 +13834,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( static_cast( m_device ), return static_cast( d.vkCreateDescriptorUpdateTemplateKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -14406,10 +13860,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( descriptorUpdateTemplate ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -14432,14 +13884,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &descriptorUpdateTemplate ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - descriptorUpdateTemplate, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14453,9 +13901,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyDescriptorUpdateTemplateKHR( static_cast( m_device ), static_cast( descriptorUpdateTemplate ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDescriptorUpdateTemplateKHR( static_cast( m_device ), - static_cast( descriptorUpdateTemplate ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14488,10 +13933,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( descriptorSet ), static_cast( descriptorUpdateTemplate ), pData ); - d.vkUpdateDescriptorSetWithTemplateKHR( static_cast( m_device ), - static_cast( descriptorSet ), - static_cast( descriptorUpdateTemplate ), - pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14525,8 +13966,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportWScalingNV( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); - d.vkCmdSetViewportWScalingNV( - static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportWScalings ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14554,7 +13993,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkReleaseDisplayEXT( static_cast( m_physicalDevice ), static_cast( display ) ) ); - return static_cast( d.vkReleaseDisplayEXT( static_cast( m_physicalDevice ), static_cast( display ) ) ); } #else template @@ -14579,7 +14017,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), dpy, static_cast( display ) ) ); - return static_cast( d.vkAcquireXlibDisplayEXT( static_cast( m_physicalDevice ), dpy, static_cast( display ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14595,10 +14032,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast( display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14611,8 +14046,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetRandROutputDisplayEXT( static_cast( m_physicalDevice ), dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); - return static_cast( - d.vkGetRandROutputDisplayEXT( static_cast( m_physicalDevice ), dpy, rrOutput, reinterpret_cast( pDisplay ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14629,10 +14062,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -14649,12 +14080,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast( &display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14672,9 +14100,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), static_cast( surface ), reinterpret_cast( pSurfaceCapabilities ) ) ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast( m_physicalDevice ), - static_cast( surface ), - reinterpret_cast( pSurfaceCapabilities ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14692,10 +14117,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast( surface ), reinterpret_cast( &surfaceCapabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14709,8 +14132,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkDisplayPowerControlEXT( static_cast( m_device ), static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); - return static_cast( d.vkDisplayPowerControlEXT( - static_cast( m_device ), static_cast( display ), reinterpret_cast( pDisplayPowerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14727,10 +14148,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkDisplayPowerControlEXT( m_device, static_cast( display ), reinterpret_cast( &displayPowerInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14741,7 +14160,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkRegisterDeviceEventEXT( static_cast( m_device ), return static_cast( d.vkRegisterDeviceEventEXT( static_cast( m_device ), reinterpret_cast( pDeviceEventInfo ), reinterpret_cast( pAllocator ), @@ -14767,10 +14185,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -14792,12 +14208,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14810,7 +14223,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkRegisterDisplayEventEXT( static_cast( m_device ), return static_cast( d.vkRegisterDisplayEventEXT( static_cast( m_device ), static_cast( display ), reinterpret_cast( pDisplayEventInfo ), @@ -14839,10 +14251,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fence ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -14866,12 +14276,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( fence, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14885,8 +14292,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSwapchainCounterEXT( static_cast( m_device ), static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); - return static_cast( d.vkGetSwapchainCounterEXT( - static_cast( m_device ), static_cast( swapchain ), static_cast( counter ), pCounterValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14903,10 +14308,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSwapchainCounterEXT( m_device, static_cast( swapchain ), static_cast( counter ), &counterValue ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( counterValue ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14922,9 +14325,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pDisplayTimingProperties ) ) ); - return static_cast( d.vkGetRefreshCycleDurationGOOGLE( static_cast( m_device ), - static_cast( swapchain ), - reinterpret_cast( pDisplayTimingProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -14941,10 +14341,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast( swapchain ), reinterpret_cast( &displayTimingProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( displayTimingProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -14956,7 +14354,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), return static_cast( d.vkGetPastPresentationTimingGOOGLE( static_cast( m_device ), static_cast( swapchain ), pPresentationTimingCount, @@ -14964,11 +14361,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename PastPresentationTimingGOOGLEAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename PastPresentationTimingGOOGLEAllocator, typename Dispatch, @@ -15001,14 +14393,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); if ( presentationTimingCount < presentationTimings.size() ) { presentationTimings.resize( presentationTimingCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); } template < @@ -15046,14 +14436,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); if ( presentationTimingCount < presentationTimings.size() ) { presentationTimings.resize( presentationTimingCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentationTimings ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15068,8 +14456,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDiscardRectangleEXT( static_cast( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); - d.vkCmdSetDiscardRectangleEXT( - static_cast( m_commandBuffer ), firstDiscardRectangle, discardRectangleCount, reinterpret_cast( pDiscardRectangles ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15094,7 +14480,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); - d.vkCmdSetDiscardRectangleEnableEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleEnable ) ); } template @@ -15103,7 +14488,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleMode ) ); - d.vkCmdSetDiscardRectangleModeEXT( static_cast( m_commandBuffer ), static_cast( discardRectangleMode ) ); } //=== VK_EXT_hdr_metadata === @@ -15119,10 +14503,6 @@ namespace VULKAN_HPP_NAMESPACE swapchainCount, reinterpret_cast( pSwapchains ), reinterpret_cast( pMetadata ) ); - d.vkSetHdrMetadataEXT( static_cast( m_device ), - swapchainCount, - reinterpret_cast( pSwapchains ), - reinterpret_cast( pMetadata ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15160,7 +14540,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRenderPass2KHR( static_cast( m_device ), return static_cast( d.vkCreateRenderPass2KHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -15186,10 +14565,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( renderPass ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -15211,12 +14588,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &renderPass ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( renderPass, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15230,9 +14604,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pRenderPassBegin ), reinterpret_cast( pSubpassBeginInfo ) ); - d.vkCmdBeginRenderPass2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pRenderPassBegin ), - reinterpret_cast( pSubpassBeginInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15260,9 +14631,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassBeginInfo ), reinterpret_cast( pSubpassEndInfo ) ); - d.vkCmdNextSubpass2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pSubpassBeginInfo ), - reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15287,7 +14655,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); - d.vkCmdEndRenderPass2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pSubpassEndInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15313,7 +14680,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( swapchain ) ) ); - return static_cast( d.vkGetSwapchainStatusKHR( static_cast( m_device ), static_cast( swapchain ) ) ); } #else template @@ -15330,9 +14696,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); } @@ -15346,7 +14709,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pExternalFenceInfo ), reinterpret_cast( pExternalFenceProperties ) ); @@ -15383,8 +14745,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkImportFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); - return static_cast( d.vkImportFenceWin32HandleKHR( static_cast( m_device ), - reinterpret_cast( pImportFenceWin32HandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15400,10 +14760,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast( &importFenceWin32HandleInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15415,7 +14773,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); - d.vkGetFenceWin32HandleKHR( static_cast( m_device ), reinterpret_cast( pGetWin32HandleInfo ), pHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15432,10 +14789,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast( &getWin32HandleInfo ), &handle ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( handle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -15449,8 +14804,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( pImportFenceFdInfo ) ) ); - return static_cast( - d.vkImportFenceFdKHR( static_cast( m_device ), reinterpret_cast( pImportFenceFdInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15466,10 +14819,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportFenceFdKHR( m_device, reinterpret_cast( &importFenceFdInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15480,7 +14831,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); - return static_cast( d.vkGetFenceFdKHR( static_cast( m_device ), reinterpret_cast( pGetFdInfo ), pFd ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15497,10 +14847,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetFenceFdKHR( m_device, reinterpret_cast( &getFdInfo ), &fd ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( fd ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15516,7 +14864,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast( m_physicalDevice ), d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( static_cast( m_physicalDevice ), queueFamilyIndex, pCounterCount, @@ -15525,13 +14872,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value && - std::is_same::value, - int>::type> template ( m_physicalDevice ), reinterpret_cast( pPerformanceQueryCreateInfo ), pNumPasses ); - d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( static_cast( m_physicalDevice ), - reinterpret_cast( pPerformanceQueryCreateInfo ), - pNumPasses ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15684,8 +15017,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); - return static_cast( - d.vkAcquireProfilingLockKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -15701,10 +15032,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast( &info ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15713,7 +15042,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkReleaseProfilingLockKHR( static_cast( m_device ) ); - d.vkReleaseProfilingLockKHR( static_cast( m_device ) ); } //=== VK_KHR_get_surface_capabilities2 === @@ -15725,7 +15053,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), return static_cast( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pSurfaceCapabilities ) ) ); @@ -15748,10 +15075,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceCapabilities ) ); } template @@ -15771,10 +15096,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &surfaceInfo ), reinterpret_cast( &surfaceCapabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15785,7 +15108,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), return static_cast( d.vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pSurfaceInfo ), pSurfaceFormatCount, @@ -15793,9 +15115,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -15826,14 +15145,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); if ( surfaceFormatCount < surfaceFormats.size() ) { surfaceFormats.resize( surfaceFormatCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surfaceFormats ) ); } template ::value, int>::type> template () = surfaceFormats[i]; } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); } template () = surfaceFormats[i]; } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -15997,15 +15304,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetPhysicalDeviceDisplayProperties2KHR( - static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename DisplayProperties2KHRAllocator, - typename Dispatch, - typename std::enable_if::value, int>::type> template < typename DisplayProperties2KHRAllocator, typename Dispatch, @@ -16034,14 +15335,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -16072,14 +15371,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16091,16 +15388,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( - static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename DisplayPlaneProperties2KHRAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename DisplayPlaneProperties2KHRAllocator, typename Dispatch, @@ -16130,14 +15420,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -16169,14 +15457,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16191,18 +15477,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( display ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetDisplayModeProperties2KHR( static_cast( m_physicalDevice ), - static_cast( display ), - pPropertyCount, - reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename DisplayModeProperties2KHRAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename DisplayModeProperties2KHRAllocator, typename Dispatch, @@ -16232,14 +15509,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { properties.resize( propertyCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template < @@ -16273,7 +15548,6 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); if ( propertyCount < properties.size() ) { @@ -16281,6 +15555,98 @@ namespace VULKAN_HPP_NAMESPACE } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains; + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } + + template ::value, int>::type> + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type + PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, + StructureChainAllocator & structureChainAllocator, + Dispatch const & d ) const + { + VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); +# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) + VULKAN_HPP_ASSERT( d.vkGetDisplayModeProperties2KHR && "Function requires " ); +# endif + + std::vector structureChains( structureChainAllocator ); + std::vector properties; + uint32_t propertyCount; + VULKAN_HPP_NAMESPACE::Result result; + do + { + result = static_cast( + d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast( display ), &propertyCount, nullptr ) ); + if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) + { + structureChains.resize( propertyCount ); + properties.resize( propertyCount ); + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + properties[i].pNext = structureChains[i].template get().pNext; + } + result = static_cast( d.vkGetDisplayModeProperties2KHR( + m_physicalDevice, static_cast( display ), &propertyCount, reinterpret_cast( properties.data() ) ) ); + } + } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" ); + VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); + if ( propertyCount < properties.size() ) + { + structureChains.resize( propertyCount ); + } + for ( uint32_t i = 0; i < propertyCount; i++ ) + { + structureChains[i].template get() = properties[i]; + } + return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChains ) ); + } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template @@ -16290,7 +15656,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), return static_cast( d.vkGetDisplayPlaneCapabilities2KHR( static_cast( m_physicalDevice ), reinterpret_cast( pDisplayPlaneInfo ), reinterpret_cast( pCapabilities ) ) ); @@ -16312,10 +15677,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &displayPlaneInfo ), reinterpret_cast( &capabilities ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( capabilities ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16329,7 +15692,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIOSSurfaceMVK( static_cast( m_instance ), return static_cast( d.vkCreateIOSSurfaceMVK( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -16355,10 +15717,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -16380,12 +15740,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16401,7 +15758,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMacOSSurfaceMVK( static_cast( m_instance ), return static_cast( d.vkCreateMacOSSurfaceMVK( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -16427,10 +15783,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -16452,12 +15806,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16472,8 +15823,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); - return static_cast( - d.vkSetDebugUtilsObjectNameEXT( static_cast( m_device ), reinterpret_cast( pNameInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16489,10 +15838,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast( &nameInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16503,8 +15850,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); - return static_cast( - d.vkSetDebugUtilsObjectTagEXT( static_cast( m_device ), reinterpret_cast( pTagInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16520,10 +15865,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast( &tagInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16533,7 +15876,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); - d.vkQueueBeginDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16555,7 +15897,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); - d.vkQueueEndDebugUtilsLabelEXT( static_cast( m_queue ) ); } template @@ -16564,7 +15905,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); - d.vkQueueInsertDebugUtilsLabelEXT( static_cast( m_queue ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16587,7 +15927,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); - d.vkCmdBeginDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16609,7 +15948,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); - d.vkCmdEndDebugUtilsLabelEXT( static_cast( m_commandBuffer ) ); } template @@ -16618,7 +15956,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); - d.vkCmdInsertDebugUtilsLabelEXT( static_cast( m_commandBuffer ), reinterpret_cast( pLabelInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16643,7 +15980,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDebugUtilsMessengerEXT( static_cast( m_instance ), return static_cast( d.vkCreateDebugUtilsMessengerEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -16669,10 +16005,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( messenger ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -16694,14 +16028,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &messenger ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( messenger, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( messenger, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16715,9 +16045,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), static_cast( messenger ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), - static_cast( messenger ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16747,9 +16074,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), static_cast( messenger ), reinterpret_cast( pAllocator ) ); - d.vkDestroyDebugUtilsMessengerEXT( static_cast( m_instance ), - static_cast( messenger ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16777,7 +16101,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), d.vkSubmitDebugUtilsMessageEXT( static_cast( m_instance ), static_cast( messageSeverity ), static_cast( messageTypes ), @@ -16815,8 +16138,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( - static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16834,10 +16155,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template @@ -16856,10 +16175,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast( &properties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -16872,8 +16189,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( static_cast( m_device ), reinterpret_cast( pInfo ), pBuffer ) ); - return static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( - static_cast( m_device ), reinterpret_cast( pInfo ), pBuffer ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -16891,10 +16206,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast( &info ), &buffer ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( buffer ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ @@ -16912,7 +16225,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( static_cast( m_device ), return static_cast( d.vkCreateExecutionGraphPipelinesAMDX( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, @@ -16922,9 +16234,6 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -16950,9 +16259,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -16983,9 +16289,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -17013,18 +16316,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename PipelineAllocator, - typename std::enable_if>::value, int>::type> template < typename Dispatch, typename PipelineAllocator, @@ -17052,13 +16348,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -17094,13 +16386,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -17131,13 +16419,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); - result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17152,9 +16436,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast( m_device ), static_cast( executionGraph ), reinterpret_cast( pSizeInfo ) ) ); - return static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( static_cast( m_device ), - static_cast( executionGraph ), - reinterpret_cast( pSizeInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17172,10 +16453,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineScratchSizeAMDX( m_device, static_cast( executionGraph ), reinterpret_cast( &sizeInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( sizeInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17191,10 +16470,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( executionGraph ), reinterpret_cast( pNodeInfo ), pNodeIndex ) ); - return static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast( m_device ), - static_cast( executionGraph ), - reinterpret_cast( pNodeInfo ), - pNodeIndex ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17211,17 +16486,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetExecutionGraphPipelineNodeIndexAMDX( m_device, static_cast( executionGraph ), reinterpret_cast( &nodeInfo ), &nodeIndex ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( nodeIndex ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, - VULKAN_HPP_NAMESPACE::DeviceAddress scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, VULKAN_HPP_NAMESPACE::DeviceAddress scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, @@ -17232,15 +16502,10 @@ namespace VULKAN_HPP_NAMESPACE static_cast( executionGraph ), static_cast( scratch ), static_cast( scratchSize ) ); - d.vkCmdInitializeGraphScratchMemoryAMDX( static_cast( m_commandBuffer ), - static_cast( executionGraph ), - static_cast( scratch ), - static_cast( scratchSize ) ); } template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT @@ -17250,16 +16515,11 @@ namespace VULKAN_HPP_NAMESPACE static_cast( scratch ), static_cast( scratchSize ), reinterpret_cast( pCountInfo ) ); - d.vkCmdDispatchGraphAMDX( static_cast( m_commandBuffer ), - static_cast( scratch ), - static_cast( scratchSize ), - reinterpret_cast( pCountInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT @@ -17273,16 +16533,11 @@ namespace VULKAN_HPP_NAMESPACE static_cast( scratch ), static_cast( scratchSize ), reinterpret_cast( &countInfo ) ); - d.vkCmdDispatchGraphAMDX( m_commandBuffer, - static_cast( scratch ), - static_cast( scratchSize ), - reinterpret_cast( &countInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT @@ -17292,16 +16547,11 @@ namespace VULKAN_HPP_NAMESPACE static_cast( scratch ), static_cast( scratchSize ), reinterpret_cast( pCountInfo ) ); - d.vkCmdDispatchGraphIndirectAMDX( static_cast( m_commandBuffer ), - static_cast( scratch ), - static_cast( scratchSize ), - reinterpret_cast( pCountInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT @@ -17315,16 +16565,11 @@ namespace VULKAN_HPP_NAMESPACE static_cast( scratch ), static_cast( scratchSize ), reinterpret_cast( &countInfo ) ); - d.vkCmdDispatchGraphIndirectAMDX( m_commandBuffer, - static_cast( scratch ), - static_cast( scratchSize ), - reinterpret_cast( &countInfo ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch, - VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceSize scratchSize, VULKAN_HPP_NAMESPACE::DeviceAddress countInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT @@ -17334,10 +16579,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( scratch ), static_cast( scratchSize ), static_cast( countInfo ) ); - d.vkCmdDispatchGraphIndirectCountAMDX( static_cast( m_commandBuffer ), - static_cast( scratch ), - static_cast( scratchSize ), - static_cast( countInfo ) ); } #endif /*VK_ENABLE_BETA_EXTENSIONS*/ @@ -17349,7 +16590,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), reinterpret_cast( pSampleLocationsInfo ) ); - d.vkCmdSetSampleLocationsEXT( static_cast( m_commandBuffer ), reinterpret_cast( pSampleLocationsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17375,9 +16615,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), static_cast( samples ), reinterpret_cast( pMultisampleProperties ) ); - d.vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast( m_physicalDevice ), - static_cast( samples ), - reinterpret_cast( pMultisampleProperties ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17410,9 +16647,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetImageMemoryRequirements2KHR( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetImageMemoryRequirements2KHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17461,9 +16695,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetBufferMemoryRequirements2KHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17510,7 +16741,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), d.vkGetImageSparseMemoryRequirements2KHR( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, @@ -17518,11 +16748,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( d.vkCreateAccelerationStructureKHR( static_cast( m_device ), return static_cast( d.vkCreateAccelerationStructureKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -17627,10 +16851,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -17652,14 +16874,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -17673,9 +16891,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), - static_cast( accelerationStructure ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17705,9 +16920,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - d.vkDestroyAccelerationStructureKHR( static_cast( m_device ), - static_cast( accelerationStructure ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17736,7 +16948,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresKHR( static_cast( m_commandBuffer ), d.vkCmdBuildAccelerationStructuresKHR( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ), @@ -17779,7 +16990,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), d.vkCmdBuildAccelerationStructuresIndirectKHR( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ), @@ -17840,7 +17050,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkBuildAccelerationStructuresKHR( static_cast( m_device ), d.vkBuildAccelerationStructuresKHR( static_cast( m_device ), static_cast( deferredOperation ), infoCount, @@ -17875,7 +17084,6 @@ namespace VULKAN_HPP_NAMESPACE infos.size(), reinterpret_cast( infos.data() ), reinterpret_cast( pBuildRangeInfos.data() ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", @@ -17894,9 +17102,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkCopyAccelerationStructureKHR( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); - return static_cast( d.vkCopyAccelerationStructureKHR( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17913,7 +17118,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureKHR( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", @@ -17933,9 +17137,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); - return static_cast( d.vkCopyAccelerationStructureToMemoryKHR( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17953,7 +17154,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyAccelerationStructureToMemoryKHR( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", @@ -17973,9 +17173,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); - return static_cast( d.vkCopyMemoryToAccelerationStructureKHR( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -17993,7 +17190,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToAccelerationStructureKHR( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", @@ -18014,7 +17210,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), return static_cast( d.vkWriteAccelerationStructuresPropertiesKHR( static_cast( m_device ), accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), @@ -18025,10 +17220,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ( data.data() ), stride ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -18087,10 +17276,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &data ), stride ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18101,8 +17288,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); - d.vkCmdCopyAccelerationStructureKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18126,8 +17311,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); - d.vkCmdCopyAccelerationStructureToMemoryKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18152,8 +17335,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); - d.vkCmdCopyMemoryToAccelerationStructureKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18178,8 +17359,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); - return static_cast( d.vkGetAccelerationStructureDeviceAddressKHR( - static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18211,7 +17390,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), d.vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast( m_commandBuffer ), accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), @@ -18250,7 +17428,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), d.vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast( m_device ), reinterpret_cast( pVersionInfo ), reinterpret_cast( pCompatibility ) ); @@ -18285,7 +17462,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), d.vkGetAccelerationStructureBuildSizesKHR( static_cast( m_device ), static_cast( buildType ), reinterpret_cast( pBuildInfo ), @@ -18339,7 +17515,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), d.vkCmdTraceRaysKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), @@ -18388,7 +17563,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRayTracingPipelinesKHR( static_cast( m_device ), return static_cast( d.vkCreateRayTracingPipelinesKHR( static_cast( m_device ), static_cast( deferredOperation ), static_cast( pipelineCache ), @@ -18399,9 +17573,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -18432,12 +17603,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -18473,12 +17638,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -18511,21 +17670,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename PipelineAllocator, - typename std::enable_if>::value, int>::type> template < typename Dispatch, typename PipelineAllocator, @@ -18558,16 +17707,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -18608,16 +17750,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -18653,16 +17788,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, - VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, - VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); - result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18678,15 +17806,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); - return static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( - static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -18725,10 +17845,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18743,15 +17861,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); - return static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( - static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -18791,10 +17901,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18807,7 +17915,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), d.vkCmdTraceRaysIndirectKHR( static_cast( m_commandBuffer ), reinterpret_cast( pRaygenShaderBindingTable ), reinterpret_cast( pMissShaderBindingTable ), @@ -18848,8 +17955,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( static_cast( m_device ), static_cast( pipeline ), group, static_cast( groupShader ) ) ); - return static_cast( d.vkGetRayTracingShaderGroupStackSizeKHR( - static_cast( m_device ), static_cast( pipeline ), group, static_cast( groupShader ) ) ); } template @@ -18857,7 +17962,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); - d.vkCmdSetRayTracingPipelineStackSizeKHR( static_cast( m_commandBuffer ), pipelineStackSize ); } //=== VK_KHR_sampler_ycbcr_conversion === @@ -18870,7 +17974,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateSamplerYcbcrConversionKHR( static_cast( m_device ), return static_cast( d.vkCreateSamplerYcbcrConversionKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -18897,10 +18000,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( ycbcrConversion ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -18923,14 +18024,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &ycbcrConversion ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( ycbcrConversion, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -18944,9 +18041,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroySamplerYcbcrConversionKHR( static_cast( m_device ), static_cast( ycbcrConversion ), reinterpret_cast( pAllocator ) ); - d.vkDestroySamplerYcbcrConversionKHR( static_cast( m_device ), - static_cast( ycbcrConversion ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18978,8 +18072,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBindBufferMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - return static_cast( - d.vkBindBufferMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -18996,10 +18088,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19011,8 +18101,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBindImageMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - return static_cast( - d.vkBindImageMemory2KHR( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19028,10 +18116,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19044,7 +18130,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( static_cast( m_device ), static_cast( image ), reinterpret_cast( pProperties ) ) ); - static_cast( m_device ), static_cast( image ), reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19062,10 +18147,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast( image ), reinterpret_cast( &properties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19078,7 +18161,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateValidationCacheEXT( static_cast( m_device ), return static_cast( d.vkCreateValidationCacheEXT( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -19104,10 +18186,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( validationCache ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -19129,14 +18209,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &validationCache ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( validationCache, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( validationCache, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19149,7 +18225,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyValidationCacheEXT( static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); - static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19178,7 +18253,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyValidationCacheEXT( static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); - static_cast( m_device ), static_cast( validationCache ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19210,10 +18284,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( dstCache ), srcCacheCount, reinterpret_cast( pSrcCaches ) ) ); - return static_cast( d.vkMergeValidationCachesEXT( static_cast( m_device ), - static_cast( dstCache ), - srcCacheCount, - reinterpret_cast( pSrcCaches ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19231,10 +18301,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkMergeValidationCachesEXT( m_device, static_cast( dstCache ), srcCaches.size(), reinterpret_cast( srcCaches.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19247,14 +18315,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetValidationCacheDataEXT( static_cast( m_device ), static_cast( validationCache ), pDataSize, pData ) ); - return static_cast( - d.vkGetValidationCacheDataEXT( static_cast( m_device ), static_cast( validationCache ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -19281,14 +18344,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template ( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); - d.vkCmdBindShadingRateImageNV( - static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); } template @@ -19351,7 +18408,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportShadingRatePaletteNV( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); - static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pShadingRatePalettes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19378,7 +18434,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), d.vkCmdSetCoarseSampleOrderNV( static_cast( m_commandBuffer ), static_cast( sampleOrderType ), customSampleOrderCount, @@ -19414,7 +18469,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateAccelerationStructureNV( static_cast( m_device ), return static_cast( d.vkCreateAccelerationStructureNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -19440,10 +18494,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( accelerationStructure ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -19465,14 +18517,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &accelerationStructure ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - accelerationStructure, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19486,9 +18534,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyAccelerationStructureNV( static_cast( m_device ), static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - d.vkDestroyAccelerationStructureNV( static_cast( m_device ), - static_cast( accelerationStructure ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19518,9 +18563,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyAccelerationStructureNV( static_cast( m_device ), static_cast( accelerationStructure ), reinterpret_cast( pAllocator ) ); - d.vkDestroyAccelerationStructureNV( static_cast( m_device ), - static_cast( accelerationStructure ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19548,7 +18590,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), d.vkGetAccelerationStructureMemoryRequirementsNV( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); @@ -19602,8 +18643,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkBindAccelerationStructureMemoryNV( static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); - return static_cast( d.vkBindAccelerationStructureMemoryNV( - static_cast( m_device ), bindInfoCount, reinterpret_cast( pBindInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -19619,10 +18658,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size(), reinterpret_cast( bindInfos.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19638,7 +18675,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), d.vkCmdBuildAccelerationStructureNV( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ), static_cast( instanceData ), @@ -19686,7 +18722,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), d.vkCmdCopyAccelerationStructureNV( static_cast( m_commandBuffer ), static_cast( dst ), static_cast( src ), @@ -19711,7 +18746,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdTraceRaysNV( static_cast( m_commandBuffer ), d.vkCmdTraceRaysNV( static_cast( m_commandBuffer ), static_cast( raygenShaderBindingTableBuffer ), static_cast( raygenShaderBindingOffset ), @@ -19738,7 +18772,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateRayTracingPipelinesNV( static_cast( m_device ), return static_cast( d.vkCreateRayTracingPipelinesNV( static_cast( m_device ), static_cast( pipelineCache ), createInfoCount, @@ -19748,9 +18781,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -19776,9 +18806,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -19809,9 +18836,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, std::move( pipelines ) ); } @@ -19839,18 +18863,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue( result, std::move( pipeline ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename PipelineAllocator, - typename std::enable_if>::value, int>::type> template < typename Dispatch, typename PipelineAllocator, @@ -19877,13 +18894,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines; uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -19918,13 +18931,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); std::vector, PipelineAllocator> uniquePipelines( pipelineAllocator ); uniquePipelines.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & pipeline : pipelines ) { uniquePipelines.push_back( UniqueHandle( pipeline, deleter ) ); @@ -19955,13 +18964,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); return ResultValue>( result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); - result, UniqueHandle( pipeline, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -19977,15 +18982,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); - return static_cast( d.vkGetRayTracingShaderGroupHandlesNV( - static_cast( m_device ), static_cast( pipeline ), firstGroup, groupCount, dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -20024,10 +19021,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast( &data ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20040,15 +19035,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetAccelerationStructureHandleNV( static_cast( m_device ), static_cast( accelerationStructure ), dataSize, pData ) ); - return static_cast( d.vkGetAccelerationStructureHandleNV( - static_cast( m_device ), static_cast( accelerationStructure ), dataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast( data.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -20085,10 +19072,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureHandleNV( m_device, static_cast( accelerationStructure ), sizeof( DataType ), reinterpret_cast( &data ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20101,7 +19086,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), d.vkCmdWriteAccelerationStructuresPropertiesNV( static_cast( m_commandBuffer ), accelerationStructureCount, reinterpret_cast( pAccelerationStructures ), @@ -20142,7 +19126,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkCompileDeferredNV( static_cast( m_device ), static_cast( pipeline ), shader ) ); - return static_cast( d.vkCompileDeferredNV( static_cast( m_device ), static_cast( pipeline ), shader ) ); } #else template @@ -20157,10 +19140,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCompileDeferredNV( m_device, static_cast( pipeline ), shader ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -20175,9 +19156,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pSupport ) ); - d.vkGetDescriptorSetLayoutSupportKHR( static_cast( m_device ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pSupport ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20231,7 +19209,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), d.vkCmdDrawIndirectCountKHR( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -20251,7 +19228,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), d.vkCmdDrawIndexedIndirectCountKHR( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -20271,7 +19247,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), return static_cast( d.vkGetMemoryHostPointerPropertiesEXT( static_cast( m_device ), static_cast( handleType ), pHostPointer, @@ -20297,10 +19272,8 @@ namespace VULKAN_HPP_NAMESPACE pHostPointer, reinterpret_cast( &memoryHostPointerProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryHostPointerProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20314,7 +19287,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), d.vkCmdWriteBufferMarkerAMD( static_cast( m_commandBuffer ), static_cast( pipelineStage ), static_cast( dstBuffer ), @@ -20322,24 +19294,6 @@ namespace VULKAN_HPP_NAMESPACE marker ); } - template - VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, - VULKAN_HPP_NAMESPACE::Buffer dstBuffer, - VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, - uint32_t marker, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteBufferMarker2AMD( static_cast( m_commandBuffer ), - static_cast( stage ), - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); - static_cast( dstBuffer ), - static_cast( dstOffset ), - marker ); - } - template VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, @@ -20365,14 +19319,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); - return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( - static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -20399,14 +19348,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { timeDomains.resize( timeDomainCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } template ( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); - return static_cast( d.vkGetCalibratedTimestampsEXT( static_cast( m_device ), - timestampCount, - reinterpret_cast( pTimestampInfos ), - pTimestamps, - pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -20490,10 +19427,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template @@ -20539,10 +19472,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20553,7 +19484,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); - d.vkCmdDrawMeshTasksNV( static_cast( m_commandBuffer ), taskCount, firstTask ); } template @@ -20566,8 +19496,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawMeshTasksIndirectNV( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); - d.vkCmdDrawMeshTasksIndirectNV( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -20580,7 +19508,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), d.vkCmdDrawMeshTasksIndirectCountNV( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -20603,10 +19530,6 @@ namespace VULKAN_HPP_NAMESPACE firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissorEnables ) ); - d.vkCmdSetExclusiveScissorEnableNV( static_cast( m_commandBuffer ), - firstExclusiveScissor, - exclusiveScissorCount, - reinterpret_cast( pExclusiveScissorEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20635,8 +19558,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetExclusiveScissorNV( static_cast( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); - d.vkCmdSetExclusiveScissorNV( - static_cast( m_commandBuffer ), firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast( pExclusiveScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20662,7 +19583,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), pCheckpointMarker ); - d.vkCmdSetCheckpointNV( static_cast( m_commandBuffer ), pCheckpointMarker ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20685,13 +19605,9 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetQueueCheckpointDataNV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); - d.vkGetQueueCheckpointDataNV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -20752,67 +19668,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetQueueCheckpointData2NV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); -# endif - - std::vector checkpointData; - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetQueueCheckpointData2NV && "Function requires " ); -# endif - - std::vector checkpointData( checkpointData2NVAllocator ); - uint32_t checkpointDataCount; - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr ); - checkpointData.resize( checkpointDataCount ); - d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast( checkpointData.data() ) ); - - VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() ); - if ( checkpointDataCount < checkpointData.size() ) - { - checkpointData.resize( checkpointDataCount ); - } - return checkpointData; - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount, - VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetQueueCheckpointData2NV( static_cast( m_queue ), pCheckpointDataCount, reinterpret_cast( pCheckpointData ) ); - } - #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template ( d.vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( semaphore ), pValue ) ); - return static_cast( d.vkGetSemaphoreCounterValueKHR( static_cast( m_device ), static_cast( semaphore ), pValue ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20891,10 +19745,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast( semaphore ), &value ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20906,8 +19758,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); - return static_cast( - d.vkWaitSemaphoresKHR( static_cast( m_device ), reinterpret_cast( pWaitInfo ), timeout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20922,7 +19772,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast( &waitInfo ), timeout ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); @@ -20936,7 +19785,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); - return static_cast( d.vkSignalSemaphoreKHR( static_cast( m_device ), reinterpret_cast( pSignalInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20952,10 +19800,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast( &signalInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20968,8 +19814,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkInitializePerformanceApiINTEL( static_cast( m_device ), reinterpret_cast( pInitializeInfo ) ) ); - return static_cast( d.vkInitializePerformanceApiINTEL( static_cast( m_device ), - reinterpret_cast( pInitializeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -20985,10 +19829,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast( &initializeInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -20997,7 +19839,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); - d.vkUninitializePerformanceApiINTEL( static_cast( m_device ) ); } template @@ -21007,8 +19848,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ) ); - return static_cast( d.vkCmdSetPerformanceMarkerINTEL( static_cast( m_commandBuffer ), - reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21024,10 +19863,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21038,8 +19875,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), reinterpret_cast( pMarkerInfo ) ) ); - return static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( static_cast( m_commandBuffer ), - reinterpret_cast( pMarkerInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21055,10 +19890,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast( &markerInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21069,8 +19902,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), reinterpret_cast( pOverrideInfo ) ) ); - return static_cast( d.vkCmdSetPerformanceOverrideINTEL( static_cast( m_commandBuffer ), - reinterpret_cast( pOverrideInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21086,10 +19917,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast( &overrideInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21100,7 +19929,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkAcquirePerformanceConfigurationINTEL( static_cast( m_device ), return static_cast( d.vkAcquirePerformanceConfigurationINTEL( static_cast( m_device ), reinterpret_cast( pAcquireInfo ), reinterpret_cast( pConfiguration ) ) ); @@ -21122,10 +19950,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( configuration ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -21145,14 +19971,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &acquireInfo ), reinterpret_cast( &configuration ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( configuration, detail::ObjectRelease( *this, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( configuration, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21165,8 +19987,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); - return static_cast( - d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); } #else template @@ -21181,10 +20001,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -21196,8 +20014,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); - return static_cast( - d.vkReleasePerformanceConfigurationINTEL( static_cast( m_device ), static_cast( configuration ) ) ); } #else template @@ -21212,10 +20028,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast( configuration ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -21227,8 +20041,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), static_cast( configuration ) ) ); - return static_cast( - d.vkQueueSetPerformanceConfigurationINTEL( static_cast( m_queue ), static_cast( configuration ) ) ); } #else template @@ -21244,10 +20056,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast( configuration ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -21259,7 +20069,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPerformanceParameterINTEL( static_cast( m_device ), static_cast( parameter ), reinterpret_cast( pValue ) ) ); - static_cast( m_device ), static_cast( parameter ), reinterpret_cast( pValue ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21276,10 +20085,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPerformanceParameterINTEL( m_device, static_cast( parameter ), reinterpret_cast( &value ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( value ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21292,7 +20099,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkSetLocalDimmingAMD( static_cast( m_device ), static_cast( swapChain ), static_cast( localDimmingEnable ) ); - d.vkSetLocalDimmingAMD( static_cast( m_device ), static_cast( swapChain ), static_cast( localDimmingEnable ) ); } #if defined( VK_USE_PLATFORM_FUCHSIA ) @@ -21306,7 +20112,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast( m_instance ), return static_cast( d.vkCreateImagePipeSurfaceFUCHSIA( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -21332,10 +20137,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -21357,12 +20160,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21378,7 +20178,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMetalSurfaceEXT( static_cast( m_instance ), return static_cast( d.vkCreateMetalSurfaceEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -21404,10 +20203,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -21429,12 +20226,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21453,18 +20247,9 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast( m_physicalDevice ), pFragmentShadingRateCount, reinterpret_cast( pFragmentShadingRates ) ) ); - return static_cast( - d.vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast( m_physicalDevice ), - pFragmentShadingRateCount, - reinterpret_cast( pFragmentShadingRates ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( m_commandBuffer ), reinterpret_cast( pFragmentSize ), reinterpret_cast( combinerOps ) ); - d.vkCmdSetFragmentShadingRateKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pFragmentSize ), - reinterpret_cast( combinerOps ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21581,65 +20359,52 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_dynamic_rendering_local_read === template - VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR * pLocationInfo, + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pLocationInfo ) ); - d.vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pLocationInfo ) ); + reinterpret_cast( pLocationInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo, + VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkCmdSetRenderingAttachmentLocationsKHR && - "Function requires " ); + "Function requires or " ); # endif - d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast( &locationInfo ) ); + d.vkCmdSetRenderingAttachmentLocationsKHR( m_commandBuffer, reinterpret_cast( &locationInfo ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void - CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - VULKAN_HPP_INLINE void - CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pInputAttachmentIndexInfo ) ); - d.vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), - reinterpret_cast( pInputAttachmentIndexInfo ) ); + reinterpret_cast( pInputAttachmentIndexInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void - CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - VULKAN_HPP_INLINE void - CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkCmdSetRenderingInputAttachmentIndicesKHR && - "Function requires " ); + "Function requires or " ); # endif - d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, - reinterpret_cast( &inputAttachmentIndexInfo ) ); - d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, - reinterpret_cast( &inputAttachmentIndexInfo ) ); + d.vkCmdSetRenderingInputAttachmentIndicesKHR( m_commandBuffer, reinterpret_cast( &inputAttachmentIndexInfo ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21652,8 +20417,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); - return static_cast( - d.vkGetBufferDeviceAddressEXT( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -21683,16 +20446,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); - return static_cast( d.vkGetPhysicalDeviceToolPropertiesEXT( - static_cast( m_physicalDevice ), pToolCount, reinterpret_cast( pToolProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename PhysicalDeviceToolPropertiesAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> template < typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch, @@ -21722,14 +20478,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } template < @@ -21762,14 +20516,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" ); VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); if ( toolCount < toolProperties.size() ) { toolProperties.resize( toolCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( toolProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -21784,7 +20536,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkWaitForPresentKHR( static_cast( m_device ), static_cast( swapchain ), presentId, timeout ) ); - return static_cast( d.vkWaitForPresentKHR( static_cast( m_device ), static_cast( swapchain ), presentId, timeout ) ); } #else template @@ -21802,10 +20553,6 @@ namespace VULKAN_HPP_NAMESPACE result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( - result, - VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); return static_cast( result ); } @@ -21820,15 +20567,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( static_cast( m_physicalDevice ), pCombinationCount, reinterpret_cast( pCombinations ) ) ); - static_cast( m_physicalDevice ), pCombinationCount, reinterpret_cast( pCombinations ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), return static_cast( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast( m_physicalDevice ), reinterpret_cast( pSurfaceInfo ), pPresentModeCount, @@ -22030,9 +20756,6 @@ namespace VULKAN_HPP_NAMESPACE } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -22063,14 +20786,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" ); VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); if ( presentModeCount < presentModes.size() ) { presentModes.resize( presentModeCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( presentModes ) ); } template ( d.vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); - return static_cast( d.vkAcquireFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); } # else template @@ -22138,10 +20856,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -22152,7 +20868,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); - return static_cast( d.vkReleaseFullScreenExclusiveModeEXT( static_cast( m_device ), static_cast( swapchain ) ) ); } # else template @@ -22167,10 +20882,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast( swapchain ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -22184,9 +20897,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), reinterpret_cast( pSurfaceInfo ), reinterpret_cast( pModes ) ) ); - return static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( static_cast( m_device ), - reinterpret_cast( pSurfaceInfo ), - reinterpret_cast( pModes ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22204,10 +20914,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast( &surfaceInfo ), reinterpret_cast( &modes ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( modes ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -22221,7 +20929,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateHeadlessSurfaceEXT( static_cast( m_instance ), return static_cast( d.vkCreateHeadlessSurfaceEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -22247,10 +20954,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -22272,12 +20977,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22291,8 +20993,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); - return static_cast( - d.vkGetBufferDeviceAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22318,7 +21018,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ); - return d.vkGetBufferOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22345,8 +21044,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), reinterpret_cast( pInfo ) ); - return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast( m_device ), - reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22374,7 +21071,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); - d.vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } //=== VK_EXT_host_query_reset === @@ -22387,7 +21083,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkResetQueryPoolEXT( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); - d.vkResetQueryPoolEXT( static_cast( m_device ), static_cast( queryPool ), firstQuery, queryCount ); } //=== VK_EXT_extended_dynamic_state === @@ -22397,7 +21092,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); - d.vkCmdSetCullModeEXT( static_cast( m_commandBuffer ), static_cast( cullMode ) ); } template @@ -22405,7 +21099,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); - d.vkCmdSetFrontFaceEXT( static_cast( m_commandBuffer ), static_cast( frontFace ) ); } template @@ -22414,7 +21107,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); - d.vkCmdSetPrimitiveTopologyEXT( static_cast( m_commandBuffer ), static_cast( primitiveTopology ) ); } template @@ -22424,7 +21116,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); - d.vkCmdSetViewportWithCountEXT( static_cast( m_commandBuffer ), viewportCount, reinterpret_cast( pViewports ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22448,7 +21139,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); - d.vkCmdSetScissorWithCountEXT( static_cast( m_commandBuffer ), scissorCount, reinterpret_cast( pScissors ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22476,7 +21166,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), d.vkCmdBindVertexBuffers2EXT( static_cast( m_commandBuffer ), firstBinding, bindingCount, @@ -22534,7 +21223,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); - d.vkCmdSetDepthTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthTestEnable ) ); } template @@ -22542,7 +21230,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); - d.vkCmdSetDepthWriteEnableEXT( static_cast( m_commandBuffer ), static_cast( depthWriteEnable ) ); } template @@ -22550,7 +21237,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); - d.vkCmdSetDepthCompareOpEXT( static_cast( m_commandBuffer ), static_cast( depthCompareOp ) ); } template @@ -22559,7 +21245,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); - d.vkCmdSetDepthBoundsTestEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBoundsTestEnable ) ); } template @@ -22567,7 +21252,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); - d.vkCmdSetStencilTestEnableEXT( static_cast( m_commandBuffer ), static_cast( stencilTestEnable ) ); } template @@ -22579,7 +21263,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), d.vkCmdSetStencilOpEXT( static_cast( m_commandBuffer ), static_cast( faceMask ), static_cast( failOp ), @@ -22599,9 +21282,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkCreateDeferredOperationKHR( static_cast( m_device ), reinterpret_cast( pAllocator ), reinterpret_cast( pDeferredOperation ) ) ); - return static_cast( d.vkCreateDeferredOperationKHR( static_cast( m_device ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pDeferredOperation ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22620,10 +21300,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( deferredOperation ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -22642,14 +21320,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &deferredOperation ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( deferredOperation, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( deferredOperation, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -22662,7 +21336,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDeferredOperationKHR( static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); - static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22691,7 +21364,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyDeferredOperationKHR( static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); - static_cast( m_device ), static_cast( operation ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22718,7 +21390,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( operation ) ); - return d.vkGetDeferredOperationMaxConcurrencyKHR( static_cast( m_device ), static_cast( operation ) ); } #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -22728,7 +21399,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( operation ) ) ); - return static_cast( d.vkGetDeferredOperationResultKHR( static_cast( m_device ), static_cast( operation ) ) ); } #else template @@ -22754,7 +21424,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( operation ) ) ); - return static_cast( d.vkDeferredOperationJoinKHR( static_cast( m_device ), static_cast( operation ) ) ); } #else template @@ -22772,10 +21441,6 @@ namespace VULKAN_HPP_NAMESPACE result, VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( - result, - VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } ); return static_cast( result ); } @@ -22790,7 +21455,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), return static_cast( d.vkGetPipelineExecutablePropertiesKHR( static_cast( m_device ), reinterpret_cast( pPipelineInfo ), pExecutableCount, @@ -22798,11 +21462,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( d.vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), return static_cast( d.vkGetPipelineExecutableStatisticsKHR( static_cast( m_device ), reinterpret_cast( pExecutableInfo ), pStatisticCount, @@ -22909,11 +21563,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( - d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast( m_device ), d.vkGetPipelineExecutableInternalRepresentationsKHR( static_cast( m_device ), reinterpret_cast( pExecutableInfo ), pInternalRepresentationCount, @@ -23021,11 +21665,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkCopyMemoryToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); - return static_cast( - d.vkCopyMemoryToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); + d.vkCopyMemoryToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyMemoryToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const + Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCopyMemoryToImageEXT && "Function requires or " ); # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( - d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); + d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast( ©MemoryToImageInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkCopyImageToMemoryEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); - return static_cast( - d.vkCopyImageToMemoryEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); + d.vkCopyImageToMemoryEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToMemoryInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const + Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCopyImageToMemoryEXT && "Function requires or " ); # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( - d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); + d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast( ©ImageToMemoryInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkCopyImageToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); - return static_cast( - d.vkCopyImageToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); + d.vkCopyImageToImageEXT( static_cast( m_device ), reinterpret_cast( pCopyImageToImageInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const + Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCopyImageToImageEXT && "Function requires or " ); # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( - d.vkCopyImageToImageEXT( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); + d.vkCopyImageToImageEXT( m_device, reinterpret_cast( ©ImageToImageInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, - const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkTransitionImageLayoutEXT( - static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); - return static_cast( d.vkTransitionImageLayoutEXT( - static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); + static_cast( m_device ), transitionCount, reinterpret_cast( pTransitions ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType::type - Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, - Dispatch const & d ) const + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkTransitionImageLayoutEXT && "Function requires or " ); # endif VULKAN_HPP_NAMESPACE::Result result = static_cast( - d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); + d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast( transitions.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2EXT( static_cast( m_device ), d.vkGetImageSubresourceLayout2EXT( static_cast( m_device ), static_cast( image ), - reinterpret_cast( pSubresource ), - reinterpret_cast( pLayout ) ); + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2EXT( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout2EXT && - "Function requires or or " ); + "Function requires or or or " ); # endif - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; d.vkGetImageSubresourceLayout2EXT( m_device, static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2EXT( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout2EXT && - "Function requires or or " ); + "Function requires or or or " ); # endif VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); d.vkGetImageSubresourceLayout2EXT( m_device, static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } @@ -23306,64 +21924,56 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_map_memory2 === template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo, - void ** ppData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( - d.vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); - return static_cast( - d.vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); + return static_cast( d.vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryMapInfo ), ppData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, Dispatch const & d ) const + Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkMapMemory2KHR && "Function requires or " ); # endif void * pData; VULKAN_HPP_NAMESPACE::Result result = - static_cast( d.vkMapMemory2KHR( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); + static_cast( d.vkMapMemory2KHR( m_device, reinterpret_cast( &memoryMapInfo ), &pData ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pData ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); - return static_cast( d.vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); + return static_cast( d.vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( pMemoryUnmapInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, - Dispatch const & d ) const + VULKAN_HPP_INLINE typename ResultValueType::type Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d ) const { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkUnmapMemory2KHR && "Function requires or " ); # endif VULKAN_HPP_NAMESPACE::Result result = - static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); + static_cast( d.vkUnmapMemory2KHR( m_device, reinterpret_cast( &memoryUnmapInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23376,8 +21986,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkReleaseSwapchainImagesEXT( static_cast( m_device ), reinterpret_cast( pReleaseInfo ) ) ); - return static_cast( - d.vkReleaseSwapchainImagesEXT( static_cast( m_device ), reinterpret_cast( pReleaseInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23393,10 +22001,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast( &releaseInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23408,7 +22014,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), d.vkGetGeneratedCommandsMemoryRequirementsNV( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); @@ -23462,8 +22067,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), reinterpret_cast( pGeneratedCommandsInfo ) ); - d.vkCmdPreprocessGeneratedCommandsNV( static_cast( m_commandBuffer ), - reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23489,9 +22092,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), static_cast( isPreprocessed ), reinterpret_cast( pGeneratedCommandsInfo ) ); - d.vkCmdExecuteGeneratedCommandsNV( static_cast( m_commandBuffer ), - static_cast( isPreprocessed ), - reinterpret_cast( pGeneratedCommandsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23519,8 +22119,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindPipelineShaderGroupNV( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); - d.vkCmdBindPipelineShaderGroupNV( - static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ), groupIndex ); } template @@ -23531,7 +22129,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIndirectCommandsLayoutNV( static_cast( m_device ), return static_cast( d.vkCreateIndirectCommandsLayoutNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -23557,10 +22154,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -23582,14 +22177,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &indirectCommandsLayout ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23603,9 +22194,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); - d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), - static_cast( indirectCommandsLayout ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23635,9 +22223,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), static_cast( indirectCommandsLayout ), reinterpret_cast( pAllocator ) ); - d.vkDestroyIndirectCommandsLayoutNV( static_cast( m_device ), - static_cast( indirectCommandsLayout ), - reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23666,7 +22251,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pDepthBiasInfo ) ); - d.vkCmdSetDepthBias2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pDepthBiasInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23693,7 +22277,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); - return static_cast( d.vkAcquireDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, static_cast( display ) ) ); } #else template @@ -23708,10 +22291,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast( display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -23724,8 +22305,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, connectorId, reinterpret_cast( display ) ) ); - return static_cast( - d.vkGetDrmDisplayEXT( static_cast( m_physicalDevice ), drmFd, connectorId, reinterpret_cast( display ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23742,10 +22321,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -23762,12 +22339,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast( &display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23781,7 +22355,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePrivateDataSlotEXT( static_cast( m_device ), return static_cast( d.vkCreatePrivateDataSlotEXT( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -23807,10 +22380,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( privateDataSlot ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -23832,14 +22403,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &privateDataSlot ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( privateDataSlot, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -23852,8 +22419,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyPrivateDataSlotEXT( static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); - d.vkDestroyPrivateDataSlotEXT( - static_cast( m_device ), static_cast( privateDataSlot ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23885,8 +22450,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSetPrivateDataEXT( static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); - return static_cast( d.vkSetPrivateDataEXT( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); } #else template @@ -23904,10 +22467,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetPrivateDataEXT( m_device, static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), data ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -23921,8 +22482,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetPrivateDataEXT( static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); - d.vkGetPrivateDataEXT( - static_cast( m_device ), static_cast( objectType_ ), objectHandle, static_cast( privateDataSlot ), pData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -23954,7 +22513,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( static_cast( m_physicalDevice ), reinterpret_cast( pQualityLevelInfo ), reinterpret_cast( pQualityLevelProperties ) ) ); @@ -23978,10 +22536,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &qualityLevelInfo ), reinterpret_cast( &qualityLevelProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( qualityLevelProperties ) ); } template @@ -24003,10 +22559,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &qualityLevelInfo ), reinterpret_cast( &qualityLevelProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -24020,7 +22574,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetEncodedVideoSessionParametersKHR( static_cast( m_device ), d.vkGetEncodedVideoSessionParametersKHR( static_cast( m_device ), reinterpret_cast( pVideoSessionParametersInfo ), reinterpret_cast( pFeedbackInfo ), @@ -24029,9 +22582,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -24070,10 +22620,8 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ::value, int>::type> template ( m_commandBuffer ), reinterpret_cast( pEncodeInfo ) ); - d.vkCmdEncodeVideoKHR( static_cast( m_commandBuffer ), reinterpret_cast( pEncodeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24260,7 +22795,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateCudaModuleNV( static_cast( m_device ), return static_cast( d.vkCreateCudaModuleNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -24286,10 +22820,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( module ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -24311,12 +22843,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &module ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaModuleNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( module, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -24329,13 +22858,9 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( module ), pCacheSize, pCacheData ) ); - return static_cast( d.vkGetCudaModuleCacheNV( static_cast( m_device ), static_cast( module ), pCacheSize, pCacheData ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -24361,14 +22886,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCudaModuleCacheNV" ); VULKAN_HPP_ASSERT( cacheSize <= cacheData.size() ); if ( cacheSize < cacheData.size() ) { cacheData.resize( cacheSize ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( cacheData ) ); } template ( d.vkCreateCudaFunctionNV( static_cast( m_device ), return static_cast( d.vkCreateCudaFunctionNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -24440,10 +22960,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( function ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -24465,12 +22983,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &function ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createCudaFunctionNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( function, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -24483,8 +22998,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCudaModuleNV( static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCudaModuleNV( - static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24512,8 +23025,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCudaModuleNV( static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCudaModuleNV( - static_cast( m_device ), static_cast( module ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24541,8 +23052,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCudaFunctionNV( static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCudaFunctionNV( - static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24570,8 +23079,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyCudaFunctionNV( static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); - d.vkDestroyCudaFunctionNV( - static_cast( m_device ), static_cast( function ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24597,7 +23104,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); - d.vkCmdCudaLaunchKernelNV( static_cast( m_commandBuffer ), reinterpret_cast( pLaunchInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24624,7 +23130,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( pMetalObjectsInfo ) ); - d.vkExportMetalObjectsEXT( static_cast( m_device ), reinterpret_cast( pMetalObjectsInfo ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24671,8 +23176,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetEvent2KHR( static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); - d.vkCmdSetEvent2KHR( - static_cast( m_commandBuffer ), static_cast( event ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24697,7 +23200,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdResetEvent2KHR( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); - d.vkCmdResetEvent2KHR( static_cast( m_commandBuffer ), static_cast( event ), static_cast( stageMask ) ); } template @@ -24711,10 +23213,6 @@ namespace VULKAN_HPP_NAMESPACE eventCount, reinterpret_cast( pEvents ), reinterpret_cast( pDependencyInfos ) ); - d.vkCmdWaitEvents2KHR( static_cast( m_commandBuffer ), - eventCount, - reinterpret_cast( pEvents ), - reinterpret_cast( pDependencyInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24749,7 +23247,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); - d.vkCmdPipelineBarrier2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pDependencyInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24775,8 +23272,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdWriteTimestamp2KHR( static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); - d.vkCmdWriteTimestamp2KHR( - static_cast( m_commandBuffer ), static_cast( stage ), static_cast( queryPool ), query ); } template @@ -24788,7 +23283,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkQueueSubmit2KHR( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); - d.vkQueueSubmit2KHR( static_cast( m_queue ), submitCount, reinterpret_cast( pSubmits ), static_cast( fence ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24805,9 +23299,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast( submits.data() ), static_cast( fence ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -24822,8 +23313,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDescriptorSetLayoutSizeEXT( static_cast( m_device ), static_cast( layout ), reinterpret_cast( pLayoutSizeInBytes ) ); - d.vkGetDescriptorSetLayoutSizeEXT( - static_cast( m_device ), static_cast( layout ), reinterpret_cast( pLayoutSizeInBytes ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24852,8 +23341,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDescriptorSetLayoutBindingOffsetEXT( static_cast( m_device ), static_cast( layout ), binding, reinterpret_cast( pOffset ) ); - d.vkGetDescriptorSetLayoutBindingOffsetEXT( - static_cast( m_device ), static_cast( layout ), binding, reinterpret_cast( pOffset ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24882,7 +23369,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDescriptorEXT( static_cast( m_device ), reinterpret_cast( pDescriptorInfo ), dataSize, pDescriptor ); - d.vkGetDescriptorEXT( static_cast( m_device ), reinterpret_cast( pDescriptorInfo ), dataSize, pDescriptor ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24925,8 +23411,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindDescriptorBuffersEXT( static_cast( m_commandBuffer ), bufferCount, reinterpret_cast( pBindingInfos ) ); - d.vkCmdBindDescriptorBuffersEXT( - static_cast( m_commandBuffer ), bufferCount, reinterpret_cast( pBindingInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -24954,7 +23438,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), d.vkCmdSetDescriptorBufferOffsetsEXT( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), @@ -25005,7 +23488,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); - static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( layout ), set ); } template @@ -25015,8 +23497,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); - return static_cast( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( - static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25034,10 +23514,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25048,8 +23526,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetImageOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); - return static_cast( d.vkGetImageOpaqueCaptureDescriptorDataEXT( - static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25067,10 +23543,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25081,8 +23555,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); - return static_cast( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( - static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25100,10 +23572,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25114,8 +23584,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); - return static_cast( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( - static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25133,10 +23601,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25147,7 +23613,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); - static_cast( m_device ), reinterpret_cast( pInfo ), pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25166,10 +23631,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast( &info ), &data ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25184,9 +23647,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), static_cast( shadingRate ), reinterpret_cast( combinerOps ) ); - d.vkCmdSetFragmentShadingRateEnumNV( static_cast( m_commandBuffer ), - static_cast( shadingRate ), - reinterpret_cast( combinerOps ) ); } //=== VK_EXT_mesh_shader === @@ -25197,7 +23657,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); - d.vkCmdDrawMeshTasksEXT( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template @@ -25210,8 +23669,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawMeshTasksIndirectEXT( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); - d.vkCmdDrawMeshTasksIndirectEXT( - static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), drawCount, stride ); } template @@ -25224,7 +23681,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), d.vkCmdDrawMeshTasksIndirectCountEXT( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -25242,7 +23698,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); - d.vkCmdCopyBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25265,7 +23720,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); - d.vkCmdCopyImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25289,8 +23743,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyBufferToImageInfo ) ); - d.vkCmdCopyBufferToImage2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pCopyBufferToImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25314,8 +23766,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pCopyImageToBufferInfo ) ); - d.vkCmdCopyImageToBuffer2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pCopyImageToBufferInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25338,7 +23788,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); - d.vkCmdBlitImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pBlitImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25361,7 +23810,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); - d.vkCmdResolveImage2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pResolveImageInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25388,7 +23836,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeviceFaultInfoEXT( static_cast( m_device ), reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); - static_cast( m_device ), reinterpret_cast( pFaultCounts ), reinterpret_cast( pFaultInfo ) ) ); } #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_NV_acquire_winrt_display === @@ -25400,7 +23847,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( display ) ) ); - return static_cast( d.vkAcquireWinrtDisplayNV( static_cast( m_physicalDevice ), static_cast( display ) ) ); } # else template @@ -25415,10 +23861,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast( display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -25430,8 +23874,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetWinrtDisplayNV( static_cast( m_physicalDevice ), deviceRelativeId, reinterpret_cast( pDisplay ) ) ); - return static_cast( - d.vkGetWinrtDisplayNV( static_cast( m_physicalDevice ), deviceRelativeId, reinterpret_cast( pDisplay ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25448,10 +23890,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( display ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -25468,12 +23908,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast( &display ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( display, detail::ObjectRelease( *this, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25489,7 +23926,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateDirectFBSurfaceEXT( static_cast( m_instance ), return static_cast( d.vkCreateDirectFBSurfaceEXT( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -25515,10 +23951,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -25540,12 +23974,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25558,8 +23989,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, dfb ) ); - return static_cast( - d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast( m_physicalDevice ), queueFamilyIndex, dfb ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25590,7 +24019,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), d.vkCmdSetVertexInputEXT( static_cast( m_commandBuffer ), vertexBindingDescriptionCount, reinterpret_cast( pVertexBindingDescriptions ), @@ -25630,8 +24058,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); - return static_cast( d.vkGetMemoryZirconHandleFUCHSIA( - static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25648,10 +24074,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25664,7 +24088,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast( m_device ), d.vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast( m_device ), static_cast( handleType ), zirconHandle, @@ -25691,10 +24114,8 @@ namespace VULKAN_HPP_NAMESPACE zirconHandle, reinterpret_cast( &memoryZirconHandleProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( memoryZirconHandleProperties ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -25709,7 +24130,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( static_cast( m_device ), reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); - static_cast( m_device ), reinterpret_cast( pImportSemaphoreZirconHandleInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25726,10 +24146,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkImportSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &importSemaphoreZirconHandleInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25742,8 +24160,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); - return static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( - static_cast( m_device ), reinterpret_cast( pGetZirconHandleInfo ), pZirconHandle ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25760,10 +24176,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast( &getZirconHandleInfo ), &zirconHandle ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( zirconHandle ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -25779,7 +24193,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateBufferCollectionFUCHSIA( static_cast( m_device ), return static_cast( d.vkCreateBufferCollectionFUCHSIA( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -25805,10 +24218,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &collection ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( collection ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -25830,14 +24241,10 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &collection ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( collection, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, - UniqueHandle( collection, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25853,10 +24260,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), static_cast( collection ), reinterpret_cast( pImageConstraintsInfo ) ) ); - return static_cast( - d.vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast( m_device ), - static_cast( collection ), - reinterpret_cast( pImageConstraintsInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25875,10 +24278,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionImageConstraintsFUCHSIA( m_device, static_cast( collection ), reinterpret_cast( &imageConstraintsInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25893,10 +24294,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), static_cast( collection ), reinterpret_cast( pBufferConstraintsInfo ) ) ); - return static_cast( - d.vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast( m_device ), - static_cast( collection ), - reinterpret_cast( pBufferConstraintsInfo ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25915,10 +24312,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetBufferCollectionBufferConstraintsFUCHSIA( m_device, static_cast( collection ), reinterpret_cast( &bufferConstraintsInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -25930,7 +24325,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferCollectionFUCHSIA( static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); - static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25959,7 +24353,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyBufferCollectionFUCHSIA( static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); - static_cast( m_device ), static_cast( collection ), reinterpret_cast( pAllocator ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -25990,9 +24383,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), static_cast( collection ), reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( static_cast( m_device ), - static_cast( collection ), - reinterpret_cast( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26010,10 +24400,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetBufferCollectionPropertiesFUCHSIA( m_device, static_cast( collection ), reinterpret_cast( &properties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_FUCHSIA*/ @@ -26028,7 +24416,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( static_cast( m_device ), static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); - static_cast( m_device ), static_cast( renderpass ), reinterpret_cast( pMaxWorkgroupSize ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26046,10 +24433,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( m_device, static_cast( renderpass ), reinterpret_cast( &maxWorkgroupSize ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( maxWorkgroupSize ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26058,7 +24443,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); - d.vkCmdSubpassShadingHUAWEI( static_cast( m_commandBuffer ) ); } //=== VK_HUAWEI_invocation_mask === @@ -26071,8 +24455,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindInvocationMaskHUAWEI( static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); - d.vkCmdBindInvocationMaskHUAWEI( - static_cast( m_commandBuffer ), static_cast( imageView ), static_cast( imageLayout ) ); } //=== VK_NV_external_memory_rdma === @@ -26087,9 +24469,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetMemoryRemoteAddressNV( static_cast( m_device ), reinterpret_cast( pMemoryGetRemoteAddressInfo ), reinterpret_cast( pAddress ) ) ); - return static_cast( d.vkGetMemoryRemoteAddressNV( static_cast( m_device ), - reinterpret_cast( pMemoryGetRemoteAddressInfo ), - reinterpret_cast( pAddress ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26106,10 +24485,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetMemoryRemoteAddressNV( m_device, reinterpret_cast( &memoryGetRemoteAddressInfo ), reinterpret_cast( &address ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( address ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26124,9 +24501,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetPipelinePropertiesEXT( static_cast( m_device ), reinterpret_cast( pPipelineInfo ), reinterpret_cast( pPipelineProperties ) ) ); - return static_cast( d.vkGetPipelinePropertiesEXT( static_cast( m_device ), - reinterpret_cast( pPipelineInfo ), - reinterpret_cast( pPipelineProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26143,10 +24517,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelinePropertiesEXT( m_device, reinterpret_cast( &pipelineInfo ), reinterpret_cast( &pipelineProperties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineProperties ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26157,7 +24529,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); - d.vkCmdSetPatchControlPointsEXT( static_cast( m_commandBuffer ), patchControlPoints ); } template @@ -26166,7 +24537,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); - d.vkCmdSetRasterizerDiscardEnableEXT( static_cast( m_commandBuffer ), static_cast( rasterizerDiscardEnable ) ); } template @@ -26174,7 +24544,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); - d.vkCmdSetDepthBiasEnableEXT( static_cast( m_commandBuffer ), static_cast( depthBiasEnable ) ); } template @@ -26182,7 +24551,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); - d.vkCmdSetLogicOpEXT( static_cast( m_commandBuffer ), static_cast( logicOp ) ); } template @@ -26191,7 +24559,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); - d.vkCmdSetPrimitiveRestartEnableEXT( static_cast( m_commandBuffer ), static_cast( primitiveRestartEnable ) ); } #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -26204,7 +24571,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateScreenSurfaceQNX( static_cast( m_instance ), return static_cast( d.vkCreateScreenSurfaceQNX( static_cast( m_instance ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -26230,10 +24596,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( surface ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -26255,12 +24619,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &surface ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( surface, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26273,8 +24634,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, window ) ); - return static_cast( - d.vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast( m_physicalDevice ), queueFamilyIndex, window ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26304,7 +24663,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetColorWriteEnableEXT( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pColorWriteEnables ) ); - d.vkCmdSetColorWriteEnableEXT( static_cast( m_commandBuffer ), attachmentCount, reinterpret_cast( pColorWriteEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26329,7 +24687,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); - d.vkCmdTraceRaysIndirect2KHR( static_cast( m_commandBuffer ), static_cast( indirectDeviceAddress ) ); } //=== VK_EXT_multi_draw === @@ -26349,12 +24706,6 @@ namespace VULKAN_HPP_NAMESPACE instanceCount, firstInstance, stride ); - d.vkCmdDrawMultiEXT( static_cast( m_commandBuffer ), - drawCount, - reinterpret_cast( pVertexInfo ), - instanceCount, - firstInstance, - stride ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26395,13 +24746,6 @@ namespace VULKAN_HPP_NAMESPACE firstInstance, stride, pVertexOffset ); - d.vkCmdDrawMultiIndexedEXT( static_cast( m_commandBuffer ), - drawCount, - reinterpret_cast( pIndexInfo ), - instanceCount, - firstInstance, - stride, - pVertexOffset ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26437,7 +24781,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateMicromapEXT( static_cast( m_device ), return static_cast( d.vkCreateMicromapEXT( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -26463,10 +24806,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( µmap ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( micromap ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -26488,12 +24829,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( µmap ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( micromap, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( micromap, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26506,8 +24844,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyMicromapEXT( static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); - d.vkDestroyMicromapEXT( - static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26535,8 +24871,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyMicromapEXT( static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); - d.vkDestroyMicromapEXT( - static_cast( m_device ), static_cast( micromap ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26563,7 +24897,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBuildMicromapsEXT( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ) ); - d.vkCmdBuildMicromapsEXT( static_cast( m_commandBuffer ), infoCount, reinterpret_cast( pInfos ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26591,10 +24924,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( deferredOperation ), infoCount, reinterpret_cast( pInfos ) ) ); - return static_cast( d.vkBuildMicromapsEXT( static_cast( m_device ), - static_cast( deferredOperation ), - infoCount, - reinterpret_cast( pInfos ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26611,7 +24940,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkBuildMicromapsEXT( m_device, static_cast( deferredOperation ), infos.size(), reinterpret_cast( infos.data() ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT", @@ -26629,8 +24957,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkCopyMicromapEXT( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); - return static_cast( d.vkCopyMicromapEXT( - static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26646,7 +24972,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT", @@ -26665,9 +24990,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkCopyMicromapToMemoryEXT( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); - return static_cast( d.vkCopyMicromapToMemoryEXT( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26682,7 +25004,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMicromapToMemoryEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT", @@ -26701,9 +25022,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkCopyMemoryToMicromapEXT( static_cast( m_device ), static_cast( deferredOperation ), reinterpret_cast( pInfo ) ) ); - return static_cast( d.vkCopyMemoryToMicromapEXT( static_cast( m_device ), - static_cast( deferredOperation ), - reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26718,7 +25036,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCopyMemoryToMicromapEXT( m_device, static_cast( deferredOperation ), reinterpret_cast( &info ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT", @@ -26745,20 +25062,9 @@ namespace VULKAN_HPP_NAMESPACE dataSize, pData, stride ) ); - return static_cast( d.vkWriteMicromapsPropertiesEXT( static_cast( m_device ), - micromapCount, - reinterpret_cast( pMicromaps ), - static_cast( queryType ), - dataSize, - pData, - stride ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ( data.data() ), stride ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template @@ -26814,10 +25118,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( &data ), stride ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -26826,7 +25128,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); - d.vkCmdCopyMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26848,7 +25149,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); - d.vkCmdCopyMicromapToMemoryEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26871,7 +25171,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); - d.vkCmdCopyMemoryToMicromapEXT( static_cast( m_commandBuffer ), reinterpret_cast( pInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -26897,7 +25196,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), d.vkCmdWriteMicromapsPropertiesEXT( static_cast( m_commandBuffer ), micromapCount, reinterpret_cast( pMicromaps ), @@ -26935,7 +25233,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), d.vkGetDeviceMicromapCompatibilityEXT( static_cast( m_device ), reinterpret_cast( pVersionInfo ), reinterpret_cast( pCompatibility ) ); @@ -26967,7 +25264,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetMicromapBuildSizesEXT( static_cast( m_device ), d.vkGetMicromapBuildSizesEXT( static_cast( m_device ), static_cast( buildType ), reinterpret_cast( pBuildInfo ), @@ -27004,7 +25300,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); - d.vkCmdDrawClusterHUAWEI( static_cast( m_commandBuffer ), groupCountX, groupCountY, groupCountZ ); } template @@ -27014,7 +25309,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdDrawClusterIndirectHUAWEI( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); - d.vkCmdDrawClusterIndirectHUAWEI( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ) ); } //=== VK_EXT_pageable_device_local_memory === @@ -27024,7 +25318,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( memory ), priority ); - d.vkSetDeviceMemoryPriorityEXT( static_cast( m_device ), static_cast( memory ), priority ); } //=== VK_KHR_maintenance4 === @@ -27038,9 +25331,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetDeviceBufferMemoryRequirementsKHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27089,9 +25379,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetDeviceImageMemoryRequirementsKHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27138,7 +25425,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), d.vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast( m_device ), reinterpret_cast( pInfo ), pSparseMemoryRequirementCount, @@ -27146,11 +25432,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( m_device ), d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast( m_device ), reinterpret_cast( pBindingReference ), reinterpret_cast( pHostMapping ) ); @@ -27260,7 +25540,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( descriptorSet ), ppData ); - d.vkGetDescriptorSetHostMappingVALVE( static_cast( m_device ), static_cast( descriptorSet ), ppData ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27291,7 +25570,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdCopyMemoryIndirectNV( static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); - d.vkCmdCopyMemoryIndirectNV( static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, stride ); } template @@ -27304,7 +25582,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), d.vkCmdCopyMemoryToImageIndirectNV( static_cast( m_commandBuffer ), static_cast( copyBufferAddress ), copyCount, @@ -27350,9 +25627,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), decompressRegionCount, reinterpret_cast( pDecompressMemoryRegions ) ); - d.vkCmdDecompressMemoryNV( static_cast( m_commandBuffer ), - decompressRegionCount, - reinterpret_cast( pDecompressMemoryRegions ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27382,10 +25656,6 @@ namespace VULKAN_HPP_NAMESPACE static_cast( indirectCommandsAddress ), static_cast( indirectCommandsCountAddress ), stride ); - d.vkCmdDecompressMemoryIndirectCountNV( static_cast( m_commandBuffer ), - static_cast( indirectCommandsAddress ), - static_cast( indirectCommandsCountAddress ), - stride ); } //=== VK_NV_device_generated_commands_compute === @@ -27399,9 +25669,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pMemoryRequirements ) ); - d.vkGetPipelineIndirectMemoryRequirementsNV( static_cast( m_device ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pMemoryRequirements ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27451,8 +25718,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdUpdatePipelineIndirectBufferNV( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); - d.vkCmdUpdatePipelineIndirectBufferNV( - static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), static_cast( pipeline ) ); } template @@ -27462,7 +25727,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPipelineIndirectDeviceAddressNV( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); - d.vkGetPipelineIndirectDeviceAddressNV( static_cast( m_device ), reinterpret_cast( pInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27489,7 +25753,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); - d.vkCmdSetDepthClampEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClampEnable ) ); } template @@ -27497,7 +25760,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); - d.vkCmdSetPolygonModeEXT( static_cast( m_commandBuffer ), static_cast( polygonMode ) ); } template @@ -27506,7 +25768,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), static_cast( rasterizationSamples ) ); - d.vkCmdSetRasterizationSamplesEXT( static_cast( m_commandBuffer ), static_cast( rasterizationSamples ) ); } template @@ -27517,8 +25778,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetSampleMaskEXT( static_cast( m_commandBuffer ), static_cast( samples ), reinterpret_cast( pSampleMask ) ); - d.vkCmdSetSampleMaskEXT( - static_cast( m_commandBuffer ), static_cast( samples ), reinterpret_cast( pSampleMask ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27550,7 +25809,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); - d.vkCmdSetAlphaToCoverageEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToCoverageEnable ) ); } template @@ -27558,7 +25816,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); - d.vkCmdSetAlphaToOneEnableEXT( static_cast( m_commandBuffer ), static_cast( alphaToOneEnable ) ); } template @@ -27566,7 +25823,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); - d.vkCmdSetLogicOpEnableEXT( static_cast( m_commandBuffer ), static_cast( logicOpEnable ) ); } template @@ -27578,8 +25834,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetColorBlendEnableEXT( static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEnables ) ); - d.vkCmdSetColorBlendEnableEXT( - static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEnables ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27609,10 +25863,6 @@ namespace VULKAN_HPP_NAMESPACE firstAttachment, attachmentCount, reinterpret_cast( pColorBlendEquations ) ); - d.vkCmdSetColorBlendEquationEXT( static_cast( m_commandBuffer ), - firstAttachment, - attachmentCount, - reinterpret_cast( pColorBlendEquations ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27642,8 +25892,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetColorWriteMaskEXT( static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorWriteMasks ) ); - d.vkCmdSetColorWriteMaskEXT( - static_cast( m_commandBuffer ), firstAttachment, attachmentCount, reinterpret_cast( pColorWriteMasks ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27670,7 +25918,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), static_cast( domainOrigin ) ); - d.vkCmdSetTessellationDomainOriginEXT( static_cast( m_commandBuffer ), static_cast( domainOrigin ) ); } template @@ -27678,7 +25925,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); - d.vkCmdSetRasterizationStreamEXT( static_cast( m_commandBuffer ), rasterizationStream ); } template @@ -27689,8 +25935,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( conservativeRasterizationMode ) ); - d.vkCmdSetConservativeRasterizationModeEXT( static_cast( m_commandBuffer ), - static_cast( conservativeRasterizationMode ) ); } template @@ -27699,7 +25943,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); - d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast( m_commandBuffer ), extraPrimitiveOverestimationSize ); } template @@ -27707,7 +25950,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); - d.vkCmdSetDepthClipEnableEXT( static_cast( m_commandBuffer ), static_cast( depthClipEnable ) ); } template @@ -27716,7 +25958,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); - d.vkCmdSetSampleLocationsEnableEXT( static_cast( m_commandBuffer ), static_cast( sampleLocationsEnable ) ); } template @@ -27730,10 +25971,6 @@ namespace VULKAN_HPP_NAMESPACE firstAttachment, attachmentCount, reinterpret_cast( pColorBlendAdvanced ) ); - d.vkCmdSetColorBlendAdvancedEXT( static_cast( m_commandBuffer ), - firstAttachment, - attachmentCount, - reinterpret_cast( pColorBlendAdvanced ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27760,7 +25997,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), static_cast( provokingVertexMode ) ); - d.vkCmdSetProvokingVertexModeEXT( static_cast( m_commandBuffer ), static_cast( provokingVertexMode ) ); } template @@ -27769,7 +26005,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( lineRasterizationMode ) ); - d.vkCmdSetLineRasterizationModeEXT( static_cast( m_commandBuffer ), static_cast( lineRasterizationMode ) ); } template @@ -27777,7 +26012,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); - d.vkCmdSetLineStippleEnableEXT( static_cast( m_commandBuffer ), static_cast( stippledLineEnable ) ); } template @@ -27786,7 +26020,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); - d.vkCmdSetDepthClipNegativeOneToOneEXT( static_cast( m_commandBuffer ), static_cast( negativeOneToOne ) ); } template @@ -27795,7 +26028,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); - d.vkCmdSetViewportWScalingEnableNV( static_cast( m_commandBuffer ), static_cast( viewportWScalingEnable ) ); } template @@ -27807,8 +26039,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetViewportSwizzleNV( static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportSwizzles ) ); - d.vkCmdSetViewportSwizzleNV( - static_cast( m_commandBuffer ), firstViewport, viewportCount, reinterpret_cast( pViewportSwizzles ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27835,7 +26065,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); - d.vkCmdSetCoverageToColorEnableNV( static_cast( m_commandBuffer ), static_cast( coverageToColorEnable ) ); } template @@ -27843,7 +26072,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); - d.vkCmdSetCoverageToColorLocationNV( static_cast( m_commandBuffer ), coverageToColorLocation ); } template @@ -27852,7 +26080,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), static_cast( coverageModulationMode ) ); - d.vkCmdSetCoverageModulationModeNV( static_cast( m_commandBuffer ), static_cast( coverageModulationMode ) ); } template @@ -27861,7 +26088,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), static_cast( coverageModulationTableEnable ) ); - d.vkCmdSetCoverageModulationTableEnableNV( static_cast( m_commandBuffer ), static_cast( coverageModulationTableEnable ) ); } template @@ -27871,7 +26097,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCoverageModulationTableNV( static_cast( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); - d.vkCmdSetCoverageModulationTableNV( static_cast( m_commandBuffer ), coverageModulationTableCount, pCoverageModulationTable ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27895,7 +26120,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); - d.vkCmdSetShadingRateImageEnableNV( static_cast( m_commandBuffer ), static_cast( shadingRateImageEnable ) ); } template @@ -27904,7 +26128,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), static_cast( representativeFragmentTestEnable ) ); - d.vkCmdSetRepresentativeFragmentTestEnableNV( static_cast( m_commandBuffer ), static_cast( representativeFragmentTestEnable ) ); } template @@ -27913,7 +26136,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), static_cast( coverageReductionMode ) ); - d.vkCmdSetCoverageReductionModeNV( static_cast( m_commandBuffer ), static_cast( coverageReductionMode ) ); } //=== VK_EXT_shader_module_identifier === @@ -27926,8 +26148,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetShaderModuleIdentifierEXT( static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); - d.vkGetShaderModuleIdentifierEXT( - static_cast( m_device ), static_cast( shaderModule ), reinterpret_cast( pIdentifier ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27956,9 +26176,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pIdentifier ) ); - d.vkGetShaderModuleCreateInfoIdentifierEXT( static_cast( m_device ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pIdentifier ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -27992,7 +26209,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( - d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast( m_physicalDevice ), d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( static_cast( m_physicalDevice ), reinterpret_cast( pOpticalFlowImageFormatInfo ), pFormatCount, @@ -28000,11 +26216,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( d.vkCreateOpticalFlowSessionNV( static_cast( m_device ), return static_cast( d.vkCreateOpticalFlowSessionNV( static_cast( m_device ), reinterpret_cast( pCreateInfo ), reinterpret_cast( pAllocator ), @@ -28129,10 +26335,8 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &session ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( session ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE @@ -28154,12 +26358,9 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( static_cast( allocator ) ), reinterpret_cast( &session ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, UniqueHandle( session, detail::ObjectDestroy( *this, allocator, d ) ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( - result, UniqueHandle( session, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -28172,8 +26373,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyOpticalFlowSessionNV( static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); - d.vkDestroyOpticalFlowSessionNV( - static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -28202,8 +26401,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyOpticalFlowSessionNV( static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); - d.vkDestroyOpticalFlowSessionNV( - static_cast( m_device ), static_cast( session ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -28233,7 +26430,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkBindOpticalFlowSessionImageNV( static_cast( m_device ), return static_cast( d.vkBindOpticalFlowSessionImageNV( static_cast( m_device ), static_cast( session ), static_cast( bindingPoint ), @@ -28261,10 +26457,8 @@ namespace VULKAN_HPP_NAMESPACE static_cast( view ), static_cast( layout ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ @@ -28277,9 +26471,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), static_cast( session ), reinterpret_cast( pExecuteInfo ) ); - d.vkCmdOpticalFlowExecuteNV( static_cast( m_commandBuffer ), - static_cast( session ), - reinterpret_cast( pExecuteInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -28308,7 +26499,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), d.vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), static_cast( buffer ), static_cast( offset ), @@ -28317,138 +26507,131 @@ namespace VULKAN_HPP_NAMESPACE } template - VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, - VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetRenderingAreaGranularityKHR( static_cast( m_device ), - reinterpret_cast( pRenderingAreaInfo ), - reinterpret_cast( pGranularity ) ); - d.vkGetRenderingAreaGranularityKHR( static_cast( m_device ), - reinterpret_cast( pRenderingAreaInfo ), - reinterpret_cast( pGranularity ) ); + d.vkGetRenderingAreaGranularityKHR( + static_cast( m_device ), reinterpret_cast( pRenderingAreaInfo ), reinterpret_cast( pGranularity ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D - Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkGetRenderingAreaGranularityKHR && "Function requires or " ); # endif VULKAN_HPP_NAMESPACE::Extent2D granularity; d.vkGetRenderingAreaGranularityKHR( - m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); + m_device, reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); return granularity; } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pLayout ) ); - d.vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pLayout ) ); + d.vkGetDeviceImageSubresourceLayoutKHR( + static_cast( m_device ), reinterpret_cast( pInfo ), reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); # endif - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; d.vkGetDeviceImageSubresourceLayoutKHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain - Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkGetDeviceImageSubresourceLayoutKHR && + "Function requires or " ); # endif VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); d.vkGetDeviceImageSubresourceLayoutKHR( - m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); + m_device, reinterpret_cast( &info ), reinterpret_cast( &layout ) ); return structureChain; } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetImageSubresourceLayout2KHR( static_cast( m_device ), d.vkGetImageSubresourceLayout2KHR( static_cast( m_device ), static_cast( image ), - reinterpret_cast( pSubresource ), - reinterpret_cast( pLayout ) ); + reinterpret_cast( pSubresource ), + reinterpret_cast( pLayout ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 Device::getImageSubresourceLayout2KHR( + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout2KHR && - "Function requires or or " ); + "Function requires or or or " ); # endif - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; d.vkGetImageSubresourceLayout2KHR( m_device, static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain Device::getImageSubresourceLayout2KHR( - VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) VULKAN_HPP_ASSERT( d.vkGetImageSubresourceLayout2KHR && - "Function requires or or " ); + "Function requires or or or " ); # endif VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); d.vkGetImageSubresourceLayout2KHR( m_device, static_cast( image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } @@ -28463,28 +26646,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( pData ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkAntiLagUpdateAMD && "Function requires " ); -# endif - - d.vkAntiLagUpdateAMD( m_device, reinterpret_cast( &data ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - //=== VK_AMD_anti_lag === - - template - VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkAntiLagUpdateAMD( static_cast( m_device ), reinterpret_cast( pData ) ); - } - #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void Device::antiLagUpdateAMD( const VULKAN_HPP_NAMESPACE::AntiLagDataAMD & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT @@ -28508,7 +26669,6 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateShadersEXT( static_cast( m_device ), return static_cast( d.vkCreateShadersEXT( static_cast( m_device ), createInfoCount, reinterpret_cast( pCreateInfos ), @@ -28517,9 +26677,6 @@ namespace VULKAN_HPP_NAMESPACE } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -28543,9 +26700,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( result, std::move( shaders ) ); } @@ -28574,9 +26728,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( result, std::move( shaders ) ); } @@ -28602,18 +26753,11 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue( result, std::move( shader ) ); } # ifndef VULKAN_HPP_NO_SMART_HANDLE - template < - typename Dispatch, - typename ShaderEXTAllocator, - typename std::enable_if>::value, int>::type> template < typename Dispatch, typename ShaderEXTAllocator, @@ -28638,13 +26782,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders; uniqueShaders.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & shader : shaders ) { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); @@ -28677,13 +26817,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); std::vector, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator ); uniqueShaders.reserve( createInfos.size() ); detail::ObjectDestroy deleter( *this, allocator, d ); - detail::ObjectDestroy deleter( *this, allocator, d ); for ( auto const & shader : shaders ) { uniqueShaders.push_back( UniqueHandle( shader, deleter ) ); @@ -28712,13 +26848,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncompatibleShaderBinaryEXT } ); return ResultValue>( result, UniqueHandle( shader, detail::ObjectDestroy( *this, allocator, d ) ) ); - result, UniqueHandle( shader, detail::ObjectDestroy( *this, allocator, d ) ) ); } # endif /* VULKAN_HPP_NO_SMART_HANDLE */ #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -28731,8 +26863,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyShaderEXT( static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); - d.vkDestroyShaderEXT( - static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -28760,8 +26890,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkDestroyShaderEXT( static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); - d.vkDestroyShaderEXT( - static_cast( m_device ), static_cast( shader ), reinterpret_cast( pAllocator ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -28787,13 +26915,9 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( shader ), pDataSize, pData ) ); - return static_cast( d.vkGetShaderBinaryDataEXT( static_cast( m_device ), static_cast( shader ), pDataSize, pData ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -28819,14 +26943,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" ); VULKAN_HPP_ASSERT( dataSize <= data.size() ); if ( dataSize < data.size() ) { data.resize( dataSize ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data ) ); } template ( pStages ), reinterpret_cast( pShaders ) ); - d.vkCmdBindShadersEXT( static_cast( m_commandBuffer ), - stageCount, - reinterpret_cast( pStages ), - reinterpret_cast( pShaders ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -29395,511 +27511,6 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( pAllocator ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkReleaseCapturedPipelineDataKHR && "Function requires " ); -# endif - - d.vkReleaseCapturedPipelineDataKHR( - m_device, - reinterpret_cast( &info ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, - const VULKAN_HPP_NAMESPACE::DepthClampRangeEXT * pDepthClampRange, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdSetDepthClampRangeEXT( static_cast( m_commandBuffer ), - static_cast( depthClampMode ), - reinterpret_cast( pDepthClampRange ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::setDepthClampRangeEXT( VULKAN_HPP_NAMESPACE::DepthClampModeEXT depthClampMode, - Optional depthClampRange, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdSetDepthClampRangeEXT && - "Function requires or " ); -# endif - - d.vkCmdSetDepthClampRangeEXT( - m_commandBuffer, - static_cast( depthClampMode ), - reinterpret_cast( static_cast( depthClampRange ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - //=== VK_KHR_pipeline_binary === - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR * pBinaries, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreatePipelineBinariesKHR( static_cast( m_device ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pBinaries ) ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); -# endif - - std::vector pipelineBinaries; - VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; - VULKAN_HPP_NAMESPACE::Result result; - if ( createInfo.pKeysAndDataInfo ) - { - VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); - pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); - binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - else - { - VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - pipelineBinaries.resize( binaries.pipelineBinaryCount ); - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - } - - VULKAN_HPP_NAMESPACE::detail::resultCheck( - result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); - - return ResultValue>( result, std::move( pipelineBinaries ) ); - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue> - Device::createPipelineBinariesKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, - Optional allocator, - PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); -# endif - - std::vector pipelineBinaries( pipelineBinaryKHRAllocator ); - VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; - VULKAN_HPP_NAMESPACE::Result result; - if ( createInfo.pKeysAndDataInfo ) - { - VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); - pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); - binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - else - { - VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - pipelineBinaries.resize( binaries.pipelineBinaryCount ); - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - } - - VULKAN_HPP_NAMESPACE::detail::resultCheck( - result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHR", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); - - return ResultValue>( result, std::move( pipelineBinaries ) ); - } - -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template >::value, - int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> - Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, - Optional allocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); -# endif - - std::vector pipelineBinaries; - VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; - VULKAN_HPP_NAMESPACE::Result result; - if ( createInfo.pKeysAndDataInfo ) - { - VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); - pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); - binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - else - { - VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - pipelineBinaries.resize( binaries.pipelineBinaryCount ); - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - } - - VULKAN_HPP_NAMESPACE::detail::resultCheck( - result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); - std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries; - uniquePipelineBinaries.reserve( pipelineBinaries.size() ); - detail::ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & pipelineBinary : pipelineBinaries ) - { - uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); - } - return ResultValue, PipelineBinaryKHRAllocator>>( - result, std::move( uniquePipelineBinaries ) ); - } - - template >::value, - int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue, PipelineBinaryKHRAllocator>> - Device::createPipelineBinariesKHRUnique( const VULKAN_HPP_NAMESPACE::PipelineBinaryCreateInfoKHR & createInfo, - Optional allocator, - PipelineBinaryKHRAllocator & pipelineBinaryKHRAllocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreatePipelineBinariesKHR && "Function requires " ); -# endif - - std::vector pipelineBinaries; - VULKAN_HPP_NAMESPACE::PipelineBinaryHandlesInfoKHR binaries; - VULKAN_HPP_NAMESPACE::Result result; - if ( createInfo.pKeysAndDataInfo ) - { - VULKAN_HPP_ASSERT( !createInfo.pipeline && !createInfo.pPipelineCreateInfo ); - pipelineBinaries.resize( createInfo.pKeysAndDataInfo->binaryCount ); - binaries.pipelineBinaryCount = createInfo.pKeysAndDataInfo->binaryCount; - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - else - { - VULKAN_HPP_ASSERT( !createInfo.pipeline ^ !createInfo.pPipelineCreateInfo ); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - pipelineBinaries.resize( binaries.pipelineBinaryCount ); - binaries.pPipelineBinaries = pipelineBinaries.data(); - result = static_cast( d.vkCreatePipelineBinariesKHR( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &binaries ) ) ); - } - } - - VULKAN_HPP_NAMESPACE::detail::resultCheck( - result, - VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineBinariesKHRUnique", - { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete, VULKAN_HPP_NAMESPACE::Result::ePipelineBinaryMissingKHR } ); - std::vector, PipelineBinaryKHRAllocator> uniquePipelineBinaries( - pipelineBinaryKHRAllocator ); - uniquePipelineBinaries.reserve( pipelineBinaries.size() ); - detail::ObjectDestroy deleter( *this, allocator, d ); - for ( auto const & pipelineBinary : pipelineBinaries ) - { - uniquePipelineBinaries.push_back( UniqueHandle( pipelineBinary, deleter ) ); - } - return ResultValue, PipelineBinaryKHRAllocator>>( - result, std::move( uniquePipelineBinaries ) ); - } -# endif /* VULKAN_HPP_NO_SMART_HANDLE */ -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineBinaryKHR( - static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyPipelineBinaryKHR( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); -# endif - - d.vkDestroyPipelineBinaryKHR( - m_device, - static_cast( pipelineBinary ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyPipelineBinaryKHR( - static_cast( m_device ), static_cast( pipelineBinary ), reinterpret_cast( pAllocator ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineBinaryKHR pipelineBinary, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkDestroyPipelineBinaryKHR && "Function requires " ); -# endif - - d.vkDestroyPipelineBinaryKHR( - m_device, - static_cast( pipelineBinary ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineKeyKHR( const VULKAN_HPP_NAMESPACE::PipelineCreateInfoKHR * pPipelineCreateInfo, - VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineKey, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineKeyKHR( static_cast( m_device ), - reinterpret_cast( pPipelineCreateInfo ), - reinterpret_cast( pPipelineKey ) ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::getPipelineKeyKHR( Optional pipelineCreateInfo, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetPipelineKeyKHR && "Function requires " ); -# endif - - VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR pipelineKey; - VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetPipelineKeyKHR( - m_device, - reinterpret_cast( static_cast( pipelineCreateInfo ) ), - reinterpret_cast( &pipelineKey ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineKeyKHR" ); - - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( pipelineKey ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR * pInfo, - VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR * pPipelineBinaryKey, - size_t * pPipelineBinaryDataSize, - void * pPipelineBinaryData, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPipelineBinaryDataKHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pPipelineBinaryKey ), - pPipelineBinaryDataSize, - pPipelineBinaryData ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>>::type - Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); -# endif - - std::pair> data_; - VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; - std::vector & pipelineBinaryData = data_.second; - size_t pipelineBinaryDataSize; - VULKAN_HPP_NAMESPACE::Result result = - static_cast( d.vkGetPipelineBinaryDataKHR( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &pipelineBinaryKey ), - &pipelineBinaryDataSize, - nullptr ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - pipelineBinaryData.resize( pipelineBinaryDataSize ); - result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &pipelineBinaryKey ), - &pipelineBinaryDataSize, - reinterpret_cast( pipelineBinaryData.data() ) ) ); - } - - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); - - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - } - - template ::value, int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE - typename ResultValueType>>::type - Device::getPipelineBinaryDataKHR( const VULKAN_HPP_NAMESPACE::PipelineBinaryDataInfoKHR & info, - Uint8_tAllocator & uint8_tAllocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetPipelineBinaryDataKHR && "Function requires " ); -# endif - - std::pair> data_( - std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) ); - VULKAN_HPP_NAMESPACE::PipelineBinaryKeyKHR & pipelineBinaryKey = data_.first; - std::vector & pipelineBinaryData = data_.second; - size_t pipelineBinaryDataSize; - VULKAN_HPP_NAMESPACE::Result result = - static_cast( d.vkGetPipelineBinaryDataKHR( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &pipelineBinaryKey ), - &pipelineBinaryDataSize, - nullptr ) ); - if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) - { - pipelineBinaryData.resize( pipelineBinaryDataSize ); - result = static_cast( d.vkGetPipelineBinaryDataKHR( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &pipelineBinaryKey ), - &pipelineBinaryDataSize, - reinterpret_cast( pipelineBinaryData.data() ) ) ); - } - - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineBinaryDataKHR" ); - - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE Result Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR * pInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkReleaseCapturedPipelineDataKHR( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pAllocator ) ) ); - } - #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void Device::releaseCapturedPipelineDataKHR( const VULKAN_HPP_NAMESPACE::ReleaseCapturedPipelineDataInfoKHR & info, @@ -29931,16 +27542,9 @@ namespace VULKAN_HPP_NAMESPACE static_cast( framebuffer ), pPropertiesCount, reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetFramebufferTilePropertiesQCOM( static_cast( m_device ), - static_cast( framebuffer ), - pPropertiesCount, - reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -30021,9 +27625,6 @@ namespace VULKAN_HPP_NAMESPACE return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), reinterpret_cast( pRenderingInfo ), reinterpret_cast( pProperties ) ) ); - return static_cast( d.vkGetDynamicRenderingTilePropertiesQCOM( static_cast( m_device ), - reinterpret_cast( pRenderingInfo ), - reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -30054,8 +27655,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkSetLatencySleepModeNV( static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepModeInfo ) ) ); - return static_cast( d.vkSetLatencySleepModeNV( - static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepModeInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -30072,10 +27671,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkSetLatencySleepModeNV( m_device, static_cast( swapchain ), reinterpret_cast( &sleepModeInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -30087,8 +27684,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkLatencySleepNV( static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepInfo ) ) ); - return static_cast( d.vkLatencySleepNV( - static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pSleepInfo ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -30114,8 +27709,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkSetLatencyMarkerNV( static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); - d.vkSetLatencyMarkerNV( - static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -30141,18 +27734,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkGetLatencyTimingsNV( static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); - d.vkGetLatencyTimingsNV( - static_cast( m_device ), static_cast( swapchain ), reinterpret_cast( pLatencyMarkerInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template < - typename LatencyTimingsFrameReportNVAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const template < typename LatencyTimingsFrameReportNVAllocator, typename Dispatch, @@ -30166,11 +27750,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); # endif - std::vector timings; - VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; - d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); - timings.resize( latencyMarkerInfo.timingCount ); - latencyMarkerInfo.pTimings = timings.data(); std::vector timings; VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); @@ -30181,32 +27760,6 @@ namespace VULKAN_HPP_NAMESPACE return timings; } - template < - typename LatencyTimingsFrameReportNVAllocator, - typename Dispatch, - typename std::enable_if::value, - int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector - Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, - LatencyTimingsFrameReportNVAllocator & latencyTimingsFrameReportNVAllocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetLatencyTimingsNV && "Function requires " ); -# endif - - std::vector timings( latencyTimingsFrameReportNVAllocator ); - VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV latencyMarkerInfo; - d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); - timings.resize( latencyMarkerInfo.timingCount ); - latencyMarkerInfo.pTimings = timings.data(); - d.vkGetLatencyTimingsNV( m_device, static_cast( swapchain ), reinterpret_cast( &latencyMarkerInfo ) ); - - return timings; - return timings; - } - template < typename LatencyTimingsFrameReportNVAllocator, typename Dispatch, @@ -30239,7 +27792,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( pQueueTypeInfo ) ); - d.vkQueueNotifyOutOfBandNV( static_cast( m_queue ), reinterpret_cast( pQueueTypeInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -30265,15 +27817,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); - static_cast( m_physicalDevice ), pPropertyCount, reinterpret_cast( pProperties ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> template ( m_commandBuffer ), static_cast( aspectMask ) ); - d.vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast( m_commandBuffer ), static_cast( aspectMask ) ); } #if defined( VK_USE_PLATFORM_SCREEN_QNX ) @@ -30377,8 +27918,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetScreenBufferPropertiesQNX( static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); - return static_cast( - d.vkGetScreenBufferPropertiesQNX( static_cast( m_device ), buffer, reinterpret_cast( pProperties ) ) ); } # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -30395,10 +27934,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); } template @@ -30415,10 +27952,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast( &properties ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( structureChain ) ); } # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ @@ -30431,7 +27966,6 @@ namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); - d.vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } //=== VK_KHR_calibrated_timestamps === @@ -30444,14 +27978,9 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); - return static_cast( d.vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( - static_cast( m_physicalDevice ), pTimeDomainCount, reinterpret_cast( pTimeDomains ) ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -30478,14 +28007,12 @@ namespace VULKAN_HPP_NAMESPACE } } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsKHR" ); VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() ); if ( timeDomainCount < timeDomains.size() ) { timeDomains.resize( timeDomainCount ); } return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( timeDomains ) ); } template ( pTimestampInfos ), pTimestamps, pMaxDeviation ) ); - return static_cast( d.vkGetCalibratedTimestampsKHR( static_cast( m_device ), - timestampCount, - reinterpret_cast( pTimestampInfos ), - pTimestamps, - pMaxDeviation ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, int>::type> template ::value, int>::type> @@ -30569,10 +28086,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template ( d.vkGetCalibratedTimestampsKHR( m_device, timestampInfos.size(), reinterpret_cast( timestampInfos.data() ), timestamps.data(), &maxDeviation ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } template @@ -30618,112 +28131,105 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkGetCalibratedTimestampsKHR( m_device, 1, reinterpret_cast( ×tampInfo ), ×tamp, &maxDeviation ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampKHR" ); return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( data_ ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_maintenance6 === template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pBindDescriptorSetsInfo ) ); - d.vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pBindDescriptorSetsInfo ) ); + reinterpret_cast( pBindDescriptorSetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCmdBindDescriptorSets2KHR && "Function requires or " ); # endif - d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); + d.vkCmdBindDescriptorSets2KHR( m_commandBuffer, reinterpret_cast( &bindDescriptorSetsInfo ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); - d.vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); + d.vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), reinterpret_cast( pPushConstantsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCmdPushConstants2KHR && "Function requires or " ); # endif - d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); + d.vkCmdPushConstants2KHR( m_commandBuffer, reinterpret_cast( &pushConstantsInfo ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pPushDescriptorSetInfo ) ); - d.vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pPushDescriptorSetInfo ) ); + reinterpret_cast( pPushDescriptorSetInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSet2KHR && "Function requires or " ); # endif - d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); + d.vkCmdPushDescriptorSet2KHR( m_commandBuffer, reinterpret_cast( &pushDescriptorSetInfo ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template VULKAN_HPP_INLINE void - CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast( m_commandBuffer ), - d.vkCmdPushDescriptorSetWithTemplate2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); + reinterpret_cast( pPushDescriptorSetWithTemplateInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_INLINE void - CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); # if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( d.vkCmdPushDescriptorSetWithTemplate2KHR && + "Function requires or " ); # endif d.vkCmdPushDescriptorSetWithTemplate2KHR( m_commandBuffer, - reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); } #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -30735,8 +28241,6 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pSetDescriptorBufferOffsetsInfo ) ); - d.vkCmdSetDescriptorBufferOffsets2EXT( static_cast( m_commandBuffer ), - reinterpret_cast( pSetDescriptorBufferOffsetsInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -30763,8 +28267,6 @@ namespace VULKAN_HPP_NAMESPACE d.vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( static_cast( m_commandBuffer ), reinterpret_cast( pBindDescriptorBufferEmbeddedSamplersInfo ) ); - static_cast( m_commandBuffer ), - reinterpret_cast( pBindDescriptorBufferEmbeddedSamplersInfo ) ); } #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE @@ -31217,519 +28719,6 @@ namespace VULKAN_HPP_NAMESPACE reinterpret_cast( pProperties ) ) ); } -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template ::value, - int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< - std::vector>::type - PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && - "Function requires " ); -# endif - - std::vector properties; - uint32_t propertyCount; - VULKAN_HPP_NAMESPACE::Result result; - do - { - result = static_cast( - d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - } - - template ::value, - int>::type> - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType< - std::vector>::type - PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( - CooperativeMatrixFlexibleDimensionsPropertiesNVAllocator & cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator, Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV && - "Function requires " ); -# endif - - std::vector properties( - cooperativeMatrixFlexibleDimensionsPropertiesNVAllocator ); - uint32_t propertyCount; - VULKAN_HPP_NAMESPACE::Result result; - do - { - result = static_cast( - d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) ); - if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && propertyCount ) - { - properties.resize( propertyCount ); - result = static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - m_physicalDevice, &propertyCount, reinterpret_cast( properties.data() ) ) ); - } - } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV" ); - VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); - if ( propertyCount < properties.size() ) - { - properties.resize( propertyCount ); - } - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( properties ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - //=== VK_EXT_device_generated_commands === - - template - VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT * pInfo, - VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkGetGeneratedCommandsMemoryRequirementsEXT( static_cast( m_device ), - reinterpret_cast( pInfo ), - reinterpret_cast( pMemoryRequirements ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 - Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && - "Function requires " ); -# endif - - VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; - d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); - - return memoryRequirements; - } - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain - Device::getGeneratedCommandsMemoryRequirementsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoEXT & info, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkGetGeneratedCommandsMemoryRequirementsEXT && - "Function requires " ); -# endif - - VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get(); - d.vkGetGeneratedCommandsMemoryRequirementsEXT( m_device, - reinterpret_cast( &info ), - reinterpret_cast( &memoryRequirements ) ); - - return structureChain; - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, - VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdPreprocessGeneratedCommandsEXT( static_cast( m_commandBuffer ), - reinterpret_cast( pGeneratedCommandsInfo ), - static_cast( stateCommandBuffer ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsEXT( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, - VULKAN_HPP_NAMESPACE::CommandBuffer stateCommandBuffer, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdPreprocessGeneratedCommandsEXT && "Function requires " ); -# endif - - d.vkCmdPreprocessGeneratedCommandsEXT( - m_commandBuffer, reinterpret_cast( &generatedCommandsInfo ), static_cast( stateCommandBuffer ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, - const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT * pGeneratedCommandsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkCmdExecuteGeneratedCommandsEXT( static_cast( m_commandBuffer ), - static_cast( isPreprocessed ), - reinterpret_cast( pGeneratedCommandsInfo ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsEXT( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, - const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoEXT & generatedCommandsInfo, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCmdExecuteGeneratedCommandsEXT && "Function requires " ); -# endif - - d.vkCmdExecuteGeneratedCommandsEXT( - m_commandBuffer, static_cast( isPreprocessed ), reinterpret_cast( &generatedCommandsInfo ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT * pIndirectCommandsLayout, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIndirectCommandsLayoutEXT( static_cast( m_device ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pIndirectCommandsLayout ) ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createIndirectCommandsLayoutEXT( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); -# endif - - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; - VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &indirectCommandsLayout ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXT" ); - - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectCommandsLayout ) ); - } - -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createIndirectCommandsLayoutEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreateIndirectCommandsLayoutEXT && "Function requires " ); -# endif - - VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout; - VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectCommandsLayoutEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &indirectCommandsLayout ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutEXTUnique" ); - - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - indirectCommandsLayout, detail::ObjectDestroy( *this, allocator, d ) ) ); - } -# endif /* VULKAN_HPP_NO_SMART_HANDLE */ -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), - static_cast( indirectCommandsLayout ), - reinterpret_cast( pAllocator ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutEXT( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); -# endif - - d.vkDestroyIndirectCommandsLayoutEXT( - m_device, - static_cast( indirectCommandsLayout ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectCommandsLayoutEXT( static_cast( m_device ), - static_cast( indirectCommandsLayout ), - reinterpret_cast( pAllocator ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutEXT indirectCommandsLayout, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkDestroyIndirectCommandsLayoutEXT && "Function requires " ); -# endif - - d.vkDestroyIndirectCommandsLayoutEXT( - m_device, - static_cast( indirectCommandsLayout ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT * pCreateInfo, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT * pIndirectExecutionSet, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkCreateIndirectExecutionSetEXT( static_cast( m_device ), - reinterpret_cast( pCreateInfo ), - reinterpret_cast( pAllocator ), - reinterpret_cast( pIndirectExecutionSet ) ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType::type - Device::createIndirectExecutionSetEXT( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); -# endif - - VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; - VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &indirectExecutionSet ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXT" ); - - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, std::move( indirectExecutionSet ) ); - } - -# ifndef VULKAN_HPP_NO_SMART_HANDLE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType>::type - Device::createIndirectExecutionSetEXTUnique( const VULKAN_HPP_NAMESPACE::IndirectExecutionSetCreateInfoEXT & createInfo, - Optional allocator, - Dispatch const & d ) const - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkCreateIndirectExecutionSetEXT && "Function requires " ); -# endif - - VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet; - VULKAN_HPP_NAMESPACE::Result result = static_cast( d.vkCreateIndirectExecutionSetEXT( - m_device, - reinterpret_cast( &createInfo ), - reinterpret_cast( static_cast( allocator ) ), - reinterpret_cast( &indirectExecutionSet ) ) ); - VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectExecutionSetEXTUnique" ); - - return VULKAN_HPP_NAMESPACE::detail::createResultValueType( result, - UniqueHandle( - indirectExecutionSet, detail::ObjectDestroy( *this, allocator, d ) ) ); - } -# endif /* VULKAN_HPP_NO_SMART_HANDLE */ -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), - static_cast( indirectExecutionSet ), - reinterpret_cast( pAllocator ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroyIndirectExecutionSetEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); -# endif - - d.vkDestroyIndirectExecutionSetEXT( - m_device, - static_cast( indirectExecutionSet ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkDestroyIndirectExecutionSetEXT( static_cast( m_device ), - static_cast( indirectExecutionSet ), - reinterpret_cast( pAllocator ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - Optional allocator, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkDestroyIndirectExecutionSetEXT && "Function requires " ); -# endif - - d.vkDestroyIndirectExecutionSetEXT( - m_device, - static_cast( indirectExecutionSet ), - reinterpret_cast( static_cast( allocator ) ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - uint32_t executionSetWriteCount, - const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetPipelineEXT * pExecutionSetWrites, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateIndirectExecutionSetPipelineEXT( static_cast( m_device ), - static_cast( indirectExecutionSet ), - executionSetWriteCount, - reinterpret_cast( pExecutionSetWrites ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetPipelineEXT( - VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetPipelineEXT && - "Function requires " ); -# endif - - d.vkUpdateIndirectExecutionSetPipelineEXT( m_device, - static_cast( indirectExecutionSet ), - executionSetWrites.size(), - reinterpret_cast( executionSetWrites.data() ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - uint32_t executionSetWriteCount, - const VULKAN_HPP_NAMESPACE::WriteIndirectExecutionSetShaderEXT * pExecutionSetWrites, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - d.vkUpdateIndirectExecutionSetShaderEXT( static_cast( m_device ), - static_cast( indirectExecutionSet ), - executionSetWriteCount, - reinterpret_cast( pExecutionSetWrites ) ); - } - -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_INLINE void Device::updateIndirectExecutionSetShaderEXT( - VULKAN_HPP_NAMESPACE::IndirectExecutionSetEXT indirectExecutionSet, - VULKAN_HPP_NAMESPACE::ArrayProxy const & executionSetWrites, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); -# if ( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1 ) - VULKAN_HPP_ASSERT( d.vkUpdateIndirectExecutionSetShaderEXT && - "Function requires " ); -# endif - - d.vkUpdateIndirectExecutionSetShaderEXT( m_device, - static_cast( indirectExecutionSet ), - executionSetWrites.size(), - reinterpret_cast( executionSetWrites.data() ) ); - } -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - //=== VK_NV_cooperative_matrix2 === - - template - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result - PhysicalDevice::getCooperativeMatrixFlexibleDimensionsPropertiesNV( uint32_t * pPropertyCount, - VULKAN_HPP_NAMESPACE::CooperativeMatrixFlexibleDimensionsPropertiesNV * pProperties, - Dispatch const & d ) const VULKAN_HPP_NOEXCEPT - { - VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); - return static_cast( d.vkGetPhysicalDeviceCooperativeMatrixFlexibleDimensionsPropertiesNV( - static_cast( m_physicalDevice ), - pPropertyCount, - reinterpret_cast( pProperties ) ) ); - } - #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template >>>>>> indev //=================================== //=== HANDLE forward declarations === //=================================== @@ -2605,25 +2627,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SurfaceKHR const & ) const = default; -#else - bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR == rhs.m_surfaceKHR; - } - - bool operator!=( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR != rhs.m_surfaceKHR; - } - - bool operator<( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_surfaceKHR < rhs.m_surfaceKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT { return m_surfaceKHR; @@ -2721,25 +2724,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DebugReportCallbackEXT const & ) const = default; -#else - bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT; - } - - bool operator!=( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT; - } - - bool operator<( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT { return m_debugReportCallbackEXT; @@ -2837,25 +2821,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DebugUtilsMessengerEXT const & ) const = default; -#else - bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT; - } - - bool operator!=( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT; - } - - bool operator<( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT { return m_debugUtilsMessengerEXT; @@ -2941,25 +2906,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayKHR const & ) const = default; -#else - bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR == rhs.m_displayKHR; - } - - bool operator!=( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR != rhs.m_displayKHR; - } - - bool operator<( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayKHR < rhs.m_displayKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT { return m_displayKHR; @@ -3051,25 +2997,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SwapchainKHR const & ) const = default; -#else - bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR == rhs.m_swapchainKHR; - } - - bool operator!=( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR != rhs.m_swapchainKHR; - } - - bool operator<( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_swapchainKHR < rhs.m_swapchainKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT { return m_swapchainKHR; @@ -3161,25 +3088,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Semaphore const & ) const = default; -#else - bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore == rhs.m_semaphore; - } - - bool operator!=( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore != rhs.m_semaphore; - } - - bool operator<( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_semaphore < rhs.m_semaphore; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT { return m_semaphore; @@ -3271,25 +3179,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Fence const & ) const = default; -#else - bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence == rhs.m_fence; - } - - bool operator!=( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence != rhs.m_fence; - } - - bool operator<( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_fence < rhs.m_fence; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT { return m_fence; @@ -3387,25 +3276,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PerformanceConfigurationINTEL const & ) const = default; -#else - bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL; - } - - bool operator!=( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL; - } - - bool operator<( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT { return m_performanceConfigurationINTEL; @@ -3491,25 +3361,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( QueryPool const & ) const = default; -#else - bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool == rhs.m_queryPool; - } - - bool operator!=( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool != rhs.m_queryPool; - } - - bool operator<( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queryPool < rhs.m_queryPool; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT { return m_queryPool; @@ -3601,25 +3452,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Buffer const & ) const = default; -#else - bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_buffer == rhs.m_buffer; - } - - bool operator!=( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_buffer != rhs.m_buffer; - } - - bool operator<( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_buffer < rhs.m_buffer; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT { return m_buffer; @@ -3711,25 +3543,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineLayout const & ) const = default; -#else - bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout == rhs.m_pipelineLayout; - } - - bool operator!=( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout != rhs.m_pipelineLayout; - } - - bool operator<( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineLayout < rhs.m_pipelineLayout; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT { return m_pipelineLayout; @@ -3821,25 +3634,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSet const & ) const = default; -#else - bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet == rhs.m_descriptorSet; - } - - bool operator!=( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet != rhs.m_descriptorSet; - } - - bool operator<( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSet < rhs.m_descriptorSet; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT { return m_descriptorSet; @@ -3931,25 +3725,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageView const & ) const = default; -#else - bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView == rhs.m_imageView; - } - - bool operator!=( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView != rhs.m_imageView; - } - - bool operator<( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_imageView < rhs.m_imageView; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT { return m_imageView; @@ -4041,25 +3816,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Pipeline const & ) const = default; -#else - bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline == rhs.m_pipeline; - } - - bool operator!=( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline != rhs.m_pipeline; - } - - bool operator<( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipeline < rhs.m_pipeline; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT { return m_pipeline; @@ -4151,25 +3907,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderEXT const & ) const = default; -#else - bool operator==( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderEXT == rhs.m_shaderEXT; - } - - bool operator!=( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderEXT != rhs.m_shaderEXT; - } - - bool operator<( ShaderEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderEXT < rhs.m_shaderEXT; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderEXT() const VULKAN_HPP_NOEXCEPT { return m_shaderEXT; @@ -4255,25 +3992,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Image const & ) const = default; -#else - bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image == rhs.m_image; - } - - bool operator!=( Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image != rhs.m_image; - } - - bool operator<( Image const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_image < rhs.m_image; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT { return m_image; @@ -4371,25 +4089,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( AccelerationStructureNV const & ) const = default; -#else - bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV == rhs.m_accelerationStructureNV; - } - - bool operator!=( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV != rhs.m_accelerationStructureNV; - } - - bool operator<( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureNV < rhs.m_accelerationStructureNV; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT { return m_accelerationStructureNV; @@ -4487,25 +4186,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( OpticalFlowSessionNV const & ) const = default; -#else - bool operator==( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_opticalFlowSessionNV == rhs.m_opticalFlowSessionNV; - } - - bool operator!=( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_opticalFlowSessionNV != rhs.m_opticalFlowSessionNV; - } - - bool operator<( OpticalFlowSessionNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_opticalFlowSessionNV < rhs.m_opticalFlowSessionNV; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkOpticalFlowSessionNV() const VULKAN_HPP_NOEXCEPT { return m_opticalFlowSessionNV; @@ -4597,25 +4277,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorUpdateTemplate const & ) const = default; -#else - bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate; - } - - bool operator!=( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate; - } - - bool operator<( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT { return m_descriptorUpdateTemplate; @@ -4709,25 +4370,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Event const & ) const = default; -#else - bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event == rhs.m_event; - } - - bool operator!=( Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event != rhs.m_event; - } - - bool operator<( Event const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_event < rhs.m_event; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT { return m_event; @@ -4825,25 +4467,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( AccelerationStructureKHR const & ) const = default; -#else - bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR; - } - - bool operator!=( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR; - } - - bool operator<( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT { return m_accelerationStructureKHR; @@ -4935,25 +4558,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MicromapEXT const & ) const = default; -#else - bool operator==( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_micromapEXT == rhs.m_micromapEXT; - } - - bool operator!=( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_micromapEXT != rhs.m_micromapEXT; - } - - bool operator<( MicromapEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_micromapEXT < rhs.m_micromapEXT; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkMicromapEXT() const VULKAN_HPP_NOEXCEPT { return m_micromapEXT; @@ -5037,25 +4641,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandBuffer const & ) const = default; -#else - bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer == rhs.m_commandBuffer; - } - - bool operator!=( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer != rhs.m_commandBuffer; - } - - bool operator<( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandBuffer < rhs.m_commandBuffer; - } -#endif - //=== VK_VERSION_1_0 === template @@ -5787,6 +5372,105 @@ namespace VULKAN_HPP_NAMESPACE void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + //=== VK_VERSION_1_4 === + + template + void setLineStipple( uint32_t lineStippleFactor, + uint16_t lineStipplePattern, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + + template + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + const void * pData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_EXT_debug_marker === template @@ -6528,20 +6212,20 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_dynamic_rendering_local_read === template - void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR * pLocationInfo, + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo * pLocationInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo, + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR * pInputAttachmentIndexInfo, + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo * pInputAttachmentIndexInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo, + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7322,38 +7006,38 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance6 === template - void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR * pBindDescriptorSetsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo * pBindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR * pPushConstantsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo * pPushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR * pPushDescriptorSetInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo * pPushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR * pPushDescriptorSetWithTemplateInfo, + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo * pPushDescriptorSetWithTemplateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo, + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ @@ -7492,25 +7176,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceMemory const & ) const = default; -#else - bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory == rhs.m_deviceMemory; - } - - bool operator!=( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory != rhs.m_deviceMemory; - } - - bool operator<( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deviceMemory < rhs.m_deviceMemory; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT { return m_deviceMemory; @@ -7602,25 +7267,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoSessionKHR const & ) const = default; -#else - bool operator==( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR == rhs.m_videoSessionKHR; - } - - bool operator!=( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR != rhs.m_videoSessionKHR; - } - - bool operator<( VideoSessionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionKHR < rhs.m_videoSessionKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionKHR() const VULKAN_HPP_NOEXCEPT { return m_videoSessionKHR; @@ -7712,25 +7358,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeferredOperationKHR const & ) const = default; -#else - bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR == rhs.m_deferredOperationKHR; - } - - bool operator!=( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR != rhs.m_deferredOperationKHR; - } - - bool operator<( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_deferredOperationKHR < rhs.m_deferredOperationKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT { return m_deferredOperationKHR; @@ -7823,25 +7450,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( BufferCollectionFUCHSIA const & ) const = default; -# else - bool operator==( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA == rhs.m_bufferCollectionFUCHSIA; - } - - bool operator!=( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA != rhs.m_bufferCollectionFUCHSIA; - } - - bool operator<( BufferCollectionFUCHSIA const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferCollectionFUCHSIA < rhs.m_bufferCollectionFUCHSIA; - } -# endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferCollectionFUCHSIA() const VULKAN_HPP_NOEXCEPT { return m_bufferCollectionFUCHSIA; @@ -7934,25 +7542,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( BufferView const & ) const = default; -#else - bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView == rhs.m_bufferView; - } - - bool operator!=( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView != rhs.m_bufferView; - } - - bool operator<( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_bufferView < rhs.m_bufferView; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT { return m_bufferView; @@ -8044,25 +7633,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CommandPool const & ) const = default; -#else - bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool == rhs.m_commandPool; - } - - bool operator!=( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool != rhs.m_commandPool; - } - - bool operator<( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_commandPool < rhs.m_commandPool; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT { return m_commandPool; @@ -8154,25 +7724,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCache const & ) const = default; -#else - bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache == rhs.m_pipelineCache; - } - - bool operator!=( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache != rhs.m_pipelineCache; - } - - bool operator<( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineCache < rhs.m_pipelineCache; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT { return m_pipelineCache; @@ -8264,25 +7815,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CuFunctionNVX const & ) const = default; -#else - bool operator==( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX == rhs.m_cuFunctionNVX; - } - - bool operator!=( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX != rhs.m_cuFunctionNVX; - } - - bool operator<( CuFunctionNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuFunctionNVX < rhs.m_cuFunctionNVX; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuFunctionNVX() const VULKAN_HPP_NOEXCEPT { return m_cuFunctionNVX; @@ -8374,25 +7906,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CuModuleNVX const & ) const = default; -#else - bool operator==( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX == rhs.m_cuModuleNVX; - } - - bool operator!=( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX != rhs.m_cuModuleNVX; - } - - bool operator<( CuModuleNVX const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cuModuleNVX < rhs.m_cuModuleNVX; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCuModuleNVX() const VULKAN_HPP_NOEXCEPT { return m_cuModuleNVX; @@ -8485,25 +7998,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CudaFunctionNV const & ) const = default; -# else - bool operator==( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cudaFunctionNV == rhs.m_cudaFunctionNV; - } - - bool operator!=( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cudaFunctionNV != rhs.m_cudaFunctionNV; - } - - bool operator<( CudaFunctionNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cudaFunctionNV < rhs.m_cudaFunctionNV; - } -# endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaFunctionNV() const VULKAN_HPP_NOEXCEPT { return m_cudaFunctionNV; @@ -8597,25 +8091,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CudaModuleNV const & ) const = default; -# else - bool operator==( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cudaModuleNV == rhs.m_cudaModuleNV; - } - - bool operator!=( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cudaModuleNV != rhs.m_cudaModuleNV; - } - - bool operator<( CudaModuleNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_cudaModuleNV < rhs.m_cudaModuleNV; - } -# endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCudaModuleNV() const VULKAN_HPP_NOEXCEPT { return m_cudaModuleNV; @@ -8708,25 +8183,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorPool const & ) const = default; -#else - bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool == rhs.m_descriptorPool; - } - - bool operator!=( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool != rhs.m_descriptorPool; - } - - bool operator<( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorPool < rhs.m_descriptorPool; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT { return m_descriptorPool; @@ -8824,25 +8280,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DescriptorSetLayout const & ) const = default; -#else - bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout == rhs.m_descriptorSetLayout; - } - - bool operator!=( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout != rhs.m_descriptorSetLayout; - } - - bool operator<( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_descriptorSetLayout < rhs.m_descriptorSetLayout; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT { return m_descriptorSetLayout; @@ -8934,25 +8371,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Framebuffer const & ) const = default; -#else - bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer == rhs.m_framebuffer; - } - - bool operator!=( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer != rhs.m_framebuffer; - } - - bool operator<( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_framebuffer < rhs.m_framebuffer; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT { return m_framebuffer; @@ -9050,25 +8468,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectCommandsLayoutEXT const & ) const = default; -#else - bool operator==( IndirectCommandsLayoutEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutEXT == rhs.m_indirectCommandsLayoutEXT; - } - - bool operator!=( IndirectCommandsLayoutEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutEXT != rhs.m_indirectCommandsLayoutEXT; - } - - bool operator<( IndirectCommandsLayoutEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutEXT < rhs.m_indirectCommandsLayoutEXT; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutEXT() const VULKAN_HPP_NOEXCEPT { return m_indirectCommandsLayoutEXT; @@ -9160,25 +8559,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectCommandsLayoutNV const & ) const = default; -#else - bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV; - } - - bool operator!=( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV; - } - - bool operator<( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT { return m_indirectCommandsLayoutNV; @@ -9270,25 +8650,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( IndirectExecutionSetEXT const & ) const = default; -#else - bool operator==( IndirectExecutionSetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectExecutionSetEXT == rhs.m_indirectExecutionSetEXT; - } - - bool operator!=( IndirectExecutionSetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectExecutionSetEXT != rhs.m_indirectExecutionSetEXT; - } - - bool operator<( IndirectExecutionSetEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_indirectExecutionSetEXT < rhs.m_indirectExecutionSetEXT; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectExecutionSetEXT() const VULKAN_HPP_NOEXCEPT { return m_indirectExecutionSetEXT; @@ -9374,25 +8735,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PrivateDataSlot const & ) const = default; -#else - bool operator==( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot == rhs.m_privateDataSlot; - } - - bool operator!=( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot != rhs.m_privateDataSlot; - } - - bool operator<( PrivateDataSlot const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_privateDataSlot < rhs.m_privateDataSlot; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlot() const VULKAN_HPP_NOEXCEPT { return m_privateDataSlot; @@ -9480,25 +8822,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderPass const & ) const = default; -#else - bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass == rhs.m_renderPass; - } - - bool operator!=( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass != rhs.m_renderPass; - } - - bool operator<( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_renderPass < rhs.m_renderPass; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT { return m_renderPass; @@ -9590,25 +8913,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Sampler const & ) const = default; -#else - bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler == rhs.m_sampler; - } - - bool operator!=( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler != rhs.m_sampler; - } - - bool operator<( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_sampler < rhs.m_sampler; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT { return m_sampler; @@ -9706,25 +9010,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SamplerYcbcrConversion const & ) const = default; -#else - bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion; - } - - bool operator!=( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion; - } - - bool operator<( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT { return m_samplerYcbcrConversion; @@ -9818,25 +9103,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ShaderModule const & ) const = default; -#else - bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule == rhs.m_shaderModule; - } - - bool operator!=( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule != rhs.m_shaderModule; - } - - bool operator<( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_shaderModule < rhs.m_shaderModule; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT { return m_shaderModule; @@ -9932,25 +9198,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ValidationCacheEXT const & ) const = default; -#else - bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT == rhs.m_validationCacheEXT; - } - - bool operator!=( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT != rhs.m_validationCacheEXT; - } - - bool operator<( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_validationCacheEXT < rhs.m_validationCacheEXT; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT { return m_validationCacheEXT; @@ -10048,25 +9295,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VideoSessionParametersKHR const & ) const = default; -#else - bool operator==( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionParametersKHR == rhs.m_videoSessionParametersKHR; - } - - bool operator!=( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionParametersKHR != rhs.m_videoSessionParametersKHR; - } - - bool operator<( VideoSessionParametersKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_videoSessionParametersKHR < rhs.m_videoSessionParametersKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkVideoSessionParametersKHR() const VULKAN_HPP_NOEXCEPT { return m_videoSessionParametersKHR; @@ -10152,25 +9380,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineBinaryKHR const & ) const = default; -#else - bool operator==( PipelineBinaryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineBinaryKHR == rhs.m_pipelineBinaryKHR; - } - - bool operator!=( PipelineBinaryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineBinaryKHR != rhs.m_pipelineBinaryKHR; - } - - bool operator<( PipelineBinaryKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_pipelineBinaryKHR < rhs.m_pipelineBinaryKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineBinaryKHR() const VULKAN_HPP_NOEXCEPT { return m_pipelineBinaryKHR; @@ -10254,25 +9463,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Queue const & ) const = default; -#else - bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue == rhs.m_queue; - } - - bool operator!=( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue != rhs.m_queue; - } - - bool operator<( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_queue < rhs.m_queue; - } -#endif - //=== VK_VERSION_1_0 === template @@ -10526,25 +9716,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Device const & ) const = default; -#else - bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device == rhs.m_device; - } - - bool operator!=( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device != rhs.m_device; - } - - bool operator<( Device const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_device < rhs.m_device; - } -#endif - //=== VK_VERSION_1_0 === template @@ -12354,6 +11525,112 @@ namespace VULKAN_HPP_NAMESPACE Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_VERSION_1_4 === + + template + VULKAN_HPP_NODISCARD Result mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result transitionImageLayout( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + //=== VK_KHR_swapchain === template @@ -14443,83 +13720,83 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_host_image_copy === template - VULKAN_HPP_NODISCARD Result copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo, + VULKAN_HPP_NODISCARD Result copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo, + VULKAN_HPP_NODISCARD Result copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo, + VULKAN_HPP_NODISCARD Result copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result transitionImageLayoutEXT( uint32_t transitionCount, - const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions, + VULKAN_HPP_NODISCARD Result transitionImageLayoutEXT( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_KHR_map_memory2 === template - VULKAN_HPP_NODISCARD Result mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo, - void ** ppData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD Result mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - VULKAN_HPP_NODISCARD Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo, + VULKAN_HPP_NODISCARD Result unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - typename ResultValueType::type unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; + typename ResultValueType::type unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_EXT_swapchain_maintenance1 === @@ -15448,47 +14725,47 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance5 === template - void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo, - VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D - getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo, - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, + void getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ template - void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + void getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ //=== VK_AMD_anti_lag === @@ -16067,25 +15344,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DisplayModeKHR const & ) const = default; -#else - bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR == rhs.m_displayModeKHR; - } - - bool operator!=( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR != rhs.m_displayModeKHR; - } - - bool operator<( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_displayModeKHR < rhs.m_displayModeKHR; - } -#endif - VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT { return m_displayModeKHR; @@ -16175,25 +15433,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevice const & ) const = default; -#else - bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice == rhs.m_physicalDevice; - } - - bool operator!=( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice != rhs.m_physicalDevice; - } - - bool operator<( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_physicalDevice < rhs.m_physicalDevice; - } -#endif - //=== VK_VERSION_1_0 === template @@ -17761,25 +17000,6 @@ namespace VULKAN_HPP_NAMESPACE return *this; } -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( Instance const & ) const = default; -#else - bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance == rhs.m_instance; - } - - bool operator!=( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance != rhs.m_instance; - } - - bool operator<( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return m_instance < rhs.m_instance; - } -#endif - //=== VK_VERSION_1_0 === template @@ -18487,29 +17707,4 @@ namespace VULKAN_HPP_NAMESPACE #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ } // namespace VULKAN_HPP_NAMESPACE - -// operators to compare vk::-handles with nullptr -template -typename std::enable_if::value, bool>::type operator==( const T & v, std::nullptr_t ) -{ - return !v; -} - -template -typename std::enable_if::value, bool>::type operator==( std::nullptr_t, const T & v ) -{ - return !v; -} - -template -typename std::enable_if::value, bool>::type operator!=( const T & v, std::nullptr_t ) -{ - return v; -} - -template -typename std::enable_if::value, bool>::type operator!=( std::nullptr_t, const T & v ) -{ - return v; -} #endif diff --git a/third_party/vulkan/vulkan_hash.hpp b/third_party/vulkan/vulkan_hash.hpp index 0d6d35c..0f54c3f 100644 --- a/third_party/vulkan/vulkan_hash.hpp +++ b/third_party/vulkan/vulkan_hash.hpp @@ -1394,20 +1394,20 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR const & bindDescriptorSetsInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo const & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.stageFlags ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.layout ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.firstSet ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.descriptorSetCount ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.pDescriptorSets ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.dynamicOffsetCount ); - VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfoKHR.pDynamicOffsets ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.firstSet ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.descriptorSetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pDescriptorSets ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.dynamicOffsetCount ); + VULKAN_HPP_HASH_COMBINE( seed, bindDescriptorSetsInfo.pDynamicOffsets ); return seed; } }; @@ -1521,14 +1521,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR const & bindMemoryStatusKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::BindMemoryStatus const & bindMemoryStatus ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatusKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatusKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatusKHR.pResult ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bindMemoryStatus.pResult ); return seed; } }; @@ -2108,14 +2108,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR const & bufferUsageFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo const & bufferUsageFlags2CreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfoKHR.usage ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, bufferUsageFlags2CreateInfo.usage ); return seed; } }; @@ -2724,55 +2724,55 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT const & copyImageToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToImageInfo const & copyImageToImageInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.flags ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImage ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.srcImageLayout ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImage ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.dstImageLayout ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.regionCount ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfoEXT.pRegions ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToImageInfo.pRegions ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT const & imageToMemoryCopyEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageToMemoryCopy const & imageToMemoryCopy ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.pHostPointer ); - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryRowLength ); - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.memoryImageHeight ); - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageSubresource ); - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageOffset ); - VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopyEXT.imageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, imageToMemoryCopy.imageExtent ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT const & copyImageToMemoryInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo const & copyImageToMemoryInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.flags ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImage ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.srcImageLayout ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.regionCount ); - VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfoEXT.pRegions ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.srcImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.srcImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyImageToMemoryInfo.pRegions ); return seed; } }; @@ -2807,36 +2807,36 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT const & memoryToImageCopyEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryToImageCopy const & memoryToImageCopy ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.pHostPointer ); - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryRowLength ); - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.memoryImageHeight ); - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageSubresource ); - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageOffset ); - VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopyEXT.imageExtent ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.pHostPointer ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.memoryRowLength ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.memoryImageHeight ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageOffset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryToImageCopy.imageExtent ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT const & copyMemoryToImageInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo const & copyMemoryToImageInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.flags ); - VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImage ); - VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.dstImageLayout ); - VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.regionCount ); - VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfoEXT.pRegions ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.dstImage ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.dstImageLayout ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.regionCount ); + VULKAN_HPP_HASH_COMBINE( seed, copyMemoryToImageInfo.pRegions ); return seed; } }; @@ -4052,28 +4052,28 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR const & imageSubresource2KHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::ImageSubresource2 const & imageSubresource2 ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2KHR.imageSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, imageSubresource2.imageSubresource ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR const & deviceImageSubresourceInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo const & deviceImageSubresourceInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pCreateInfo ); - VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfoKHR.pSubresource ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pCreateInfo ); + VULKAN_HPP_HASH_COMBINE( seed, deviceImageSubresourceInfo.pSubresource ); return seed; } }; @@ -4152,15 +4152,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR const & deviceQueueGlobalPriorityCreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo const & deviceQueueGlobalPriorityCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfoKHR.globalPriority ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, deviceQueueGlobalPriorityCreateInfo.globalPriority ); return seed; } }; @@ -5860,32 +5859,31 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT const & hostImageCopyDevicePerformanceQueryEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery const & hostImageCopyDevicePerformanceQuery ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.optimalDeviceAccess ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQueryEXT.identicalMemoryLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.optimalDeviceAccess ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageCopyDevicePerformanceQuery.identicalMemoryLayout ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT const & hostImageLayoutTransitionInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo const & hostImageLayoutTransitionInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.image ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.oldLayout ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.newLayout ); - VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfoEXT.subresourceRange ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.image ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.oldLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.newLayout ); + VULKAN_HPP_HASH_COMBINE( seed, hostImageLayoutTransitionInfo.subresourceRange ); return seed; } }; @@ -7205,17 +7203,17 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR const & memoryMapInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryMapInfo const & memoryMapInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.flags ); - VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.memory ); - VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.offset ); - VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfoKHR.size ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, memoryMapInfo.size ); return seed; } }; @@ -7299,15 +7297,15 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR const & memoryUnmapInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::MemoryUnmapInfo const & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.flags ); - VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfoKHR.memory ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.flags ); + VULKAN_HPP_HASH_COMBINE( seed, memoryUnmapInfo.memory ); return seed; } }; @@ -8907,16 +8905,15 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & physicalDeviceDynamicRenderingLocalReadFeaturesKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t operator()( + VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures const & physicalDeviceDynamicRenderingLocalReadFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeaturesKHR.dynamicRenderingLocalRead ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceDynamicRenderingLocalReadFeatures.dynamicRenderingLocalRead ); return seed; } }; @@ -9503,15 +9500,15 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & physicalDeviceGlobalPriorityQueryFeaturesKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures const & physicalDeviceGlobalPriorityQueryFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeaturesKHR.globalPriorityQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceGlobalPriorityQueryFeatures.globalPriorityQuery ); return seed; } }; @@ -9578,37 +9575,36 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT const & physicalDeviceHostImageCopyFeaturesEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures const & physicalDeviceHostImageCopyFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeaturesEXT.hostImageCopy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyFeatures.hostImageCopy ); return seed; } }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT const & physicalDeviceHostImageCopyPropertiesEXT ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties const & physicalDeviceHostImageCopyProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copySrcLayoutCount ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopySrcLayouts ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.copyDstLayoutCount ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.pCopyDstLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.pCopyDstLayouts ); for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) { - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.optimalTilingLayoutUUID[i] ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.optimalTilingLayoutUUID[i] ); } - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyPropertiesEXT.identicalMemoryTypeRequirements ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceHostImageCopyProperties.identicalMemoryTypeRequirements ); return seed; } }; @@ -9889,15 +9885,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesKHR const & physicalDeviceIndexTypeUint8FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features const & physicalDeviceIndexTypeUint8Features ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8FeaturesKHR.indexTypeUint8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceIndexTypeUint8Features.indexTypeUint8 ); return seed; } }; @@ -10256,34 +10251,34 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesKHR const & physicalDeviceLineRasterizationFeaturesKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures const & physicalDeviceLineRasterizationFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.rectangularLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.bresenhamLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.smoothLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.stippledRectangularLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.stippledBresenhamLines ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeaturesKHR.stippledSmoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationFeatures.stippledSmoothLines ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesKHR const & physicalDeviceLineRasterizationPropertiesKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties const & physicalDeviceLineRasterizationProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationPropertiesKHR.lineSubPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceLineRasterizationProperties.lineSubPixelPrecisionBits ); return seed; } }; @@ -10343,64 +10338,60 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR const & physicalDeviceMaintenance5FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features const & physicalDeviceMaintenance5Features ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5FeaturesKHR.maintenance5 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Features.maintenance5 ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR const & physicalDeviceMaintenance5PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties const & physicalDeviceMaintenance5Properties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentMultisampleCoverageAfterSampleCounting ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.earlyFragmentSampleMaskTestBeforeSampleCounting ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.depthStencilSwizzleOneSupport ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.polygonModePointSize ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictSinglePixelWideLinesUseParallelogram ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5PropertiesKHR.nonStrictWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance5Properties.nonStrictWideLinesUseParallelogram ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR const & physicalDeviceMaintenance6FeaturesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features const & physicalDeviceMaintenance6Features ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6FeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6FeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6FeaturesKHR.maintenance6 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Features.maintenance6 ); return seed; } }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR const & physicalDeviceMaintenance6PropertiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties const & physicalDeviceMaintenance6Properties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.blockTexelViewCompatibleMultipleLayers ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.maxCombinedImageSamplerDescriptorCount ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6PropertiesKHR.fragmentShadingRateClampCombinerInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.blockTexelViewCompatibleMultipleLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.maxCombinedImageSamplerDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceMaintenance6Properties.fragmentShadingRateClampCombinerInputs ); return seed; } }; @@ -11086,46 +11077,46 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT const & physicalDevicePipelineProtectedAccessFeaturesEXT ) - const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures const & physicalDevicePipelineProtectedAccessFeatures ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeaturesEXT.pipelineProtectedAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineProtectedAccessFeatures.pipelineProtectedAccess ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT const & physicalDevicePipelineRobustnessFeaturesEXT ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures const & physicalDevicePipelineRobustnessFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeaturesEXT.pipelineRobustness ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessFeatures.pipelineRobustness ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT const & physicalDevicePipelineRobustnessPropertiesEXT ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties const & physicalDevicePipelineRobustnessProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessStorageBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessUniformBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessVertexInputs ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessPropertiesEXT.defaultRobustnessImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessVertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePipelineRobustnessProperties.defaultRobustnessImages ); return seed; } }; @@ -11348,15 +11339,15 @@ namespace std }; template <> - struct hash + struct hash { std::size_t - operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const & physicalDevicePushDescriptorPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + operator()( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties const & physicalDevicePushDescriptorProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorPropertiesKHR.maxPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDevicePushDescriptorProperties.maxPushDescriptors ); return seed; } }; @@ -12024,15 +12015,15 @@ namespace std # endif /*VK_ENABLE_BETA_EXTENSIONS*/ template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeaturesKHR const & physicalDeviceShaderExpectAssumeFeaturesKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures const & physicalDeviceShaderExpectAssumeFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeaturesKHR.shaderExpectAssume ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderExpectAssumeFeatures.shaderExpectAssume ); return seed; } }; @@ -12053,15 +12044,15 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2FeaturesKHR const & physicalDeviceShaderFloatControls2FeaturesKHR ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features const & physicalDeviceShaderFloatControls2Features ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2FeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2FeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2FeaturesKHR.shaderFloatControls2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderFloatControls2Features.shaderFloatControls2 ); return seed; } }; @@ -12333,16 +12324,16 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeaturesKHR const & physicalDeviceShaderSubgroupRotateFeaturesKHR ) const + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures const & physicalDeviceShaderSubgroupRotateFeatures ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeaturesKHR.shaderSubgroupRotate ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeaturesKHR.shaderSubgroupRotateClustered ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.shaderSubgroupRotate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceShaderSubgroupRotateFeatures.shaderSubgroupRotateClustered ); return seed; } }; @@ -12770,16 +12761,31 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( - VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & physicalDeviceVertexAttributeDivisorFeaturesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures const & physicalDeviceVertexAttributeDivisorFeatures ) const + VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.vertexAttributeInstanceRateDivisor ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeaturesKHR.vertexAttributeInstanceRateZeroDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorFeatures.vertexAttributeInstanceRateZeroDivisor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties const & physicalDeviceVertexAttributeDivisorProperties ) const + VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.maxVertexAttribDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorProperties.supportsNonZeroFirstInstance ); return seed; } }; @@ -12798,21 +12804,6 @@ namespace std } }; - template <> - struct hash - { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & physicalDeviceVertexAttributeDivisorPropertiesKHR ) - const VULKAN_HPP_NOEXCEPT - { - std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.maxVertexAttribDivisor ); - VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVertexAttributeDivisorPropertiesKHR.supportsNonZeroFirstInstance ); - return seed; - } - }; - template <> struct hash { @@ -13202,6 +13193,79 @@ namespace std } }; + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features const & physicalDeviceVulkan14Features ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.globalPriorityQuery ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderSubgroupRotate ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderSubgroupRotateClustered ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderFloatControls2 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.shaderExpectAssume ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.rectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.bresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.smoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledRectangularLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledBresenhamLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.stippledSmoothLines ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.vertexAttributeInstanceRateDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.vertexAttributeInstanceRateZeroDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.indexTypeUint8 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.dynamicRenderingLocalRead ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.maintenance5 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.maintenance6 ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pipelineProtectedAccess ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pipelineRobustness ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.hostImageCopy ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Features.pushDescriptor ); + return seed; + } + }; + + template <> + struct hash + { + std::size_t operator()( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties const & physicalDeviceVulkan14Properties ) const VULKAN_HPP_NOEXCEPT + { + std::size_t seed = 0; + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.lineSubPixelPrecisionBits ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxVertexAttribDivisor ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.supportsNonZeroFirstInstance ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxPushDescriptors ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.dynamicRenderingLocalReadDepthStencilAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.dynamicRenderingLocalReadMultisampledAttachments ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.earlyFragmentMultisampleCoverageAfterSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.earlyFragmentSampleMaskTestBeforeSampleCounting ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.depthStencilSwizzleOneSupport ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.polygonModePointSize ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.nonStrictSinglePixelWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.nonStrictWideLinesUseParallelogram ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.blockTexelViewCompatibleMultipleLayers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.maxCombinedImageSamplerDescriptorCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.fragmentShadingRateClampCombinerInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessStorageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessUniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessVertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.defaultRobustnessImages ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.copySrcLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pCopySrcLayouts ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.copyDstLayoutCount ); + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.pCopyDstLayouts ); + for ( size_t i = 0; i < VK_UUID_SIZE; ++i ) + { + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.optimalTilingLayoutUUID[i] ); + } + VULKAN_HPP_HASH_COMBINE( seed, physicalDeviceVulkan14Properties.identicalMemoryTypeRequirements ); + return seed; + } + }; + template <> struct hash { @@ -13528,14 +13592,14 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR const & pipelineCreateFlags2CreateInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo const & pipelineCreateFlags2CreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfoKHR.flags ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineCreateFlags2CreateInfo.flags ); return seed; } }; @@ -13774,18 +13838,18 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoKHR const & pipelineRasterizationLineStateCreateInfoKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo const & pipelineRasterizationLineStateCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoKHR.lineRasterizationMode ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoKHR.stippledLineEnable ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoKHR.lineStippleFactor ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfoKHR.lineStipplePattern ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineRasterizationMode ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.stippledLineEnable ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineStippleFactor ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRasterizationLineStateCreateInfo.lineStipplePattern ); return seed; } }; @@ -13866,17 +13930,17 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT const & pipelineRobustnessCreateInfoEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo const & pipelineRobustnessCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.storageBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.uniformBuffers ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.vertexInputs ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfoEXT.images ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.storageBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.uniformBuffers ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.vertexInputs ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineRobustnessCreateInfo.images ); return seed; } }; @@ -13960,29 +14024,28 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR const & vertexInputBindingDivisorDescriptionKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription const & vertexInputBindingDivisorDescription ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionKHR.binding ); - VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescriptionKHR.divisor ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescription.binding ); + VULKAN_HPP_HASH_COMBINE( seed, vertexInputBindingDivisorDescription.divisor ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR const & pipelineVertexInputDivisorStateCreateInfoKHR ) const - VULKAN_HPP_NOEXCEPT + std::size_t + operator()( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo const & pipelineVertexInputDivisorStateCreateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.vertexBindingDivisorCount ); - VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfoKHR.pVertexBindingDivisors ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.vertexBindingDivisorCount ); + VULKAN_HPP_HASH_COMBINE( seed, pipelineVertexInputDivisorStateCreateInfo.pVertexBindingDivisors ); return seed; } }; @@ -14273,18 +14336,18 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR const & pushConstantsInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushConstantsInfo const & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.layout ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.stageFlags ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.offset ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.size ); - VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfoKHR.pValues ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.offset ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.size ); + VULKAN_HPP_HASH_COMBINE( seed, pushConstantsInfo.pValues ); return seed; } }; @@ -14310,34 +14373,34 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR const & pushDescriptorSetInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo const & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.stageFlags ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.layout ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.set ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.descriptorWriteCount ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfoKHR.pDescriptorWrites ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.stageFlags ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.descriptorWriteCount ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetInfo.pDescriptorWrites ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR const & pushDescriptorSetWithTemplateInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo const & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.descriptorUpdateTemplate ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.layout ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.set ); - VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfoKHR.pData ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.descriptorUpdateTemplate ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.layout ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.set ); + VULKAN_HPP_HASH_COMBINE( seed, pushDescriptorSetWithTemplateInfo.pData ); return seed; } }; @@ -14441,18 +14504,17 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t - operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR const & queueFamilyGlobalPriorityPropertiesKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties const & queueFamilyGlobalPriorityProperties ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorityCount ); - for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE_KHR; ++i ) + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.sType ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.priorityCount ); + for ( size_t i = 0; i < VK_MAX_GLOBAL_PRIORITY_SIZE; ++i ) { - VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityPropertiesKHR.priorities[i] ); + VULKAN_HPP_HASH_COMBINE( seed, queueFamilyGlobalPriorityProperties.priorities[i] ); } return seed; } @@ -15012,32 +15074,32 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR const & renderingAreaInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAreaInfo const & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.viewMask ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.colorAttachmentCount ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.pColorAttachmentFormats ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.depthAttachmentFormat ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfoKHR.stencilAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.viewMask ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.pColorAttachmentFormats ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.depthAttachmentFormat ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAreaInfo.stencilAttachmentFormat ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR const & renderingAttachmentLocationInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo const & renderingAttachmentLocationInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfoKHR.colorAttachmentCount ); - VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfoKHR.pColorAttachmentLocations ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingAttachmentLocationInfo.pColorAttachmentLocations ); return seed; } }; @@ -15094,17 +15156,17 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR const & renderingInputAttachmentIndexInfoKHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo const & renderingInputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfoKHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfoKHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfoKHR.colorAttachmentCount ); - VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfoKHR.pColorAttachmentInputIndices ); - VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfoKHR.pDepthInputAttachmentIndex ); - VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfoKHR.pStencilInputAttachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.sType ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.colorAttachmentCount ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pColorAttachmentInputIndices ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pDepthInputAttachmentIndex ); + VULKAN_HPP_HASH_COMBINE( seed, renderingInputAttachmentIndexInfo.pStencilInputAttachmentIndex ); return seed; } }; @@ -15815,27 +15877,27 @@ namespace std }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT const & subresourceHostMemcpySizeEXT ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize const & subresourceHostMemcpySize ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.sType ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySizeEXT.size ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceHostMemcpySize.size ); return seed; } }; template <> - struct hash + struct hash { - std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR const & subresourceLayout2KHR ) const VULKAN_HPP_NOEXCEPT + std::size_t operator()( VULKAN_HPP_NAMESPACE::SubresourceLayout2 const & subresourceLayout2 ) const VULKAN_HPP_NOEXCEPT { std::size_t seed = 0; - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.sType ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.pNext ); - VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2KHR.subresourceLayout ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.sType ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.pNext ); + VULKAN_HPP_HASH_COMBINE( seed, subresourceLayout2.subresourceLayout ); return seed; } }; diff --git a/third_party/vulkan/vulkan_raii.hpp b/third_party/vulkan/vulkan_raii.hpp index c5c1386..3cf1e0e 100644 --- a/third_party/vulkan/vulkan_raii.hpp +++ b/third_party/vulkan/vulkan_raii.hpp @@ -871,6 +871,29 @@ namespace VULKAN_HPP_NAMESPACE vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) ); + //=== VK_VERSION_1_4 === + vkCmdSetLineStipple = PFN_vkCmdSetLineStipple( vkGetDeviceProcAddr( device, "vkCmdSetLineStipple" ) ); + vkMapMemory2 = PFN_vkMapMemory2( vkGetDeviceProcAddr( device, "vkMapMemory2" ) ); + vkUnmapMemory2 = PFN_vkUnmapMemory2( vkGetDeviceProcAddr( device, "vkUnmapMemory2" ) ); + vkCmdBindIndexBuffer2 = PFN_vkCmdBindIndexBuffer2( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2" ) ); + vkGetRenderingAreaGranularity = PFN_vkGetRenderingAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularity" ) ); + vkGetDeviceImageSubresourceLayout = PFN_vkGetDeviceImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayout" ) ); + vkGetImageSubresourceLayout2 = PFN_vkGetImageSubresourceLayout2( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2" ) ); + vkCmdPushDescriptorSet = PFN_vkCmdPushDescriptorSet( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet" ) ); + vkCmdPushDescriptorSetWithTemplate = PFN_vkCmdPushDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate" ) ); + vkCmdSetRenderingAttachmentLocations = + PFN_vkCmdSetRenderingAttachmentLocations( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocations" ) ); + vkCmdSetRenderingInputAttachmentIndices = + PFN_vkCmdSetRenderingInputAttachmentIndices( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndices" ) ); + vkCmdBindDescriptorSets2 = PFN_vkCmdBindDescriptorSets2( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2" ) ); + vkCmdPushConstants2 = PFN_vkCmdPushConstants2( vkGetDeviceProcAddr( device, "vkCmdPushConstants2" ) ); + vkCmdPushDescriptorSet2 = PFN_vkCmdPushDescriptorSet2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2" ) ); + vkCmdPushDescriptorSetWithTemplate2 = PFN_vkCmdPushDescriptorSetWithTemplate2( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2" ) ); + vkCopyMemoryToImage = PFN_vkCopyMemoryToImage( vkGetDeviceProcAddr( device, "vkCopyMemoryToImage" ) ); + vkCopyImageToMemory = PFN_vkCopyImageToMemory( vkGetDeviceProcAddr( device, "vkCopyImageToMemory" ) ); + vkCopyImageToImage = PFN_vkCopyImageToImage( vkGetDeviceProcAddr( device, "vkCopyImageToImage" ) ); + vkTransitionImageLayout = PFN_vkTransitionImageLayout( vkGetDeviceProcAddr( device, "vkTransitionImageLayout" ) ); + //=== VK_KHR_swapchain === vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) ); vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) ); @@ -926,14 +949,9 @@ namespace VULKAN_HPP_NAMESPACE vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) ); //=== VK_NVX_image_view_handle === -<<<<<<< HEAD - vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); - vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); -======= vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) ); vkGetImageViewHandle64NVX = PFN_vkGetImageViewHandle64NVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandle64NVX" ) ); vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) ); ->>>>>>> indev //=== VK_AMD_draw_indirect_count === vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) ); @@ -998,8 +1016,12 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_push_descriptor === vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) ); + if ( !vkCmdPushDescriptorSet ) + vkCmdPushDescriptorSet = vkCmdPushDescriptorSetKHR; vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate ) + vkCmdPushDescriptorSetWithTemplate = vkCmdPushDescriptorSetWithTemplateKHR; //=== VK_EXT_conditional_rendering === vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) ); @@ -1282,8 +1304,12 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_dynamic_rendering_local_read === vkCmdSetRenderingAttachmentLocationsKHR = PFN_vkCmdSetRenderingAttachmentLocationsKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingAttachmentLocationsKHR" ) ); + if ( !vkCmdSetRenderingAttachmentLocations ) + vkCmdSetRenderingAttachmentLocations = vkCmdSetRenderingAttachmentLocationsKHR; vkCmdSetRenderingInputAttachmentIndicesKHR = PFN_vkCmdSetRenderingInputAttachmentIndicesKHR( vkGetDeviceProcAddr( device, "vkCmdSetRenderingInputAttachmentIndicesKHR" ) ); + if ( !vkCmdSetRenderingInputAttachmentIndices ) + vkCmdSetRenderingInputAttachmentIndices = vkCmdSetRenderingInputAttachmentIndicesKHR; //=== VK_EXT_buffer_device_address === vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) ); @@ -1315,8 +1341,8 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_line_rasterization === vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) ); - if ( !vkCmdSetLineStippleKHR ) - vkCmdSetLineStippleKHR = vkCmdSetLineStippleEXT; + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleEXT; //=== VK_EXT_host_query_reset === vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) ); @@ -1378,17 +1404,29 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) ); //=== VK_EXT_host_image_copy === - vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); - vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); - vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); - vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) ); + if ( !vkCopyMemoryToImage ) + vkCopyMemoryToImage = vkCopyMemoryToImageEXT; + vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) ); + if ( !vkCopyImageToMemory ) + vkCopyImageToMemory = vkCopyImageToMemoryEXT; + vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) ); + if ( !vkCopyImageToImage ) + vkCopyImageToImage = vkCopyImageToImageEXT; + vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) ); + if ( !vkTransitionImageLayout ) + vkTransitionImageLayout = vkTransitionImageLayoutEXT; vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) ); - if ( !vkGetImageSubresourceLayout2KHR ) - vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT; + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2EXT; //=== VK_KHR_map_memory2 === - vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) ); + if ( !vkMapMemory2 ) + vkMapMemory2 = vkMapMemory2KHR; vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) ); + if ( !vkUnmapMemory2 ) + vkUnmapMemory2 = vkUnmapMemory2KHR; //=== VK_EXT_swapchain_maintenance1 === vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) ); @@ -1684,11 +1722,19 @@ namespace VULKAN_HPP_NAMESPACE vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) ); //=== VK_KHR_maintenance5 === - vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) ); + if ( !vkCmdBindIndexBuffer2 ) + vkCmdBindIndexBuffer2 = vkCmdBindIndexBuffer2KHR; vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) ); + if ( !vkGetRenderingAreaGranularity ) + vkGetRenderingAreaGranularity = vkGetRenderingAreaGranularityKHR; vkGetDeviceImageSubresourceLayoutKHR = PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) ); + if ( !vkGetDeviceImageSubresourceLayout ) + vkGetDeviceImageSubresourceLayout = vkGetDeviceImageSubresourceLayoutKHR; vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) ); + if ( !vkGetImageSubresourceLayout2 ) + vkGetImageSubresourceLayout2 = vkGetImageSubresourceLayout2KHR; //=== VK_AMD_anti_lag === vkAntiLagUpdateAMD = PFN_vkAntiLagUpdateAMD( vkGetDeviceProcAddr( device, "vkAntiLagUpdateAMD" ) ); @@ -1730,16 +1776,26 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_line_rasterization === vkCmdSetLineStippleKHR = PFN_vkCmdSetLineStippleKHR( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleKHR" ) ); + if ( !vkCmdSetLineStipple ) + vkCmdSetLineStipple = vkCmdSetLineStippleKHR; //=== VK_KHR_calibrated_timestamps === vkGetCalibratedTimestampsKHR = PFN_vkGetCalibratedTimestampsKHR( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsKHR" ) ); //=== VK_KHR_maintenance6 === vkCmdBindDescriptorSets2KHR = PFN_vkCmdBindDescriptorSets2KHR( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets2KHR" ) ); - vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); - vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdBindDescriptorSets2 ) + vkCmdBindDescriptorSets2 = vkCmdBindDescriptorSets2KHR; + vkCmdPushConstants2KHR = PFN_vkCmdPushConstants2KHR( vkGetDeviceProcAddr( device, "vkCmdPushConstants2KHR" ) ); + if ( !vkCmdPushConstants2 ) + vkCmdPushConstants2 = vkCmdPushConstants2KHR; + vkCmdPushDescriptorSet2KHR = PFN_vkCmdPushDescriptorSet2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSet2KHR" ) ); + if ( !vkCmdPushDescriptorSet2 ) + vkCmdPushDescriptorSet2 = vkCmdPushDescriptorSet2KHR; vkCmdPushDescriptorSetWithTemplate2KHR = PFN_vkCmdPushDescriptorSetWithTemplate2KHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplate2KHR" ) ); + if ( !vkCmdPushDescriptorSetWithTemplate2 ) + vkCmdPushDescriptorSetWithTemplate2 = vkCmdPushDescriptorSetWithTemplate2KHR; vkCmdSetDescriptorBufferOffsets2EXT = PFN_vkCmdSetDescriptorBufferOffsets2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsets2EXT" ) ); vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplers2EXT" ) ); @@ -1954,6 +2010,27 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0; PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0; + //=== VK_VERSION_1_4 === + PFN_vkCmdSetLineStipple vkCmdSetLineStipple = 0; + PFN_vkMapMemory2 vkMapMemory2 = 0; + PFN_vkUnmapMemory2 vkUnmapMemory2 = 0; + PFN_vkCmdBindIndexBuffer2 vkCmdBindIndexBuffer2 = 0; + PFN_vkGetRenderingAreaGranularity vkGetRenderingAreaGranularity = 0; + PFN_vkGetDeviceImageSubresourceLayout vkGetDeviceImageSubresourceLayout = 0; + PFN_vkGetImageSubresourceLayout2 vkGetImageSubresourceLayout2 = 0; + PFN_vkCmdPushDescriptorSet vkCmdPushDescriptorSet = 0; + PFN_vkCmdPushDescriptorSetWithTemplate vkCmdPushDescriptorSetWithTemplate = 0; + PFN_vkCmdSetRenderingAttachmentLocations vkCmdSetRenderingAttachmentLocations = 0; + PFN_vkCmdSetRenderingInputAttachmentIndices vkCmdSetRenderingInputAttachmentIndices = 0; + PFN_vkCmdBindDescriptorSets2 vkCmdBindDescriptorSets2 = 0; + PFN_vkCmdPushConstants2 vkCmdPushConstants2 = 0; + PFN_vkCmdPushDescriptorSet2 vkCmdPushDescriptorSet2 = 0; + PFN_vkCmdPushDescriptorSetWithTemplate2 vkCmdPushDescriptorSetWithTemplate2 = 0; + PFN_vkCopyMemoryToImage vkCopyMemoryToImage = 0; + PFN_vkCopyImageToMemory vkCopyImageToMemory = 0; + PFN_vkCopyImageToImage vkCopyImageToImage = 0; + PFN_vkTransitionImageLayout vkTransitionImageLayout = 0; + //=== VK_KHR_swapchain === PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0; PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0; @@ -2005,14 +2082,9 @@ namespace VULKAN_HPP_NAMESPACE PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0; //=== VK_NVX_image_view_handle === -<<<<<<< HEAD - PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; - PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; -======= PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0; PFN_vkGetImageViewHandle64NVX vkGetImageViewHandle64NVX = 0; PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0; ->>>>>>> indev //=== VK_AMD_draw_indirect_count === PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0; @@ -2782,6 +2854,12 @@ namespace VULKAN_HPP_NAMESPACE //=== RAII HANDLES === //==================== + template + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false; + }; + class Context { public: @@ -3156,6 +3234,12 @@ namespace VULKAN_HPP_NAMESPACE std::unique_ptr m_dispatcher; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class PhysicalDevice { public: @@ -3601,6 +3685,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class PhysicalDevices : public std::vector { public: @@ -3995,6 +4085,30 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD std::vector getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const; + //=== VK_VERSION_1_4 === + + VULKAN_HPP_NODISCARD void * mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const; + + void unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; + + void copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const; + + void copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const; + + void copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const; + + void transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + //=== VK_KHR_swapchain === VULKAN_HPP_NODISCARD @@ -4426,19 +4540,19 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_host_image_copy === - void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const; + void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const; - void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const; + void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const; - void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const; + void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const; - void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; + void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const; //=== VK_KHR_map_memory2 === - VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const; + VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const; - void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const; + void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const; //=== VK_EXT_swapchain_maintenance1 === @@ -4672,14 +4786,14 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance5 === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D - getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; + getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT; - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT; + getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT; //=== VK_AMD_anti_lag === @@ -4764,6 +4878,12 @@ namespace VULKAN_HPP_NAMESPACE std::unique_ptr m_dispatcher; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class AccelerationStructureKHR { public: @@ -4884,6 +5004,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class AccelerationStructureNV { public: @@ -5012,6 +5138,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Buffer { public: @@ -5137,6 +5269,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + # if defined( VK_USE_PLATFORM_FUCHSIA ) class BufferCollectionFUCHSIA { @@ -5265,6 +5403,13 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + # endif /*VK_USE_PLATFORM_FUCHSIA*/ class BufferView @@ -5386,6 +5531,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class CommandPool { public: @@ -5517,6 +5668,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class CommandBuffer { public: @@ -5891,6 +6048,41 @@ namespace VULKAN_HPP_NAMESPACE void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT; + //=== VK_VERSION_1_4 === + + void setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT; + + void bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSet( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const + VULKAN_HPP_NOEXCEPT; + + template + void pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT; + + void setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT; + + void setRenderingInputAttachmentIndices( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const + VULKAN_HPP_NOEXCEPT; + + void bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; + + void pushDescriptorSetWithTemplate2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const + VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_debug_marker === void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT; @@ -6216,9 +6408,9 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_dynamic_rendering_local_read === - void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo ) const VULKAN_HPP_NOEXCEPT; + void setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT; - void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo ) const + void setRenderingInputAttachmentIndicesKHR( const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT; //=== VK_EXT_line_rasterization === @@ -6549,13 +6741,13 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance6 === - void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; + void bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT; - void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; + void pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT; - void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; + void pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT; - void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo ) const + void pushDescriptorSetWithTemplate2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT; void setDescriptorBufferOffsets2EXT( const VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT & setDescriptorBufferOffsetsInfo ) const @@ -6579,6 +6771,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class CommandBuffers : public std::vector { public: @@ -6724,6 +6922,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class CuModuleNVX { public: @@ -6843,6 +7047,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + # if defined( VK_ENABLE_BETA_EXTENSIONS ) class CudaFunctionNV { @@ -6962,6 +7172,13 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + # endif /*VK_ENABLE_BETA_EXTENSIONS*/ # if defined( VK_ENABLE_BETA_EXTENSIONS ) @@ -7087,6 +7304,13 @@ namespace VULKAN_HPP_NAMESPACE const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {}; VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + # endif /*VK_ENABLE_BETA_EXTENSIONS*/ class DebugReportCallbackEXT @@ -7209,6 +7433,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DebugUtilsMessengerEXT { public: @@ -7329,6 +7559,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DeferredOperationKHR { public: @@ -7456,6 +7692,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DescriptorPool { public: @@ -7580,6 +7822,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DescriptorSet { public: @@ -7701,6 +7949,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DescriptorSets : public std::vector { public: @@ -7853,6 +8107,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DescriptorUpdateTemplate { public: @@ -7973,6 +8233,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DeviceMemory { public: @@ -8112,6 +8378,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DisplayKHR { public: @@ -8257,6 +8529,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class DisplayKHRs : public std::vector { public: @@ -8390,6 +8668,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Event { public: @@ -8517,6 +8801,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Fence { public: @@ -8659,6 +8949,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Framebuffer { public: @@ -8782,6 +9078,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Image { public: @@ -8905,27 +9207,36 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT; + //=== VK_VERSION_1_4 === + + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; + //=== VK_EXT_image_drm_format_modifier === VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const; //=== VK_EXT_host_image_copy === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; //=== VK_KHR_maintenance5 === - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; template VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT; + getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT; private: VULKAN_HPP_NAMESPACE::Device m_device = {}; @@ -8934,6 +9245,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class ImageView { public: @@ -9057,6 +9374,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class IndirectCommandsLayoutEXT { public: @@ -9177,6 +9500,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class IndirectCommandsLayoutNV { public: @@ -9297,6 +9626,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class IndirectExecutionSetEXT { public: @@ -9425,6 +9760,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class MicromapEXT { public: @@ -9544,6 +9885,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class OpticalFlowSessionNV { public: @@ -9670,6 +10017,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class PerformanceConfigurationINTEL { public: @@ -9777,6 +10130,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class PipelineCache { public: @@ -9903,6 +10262,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Pipeline { public: @@ -10120,6 +10485,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Pipelines : public std::vector { public: @@ -10315,6 +10686,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class PipelineBinaryKHRs : public std::vector { public: @@ -10462,6 +10839,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class PrivateDataSlot { public: @@ -10582,6 +10965,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class QueryPool { public: @@ -10726,6 +11115,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Queue { public: @@ -10874,6 +11269,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class RenderPass { public: @@ -11010,6 +11411,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Sampler { public: @@ -11129,6 +11536,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class SamplerYcbcrConversion { public: @@ -11249,6 +11662,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class Semaphore { public: @@ -11376,6 +11795,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class ShaderEXT { public: @@ -11512,6 +11937,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class ShaderEXTs : public std::vector { public: @@ -11662,6 +12093,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class SurfaceKHR { public: @@ -11933,6 +12370,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::InstanceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class SwapchainKHR { public: @@ -12101,6 +12544,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class SwapchainKHRs : public std::vector { public: @@ -12254,6 +12703,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class VideoSessionKHR { public: @@ -12380,6 +12835,12 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + class VideoSessionParametersKHR { public: @@ -12504,6 +12965,63 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::detail::DeviceDispatcher const * m_dispatcher = nullptr; }; + template <> + struct isVulkanRAIIHandleType + { + static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true; + }; + + // operators to compare vk::raii-handles +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, bool>::type = 0> + auto operator<=>( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a <=> *b; + } +# else + template ::value, bool>::type = 0> + bool operator==( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a == *b; + } + + template ::value, bool>::type = 0> + bool operator!=( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a != *b; + } + + template ::value, bool>::type = 0> + bool operator<( T const & a, T const & b ) VULKAN_HPP_NOEXCEPT + { + return *a < *b; + } +# endif + + template ::value, bool>::type = 0> + bool operator==( const T & v, std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + return !*v; + } + + template ::value, bool>::type = 0> + bool operator==( std::nullptr_t, const T & v ) VULKAN_HPP_NOEXCEPT + { + return !*v; + } + + template ::value, bool>::type = 0> + bool operator!=( const T & v, std::nullptr_t ) VULKAN_HPP_NOEXCEPT + { + return *v; + } + + template ::value, bool>::type = 0> + bool operator!=( std::nullptr_t, const T & v ) VULKAN_HPP_NOEXCEPT + { + return *v; + } + //=========================== //=== COMMAND Definitions === //=========================== @@ -15560,6 +16078,257 @@ namespace VULKAN_HPP_NAMESPACE return sparseMemoryRequirements; } + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE void CommandBuffer::setLineStipple( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStipple && + "Function requires or or " ); + + getDispatcher()->vkCmdSetLineStipple( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2 && "Function requires or " ); + + void * pData; + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkMapMemory2( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2" ); + + return pData; + } + + VULKAN_HPP_INLINE void Device::unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2 && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkUnmapMemory2( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2" ); + } + + VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2( VULKAN_HPP_NAMESPACE::Buffer buffer, + VULKAN_HPP_NAMESPACE::DeviceSize offset, + VULKAN_HPP_NAMESPACE::DeviceSize size, + VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2 && "Function requires or " ); + + getDispatcher()->vkCmdBindIndexBuffer2( static_cast( m_commandBuffer ), + static_cast( buffer ), + static_cast( offset ), + static_cast( size ), + static_cast( indexType ) ); + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D + Device::getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularity && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Extent2D granularity; + getDispatcher()->vkGetRenderingAreaGranularity( static_cast( m_device ), + reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &granularity ) ); + + return granularity; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetDeviceImageSubresourceLayout( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Device::getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetDeviceImageSubresourceLayout( static_cast( m_device ), + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2 && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; + getDispatcher()->vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return layout; + } + + template + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain + Image::getSubresourceLayout2( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkGetImageSubresourceLayout2 && + "Function requires or or or " ); + + VULKAN_HPP_NAMESPACE::StructureChain structureChain; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); + getDispatcher()->vkGetImageSubresourceLayout2( static_cast( m_device ), + static_cast( m_image ), + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); + + return structureChain; + } + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSet( + VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet && "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSet( static_cast( m_commandBuffer ), + static_cast( pipelineBindPoint ), + static_cast( layout ), + set, + descriptorWrites.size(), + reinterpret_cast( descriptorWrites.data() ) ); + } + + template + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, + VULKAN_HPP_NAMESPACE::PipelineLayout layout, + uint32_t set, + DataType const & data ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPushDescriptorSetWithTemplate && + "Function requires or or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate( static_cast( m_commandBuffer ), + static_cast( descriptorUpdateTemplate ), + static_cast( layout ), + set, + reinterpret_cast( &data ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingAttachmentLocations( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingAttachmentLocations && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingAttachmentLocations( static_cast( m_commandBuffer ), + reinterpret_cast( &locationInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndices( + const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndices && + "Function requires or " ); + + getDispatcher()->vkCmdSetRenderingInputAttachmentIndices( static_cast( m_commandBuffer ), + reinterpret_cast( &inputAttachmentIndexInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::bindDescriptorSets2( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2 && + "Function requires or " ); + + getDispatcher()->vkCmdBindDescriptorSets2( static_cast( m_commandBuffer ), + reinterpret_cast( &bindDescriptorSetsInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2 && "Function requires or " ); + + getDispatcher()->vkCmdPushConstants2( static_cast( m_commandBuffer ), + reinterpret_cast( &pushConstantsInfo ) ); + } + + VULKAN_HPP_INLINE void + CommandBuffer::pushDescriptorSet2( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2 && "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSet2( static_cast( m_commandBuffer ), + reinterpret_cast( &pushDescriptorSetInfo ) ); + } + + VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2( + const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2 && + "Function requires or " ); + + getDispatcher()->vkCmdPushDescriptorSetWithTemplate2( + static_cast( m_commandBuffer ), reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + } + + VULKAN_HPP_INLINE void Device::copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImage && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyMemoryToImage( + static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImage" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemory && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToMemory( + static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemory" ); + } + + VULKAN_HPP_INLINE void Device::copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImage && "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( + getDispatcher()->vkCopyImageToImage( static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImage" ); + } + + VULKAN_HPP_INLINE void + Device::transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const + { + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayout && + "Function requires or " ); + + VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkTransitionImageLayout( + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); + VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayout" ); + } + //=== VK_KHR_surface === VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, @@ -17465,7 +18234,8 @@ namespace VULKAN_HPP_NAMESPACE uint32_t set, VULKAN_HPP_NAMESPACE::ArrayProxy const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && + "Function requires or " ); getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast( m_commandBuffer ), static_cast( pipelineBindPoint ), @@ -17481,8 +18251,9 @@ namespace VULKAN_HPP_NAMESPACE uint32_t set, DataType const & data ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && - "Function requires or " ); + VULKAN_HPP_ASSERT( + getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR && + "Function requires or or " ); getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast( m_commandBuffer ), static_cast( descriptorUpdateTemplate ), @@ -20283,24 +21054,24 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_dynamic_rendering_local_read === - VULKAN_HPP_INLINE void CommandBuffer::setRenderingAttachmentLocationsKHR( - const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR & locationInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void + CommandBuffer::setRenderingAttachmentLocationsKHR( const VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo & locationInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdSetRenderingAttachmentLocationsKHR( static_cast( m_commandBuffer ), - reinterpret_cast( &locationInfo ) ); + reinterpret_cast( &locationInfo ) ); } VULKAN_HPP_INLINE void CommandBuffer::setRenderingInputAttachmentIndicesKHR( - const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT + const VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo & inputAttachmentIndexInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR && - "Function requires " ); + "Function requires or " ); - getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR( - static_cast( m_commandBuffer ), reinterpret_cast( &inputAttachmentIndexInfo ) ); + getDispatcher()->vkCmdSetRenderingInputAttachmentIndicesKHR( static_cast( m_commandBuffer ), + reinterpret_cast( &inputAttachmentIndexInfo ) ); } //=== VK_EXT_buffer_device_address === @@ -20573,7 +21344,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetLineStippleEXT( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } @@ -20891,97 +21662,98 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_EXT_host_image_copy === - VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const + VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function requires or " ); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyMemoryToImageEXT( - static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); + static_cast( m_device ), reinterpret_cast( ©MemoryToImageInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" ); } - VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const + VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function requires or " ); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToMemoryEXT( - static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); + static_cast( m_device ), reinterpret_cast( ©ImageToMemoryInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" ); } - VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const + VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function requires or " ); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkCopyImageToImageEXT( - static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); + static_cast( m_device ), reinterpret_cast( ©ImageToImageInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" ); } - VULKAN_HPP_INLINE void Device::transitionImageLayoutEXT( - VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const + VULKAN_HPP_INLINE void + Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && + "Function requires or " ); VULKAN_HPP_NAMESPACE::Result result = static_cast( getDispatcher()->vkTransitionImageLayoutEXT( - static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); + static_cast( m_device ), transitions.size(), reinterpret_cast( transitions.data() ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" ); } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function requires or or " ); + "Function requires or or or " ); - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain - Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2EXT && - "Function requires or or " ); + "Function requires or or or " ); VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } //=== VK_KHR_map_memory2 === - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2KHR && "Function requires or " ); void * pData; VULKAN_HPP_NAMESPACE::Result result = static_cast( - getDispatcher()->vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); + getDispatcher()->vkMapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryMapInfo ), &pData ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" ); return pData; } - VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const + VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo ) const { - VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function requires or " ); VULKAN_HPP_NAMESPACE::Result result = static_cast( - getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); + getDispatcher()->vkUnmapMemory2KHR( static_cast( m_device ), reinterpret_cast( &memoryUnmapInfo ) ) ); VULKAN_HPP_NAMESPACE::detail::resultCheck( result, VULKAN_HPP_NAMESPACE_STRING "::Device::unmapMemory2KHR" ); } @@ -23108,7 +23880,8 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && + "Function requires or " ); getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast( m_commandBuffer ), static_cast( buffer ), @@ -23118,78 +23891,79 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D - Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT + Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && + "Function requires or " ); VULKAN_HPP_NAMESPACE::Extent2D granularity; getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast( m_device ), - reinterpret_cast( &renderingAreaInfo ), + reinterpret_cast( &renderingAreaInfo ), reinterpret_cast( &granularity ) ); return granularity; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && - "Function requires " ); + "Function requires or " ); - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain - Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT + Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR && - "Function requires " ); + "Function requires or " ); VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast( m_device ), - reinterpret_cast( &info ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &info ), + reinterpret_cast( &layout ) ); return structureChain; } - VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR - Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2 + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2KHR && - "Function requires or or " ); + "Function requires or or or " ); - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout; + VULKAN_HPP_NAMESPACE::SubresourceLayout2 layout; getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return layout; } template VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain - Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT + Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout2KHR && - "Function requires or or " ); + "Function requires or or or " ); VULKAN_HPP_NAMESPACE::StructureChain structureChain; - VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get(); + VULKAN_HPP_NAMESPACE::SubresourceLayout2 & layout = structureChain.template get(); getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast( m_device ), static_cast( m_image ), - reinterpret_cast( &subresource ), - reinterpret_cast( &layout ) ); + reinterpret_cast( &subresource ), + reinterpret_cast( &layout ) ); return structureChain; } @@ -23616,7 +24390,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_INLINE void CommandBuffer::setLineStippleKHR( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleKHR && - "Function requires or " ); + "Function requires or or " ); getDispatcher()->vkCmdSetLineStippleKHR( static_cast( m_commandBuffer ), lineStippleFactor, lineStipplePattern ); } @@ -23691,40 +24465,41 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_maintenance6 === VULKAN_HPP_INLINE void - CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::bindDescriptorSets2KHR( const VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo & bindDescriptorSetsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets2KHR && + "Function requires or " ); getDispatcher()->vkCmdBindDescriptorSets2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( &bindDescriptorSetsInfo ) ); + reinterpret_cast( &bindDescriptorSetsInfo ) ); } - VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_INLINE void CommandBuffer::pushConstants2KHR( const VULKAN_HPP_NAMESPACE::PushConstantsInfo & pushConstantsInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants2KHR && "Function requires or " ); getDispatcher()->vkCmdPushConstants2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( &pushConstantsInfo ) ); + reinterpret_cast( &pushConstantsInfo ) ); } VULKAN_HPP_INLINE void - CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT + CommandBuffer::pushDescriptorSet2KHR( const VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo & pushDescriptorSetInfo ) const VULKAN_HPP_NOEXCEPT { - VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2KHR && "Function requires " ); + VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSet2KHR && + "Function requires or " ); getDispatcher()->vkCmdPushDescriptorSet2KHR( static_cast( m_commandBuffer ), - reinterpret_cast( &pushDescriptorSetInfo ) ); + reinterpret_cast( &pushDescriptorSetInfo ) ); } VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplate2KHR( - const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT + const VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo & pushDescriptorSetWithTemplateInfo ) const VULKAN_HPP_NOEXCEPT { VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR && - "Function requires " ); + "Function requires or " ); getDispatcher()->vkCmdPushDescriptorSetWithTemplate2KHR( - static_cast( m_commandBuffer ), - reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); + static_cast( m_commandBuffer ), reinterpret_cast( &pushDescriptorSetWithTemplateInfo ) ); } VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsets2EXT( diff --git a/third_party/vulkan/vulkan_static_assertions.hpp b/third_party/vulkan/vulkan_static_assertions.hpp index a11f982..108374c 100644 --- a/third_party/vulkan/vulkan_static_assertions.hpp +++ b/third_party/vulkan/vulkan_static_assertions.hpp @@ -1811,6 +1811,333 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "DeviceImageMemoryRequirements is not nothrow_move_constructible!" ); +//=== VK_VERSION_1_4 === + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Features ) == sizeof( VkPhysicalDeviceVulkan14Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan14Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan14Properties ) == sizeof( VkPhysicalDeviceVulkan14Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVulkan14Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfo ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceQueueGlobalPriorityCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeatures ) == sizeof( VkPhysicalDeviceGlobalPriorityQueryFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceGlobalPriorityQueryFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityProperties ) == sizeof( VkQueueFamilyGlobalPriorityProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "QueueFamilyGlobalPriorityProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupRotateFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderSubgroupRotateFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2Features ) == sizeof( VkPhysicalDeviceShaderFloatControls2Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderFloatControls2Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeatures ) == sizeof( VkPhysicalDeviceShaderExpectAssumeFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceShaderExpectAssumeFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeatures ) == sizeof( VkPhysicalDeviceLineRasterizationFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationProperties ) == sizeof( VkPhysicalDeviceLineRasterizationProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceLineRasterizationProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfo ) == sizeof( VkPipelineRasterizationLineStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRasterizationLineStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorProperties ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription ) == sizeof( VkVertexInputBindingDivisorDescription ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "VertexInputBindingDivisorDescription is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfo ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineVertexInputDivisorStateCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeatures ) == + sizeof( VkPhysicalDeviceVertexAttributeDivisorFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceVertexAttributeDivisorFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8Features ) == sizeof( VkPhysicalDeviceIndexTypeUint8Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceIndexTypeUint8Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfo ) == sizeof( VkMemoryMapInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, "MemoryMapInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryUnmapInfo ) == sizeof( VkMemoryUnmapInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryUnmapInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Features ) == sizeof( VkPhysicalDeviceMaintenance5Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5Properties ) == sizeof( VkPhysicalDeviceMaintenance5Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance5Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfo ) == sizeof( VkRenderingAreaInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAreaInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo ) == sizeof( VkDeviceImageSubresourceInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "DeviceImageSubresourceInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2 ) == sizeof( VkImageSubresource2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageSubresource2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2 ) == sizeof( VkSubresourceLayout2 ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceLayout2 is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfo ) == sizeof( VkPipelineCreateFlags2CreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineCreateFlags2CreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfo ) == sizeof( VkBufferUsageFlags2CreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BufferUsageFlags2CreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorProperties ) == sizeof( VkPhysicalDevicePushDescriptorProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePushDescriptorProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeatures ) == + sizeof( VkPhysicalDeviceDynamicRenderingLocalReadFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceDynamicRenderingLocalReadFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfo ) == sizeof( VkRenderingAttachmentLocationInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingAttachmentLocationInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfo ) == sizeof( VkRenderingInputAttachmentIndexInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "RenderingInputAttachmentIndexInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Features ) == sizeof( VkPhysicalDeviceMaintenance6Features ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6Features is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6Properties ) == sizeof( VkPhysicalDeviceMaintenance6Properties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceMaintenance6Properties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindMemoryStatus ) == sizeof( VkBindMemoryStatus ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindMemoryStatus is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfo ) == sizeof( VkBindDescriptorSetsInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "BindDescriptorSetsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantsInfo ) == sizeof( VkPushConstantsInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushConstantsInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfo ) == sizeof( VkPushDescriptorSetInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfo ) == sizeof( VkPushDescriptorSetWithTemplateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PushDescriptorSetWithTemplateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeatures ) == + sizeof( VkPhysicalDevicePipelineProtectedAccessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineProtectedAccessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeatures ) == sizeof( VkPhysicalDevicePipelineRobustnessFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessProperties ) == sizeof( VkPhysicalDevicePipelineRobustnessProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDevicePipelineRobustnessProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfo ) == sizeof( VkPipelineRobustnessCreateInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PipelineRobustnessCreateInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeatures ) == sizeof( VkPhysicalDeviceHostImageCopyFeatures ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyFeatures is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyProperties ) == sizeof( VkPhysicalDeviceHostImageCopyProperties ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "PhysicalDeviceHostImageCopyProperties is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopy ) == sizeof( VkMemoryToImageCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "MemoryToImageCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopy ) == sizeof( VkImageToMemoryCopy ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "ImageToMemoryCopy is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo ) == sizeof( VkCopyMemoryToImageInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyMemoryToImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo ) == sizeof( VkCopyImageToMemoryInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToMemoryInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfo ) == sizeof( VkCopyImageToImageInfo ), "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "CopyImageToImageInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo ) == sizeof( VkHostImageLayoutTransitionInfo ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageLayoutTransitionInfo is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySize ) == sizeof( VkSubresourceHostMemcpySize ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "SubresourceHostMemcpySize is not nothrow_move_constructible!" ); + +VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQuery ) == sizeof( VkHostImageCopyDevicePerformanceQuery ), + "struct and wrapper have different size!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, + "struct wrapper is not a standard layout!" ); +VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, + "HostImageCopyDevicePerformanceQuery is not nothrow_move_constructible!" ); + //=== VK_KHR_surface === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" ); @@ -2648,30 +2975,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_EXT_pipeline_robustness === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT ) == - sizeof( VkPhysicalDevicePipelineRobustnessFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDevicePipelineRobustnessFeaturesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT ) == - sizeof( VkPhysicalDevicePipelineRobustnessPropertiesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDevicePipelineRobustnessPropertiesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT ) == sizeof( VkPipelineRobustnessCreateInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineRobustnessCreateInfoEXT is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_WIN32_KHR ) //=== VK_KHR_external_memory_win32 === @@ -2772,15 +3075,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" ); -//=== VK_KHR_push_descriptor === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" ); - //=== VK_EXT_conditional_rendering === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), @@ -3998,30 +4292,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "VideoDecodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" ); -//=== VK_KHR_global_priority === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) == - sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" ); - //=== VK_AMD_memory_overallocation_behavior === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), @@ -4337,30 +4607,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" ); -//=== VK_KHR_dynamic_rendering_local_read === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR ) == - sizeof( VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentLocationInfoKHR ) == sizeof( VkRenderingAttachmentLocationInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingAttachmentLocationInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInputAttachmentIndexInfoKHR ) == sizeof( VkRenderingInputAttachmentIndexInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingInputAttachmentIndexInfoKHR is not nothrow_move_constructible!" ); - //=== VK_EXT_shader_image_atomic_int64 === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == @@ -4651,81 +4897,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" ); -//=== VK_EXT_host_image_copy === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceHostImageCopyFeaturesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyPropertiesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceHostImageCopyPropertiesEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT ) == sizeof( VkMemoryToImageCopyEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "MemoryToImageCopyEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT ) == sizeof( VkImageToMemoryCopyEXT ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "ImageToMemoryCopyEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT ) == sizeof( VkCopyMemoryToImageInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "CopyMemoryToImageInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT ) == sizeof( VkCopyImageToMemoryInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "CopyImageToMemoryInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT ) == sizeof( VkCopyImageToImageInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "CopyImageToImageInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT ) == sizeof( VkHostImageLayoutTransitionInfoEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "HostImageLayoutTransitionInfoEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT ) == sizeof( VkSubresourceHostMemcpySizeEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "SubresourceHostMemcpySizeEXT is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT ) == sizeof( VkHostImageCopyDevicePerformanceQueryEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" ); - -//=== VK_KHR_map_memory2 === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "MemoryMapInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR ) == sizeof( VkMemoryUnmapInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "MemoryUnmapInfoKHR is not nothrow_move_constructible!" ); - //=== VK_EXT_map_memory_placed === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMapMemoryPlacedFeaturesEXT ) == sizeof( VkPhysicalDeviceMapMemoryPlacedFeaturesEXT ), @@ -6314,16 +6485,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" ); -//=== VK_KHR_shader_subgroup_rotate === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupRotateFeaturesKHR ) == - sizeof( VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceShaderSubgroupRotateFeaturesKHR is not nothrow_move_constructible!" ); - //=== VK_ARM_scheduling_controls === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueShaderCoreControlCreateInfoARM ) == sizeof( VkDeviceQueueShaderCoreControlCreateInfoARM ), @@ -6823,16 +6984,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_EXT_pipeline_protected_access === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT ) == - sizeof( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_ANDROID_KHR ) //=== VK_ANDROID_external_format_resolve === @@ -6861,56 +7012,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceMaintenance5FeaturesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance5PropertiesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceMaintenance5PropertiesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR ) == sizeof( VkRenderingAreaInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "RenderingAreaInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR ) == sizeof( VkDeviceImageSubresourceInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "DeviceImageSubresourceInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR ) == sizeof( VkImageSubresource2KHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "ImageSubresource2KHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR ) == sizeof( VkSubresourceLayout2KHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "SubresourceLayout2KHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR ) == sizeof( VkPipelineCreateFlags2CreateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineCreateFlags2CreateInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR ) == sizeof( VkBufferUsageFlags2CreateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "BufferUsageFlags2CreateInfoKHR is not nothrow_move_constructible!" ); - //=== VK_AMD_anti_lag === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAntiLagFeaturesAMD ) == sizeof( VkPhysicalDeviceAntiLagFeaturesAMD ), @@ -7556,49 +7657,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT is not nothrow_move_constructible!" ); -//=== VK_KHR_vertex_attribute_divisor === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesKHR ) == - sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceVertexAttributeDivisorPropertiesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR ) == sizeof( VkVertexInputBindingDivisorDescriptionKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "VertexInputBindingDivisorDescriptionKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoKHR ) == - sizeof( VkPipelineVertexInputDivisorStateCreateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineVertexInputDivisorStateCreateInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesKHR ) == - sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceVertexAttributeDivisorFeaturesKHR is not nothrow_move_constructible!" ); - -//=== VK_KHR_shader_float_controls2 === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloatControls2FeaturesKHR ) == - sizeof( VkPhysicalDeviceShaderFloatControls2FeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceShaderFloatControls2FeaturesKHR is not nothrow_move_constructible!" ); - #if defined( VK_USE_PLATFORM_SCREEN_QNX ) //=== VK_QNX_external_memory_screen_buffer === @@ -7643,40 +7701,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceLayeredDriverPropertiesMSFT is not nothrow_move_constructible!" ); -//=== VK_KHR_index_type_uint8 === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesKHR ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceIndexTypeUint8FeaturesKHR is not nothrow_move_constructible!" ); - -//=== VK_KHR_line_rasterization === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesKHR ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceLineRasterizationFeaturesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesKHR ) == - sizeof( VkPhysicalDeviceLineRasterizationPropertiesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceLineRasterizationPropertiesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoKHR ) == - sizeof( VkPipelineRasterizationLineStateCreateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PipelineRasterizationLineStateCreateInfoKHR is not nothrow_move_constructible!" ); - //=== VK_KHR_calibrated_timestamps === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoKHR ) == sizeof( VkCalibratedTimestampInfoKHR ), @@ -7685,61 +7709,8 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "CalibratedTimestampInfoKHR is not nothrow_move_constructible!" ); -//=== VK_KHR_shader_expect_assume === - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderExpectAssumeFeaturesKHR ) == - sizeof( VkPhysicalDeviceShaderExpectAssumeFeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceShaderExpectAssumeFeaturesKHR is not nothrow_move_constructible!" ); - //=== VK_KHR_maintenance6 === -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance6FeaturesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceMaintenance6FeaturesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance6PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance6PropertiesKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PhysicalDeviceMaintenance6PropertiesKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindMemoryStatusKHR ) == sizeof( VkBindMemoryStatusKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "BindMemoryStatusKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindDescriptorSetsInfoKHR ) == sizeof( VkBindDescriptorSetsInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "BindDescriptorSetsInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantsInfoKHR ) == sizeof( VkPushConstantsInfoKHR ), "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PushConstantsInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetInfoKHR ) == sizeof( VkPushDescriptorSetInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PushDescriptorSetInfoKHR is not nothrow_move_constructible!" ); - -VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushDescriptorSetWithTemplateInfoKHR ) == sizeof( VkPushDescriptorSetWithTemplateInfoKHR ), - "struct and wrapper have different size!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, - "struct wrapper is not a standard layout!" ); -VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible::value, - "PushDescriptorSetWithTemplateInfoKHR is not nothrow_move_constructible!" ); - VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetDescriptorBufferOffsetsInfoEXT ) == sizeof( VkSetDescriptorBufferOffsetsInfoEXT ), "struct and wrapper have different size!" ); VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "struct wrapper is not a standard layout!" ); @@ -8209,8 +8180,6 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceCooperativeMatrix2PropertiesNV is not nothrow_move_constructible!" ); -<<<<<<< HEAD -======= //=== VK_EXT_vertex_attribute_robustness === VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeRobustnessFeaturesEXT ) == @@ -8221,5 +8190,4 @@ VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout::value, "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT is not nothrow_move_constructible!" ); ->>>>>>> indev #endif diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index e2937f9..054482c 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -7839,22 +7839,22 @@ namespace VULKAN_HPP_NAMESPACE using Type = BindDescriptorBufferEmbeddedSamplersInfoEXT; }; - struct BindDescriptorSetsInfoKHR + struct BindDescriptorSetsInfo { - using NativeType = VkBindDescriptorSetsInfoKHR; + using NativeType = VkBindDescriptorSetsInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorSetsInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindDescriptorSetsInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - uint32_t firstSet_ = {}, - uint32_t descriptorSetCount_ = {}, - const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ = {}, - uint32_t dynamicOffsetCount_ = {}, - const uint32_t * pDynamicOffsets_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t firstSet_ = {}, + uint32_t descriptorSetCount_ = {}, + const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ = {}, + uint32_t dynamicOffsetCount_ = {}, + const uint32_t * pDynamicOffsets_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stageFlags{ stageFlags_ } , layout{ layout_ } @@ -7866,20 +7866,20 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfoKHR( BindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR BindDescriptorSetsInfo( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BindDescriptorSetsInfoKHR( VkBindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : BindDescriptorSetsInfoKHR( *reinterpret_cast( &rhs ) ) + BindDescriptorSetsInfo( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BindDescriptorSetsInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - BindDescriptorSetsInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_, - uint32_t firstSet_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ = {}, - const void * pNext_ = nullptr ) + BindDescriptorSetsInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t firstSet_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , stageFlags( stageFlags_ ) , layout( layout_ ) @@ -7892,54 +7892,54 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - BindDescriptorSetsInfoKHR & operator=( BindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BindDescriptorSetsInfo & operator=( BindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BindDescriptorSetsInfoKHR & operator=( VkBindDescriptorSetsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + BindDescriptorSetsInfo & operator=( VkBindDescriptorSetsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setFirstSet( uint32_t firstSet_ ) VULKAN_HPP_NOEXCEPT { firstSet = firstSet_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT { descriptorSetCount = descriptorSetCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setPDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets_ ) VULKAN_HPP_NOEXCEPT { pDescriptorSets = pDescriptorSets_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - BindDescriptorSetsInfoKHR & + BindDescriptorSetsInfo & setDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorSets_ ) VULKAN_HPP_NOEXCEPT { descriptorSetCount = static_cast( descriptorSets_.size() ); @@ -7948,20 +7948,20 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setDynamicOffsetCount( uint32_t dynamicOffsetCount_ ) VULKAN_HPP_NOEXCEPT { dynamicOffsetCount = dynamicOffsetCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfoKHR & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindDescriptorSetsInfo & setPDynamicOffsets( const uint32_t * pDynamicOffsets_ ) VULKAN_HPP_NOEXCEPT { pDynamicOffsets = pDynamicOffsets_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - BindDescriptorSetsInfoKHR & setDynamicOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT + BindDescriptorSetsInfo & setDynamicOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & dynamicOffsets_ ) VULKAN_HPP_NOEXCEPT { dynamicOffsetCount = static_cast( dynamicOffsets_.size() ); pDynamicOffsets = dynamicOffsets_.data(); @@ -7970,14 +7970,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkBindDescriptorSetsInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkBindDescriptorSetsInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkBindDescriptorSetsInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkBindDescriptorSetsInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -8001,9 +8001,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( BindDescriptorSetsInfoKHR const & ) const = default; + auto operator<=>( BindDescriptorSetsInfo const & ) const = default; #else - bool operator==( BindDescriptorSetsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -8014,14 +8014,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( BindDescriptorSetsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindDescriptorSetsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorSetsInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindDescriptorSetsInfo; const void * pNext = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; @@ -8033,11 +8033,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = BindDescriptorSetsInfoKHR; + using Type = BindDescriptorSetsInfo; }; + using BindDescriptorSetsInfoKHR = BindDescriptorSetsInfo; + struct Offset2D { using NativeType = VkOffset2D; @@ -8889,57 +8891,55 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16; }; - struct BindMemoryStatusKHR + struct BindMemoryStatus { - using NativeType = VkBindMemoryStatusKHR; + using NativeType = VkBindMemoryStatus; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindMemoryStatusKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindMemoryStatus; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR BindMemoryStatusKHR( VULKAN_HPP_NAMESPACE::Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BindMemoryStatus( VULKAN_HPP_NAMESPACE::Result * pResult_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pResult{ pResult_ } { } - VULKAN_HPP_CONSTEXPR BindMemoryStatusKHR( BindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR BindMemoryStatus( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BindMemoryStatusKHR( VkBindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT : BindMemoryStatusKHR( *reinterpret_cast( &rhs ) ) - { - } + BindMemoryStatus( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT : BindMemoryStatus( *reinterpret_cast( &rhs ) ) {} - BindMemoryStatusKHR & operator=( BindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BindMemoryStatus & operator=( BindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BindMemoryStatusKHR & operator=( VkBindMemoryStatusKHR const & rhs ) VULKAN_HPP_NOEXCEPT + BindMemoryStatus & operator=( VkBindMemoryStatus const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 BindMemoryStatusKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BindMemoryStatusKHR & setPResult( VULKAN_HPP_NAMESPACE::Result * pResult_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BindMemoryStatus & setPResult( VULKAN_HPP_NAMESPACE::Result * pResult_ ) VULKAN_HPP_NOEXCEPT { pResult = pResult_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkBindMemoryStatusKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkBindMemoryStatus const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkBindMemoryStatusKHR &() VULKAN_HPP_NOEXCEPT + operator VkBindMemoryStatus &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -8955,9 +8955,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( BindMemoryStatusKHR const & ) const = default; + auto operator<=>( BindMemoryStatus const & ) const = default; #else - bool operator==( BindMemoryStatusKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -8966,24 +8966,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( BindMemoryStatusKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BindMemoryStatus const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindMemoryStatusKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindMemoryStatus; const void * pNext = {}; VULKAN_HPP_NAMESPACE::Result * pResult = {}; }; template <> - struct CppType + struct CppType { - using Type = BindMemoryStatusKHR; + using Type = BindMemoryStatus; }; + using BindMemoryStatusKHR = BindMemoryStatus; + struct BindPipelineIndirectCommandNV { using NativeType = VkBindPipelineIndirectCommandNV; @@ -13444,66 +13446,65 @@ namespace VULKAN_HPP_NAMESPACE using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo; - struct BufferUsageFlags2CreateInfoKHR + struct BufferUsageFlags2CreateInfo { - using NativeType = VkBufferUsageFlags2CreateInfoKHR; + using NativeType = VkBufferUsageFlags2CreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferUsageFlags2CreateInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ = {}, const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , usage{ usage_ } { } - VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfoKHR( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR BufferUsageFlags2CreateInfo( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - BufferUsageFlags2CreateInfoKHR( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : BufferUsageFlags2CreateInfoKHR( *reinterpret_cast( &rhs ) ) + BufferUsageFlags2CreateInfo( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : BufferUsageFlags2CreateInfo( *reinterpret_cast( &rhs ) ) { } - BufferUsageFlags2CreateInfoKHR & operator=( BufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + BufferUsageFlags2CreateInfo & operator=( BufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - BufferUsageFlags2CreateInfoKHR & operator=( VkBufferUsageFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + BufferUsageFlags2CreateInfo & operator=( VkBufferUsageFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfoKHR & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 BufferUsageFlags2CreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage_ ) VULKAN_HPP_NOEXCEPT { usage = usage_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkBufferUsageFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkBufferUsageFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkBufferUsageFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkBufferUsageFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -13512,9 +13513,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( BufferUsageFlags2CreateInfoKHR const & ) const = default; + auto operator<=>( BufferUsageFlags2CreateInfo const & ) const = default; #else - bool operator==( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -13523,24 +13524,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( BufferUsageFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( BufferUsageFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR usage = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferUsageFlags2CreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::BufferUsageFlags2 usage = {}; }; template <> - struct CppType + struct CppType { - using Type = BufferUsageFlags2CreateInfoKHR; + using Type = BufferUsageFlags2CreateInfo; }; + using BufferUsageFlags2CreateInfoKHR = BufferUsageFlags2CreateInfo; + struct BufferViewCreateInfo { using NativeType = VkBufferViewCreateInfo; @@ -18530,22 +18533,22 @@ namespace VULKAN_HPP_NAMESPACE using CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2; - struct CopyImageToImageInfoEXT + struct CopyImageToImageInfo { - using NativeType = VkCopyImageToImageInfoEXT; + using NativeType = VkCopyImageToImageInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToImageInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , srcImage{ srcImage_ } @@ -18557,21 +18560,21 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR CopyImageToImageInfoEXT( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyImageToImageInfo( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyImageToImageInfoEXT( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyImageToImageInfoEXT( *reinterpret_cast( &rhs ) ) + CopyImageToImageInfo( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToImageInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, - VULKAN_HPP_NAMESPACE::Image srcImage_, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, - VULKAN_HPP_NAMESPACE::Image dstImage_, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) + CopyImageToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , srcImage( srcImage_ ) @@ -18584,66 +18587,66 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - CopyImageToImageInfoEXT & operator=( CopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyImageToImageInfo & operator=( CopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyImageToImageInfoEXT & operator=( VkCopyImageToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CopyImageToImageInfo & operator=( VkCopyImageToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions_ ) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageToImageInfoEXT & + CopyImageToImageInfo & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); @@ -18653,14 +18656,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkCopyImageToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyImageToImageInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyImageToImageInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkCopyImageToImageInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -18669,7 +18672,7 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple( CopyImageToImageInfoEXT const & ) const = default; + auto operator<=>( CopyImageToImageInfo const & ) const = default; #else - bool operator==( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -18697,45 +18700,47 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( CopyImageToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyImageToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::Image srcImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - VULKAN_HPP_NAMESPACE::Image dstImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageCopy2 * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = CopyImageToImageInfoEXT; + using Type = CopyImageToImageInfo; }; - struct ImageToMemoryCopyEXT + using CopyImageToImageInfoEXT = CopyImageToImageInfo; + + struct ImageToMemoryCopy { - using NativeType = VkImageToMemoryCopyEXT; + using NativeType = VkImageToMemoryCopy; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopyEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageToMemoryCopy; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( void * pHostPointer_ = {}, - uint32_t memoryRowLength_ = {}, - uint32_t memoryImageHeight_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pHostPointer{ pHostPointer_ } , memoryRowLength{ memoryRowLength_ } @@ -18746,75 +18751,72 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR ImageToMemoryCopyEXT( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageToMemoryCopy( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageToMemoryCopyEXT( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageToMemoryCopyEXT( *reinterpret_cast( &rhs ) ) - { - } + ImageToMemoryCopy( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT : ImageToMemoryCopy( *reinterpret_cast( &rhs ) ) {} - ImageToMemoryCopyEXT & operator=( ImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageToMemoryCopy & operator=( ImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageToMemoryCopyEXT & operator=( VkImageToMemoryCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + ImageToMemoryCopy & operator=( VkImageToMemoryCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setPHostPointer( void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT { memoryRowLength = memoryRowLength_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT { memoryImageHeight = memoryImageHeight_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageToMemoryCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageToMemoryCopyEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkImageToMemoryCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageToMemoryCopyEXT &() VULKAN_HPP_NOEXCEPT + operator VkImageToMemoryCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -18837,9 +18839,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageToMemoryCopyEXT const & ) const = default; + auto operator<=>( ImageToMemoryCopy const & ) const = default; #else - bool operator==( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -18850,14 +18852,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( ImageToMemoryCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageToMemoryCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopyEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageToMemoryCopy; const void * pNext = {}; void * pHostPointer = {}; uint32_t memoryRowLength = {}; @@ -18868,25 +18870,27 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = ImageToMemoryCopyEXT; + using Type = ImageToMemoryCopy; }; - struct CopyImageToMemoryInfoEXT + using ImageToMemoryCopyEXT = ImageToMemoryCopy; + + struct CopyImageToMemoryInfo { - using NativeType = VkCopyImageToMemoryInfoEXT; + using NativeType = VkCopyImageToMemoryInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToMemoryInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , srcImage{ srcImage_ } @@ -18896,19 +18900,19 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfoEXT( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyImageToMemoryInfo( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyImageToMemoryInfoEXT( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyImageToMemoryInfoEXT( *reinterpret_cast( &rhs ) ) + CopyImageToMemoryInfo( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyImageToMemoryInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageToMemoryInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, - VULKAN_HPP_NAMESPACE::Image srcImage_, - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) + CopyImageToMemoryInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image srcImage_, + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , srcImage( srcImage_ ) @@ -18919,55 +18923,55 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - CopyImageToMemoryInfoEXT & operator=( CopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyImageToMemoryInfo & operator=( CopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyImageToMemoryInfoEXT & operator=( VkCopyImageToMemoryInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CopyImageToMemoryInfo & operator=( VkCopyImageToMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT { srcImage = srcImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT { srcImageLayout = srcImageLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyImageToMemoryInfo & setPRegions( const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyImageToMemoryInfoEXT & - setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + CopyImageToMemoryInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); @@ -18976,14 +18980,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkCopyImageToMemoryInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyImageToMemoryInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyImageToMemoryInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkCopyImageToMemoryInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -18992,11 +18996,11 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -19005,9 +19009,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyImageToMemoryInfoEXT const & ) const = default; + auto operator<=>( CopyImageToMemoryInfo const & ) const = default; #else - bool operator==( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -19017,28 +19021,30 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( CopyImageToMemoryInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyImageToMemoryInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::Image srcImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT * pRegions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToMemoryInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image srcImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::ImageToMemoryCopy * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = CopyImageToMemoryInfoEXT; + using Type = CopyImageToMemoryInfo; }; + using CopyImageToMemoryInfoEXT = CopyImageToMemoryInfo; + struct CopyMemoryIndirectCommandNV { using NativeType = VkCopyMemoryIndirectCommandNV; @@ -19369,21 +19375,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {}; }; - struct MemoryToImageCopyEXT + struct MemoryToImageCopy { - using NativeType = VkMemoryToImageCopyEXT; + using NativeType = VkMemoryToImageCopy; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopyEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryToImageCopy; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( const void * pHostPointer_ = {}, - uint32_t memoryRowLength_ = {}, - uint32_t memoryImageHeight_ = {}, - VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, - VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, - VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryToImageCopy( const void * pHostPointer_ = {}, + uint32_t memoryRowLength_ = {}, + uint32_t memoryImageHeight_ = {}, + VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, + VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, + VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pHostPointer{ pHostPointer_ } , memoryRowLength{ memoryRowLength_ } @@ -19394,75 +19400,72 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR MemoryToImageCopyEXT( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryToImageCopy( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryToImageCopyEXT( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : MemoryToImageCopyEXT( *reinterpret_cast( &rhs ) ) - { - } + MemoryToImageCopy( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryToImageCopy( *reinterpret_cast( &rhs ) ) {} - MemoryToImageCopyEXT & operator=( MemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryToImageCopy & operator=( MemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryToImageCopyEXT & operator=( VkMemoryToImageCopyEXT const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryToImageCopy & operator=( VkMemoryToImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setPHostPointer( const void * pHostPointer_ ) VULKAN_HPP_NOEXCEPT { pHostPointer = pHostPointer_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryRowLength( uint32_t memoryRowLength_ ) VULKAN_HPP_NOEXCEPT { memoryRowLength = memoryRowLength_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setMemoryImageHeight( uint32_t memoryImageHeight_ ) VULKAN_HPP_NOEXCEPT { memoryImageHeight = memoryImageHeight_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT { imageOffset = imageOffset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopyEXT & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryToImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT { imageExtent = imageExtent_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryToImageCopyEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryToImageCopy const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryToImageCopyEXT &() VULKAN_HPP_NOEXCEPT + operator VkMemoryToImageCopy &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -19485,9 +19488,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryToImageCopyEXT const & ) const = default; + auto operator<=>( MemoryToImageCopy const & ) const = default; #else - bool operator==( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -19498,14 +19501,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( MemoryToImageCopyEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryToImageCopy const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopyEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryToImageCopy; const void * pNext = {}; const void * pHostPointer = {}; uint32_t memoryRowLength = {}; @@ -19516,25 +19519,27 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = MemoryToImageCopyEXT; + using Type = MemoryToImageCopy; }; - struct CopyMemoryToImageInfoEXT + using MemoryToImageCopyEXT = MemoryToImageCopy; + + struct CopyMemoryToImageInfo { - using NativeType = VkCopyMemoryToImageInfoEXT; + using NativeType = VkCopyMemoryToImageInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToImageInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - uint32_t regionCount_ = {}, - const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + uint32_t regionCount_ = {}, + const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , dstImage{ dstImage_ } @@ -19544,19 +19549,19 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfoEXT( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR CopyMemoryToImageInfo( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - CopyMemoryToImageInfoEXT( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : CopyMemoryToImageInfoEXT( *reinterpret_cast( &rhs ) ) + CopyMemoryToImageInfo( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : CopyMemoryToImageInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyMemoryToImageInfoEXT( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_, - VULKAN_HPP_NAMESPACE::Image dstImage_, - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, - const void * pNext_ = nullptr ) + CopyMemoryToImageInfo( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_, + VULKAN_HPP_NAMESPACE::Image dstImage_, + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , flags( flags_ ) , dstImage( dstImage_ ) @@ -19567,55 +19572,55 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - CopyMemoryToImageInfoEXT & operator=( CopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + CopyMemoryToImageInfo & operator=( CopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - CopyMemoryToImageInfoEXT & operator=( VkCopyMemoryToImageInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + CopyMemoryToImageInfo & operator=( VkCopyMemoryToImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setFlags( VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT { dstImage = dstImage_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT { dstImageLayout = dstImageLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT { regionCount = regionCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfoEXT & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 CopyMemoryToImageInfo & setPRegions( const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions_ ) VULKAN_HPP_NOEXCEPT { pRegions = pRegions_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - CopyMemoryToImageInfoEXT & - setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT + CopyMemoryToImageInfo & + setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & regions_ ) VULKAN_HPP_NOEXCEPT { regionCount = static_cast( regions_.size() ); pRegions = regions_.data(); @@ -19624,14 +19629,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkCopyMemoryToImageInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToImageInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkCopyMemoryToImageInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkCopyMemoryToImageInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -19640,11 +19645,11 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -19653,9 +19658,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( CopyMemoryToImageInfoEXT const & ) const = default; + auto operator<=>( CopyMemoryToImageInfo const & ) const = default; #else - bool operator==( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -19665,28 +19670,30 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( CopyMemoryToImageInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( CopyMemoryToImageInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::Image dstImage = {}; - VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; - uint32_t regionCount = {}; - const VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT * pRegions = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToImageInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::HostImageCopyFlags flags = {}; + VULKAN_HPP_NAMESPACE::Image dstImage = {}; + VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; + uint32_t regionCount = {}; + const VULKAN_HPP_NAMESPACE::MemoryToImageCopy * pRegions = {}; }; template <> - struct CppType + struct CppType { - using Type = CopyMemoryToImageInfoEXT; + using Type = CopyMemoryToImageInfo; }; + using CopyMemoryToImageInfoEXT = CopyMemoryToImageInfo; + struct CopyMemoryToMicromapInfoEXT { using NativeType = VkCopyMemoryToMicromapInfoEXT; @@ -30222,58 +30229,55 @@ namespace VULKAN_HPP_NAMESPACE using DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements; - struct ImageSubresource2KHR + struct ImageSubresource2 { - using NativeType = VkImageSubresource2KHR; + using NativeType = VkImageSubresource2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSubresource2; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR ImageSubresource2( VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , imageSubresource{ imageSubresource_ } { } - VULKAN_HPP_CONSTEXPR ImageSubresource2KHR( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR ImageSubresource2( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - ImageSubresource2KHR( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : ImageSubresource2KHR( *reinterpret_cast( &rhs ) ) - { - } + ImageSubresource2( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT : ImageSubresource2( *reinterpret_cast( &rhs ) ) {} - ImageSubresource2KHR & operator=( ImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + ImageSubresource2 & operator=( ImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - ImageSubresource2KHR & operator=( VkImageSubresource2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + ImageSubresource2 & operator=( VkImageSubresource2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 ImageSubresource2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 ImageSubresource2 & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT { imageSubresource = imageSubresource_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkImageSubresource2KHR const &() const VULKAN_HPP_NOEXCEPT + operator VkImageSubresource2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkImageSubresource2KHR &() VULKAN_HPP_NOEXCEPT + operator VkImageSubresource2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -30289,9 +30293,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( ImageSubresource2KHR const & ) const = default; + auto operator<=>( ImageSubresource2 const & ) const = default; #else - bool operator==( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -30300,88 +30304,88 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( ImageSubresource2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( ImageSubresource2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2KHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSubresource2; void * pNext = {}; VULKAN_HPP_NAMESPACE::ImageSubresource imageSubresource = {}; }; template <> - struct CppType + struct CppType { - using Type = ImageSubresource2KHR; + using Type = ImageSubresource2; }; - using ImageSubresource2EXT = ImageSubresource2KHR; + using ImageSubresource2EXT = ImageSubresource2; + using ImageSubresource2KHR = ImageSubresource2; - struct DeviceImageSubresourceInfoKHR + struct DeviceImageSubresourceInfo { - using NativeType = VkDeviceImageSubresourceInfoKHR; + using NativeType = VkDeviceImageSubresourceInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceImageSubresourceInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ = {}, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pCreateInfo{ pCreateInfo_ } , pSubresource{ pSubresource_ } { } - VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfoKHR( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceImageSubresourceInfo( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceImageSubresourceInfoKHR( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceImageSubresourceInfoKHR( *reinterpret_cast( &rhs ) ) + DeviceImageSubresourceInfo( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceImageSubresourceInfo( *reinterpret_cast( &rhs ) ) { } - DeviceImageSubresourceInfoKHR & operator=( DeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceImageSubresourceInfo & operator=( DeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceImageSubresourceInfoKHR & operator=( VkDeviceImageSubresourceInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceImageSubresourceInfo & operator=( VkDeviceImageSubresourceInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPCreateInfo( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo_ ) VULKAN_HPP_NOEXCEPT { pCreateInfo = pCreateInfo_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfoKHR & - setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceImageSubresourceInfo & setPSubresource( const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource_ ) VULKAN_HPP_NOEXCEPT { pSubresource = pSubresource_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceImageSubresourceInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceImageSubresourceInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceImageSubresourceInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDeviceImageSubresourceInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -30391,7 +30395,7 @@ namespace VULKAN_HPP_NAMESPACE std::tuple + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -30400,9 +30404,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceImageSubresourceInfoKHR const & ) const = default; + auto operator<=>( DeviceImageSubresourceInfo const & ) const = default; #else - bool operator==( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -30411,25 +30415,27 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( DeviceImageSubresourceInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceImageSubresourceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfoKHR; - const void * pNext = {}; - const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; - const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceImageSubresourceInfo; + const void * pNext = {}; + const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo = {}; + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource = {}; }; template <> - struct CppType + struct CppType { - using Type = DeviceImageSubresourceInfoKHR; + using Type = DeviceImageSubresourceInfo; }; + using DeviceImageSubresourceInfoKHR = DeviceImageSubresourceInfo; + struct DeviceMemoryOpaqueCaptureAddressInfo { using NativeType = VkDeviceMemoryOpaqueCaptureAddressInfo; @@ -30980,68 +30986,68 @@ namespace VULKAN_HPP_NAMESPACE using DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo; - struct DeviceQueueGlobalPriorityCreateInfoKHR + struct DeviceQueueGlobalPriorityCreateInfo { - using NativeType = VkDeviceQueueGlobalPriorityCreateInfoKHR; + using NativeType = VkDeviceQueueGlobalPriorityCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR - DeviceQueueGlobalPriorityCreateInfoKHR( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + DeviceQueueGlobalPriorityCreateInfo( VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , globalPriority{ globalPriority_ } { } - VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoKHR( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfo( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - DeviceQueueGlobalPriorityCreateInfoKHR( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceQueueGlobalPriorityCreateInfoKHR( *reinterpret_cast( &rhs ) ) + DeviceQueueGlobalPriorityCreateInfo( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceQueueGlobalPriorityCreateInfo( *reinterpret_cast( &rhs ) ) { } - DeviceQueueGlobalPriorityCreateInfoKHR & operator=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + DeviceQueueGlobalPriorityCreateInfo & operator=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - DeviceQueueGlobalPriorityCreateInfoKHR & operator=( VkDeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + DeviceQueueGlobalPriorityCreateInfo & operator=( VkDeviceQueueGlobalPriorityCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoKHR & - setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfo & + setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority_ ) VULKAN_HPP_NOEXCEPT { globalPriority = globalPriority_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkDeviceQueueGlobalPriorityCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueGlobalPriorityCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkDeviceQueueGlobalPriorityCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkDeviceQueueGlobalPriorityCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -31050,9 +31056,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceQueueGlobalPriorityCreateInfoKHR const & ) const = default; + auto operator<=>( DeviceQueueGlobalPriorityCreateInfo const & ) const = default; #else - bool operator==( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -31061,25 +31067,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( DeviceQueueGlobalPriorityCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( DeviceQueueGlobalPriorityCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::QueueGlobalPriority globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow; }; template <> - struct CppType + struct CppType { - using Type = DeviceQueueGlobalPriorityCreateInfoKHR; + using Type = DeviceQueueGlobalPriorityCreateInfo; }; - using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR; + using DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfo; + using DeviceQueueGlobalPriorityCreateInfoKHR = DeviceQueueGlobalPriorityCreateInfo; struct DeviceQueueInfo2 { @@ -44209,47 +44216,47 @@ namespace VULKAN_HPP_NAMESPACE using Type = HeadlessSurfaceCreateInfoEXT; }; - struct HostImageCopyDevicePerformanceQueryEXT + struct HostImageCopyDevicePerformanceQuery { - using NativeType = VkHostImageCopyDevicePerformanceQueryEXT; + using NativeType = VkHostImageCopyDevicePerformanceQuery; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQueryEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageCopyDevicePerformanceQuery; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , optimalDeviceAccess{ optimalDeviceAccess_ } , identicalMemoryLayout{ identicalMemoryLayout_ } { } - VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQueryEXT( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR HostImageCopyDevicePerformanceQuery( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; - HostImageCopyDevicePerformanceQueryEXT( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : HostImageCopyDevicePerformanceQueryEXT( *reinterpret_cast( &rhs ) ) + HostImageCopyDevicePerformanceQuery( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageCopyDevicePerformanceQuery( *reinterpret_cast( &rhs ) ) { } - HostImageCopyDevicePerformanceQueryEXT & operator=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + HostImageCopyDevicePerformanceQuery & operator=( HostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - HostImageCopyDevicePerformanceQueryEXT & operator=( VkHostImageCopyDevicePerformanceQueryEXT const & rhs ) VULKAN_HPP_NOEXCEPT + HostImageCopyDevicePerformanceQuery & operator=( VkHostImageCopyDevicePerformanceQuery const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkHostImageCopyDevicePerformanceQueryEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkHostImageCopyDevicePerformanceQuery const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkHostImageCopyDevicePerformanceQueryEXT &() VULKAN_HPP_NOEXCEPT + operator VkHostImageCopyDevicePerformanceQuery &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -44265,9 +44272,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( HostImageCopyDevicePerformanceQueryEXT const & ) const = default; + auto operator<=>( HostImageCopyDevicePerformanceQuery const & ) const = default; #else - bool operator==( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -44277,38 +44284,40 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( HostImageCopyDevicePerformanceQueryEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HostImageCopyDevicePerformanceQuery const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQueryEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageCopyDevicePerformanceQuery; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 optimalDeviceAccess = {}; VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryLayout = {}; }; template <> - struct CppType + struct CppType { - using Type = HostImageCopyDevicePerformanceQueryEXT; + using Type = HostImageCopyDevicePerformanceQuery; }; - struct HostImageLayoutTransitionInfoEXT + using HostImageCopyDevicePerformanceQueryEXT = HostImageCopyDevicePerformanceQuery; + + struct HostImageLayoutTransitionInfo { - using NativeType = VkHostImageLayoutTransitionInfoEXT; + using NativeType = VkHostImageLayoutTransitionInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHostImageLayoutTransitionInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( VULKAN_HPP_NAMESPACE::Image image_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, - VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( VULKAN_HPP_NAMESPACE::Image image_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, + VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , image{ image_ } , oldLayout{ oldLayout_ } @@ -44317,48 +44326,48 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfoEXT( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR HostImageLayoutTransitionInfo( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - HostImageLayoutTransitionInfoEXT( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : HostImageLayoutTransitionInfoEXT( *reinterpret_cast( &rhs ) ) + HostImageLayoutTransitionInfo( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : HostImageLayoutTransitionInfo( *reinterpret_cast( &rhs ) ) { } - HostImageLayoutTransitionInfoEXT & operator=( HostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + HostImageLayoutTransitionInfo & operator=( HostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - HostImageLayoutTransitionInfoEXT & operator=( VkHostImageLayoutTransitionInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + HostImageLayoutTransitionInfo & operator=( VkHostImageLayoutTransitionInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT { image = image_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT { oldLayout = oldLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT { newLayout = newLayout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfoEXT & + VULKAN_HPP_CONSTEXPR_14 HostImageLayoutTransitionInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT { subresourceRange = subresourceRange_; @@ -44366,14 +44375,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkHostImageLayoutTransitionInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkHostImageLayoutTransitionInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkHostImageLayoutTransitionInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkHostImageLayoutTransitionInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -44394,9 +44403,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( HostImageLayoutTransitionInfoEXT const & ) const = default; + auto operator<=>( HostImageLayoutTransitionInfo const & ) const = default; #else - bool operator==( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -44406,14 +44415,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( HostImageLayoutTransitionInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( HostImageLayoutTransitionInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfoEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHostImageLayoutTransitionInfo; const void * pNext = {}; VULKAN_HPP_NAMESPACE::Image image = {}; VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined; @@ -44422,11 +44431,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = HostImageLayoutTransitionInfoEXT; + using Type = HostImageLayoutTransitionInfo; }; + using HostImageLayoutTransitionInfoEXT = HostImageLayoutTransitionInfo; + #if defined( VK_USE_PLATFORM_IOS_MVK ) struct IOSSurfaceCreateInfoMVK { @@ -54888,19 +54899,19 @@ namespace VULKAN_HPP_NAMESPACE using Type = MemoryHostPointerPropertiesEXT; }; - struct MemoryMapInfoKHR + struct MemoryMapInfo { - using NativeType = VkMemoryMapInfoKHR; + using NativeType = VkMemoryMapInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryMapInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryMapInfoKHR( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryMapInfo( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , memory{ memory_ } @@ -54909,59 +54920,59 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR MemoryMapInfoKHR( MemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryMapInfo( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryMapInfoKHR( VkMemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapInfoKHR( *reinterpret_cast( &rhs ) ) {} + MemoryMapInfo( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryMapInfo( *reinterpret_cast( &rhs ) ) {} - MemoryMapInfoKHR & operator=( MemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryMapInfo & operator=( MemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryMapInfoKHR & operator=( VkMemoryMapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryMapInfo & operator=( VkMemoryMapInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryMapFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryMapInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryMapInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryMapInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryMapInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryMapInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkMemoryMapInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -54982,9 +54993,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryMapInfoKHR const & ) const = default; + auto operator<=>( MemoryMapInfo const & ) const = default; #else - bool operator==( MemoryMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -54994,14 +55005,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( MemoryMapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryMapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryMapInfo; const void * pNext = {}; VULKAN_HPP_NAMESPACE::MemoryMapFlags flags = {}; VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; @@ -55010,11 +55021,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = MemoryMapInfoKHR; + using Type = MemoryMapInfo; }; + using MemoryMapInfoKHR = MemoryMapInfo; + struct MemoryMapPlacedInfoEXT { using NativeType = VkMemoryMapPlacedInfoEXT; @@ -55530,64 +55543,64 @@ namespace VULKAN_HPP_NAMESPACE uint32_t heapIndex = {}; }; - struct MemoryUnmapInfoKHR + struct MemoryUnmapInfo { - using NativeType = VkMemoryUnmapInfoKHR; + using NativeType = VkMemoryUnmapInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryUnmapInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryUnmapInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR MemoryUnmapInfoKHR( VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR flags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , memory{ memory_ } { } - VULKAN_HPP_CONSTEXPR MemoryUnmapInfoKHR( MemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR MemoryUnmapInfo( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - MemoryUnmapInfoKHR( VkMemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryUnmapInfoKHR( *reinterpret_cast( &rhs ) ) {} + MemoryUnmapInfo( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT : MemoryUnmapInfo( *reinterpret_cast( &rhs ) ) {} - MemoryUnmapInfoKHR & operator=( MemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + MemoryUnmapInfo & operator=( MemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - MemoryUnmapInfoKHR & operator=( VkMemoryUnmapInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + MemoryUnmapInfo & operator=( VkMemoryUnmapInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 MemoryUnmapInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT { memory = memory_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkMemoryUnmapInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkMemoryUnmapInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkMemoryUnmapInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkMemoryUnmapInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -55596,7 +55609,7 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -55606,9 +55619,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( MemoryUnmapInfoKHR const & ) const = default; + auto operator<=>( MemoryUnmapInfo const & ) const = default; #else - bool operator==( MemoryUnmapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -55617,25 +55630,27 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( MemoryUnmapInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( MemoryUnmapInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryUnmapInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR flags = {}; - VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryUnmapInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::MemoryUnmapFlags flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemory memory = {}; }; template <> - struct CppType + struct CppType { - using Type = MemoryUnmapInfoKHR; + using Type = MemoryUnmapInfo; }; + using MemoryUnmapInfoKHR = MemoryUnmapInfo; + #if defined( VK_USE_PLATFORM_WIN32_KHR ) struct MemoryWin32HandlePropertiesKHR { @@ -67171,47 +67186,46 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures; - struct PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR + struct PhysicalDeviceDynamicRenderingLocalReadFeatures { - using NativeType = VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + using NativeType = VkPhysicalDeviceDynamicRenderingLocalReadFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingLocalReadFeatures( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR( PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceDynamicRenderingLocalReadFeatures( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR( VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceDynamicRenderingLocalReadFeatures( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceDynamicRenderingLocalReadFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR & - operator=( PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR & operator=( VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDynamicRenderingLocalReadFeatures & setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT { dynamicRenderingLocalRead = dynamicRenderingLocalRead_; @@ -67219,14 +67233,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceDynamicRenderingLocalReadFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -67242,9 +67256,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -67253,24 +67267,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + using Type = PhysicalDeviceDynamicRenderingLocalReadFeatures; }; + using PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR = PhysicalDeviceDynamicRenderingLocalReadFeatures; + struct PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT { using NativeType = VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; @@ -71417,45 +71433,45 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceFrameBoundaryFeaturesEXT; }; - struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR + struct PhysicalDeviceGlobalPriorityQueryFeatures { - using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using NativeType = VkPhysicalDeviceGlobalPriorityQueryFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , globalPriorityQuery{ globalPriorityQuery_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeaturesKHR( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceGlobalPriorityQueryFeatures( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceGlobalPriorityQueryFeaturesKHR( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceGlobalPriorityQueryFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceGlobalPriorityQueryFeatures( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceGlobalPriorityQueryFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceGlobalPriorityQueryFeatures & operator=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceGlobalPriorityQueryFeaturesKHR & operator=( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceGlobalPriorityQueryFeatures & operator=( VkPhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGlobalPriorityQueryFeatures & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT { globalPriorityQuery = globalPriorityQuery_; @@ -71463,14 +71479,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceGlobalPriorityQueryFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceGlobalPriorityQueryFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -71486,9 +71502,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceGlobalPriorityQueryFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -71497,25 +71513,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceGlobalPriorityQueryFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceGlobalPriorityQueryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using Type = PhysicalDeviceGlobalPriorityQueryFeatures; }; - using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeaturesKHR; + using PhysicalDeviceGlobalPriorityQueryFeaturesEXT = PhysicalDeviceGlobalPriorityQueryFeatures; + using PhysicalDeviceGlobalPriorityQueryFeaturesKHR = PhysicalDeviceGlobalPriorityQueryFeatures; struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { @@ -71935,58 +71952,58 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceHdrVividFeaturesHUAWEI; }; - struct PhysicalDeviceHostImageCopyFeaturesEXT + struct PhysicalDeviceHostImageCopyFeatures { - using NativeType = VkPhysicalDeviceHostImageCopyFeaturesEXT; + using NativeType = VkPhysicalDeviceHostImageCopyFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , hostImageCopy{ hostImageCopy_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeaturesEXT( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceHostImageCopyFeatures( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceHostImageCopyFeaturesEXT( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceHostImageCopyFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceHostImageCopyFeatures( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceHostImageCopyFeaturesEXT & operator=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceHostImageCopyFeatures & operator=( PhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceHostImageCopyFeaturesEXT & operator=( VkPhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceHostImageCopyFeatures & operator=( VkPhysicalDeviceHostImageCopyFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeaturesEXT & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyFeatures & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT { hostImageCopy = hostImageCopy_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceHostImageCopyFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceHostImageCopyFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceHostImageCopyFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceHostImageCopyFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -72002,9 +72019,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceHostImageCopyFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceHostImageCopyFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -72013,39 +72030,41 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceHostImageCopyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceHostImageCopyFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceHostImageCopyFeaturesEXT; + using Type = PhysicalDeviceHostImageCopyFeatures; }; - struct PhysicalDeviceHostImageCopyPropertiesEXT + using PhysicalDeviceHostImageCopyFeaturesEXT = PhysicalDeviceHostImageCopyFeatures; + + struct PhysicalDeviceHostImageCopyProperties { - using NativeType = VkPhysicalDeviceHostImageCopyPropertiesEXT; + using NativeType = VkPhysicalDeviceHostImageCopyProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostImageCopyProperties; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( uint32_t copySrcLayoutCount_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, - uint32_t copyDstLayoutCount_ = {}, - VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, - std::array const & optimalTilingLayoutUUID_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , copySrcLayoutCount{ copySrcLayoutCount_ } , pCopySrcLayouts{ pCopySrcLayouts_ } @@ -72056,19 +72075,19 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceHostImageCopyPropertiesEXT( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceHostImageCopyPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDeviceHostImageCopyProperties( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceHostImageCopyProperties( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceHostImageCopyPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ = {}, - std::array const & optimalTilingLayoutUUID_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, - void * pNext_ = nullptr ) + PhysicalDeviceHostImageCopyProperties( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) : pNext( pNext_ ) , copySrcLayoutCount( static_cast( copySrcLayouts_.size() ) ) , pCopySrcLayouts( copySrcLayouts_.data() ) @@ -72080,29 +72099,29 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PhysicalDeviceHostImageCopyPropertiesEXT & operator=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceHostImageCopyProperties & operator=( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceHostImageCopyPropertiesEXT & operator=( VkPhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceHostImageCopyProperties & operator=( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopySrcLayoutCount( uint32_t copySrcLayoutCount_ ) VULKAN_HPP_NOEXCEPT { copySrcLayoutCount = copySrcLayoutCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopySrcLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ ) VULKAN_HPP_NOEXCEPT { pCopySrcLayouts = pCopySrcLayouts_; @@ -72110,7 +72129,7 @@ namespace VULKAN_HPP_NAMESPACE } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceHostImageCopyPropertiesEXT & + PhysicalDeviceHostImageCopyProperties & setCopySrcLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copySrcLayouts_ ) VULKAN_HPP_NOEXCEPT { copySrcLayoutCount = static_cast( copySrcLayouts_.size() ); @@ -72119,13 +72138,13 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setCopyDstLayoutCount( uint32_t copyDstLayoutCount_ ) VULKAN_HPP_NOEXCEPT { copyDstLayoutCount = copyDstLayoutCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPCopyDstLayouts( VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ ) VULKAN_HPP_NOEXCEPT { pCopyDstLayouts = pCopyDstLayouts_; @@ -72133,7 +72152,7 @@ namespace VULKAN_HPP_NAMESPACE } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PhysicalDeviceHostImageCopyPropertiesEXT & + PhysicalDeviceHostImageCopyProperties & setCopyDstLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & copyDstLayouts_ ) VULKAN_HPP_NOEXCEPT { copyDstLayoutCount = static_cast( copyDstLayouts_.size() ); @@ -72142,14 +72161,14 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setOptimalTilingLayoutUUID( std::array optimalTilingLayoutUUID_ ) VULKAN_HPP_NOEXCEPT { optimalTilingLayoutUUID = optimalTilingLayoutUUID_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyPropertiesEXT & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setIdenticalMemoryTypeRequirements( VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ ) VULKAN_HPP_NOEXCEPT { identicalMemoryTypeRequirements = identicalMemoryTypeRequirements_; @@ -72157,14 +72176,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceHostImageCopyPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceHostImageCopyProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceHostImageCopyPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceHostImageCopyProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -72188,9 +72207,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceHostImageCopyPropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDeviceHostImageCopyProperties const & ) const = default; #else - bool operator==( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -72201,14 +72220,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceHostImageCopyPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceHostImageCopyProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostImageCopyProperties; void * pNext = {}; uint32_t copySrcLayoutCount = {}; VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; @@ -72219,11 +72238,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceHostImageCopyPropertiesEXT; + using Type = PhysicalDeviceHostImageCopyProperties; }; + using PhysicalDeviceHostImageCopyPropertiesEXT = PhysicalDeviceHostImageCopyProperties; + struct PhysicalDeviceHostQueryResetFeatures { using NativeType = VkPhysicalDeviceHostQueryResetFeatures; @@ -74125,59 +74146,58 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures; - struct PhysicalDeviceIndexTypeUint8FeaturesKHR + struct PhysicalDeviceIndexTypeUint8Features { - using NativeType = VkPhysicalDeviceIndexTypeUint8FeaturesKHR; + using NativeType = VkPhysicalDeviceIndexTypeUint8Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8Features; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , indexTypeUint8{ indexTypeUint8_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesKHR( PhysicalDeviceIndexTypeUint8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8Features( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceIndexTypeUint8FeaturesKHR( VkPhysicalDeviceIndexTypeUint8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceIndexTypeUint8FeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceIndexTypeUint8Features( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceIndexTypeUint8Features( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceIndexTypeUint8FeaturesKHR & operator=( PhysicalDeviceIndexTypeUint8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceIndexTypeUint8Features & operator=( PhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceIndexTypeUint8FeaturesKHR & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceIndexTypeUint8Features & operator=( VkPhysicalDeviceIndexTypeUint8Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesKHR & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT { indexTypeUint8 = indexTypeUint8_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceIndexTypeUint8FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceIndexTypeUint8Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceIndexTypeUint8FeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceIndexTypeUint8Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -74193,9 +74213,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceIndexTypeUint8Features const & ) const = default; #else - bool operator==( PhysicalDeviceIndexTypeUint8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -74204,25 +74224,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceIndexTypeUint8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8Features; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceIndexTypeUint8FeaturesKHR; + using Type = PhysicalDeviceIndexTypeUint8Features; }; - using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8FeaturesKHR; + using PhysicalDeviceIndexTypeUint8FeaturesEXT = PhysicalDeviceIndexTypeUint8Features; + using PhysicalDeviceIndexTypeUint8FeaturesKHR = PhysicalDeviceIndexTypeUint8Features; struct PhysicalDeviceInheritedViewportScissorFeaturesNV { @@ -76291,21 +76312,21 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceLegacyVertexAttributesPropertiesEXT; }; - struct PhysicalDeviceLineRasterizationFeaturesKHR + struct PhysicalDeviceLineRasterizationFeatures { - using NativeType = VkPhysicalDeviceLineRasterizationFeaturesKHR; + using NativeType = VkPhysicalDeviceLineRasterizationFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , rectangularLines{ rectangularLines_ } , bresenhamLines{ bresenhamLines_ } @@ -76316,63 +76337,62 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesKHR( PhysicalDeviceLineRasterizationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeatures( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLineRasterizationFeaturesKHR( VkPhysicalDeviceLineRasterizationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLineRasterizationFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceLineRasterizationFeatures( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceLineRasterizationFeaturesKHR & operator=( PhysicalDeviceLineRasterizationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceLineRasterizationFeatures & operator=( PhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLineRasterizationFeaturesKHR & operator=( VkPhysicalDeviceLineRasterizationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationFeatures & operator=( VkPhysicalDeviceLineRasterizationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesKHR & - setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT { rectangularLines = rectangularLines_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesKHR & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT { bresenhamLines = bresenhamLines_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesKHR & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT { smoothLines = smoothLines_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT { stippledRectangularLines = stippledRectangularLines_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT { stippledBresenhamLines = stippledBresenhamLines_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeatures & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT { stippledSmoothLines = stippledSmoothLines_; @@ -76380,14 +76400,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceLineRasterizationFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLineRasterizationFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceLineRasterizationFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLineRasterizationFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -76410,9 +76430,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLineRasterizationFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceLineRasterizationFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceLineRasterizationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -76423,14 +76443,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceLineRasterizationFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLineRasterizationFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; @@ -76441,51 +76461,52 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLineRasterizationFeaturesKHR; + using Type = PhysicalDeviceLineRasterizationFeatures; }; - using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeaturesKHR; + using PhysicalDeviceLineRasterizationFeaturesEXT = PhysicalDeviceLineRasterizationFeatures; + using PhysicalDeviceLineRasterizationFeaturesKHR = PhysicalDeviceLineRasterizationFeatures; - struct PhysicalDeviceLineRasterizationPropertiesKHR + struct PhysicalDeviceLineRasterizationProperties { - using NativeType = VkPhysicalDeviceLineRasterizationPropertiesKHR; + using NativeType = VkPhysicalDeviceLineRasterizationProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationProperties; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesKHR( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( uint32_t lineSubPixelPrecisionBits_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesKHR( PhysicalDeviceLineRasterizationPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationProperties( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceLineRasterizationPropertiesKHR( VkPhysicalDeviceLineRasterizationPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceLineRasterizationPropertiesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceLineRasterizationProperties( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceLineRasterizationProperties( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceLineRasterizationPropertiesKHR & operator=( PhysicalDeviceLineRasterizationPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceLineRasterizationProperties & operator=( PhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceLineRasterizationPropertiesKHR & operator=( VkPhysicalDeviceLineRasterizationPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceLineRasterizationProperties & operator=( VkPhysicalDeviceLineRasterizationProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceLineRasterizationPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLineRasterizationProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceLineRasterizationPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceLineRasterizationProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -76501,9 +76522,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceLineRasterizationPropertiesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceLineRasterizationProperties const & ) const = default; #else - bool operator==( PhysicalDeviceLineRasterizationPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -76512,25 +76533,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceLineRasterizationPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceLineRasterizationProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationProperties; void * pNext = {}; uint32_t lineSubPixelPrecisionBits = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceLineRasterizationPropertiesKHR; + using Type = PhysicalDeviceLineRasterizationProperties; }; - using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationPropertiesKHR; + using PhysicalDeviceLineRasterizationPropertiesEXT = PhysicalDeviceLineRasterizationProperties; + using PhysicalDeviceLineRasterizationPropertiesKHR = PhysicalDeviceLineRasterizationProperties; struct PhysicalDeviceLinearColorAttachmentFeaturesNV { @@ -76903,58 +76925,58 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties; - struct PhysicalDeviceMaintenance5FeaturesKHR + struct PhysicalDeviceMaintenance5Features { - using NativeType = VkPhysicalDeviceMaintenance5FeaturesKHR; + using NativeType = VkPhysicalDeviceMaintenance5Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Features; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance5{ maintenance5_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5FeaturesKHR( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Features( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMaintenance5FeaturesKHR( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMaintenance5FeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceMaintenance5Features( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5Features( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceMaintenance5FeaturesKHR & operator=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceMaintenance5Features & operator=( PhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMaintenance5FeaturesKHR & operator=( VkPhysicalDeviceMaintenance5FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMaintenance5Features & operator=( VkPhysicalDeviceMaintenance5Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5FeaturesKHR & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance5Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT { maintenance5 = maintenance5_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceMaintenance5FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance5Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMaintenance5FeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance5Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -76970,9 +76992,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMaintenance5FeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceMaintenance5Features const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -76981,39 +77003,41 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceMaintenance5FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMaintenance5Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5FeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Features; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMaintenance5FeaturesKHR; + using Type = PhysicalDeviceMaintenance5Features; }; - struct PhysicalDeviceMaintenance5PropertiesKHR + using PhysicalDeviceMaintenance5FeaturesKHR = PhysicalDeviceMaintenance5Features; + + struct PhysicalDeviceMaintenance5Properties { - using NativeType = VkPhysicalDeviceMaintenance5PropertiesKHR; + using NativeType = VkPhysicalDeviceMaintenance5Properties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance5Properties; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } @@ -77024,30 +77048,30 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5PropertiesKHR( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance5Properties( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMaintenance5PropertiesKHR( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMaintenance5PropertiesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceMaintenance5Properties( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance5Properties( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceMaintenance5PropertiesKHR & operator=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceMaintenance5Properties & operator=( PhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMaintenance5PropertiesKHR & operator=( VkPhysicalDeviceMaintenance5PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMaintenance5Properties & operator=( VkPhysicalDeviceMaintenance5Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMaintenance5PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance5Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMaintenance5PropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance5Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -77077,9 +77101,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMaintenance5PropertiesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceMaintenance5Properties const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -77093,14 +77117,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceMaintenance5PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMaintenance5Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5PropertiesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance5Properties; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; @@ -77111,63 +77135,65 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMaintenance5PropertiesKHR; + using Type = PhysicalDeviceMaintenance5Properties; }; - struct PhysicalDeviceMaintenance6FeaturesKHR + using PhysicalDeviceMaintenance5PropertiesKHR = PhysicalDeviceMaintenance5Properties; + + struct PhysicalDeviceMaintenance6Features { - using NativeType = VkPhysicalDeviceMaintenance6FeaturesKHR; + using NativeType = VkPhysicalDeviceMaintenance6Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6FeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Features; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maintenance6{ maintenance6_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6FeaturesKHR( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Features( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMaintenance6FeaturesKHR( VkPhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMaintenance6FeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceMaintenance6Features( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6Features( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceMaintenance6FeaturesKHR & operator=( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceMaintenance6Features & operator=( PhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMaintenance6FeaturesKHR & operator=( VkPhysicalDeviceMaintenance6FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMaintenance6Features & operator=( VkPhysicalDeviceMaintenance6Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6FeaturesKHR & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance6Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT { maintenance6 = maintenance6_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceMaintenance6FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance6Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMaintenance6FeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance6Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -77183,9 +77209,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMaintenance6FeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceMaintenance6Features const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -77194,36 +77220,38 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceMaintenance6FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMaintenance6Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6FeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Features; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMaintenance6FeaturesKHR; + using Type = PhysicalDeviceMaintenance6Features; }; - struct PhysicalDeviceMaintenance6PropertiesKHR + using PhysicalDeviceMaintenance6FeaturesKHR = PhysicalDeviceMaintenance6Features; + + struct PhysicalDeviceMaintenance6Properties { - using NativeType = VkPhysicalDeviceMaintenance6PropertiesKHR; + using NativeType = VkPhysicalDeviceMaintenance6Properties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6PropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance6Properties; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6PropertiesKHR( VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, - uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, + uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } @@ -77231,30 +77259,30 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6PropertiesKHR( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance6Properties( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceMaintenance6PropertiesKHR( VkPhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceMaintenance6PropertiesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceMaintenance6Properties( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceMaintenance6Properties( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceMaintenance6PropertiesKHR & operator=( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceMaintenance6Properties & operator=( PhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceMaintenance6PropertiesKHR & operator=( VkPhysicalDeviceMaintenance6PropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceMaintenance6Properties & operator=( VkPhysicalDeviceMaintenance6Properties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDeviceMaintenance6PropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance6Properties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMaintenance6PropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMaintenance6Properties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -77274,9 +77302,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceMaintenance6PropertiesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceMaintenance6Properties const & ) const = default; #else - bool operator==( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -77287,14 +77315,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceMaintenance6PropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceMaintenance6Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6PropertiesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance6Properties; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {}; uint32_t maxCombinedImageSamplerDescriptorCount = {}; @@ -77302,11 +77330,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceMaintenance6PropertiesKHR; + using Type = PhysicalDeviceMaintenance6Properties; }; + using PhysicalDeviceMaintenance6PropertiesKHR = PhysicalDeviceMaintenance6Properties; + struct PhysicalDeviceMaintenance7FeaturesKHR { using NativeType = VkPhysicalDeviceMaintenance7FeaturesKHR; @@ -81725,46 +81755,46 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDevicePipelinePropertiesFeaturesEXT; }; - struct PhysicalDevicePipelineProtectedAccessFeaturesEXT + struct PhysicalDevicePipelineProtectedAccessFeatures { - using NativeType = VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; + using NativeType = VkPhysicalDevicePipelineProtectedAccessFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineProtectedAccessFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineProtectedAccess{ pipelineProtectedAccess_ } { } VULKAN_HPP_CONSTEXPR - PhysicalDevicePipelineProtectedAccessFeaturesEXT( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDevicePipelineProtectedAccessFeatures( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelineProtectedAccessFeaturesEXT( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelineProtectedAccessFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDevicePipelineProtectedAccessFeatures( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineProtectedAccessFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDevicePipelineProtectedAccessFeatures & operator=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePipelineProtectedAccessFeaturesEXT & operator=( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineProtectedAccessFeatures & operator=( VkPhysicalDevicePipelineProtectedAccessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeaturesEXT & + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineProtectedAccessFeatures & setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT { pipelineProtectedAccess = pipelineProtectedAccess_; @@ -81772,14 +81802,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineProtectedAccessFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePipelineProtectedAccessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineProtectedAccessFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -81795,9 +81825,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDevicePipelineProtectedAccessFeatures const & ) const = default; #else - bool operator==( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -81806,63 +81836,65 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDevicePipelineProtectedAccessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePipelineProtectedAccessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineProtectedAccessFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePipelineProtectedAccessFeaturesEXT; + using Type = PhysicalDevicePipelineProtectedAccessFeatures; }; - struct PhysicalDevicePipelineRobustnessFeaturesEXT + using PhysicalDevicePipelineProtectedAccessFeaturesEXT = PhysicalDevicePipelineProtectedAccessFeatures; + + struct PhysicalDevicePipelineRobustnessFeatures { - using NativeType = VkPhysicalDevicePipelineRobustnessFeaturesEXT; + using NativeType = VkPhysicalDevicePipelineRobustnessFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , pipelineRobustness{ pipelineRobustness_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeaturesEXT( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessFeatures( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelineRobustnessFeaturesEXT( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelineRobustnessFeaturesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDevicePipelineRobustnessFeatures( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDevicePipelineRobustnessFeatures & operator=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePipelineRobustnessFeaturesEXT & operator=( VkPhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineRobustnessFeatures & operator=( VkPhysicalDevicePipelineRobustnessFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeaturesEXT & + VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineRobustnessFeatures & setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT { pipelineRobustness = pipelineRobustness_; @@ -81870,14 +81902,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePipelineRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePipelineRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineRobustnessFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -81893,9 +81925,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelineRobustnessFeaturesEXT const & ) const = default; + auto operator<=>( PhysicalDevicePipelineRobustnessFeatures const & ) const = default; #else - bool operator==( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -81904,42 +81936,43 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDevicePipelineRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePipelineRobustnessFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePipelineRobustnessFeaturesEXT; + using Type = PhysicalDevicePipelineRobustnessFeatures; }; - struct PhysicalDevicePipelineRobustnessPropertiesEXT + using PhysicalDevicePipelineRobustnessFeaturesEXT = PhysicalDevicePipelineRobustnessFeatures; + + struct PhysicalDevicePipelineRobustnessProperties { - using NativeType = VkPhysicalDevicePipelineRobustnessPropertiesEXT; + using NativeType = VkPhysicalDevicePipelineRobustnessProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineRobustnessProperties; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR - PhysicalDevicePipelineRobustnessPropertiesEXT( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages_ = - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } @@ -81948,31 +81981,30 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR - PhysicalDevicePipelineRobustnessPropertiesEXT( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineRobustnessProperties( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePipelineRobustnessPropertiesEXT( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePipelineRobustnessPropertiesEXT( *reinterpret_cast( &rhs ) ) + PhysicalDevicePipelineRobustnessProperties( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePipelineRobustnessProperties( *reinterpret_cast( &rhs ) ) { } - PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDevicePipelineRobustnessProperties & operator=( PhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePipelineRobustnessPropertiesEXT & operator=( VkPhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePipelineRobustnessProperties & operator=( VkPhysicalDevicePipelineRobustnessProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePipelineRobustnessPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineRobustnessProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePipelineRobustnessPropertiesEXT &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePipelineRobustnessProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -81981,10 +82013,10 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -81993,9 +82025,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePipelineRobustnessPropertiesEXT const & ) const = default; + auto operator<=>( PhysicalDevicePipelineRobustnessProperties const & ) const = default; #else - bool operator==( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -82006,30 +82038,32 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDevicePipelineRobustnessPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePipelineRobustnessProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs = - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineRobustnessProperties; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePipelineRobustnessPropertiesEXT; + using Type = PhysicalDevicePipelineRobustnessProperties; }; + using PhysicalDevicePipelineRobustnessPropertiesEXT = PhysicalDevicePipelineRobustnessProperties; + struct PhysicalDevicePointClippingProperties { using NativeType = VkPhysicalDevicePointClippingProperties; @@ -83593,44 +83627,44 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceProvokingVertexPropertiesEXT; }; - struct PhysicalDevicePushDescriptorPropertiesKHR + struct PhysicalDevicePushDescriptorProperties { - using NativeType = VkPhysicalDevicePushDescriptorPropertiesKHR; + using NativeType = VkPhysicalDevicePushDescriptorProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorProperties; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( uint32_t maxPushDescriptors_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , maxPushDescriptors{ maxPushDescriptors_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorProperties( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDevicePushDescriptorPropertiesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDevicePushDescriptorProperties( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDevicePushDescriptorProperties( *reinterpret_cast( &rhs ) ) { } - PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDevicePushDescriptorProperties & operator=( PhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDevicePushDescriptorProperties & operator=( VkPhysicalDevicePushDescriptorProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkPhysicalDevicePushDescriptorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePushDescriptorProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDevicePushDescriptorProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -83646,9 +83680,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const & ) const = default; + auto operator<=>( PhysicalDevicePushDescriptorProperties const & ) const = default; #else - bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -83657,24 +83691,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDevicePushDescriptorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorProperties; void * pNext = {}; uint32_t maxPushDescriptors = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDevicePushDescriptorPropertiesKHR; + using Type = PhysicalDevicePushDescriptorProperties; }; + using PhysicalDevicePushDescriptorPropertiesKHR = PhysicalDevicePushDescriptorProperties; + struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT { using NativeType = VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; @@ -88319,45 +88355,45 @@ namespace VULKAN_HPP_NAMESPACE }; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - struct PhysicalDeviceShaderExpectAssumeFeaturesKHR + struct PhysicalDeviceShaderExpectAssumeFeatures { - using NativeType = VkPhysicalDeviceShaderExpectAssumeFeaturesKHR; + using NativeType = VkPhysicalDeviceShaderExpectAssumeFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderExpectAssumeFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderExpectAssume{ shaderExpectAssume_ } { } - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeaturesKHR( PhysicalDeviceShaderExpectAssumeFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderExpectAssumeFeatures( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderExpectAssumeFeaturesKHR( VkPhysicalDeviceShaderExpectAssumeFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderExpectAssumeFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceShaderExpectAssumeFeatures( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderExpectAssumeFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceShaderExpectAssumeFeaturesKHR & operator=( PhysicalDeviceShaderExpectAssumeFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShaderExpectAssumeFeatures & operator=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderExpectAssumeFeaturesKHR & operator=( VkPhysicalDeviceShaderExpectAssumeFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderExpectAssumeFeatures & operator=( VkPhysicalDeviceShaderExpectAssumeFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderExpectAssumeFeatures & setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT { shaderExpectAssume = shaderExpectAssume_; @@ -88365,14 +88401,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderExpectAssumeFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderExpectAssumeFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderExpectAssumeFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -88388,9 +88424,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderExpectAssumeFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceShaderExpectAssumeFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceShaderExpectAssumeFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -88399,24 +88435,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceShaderExpectAssumeFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderExpectAssumeFeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderExpectAssumeFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderExpectAssumeFeaturesKHR; + using Type = PhysicalDeviceShaderExpectAssumeFeatures; }; + using PhysicalDeviceShaderExpectAssumeFeaturesKHR = PhysicalDeviceShaderExpectAssumeFeatures; + struct PhysicalDeviceShaderFloat16Int8Features { using NativeType = VkPhysicalDeviceShaderFloat16Int8Features; @@ -88526,46 +88564,45 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; - struct PhysicalDeviceShaderFloatControls2FeaturesKHR + struct PhysicalDeviceShaderFloatControls2Features { - using NativeType = VkPhysicalDeviceShaderFloatControls2FeaturesKHR; + using NativeType = VkPhysicalDeviceShaderFloatControls2Features; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloatControls2FeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloatControls2Features; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2FeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderFloatControls2{ shaderFloatControls2_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderFloatControls2FeaturesKHR( PhysicalDeviceShaderFloatControls2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloatControls2Features( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderFloatControls2FeaturesKHR( VkPhysicalDeviceShaderFloatControls2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderFloatControls2FeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceShaderFloatControls2Features( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderFloatControls2Features( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceShaderFloatControls2FeaturesKHR & operator=( PhysicalDeviceShaderFloatControls2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShaderFloatControls2Features & operator=( PhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderFloatControls2FeaturesKHR & operator=( VkPhysicalDeviceShaderFloatControls2FeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderFloatControls2Features & operator=( VkPhysicalDeviceShaderFloatControls2Features const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2FeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloatControls2Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT { shaderFloatControls2 = shaderFloatControls2_; @@ -88573,14 +88610,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderFloatControls2FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderFloatControls2Features const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderFloatControls2FeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderFloatControls2Features &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -88596,9 +88633,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderFloatControls2FeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceShaderFloatControls2Features const & ) const = default; #else - bool operator==( PhysicalDeviceShaderFloatControls2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -88607,24 +88644,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceShaderFloatControls2FeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloatControls2FeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloatControls2Features; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderFloatControls2FeaturesKHR; + using Type = PhysicalDeviceShaderFloatControls2Features; }; + using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features; + struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT { using NativeType = VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; @@ -90358,55 +90397,54 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures; - struct PhysicalDeviceShaderSubgroupRotateFeaturesKHR + struct PhysicalDeviceShaderSubgroupRotateFeatures { - using NativeType = VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR; + using NativeType = VkPhysicalDeviceShaderSubgroupRotateFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , shaderSubgroupRotate{ shaderSubgroupRotate_ } , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceShaderSubgroupRotateFeaturesKHR( PhysicalDeviceShaderSubgroupRotateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupRotateFeatures( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceShaderSubgroupRotateFeaturesKHR( VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceShaderSubgroupRotateFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceShaderSubgroupRotateFeatures( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceShaderSubgroupRotateFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceShaderSubgroupRotateFeaturesKHR & operator=( PhysicalDeviceShaderSubgroupRotateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceShaderSubgroupRotateFeatures & operator=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceShaderSubgroupRotateFeaturesKHR & operator=( VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceShaderSubgroupRotateFeatures & operator=( VkPhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT { shaderSubgroupRotate = shaderSubgroupRotate_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupRotateFeatures & setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT { shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; @@ -90414,14 +90452,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderSubgroupRotateFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderSubgroupRotateFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -90437,9 +90475,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceShaderSubgroupRotateFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceShaderSubgroupRotateFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceShaderSubgroupRotateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -90449,25 +90487,27 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceShaderSubgroupRotateFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceShaderSubgroupRotateFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {}; VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceShaderSubgroupRotateFeaturesKHR; + using Type = PhysicalDeviceShaderSubgroupRotateFeatures; }; + using PhysicalDeviceShaderSubgroupRotateFeaturesKHR = PhysicalDeviceShaderSubgroupRotateFeatures; + struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { using NativeType = VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; @@ -93282,55 +93322,54 @@ namespace VULKAN_HPP_NAMESPACE using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures; using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures; - struct PhysicalDeviceVertexAttributeDivisorFeaturesKHR + struct PhysicalDeviceVertexAttributeDivisorFeatures { - using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; + using NativeType = VkPhysicalDeviceVertexAttributeDivisorFeatures; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesKHR( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ } , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ } { } - VULKAN_HPP_CONSTEXPR - PhysicalDeviceVertexAttributeDivisorFeaturesKHR( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeatures( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PhysicalDeviceVertexAttributeDivisorFeaturesKHR( VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVertexAttributeDivisorFeaturesKHR( *reinterpret_cast( &rhs ) ) + PhysicalDeviceVertexAttributeDivisorFeatures( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorFeatures( *reinterpret_cast( &rhs ) ) { } - PhysicalDeviceVertexAttributeDivisorFeaturesKHR & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PhysicalDeviceVertexAttributeDivisorFeatures & operator=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PhysicalDeviceVertexAttributeDivisorFeaturesKHR & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PhysicalDeviceVertexAttributeDivisorFeatures & operator=( VkPhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesKHR & + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeatures & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT { vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; @@ -93338,14 +93377,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorFeatures const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceVertexAttributeDivisorFeatures &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -93361,9 +93400,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & ) const = default; + auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeatures const & ) const = default; #else - bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -93373,26 +93412,117 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PhysicalDeviceVertexAttributeDivisorFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures; void * pNext = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; }; template <> - struct CppType + struct CppType { - using Type = PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + using Type = PhysicalDeviceVertexAttributeDivisorFeatures; }; - using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeaturesKHR; + using PhysicalDeviceVertexAttributeDivisorFeaturesEXT = PhysicalDeviceVertexAttributeDivisorFeatures; + using PhysicalDeviceVertexAttributeDivisorFeaturesKHR = PhysicalDeviceVertexAttributeDivisorFeatures; + + struct PhysicalDeviceVertexAttributeDivisorProperties + { + using NativeType = VkPhysicalDeviceVertexAttributeDivisorProperties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorProperties( uint32_t maxVertexAttribDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } + { + } + + VULKAN_HPP_CONSTEXPR + PhysicalDeviceVertexAttributeDivisorProperties( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVertexAttributeDivisorProperties( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVertexAttributeDivisorProperties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVertexAttributeDivisorProperties & operator=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVertexAttributeDivisorProperties & operator=( VkPhysicalDeviceVertexAttributeDivisorProperties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVertexAttributeDivisorProperties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVertexAttributeDivisorProperties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, maxVertexAttribDivisor, supportsNonZeroFirstInstance ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVertexAttributeDivisorProperties const & ) const = default; +#else + bool operator==( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && + ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ); +# endif + } + + bool operator!=( PhysicalDeviceVertexAttributeDivisorProperties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorProperties; + void * pNext = {}; + uint32_t maxVertexAttribDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVertexAttributeDivisorProperties; + }; + + using PhysicalDeviceVertexAttributeDivisorPropertiesKHR = PhysicalDeviceVertexAttributeDivisorProperties; struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT { @@ -93478,95 +93608,6 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT; }; - struct PhysicalDeviceVertexAttributeDivisorPropertiesKHR - { - using NativeType = VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesKHR; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesKHR( uint32_t maxVertexAttribDivisor_ = {}, - VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } - , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } - { - } - - VULKAN_HPP_CONSTEXPR - PhysicalDeviceVertexAttributeDivisorPropertiesKHR( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - PhysicalDeviceVertexAttributeDivisorPropertiesKHR( VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PhysicalDeviceVertexAttributeDivisorPropertiesKHR( *reinterpret_cast( &rhs ) ) - { - } - - PhysicalDeviceVertexAttributeDivisorPropertiesKHR & - operator=( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PhysicalDeviceVertexAttributeDivisorPropertiesKHR & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, maxVertexAttribDivisor, supportsNonZeroFirstInstance ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & ) const = default; -#else - bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && - ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ); -# endif - } - - bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesKHR; - void * pNext = {}; - uint32_t maxVertexAttribDivisor = {}; - VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; - }; - - template <> - struct CppType - { - using Type = PhysicalDeviceVertexAttributeDivisorPropertiesKHR; - }; - struct PhysicalDeviceVertexAttributeRobustnessFeaturesEXT { using NativeType = VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT; @@ -96588,6 +96629,575 @@ namespace VULKAN_HPP_NAMESPACE using Type = PhysicalDeviceVulkan13Properties; }; + struct PhysicalDeviceVulkan14Features + { + using NativeType = VkPhysicalDeviceVulkan14Features; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Features; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , globalPriorityQuery{ globalPriorityQuery_ } + , shaderSubgroupRotate{ shaderSubgroupRotate_ } + , shaderSubgroupRotateClustered{ shaderSubgroupRotateClustered_ } + , shaderFloatControls2{ shaderFloatControls2_ } + , shaderExpectAssume{ shaderExpectAssume_ } + , rectangularLines{ rectangularLines_ } + , bresenhamLines{ bresenhamLines_ } + , smoothLines{ smoothLines_ } + , stippledRectangularLines{ stippledRectangularLines_ } + , stippledBresenhamLines{ stippledBresenhamLines_ } + , stippledSmoothLines{ stippledSmoothLines_ } + , vertexAttributeInstanceRateDivisor{ vertexAttributeInstanceRateDivisor_ } + , vertexAttributeInstanceRateZeroDivisor{ vertexAttributeInstanceRateZeroDivisor_ } + , indexTypeUint8{ indexTypeUint8_ } + , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } + , maintenance5{ maintenance5_ } + , maintenance6{ maintenance6_ } + , pipelineProtectedAccess{ pipelineProtectedAccess_ } + , pipelineRobustness{ pipelineRobustness_ } + , hostImageCopy{ hostImageCopy_ } + , pushDescriptor{ pushDescriptor_ } + { + } + + VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan14Features( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan14Features( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan14Features( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan14Features & operator=( PhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan14Features & operator=( VkPhysicalDeviceVulkan14Features const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + +#if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT + { + pNext = pNext_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setGlobalPriorityQuery( VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery_ ) VULKAN_HPP_NOEXCEPT + { + globalPriorityQuery = globalPriorityQuery_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderSubgroupRotate( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotate = shaderSubgroupRotate_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setShaderSubgroupRotateClustered( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered_ ) VULKAN_HPP_NOEXCEPT + { + shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT + { + shaderFloatControls2 = shaderFloatControls2_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT + { + shaderExpectAssume = shaderExpectAssume_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + rectangularLines = rectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + bresenhamLines = bresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT + { + smoothLines = smoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledRectangularLines = stippledRectangularLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledBresenhamLines = stippledBresenhamLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT + { + stippledSmoothLines = stippledSmoothLines_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT + { + vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT + { + indexTypeUint8 = indexTypeUint8_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setDynamicRenderingLocalRead( VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead_ ) VULKAN_HPP_NOEXCEPT + { + dynamicRenderingLocalRead = dynamicRenderingLocalRead_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance5( VULKAN_HPP_NAMESPACE::Bool32 maintenance5_ ) VULKAN_HPP_NOEXCEPT + { + maintenance5 = maintenance5_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setMaintenance6( VULKAN_HPP_NAMESPACE::Bool32 maintenance6_ ) VULKAN_HPP_NOEXCEPT + { + maintenance6 = maintenance6_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & + setPipelineProtectedAccess( VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess_ ) VULKAN_HPP_NOEXCEPT + { + pipelineProtectedAccess = pipelineProtectedAccess_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPipelineRobustness( VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness_ ) VULKAN_HPP_NOEXCEPT + { + pipelineRobustness = pipelineRobustness_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setHostImageCopy( VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy_ ) VULKAN_HPP_NOEXCEPT + { + hostImageCopy = hostImageCopy_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setPushDescriptor( VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor_ ) VULKAN_HPP_NOEXCEPT + { + pushDescriptor = pushDescriptor_; + return *this; + } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceVulkan14Features const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Features &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + globalPriorityQuery, + shaderSubgroupRotate, + shaderSubgroupRotateClustered, + shaderFloatControls2, + shaderExpectAssume, + rectangularLines, + bresenhamLines, + smoothLines, + stippledRectangularLines, + stippledBresenhamLines, + stippledSmoothLines, + vertexAttributeInstanceRateDivisor, + vertexAttributeInstanceRateZeroDivisor, + indexTypeUint8, + dynamicRenderingLocalRead, + maintenance5, + maintenance6, + pipelineProtectedAccess, + pipelineRobustness, + hostImageCopy, + pushDescriptor ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan14Features const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( globalPriorityQuery == rhs.globalPriorityQuery ) && + ( shaderSubgroupRotate == rhs.shaderSubgroupRotate ) && ( shaderSubgroupRotateClustered == rhs.shaderSubgroupRotateClustered ) && + ( shaderFloatControls2 == rhs.shaderFloatControls2 ) && ( shaderExpectAssume == rhs.shaderExpectAssume ) && + ( rectangularLines == rhs.rectangularLines ) && ( bresenhamLines == rhs.bresenhamLines ) && ( smoothLines == rhs.smoothLines ) && + ( stippledRectangularLines == rhs.stippledRectangularLines ) && ( stippledBresenhamLines == rhs.stippledBresenhamLines ) && + ( stippledSmoothLines == rhs.stippledSmoothLines ) && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor ) && + ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor ) && ( indexTypeUint8 == rhs.indexTypeUint8 ) && + ( dynamicRenderingLocalRead == rhs.dynamicRenderingLocalRead ) && ( maintenance5 == rhs.maintenance5 ) && ( maintenance6 == rhs.maintenance6 ) && + ( pipelineProtectedAccess == rhs.pipelineProtectedAccess ) && ( pipelineRobustness == rhs.pipelineRobustness ) && + ( hostImageCopy == rhs.hostImageCopy ) && ( pushDescriptor == rhs.pushDescriptor ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Features; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::Bool32 globalPriorityQuery = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotate = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupRotateClustered = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2 = {}; + VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume = {}; + VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalRead = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance5 = {}; + VULKAN_HPP_NAMESPACE::Bool32 maintenance6 = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineProtectedAccess = {}; + VULKAN_HPP_NAMESPACE::Bool32 pipelineRobustness = {}; + VULKAN_HPP_NAMESPACE::Bool32 hostImageCopy = {}; + VULKAN_HPP_NAMESPACE::Bool32 pushDescriptor = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan14Features; + }; + + struct PhysicalDeviceVulkan14Properties + { + using NativeType = VkPhysicalDeviceVulkan14Properties; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan14Properties; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( + uint32_t lineSubPixelPrecisionBits_ = {}, + uint32_t maxVertexAttribDivisor_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance_ = {}, + uint32_t maxPushDescriptors_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers_ = {}, + uint32_t maxCombinedImageSamplerDescriptorCount_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs_ = {}, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs_ = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + uint32_t copySrcLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts_ = {}, + uint32_t copyDstLayoutCount_ = {}, + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts_ = {}, + std::array const & optimalTilingLayoutUUID_ = {}, + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , lineSubPixelPrecisionBits{ lineSubPixelPrecisionBits_ } + , maxVertexAttribDivisor{ maxVertexAttribDivisor_ } + , supportsNonZeroFirstInstance{ supportsNonZeroFirstInstance_ } + , maxPushDescriptors{ maxPushDescriptors_ } + , dynamicRenderingLocalReadDepthStencilAttachments{ dynamicRenderingLocalReadDepthStencilAttachments_ } + , dynamicRenderingLocalReadMultisampledAttachments{ dynamicRenderingLocalReadMultisampledAttachments_ } + , earlyFragmentMultisampleCoverageAfterSampleCounting{ earlyFragmentMultisampleCoverageAfterSampleCounting_ } + , earlyFragmentSampleMaskTestBeforeSampleCounting{ earlyFragmentSampleMaskTestBeforeSampleCounting_ } + , depthStencilSwizzleOneSupport{ depthStencilSwizzleOneSupport_ } + , polygonModePointSize{ polygonModePointSize_ } + , nonStrictSinglePixelWideLinesUseParallelogram{ nonStrictSinglePixelWideLinesUseParallelogram_ } + , nonStrictWideLinesUseParallelogram{ nonStrictWideLinesUseParallelogram_ } + , blockTexelViewCompatibleMultipleLayers{ blockTexelViewCompatibleMultipleLayers_ } + , maxCombinedImageSamplerDescriptorCount{ maxCombinedImageSamplerDescriptorCount_ } + , fragmentShadingRateClampCombinerInputs{ fragmentShadingRateClampCombinerInputs_ } + , defaultRobustnessStorageBuffers{ defaultRobustnessStorageBuffers_ } + , defaultRobustnessUniformBuffers{ defaultRobustnessUniformBuffers_ } + , defaultRobustnessVertexInputs{ defaultRobustnessVertexInputs_ } + , defaultRobustnessImages{ defaultRobustnessImages_ } + , copySrcLayoutCount{ copySrcLayoutCount_ } + , pCopySrcLayouts{ pCopySrcLayouts_ } + , copyDstLayoutCount{ copyDstLayoutCount_ } + , pCopyDstLayouts{ pCopyDstLayouts_ } + , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } + , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } + { + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + PhysicalDeviceVulkan14Properties( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT + : PhysicalDeviceVulkan14Properties( *reinterpret_cast( &rhs ) ) + { + } + + PhysicalDeviceVulkan14Properties & operator=( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PhysicalDeviceVulkan14Properties & operator=( VkPhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPhysicalDeviceVulkan14Properties const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceVulkan14Properties &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, + pNext, + lineSubPixelPrecisionBits, + maxVertexAttribDivisor, + supportsNonZeroFirstInstance, + maxPushDescriptors, + dynamicRenderingLocalReadDepthStencilAttachments, + dynamicRenderingLocalReadMultisampledAttachments, + earlyFragmentMultisampleCoverageAfterSampleCounting, + earlyFragmentSampleMaskTestBeforeSampleCounting, + depthStencilSwizzleOneSupport, + polygonModePointSize, + nonStrictSinglePixelWideLinesUseParallelogram, + nonStrictWideLinesUseParallelogram, + blockTexelViewCompatibleMultipleLayers, + maxCombinedImageSamplerDescriptorCount, + fragmentShadingRateClampCombinerInputs, + defaultRobustnessStorageBuffers, + defaultRobustnessUniformBuffers, + defaultRobustnessVertexInputs, + defaultRobustnessImages, + copySrcLayoutCount, + pCopySrcLayouts, + copyDstLayoutCount, + pCopyDstLayouts, + optimalTilingLayoutUUID, + identicalMemoryTypeRequirements ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceVulkan14Properties const & ) const = default; +#else + bool operator==( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits ) && + ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor ) && ( supportsNonZeroFirstInstance == rhs.supportsNonZeroFirstInstance ) && + ( maxPushDescriptors == rhs.maxPushDescriptors ) && + ( dynamicRenderingLocalReadDepthStencilAttachments == rhs.dynamicRenderingLocalReadDepthStencilAttachments ) && + ( dynamicRenderingLocalReadMultisampledAttachments == rhs.dynamicRenderingLocalReadMultisampledAttachments ) && + ( earlyFragmentMultisampleCoverageAfterSampleCounting == rhs.earlyFragmentMultisampleCoverageAfterSampleCounting ) && + ( earlyFragmentSampleMaskTestBeforeSampleCounting == rhs.earlyFragmentSampleMaskTestBeforeSampleCounting ) && + ( depthStencilSwizzleOneSupport == rhs.depthStencilSwizzleOneSupport ) && ( polygonModePointSize == rhs.polygonModePointSize ) && + ( nonStrictSinglePixelWideLinesUseParallelogram == rhs.nonStrictSinglePixelWideLinesUseParallelogram ) && + ( nonStrictWideLinesUseParallelogram == rhs.nonStrictWideLinesUseParallelogram ) && + ( blockTexelViewCompatibleMultipleLayers == rhs.blockTexelViewCompatibleMultipleLayers ) && + ( maxCombinedImageSamplerDescriptorCount == rhs.maxCombinedImageSamplerDescriptorCount ) && + ( fragmentShadingRateClampCombinerInputs == rhs.fragmentShadingRateClampCombinerInputs ) && + ( defaultRobustnessStorageBuffers == rhs.defaultRobustnessStorageBuffers ) && + ( defaultRobustnessUniformBuffers == rhs.defaultRobustnessUniformBuffers ) && + ( defaultRobustnessVertexInputs == rhs.defaultRobustnessVertexInputs ) && ( defaultRobustnessImages == rhs.defaultRobustnessImages ) && + ( copySrcLayoutCount == rhs.copySrcLayoutCount ) && ( pCopySrcLayouts == rhs.pCopySrcLayouts ) && + ( copyDstLayoutCount == rhs.copyDstLayoutCount ) && ( pCopyDstLayouts == rhs.pCopyDstLayouts ) && + ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); +# endif + } + + bool operator!=( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan14Properties; + void * pNext = {}; + uint32_t lineSubPixelPrecisionBits = {}; + uint32_t maxVertexAttribDivisor = {}; + VULKAN_HPP_NAMESPACE::Bool32 supportsNonZeroFirstInstance = {}; + uint32_t maxPushDescriptors = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadDepthStencilAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 dynamicRenderingLocalReadMultisampledAttachments = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentMultisampleCoverageAfterSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 earlyFragmentSampleMaskTestBeforeSampleCounting = {}; + VULKAN_HPP_NAMESPACE::Bool32 depthStencilSwizzleOneSupport = {}; + VULKAN_HPP_NAMESPACE::Bool32 polygonModePointSize = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictSinglePixelWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 nonStrictWideLinesUseParallelogram = {}; + VULKAN_HPP_NAMESPACE::Bool32 blockTexelViewCompatibleMultipleLayers = {}; + uint32_t maxCombinedImageSamplerDescriptorCount = {}; + VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateClampCombinerInputs = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessStorageBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessUniformBuffers = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior defaultRobustnessVertexInputs = + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior defaultRobustnessImages = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; + uint32_t copySrcLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopySrcLayouts = {}; + uint32_t copyDstLayoutCount = {}; + VULKAN_HPP_NAMESPACE::ImageLayout * pCopyDstLayouts = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D optimalTilingLayoutUUID = {}; + VULKAN_HPP_NAMESPACE::Bool32 identicalMemoryTypeRequirements = {}; + }; + + template <> + struct CppType + { + using Type = PhysicalDeviceVulkan14Properties; + }; + struct PhysicalDeviceVulkanMemoryModelFeatures { using NativeType = VkPhysicalDeviceVulkanMemoryModelFeatures; @@ -99173,66 +99783,66 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineCoverageToColorStateCreateInfoNV; }; - struct PipelineCreateFlags2CreateInfoKHR + struct PipelineCreateFlags2CreateInfo { - using NativeType = VkPipelineCreateFlags2CreateInfoKHR; + using NativeType = VkPipelineCreateFlags2CreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreateFlags2CreateInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } { } - VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfoKHR( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineCreateFlags2CreateInfo( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineCreateFlags2CreateInfoKHR( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineCreateFlags2CreateInfoKHR( *reinterpret_cast( &rhs ) ) + PipelineCreateFlags2CreateInfo( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineCreateFlags2CreateInfo( *reinterpret_cast( &rhs ) ) { } - PipelineCreateFlags2CreateInfoKHR & operator=( PipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineCreateFlags2CreateInfo & operator=( PipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineCreateFlags2CreateInfoKHR & operator=( VkPipelineCreateFlags2CreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineCreateFlags2CreateInfo & operator=( VkPipelineCreateFlags2CreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineCreateFlags2CreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags_ ) VULKAN_HPP_NOEXCEPT { flags = flags_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineCreateFlags2CreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineCreateFlags2CreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineCreateFlags2CreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineCreateFlags2CreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -99241,9 +99851,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineCreateFlags2CreateInfoKHR const & ) const = default; + auto operator<=>( PipelineCreateFlags2CreateInfo const & ) const = default; #else - bool operator==( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -99252,24 +99862,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PipelineCreateFlags2CreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineCreateFlags2CreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR flags = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreateFlags2CreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineCreateFlags2 flags = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineCreateFlags2CreateInfoKHR; + using Type = PipelineCreateFlags2CreateInfo; }; + using PipelineCreateFlags2CreateInfoKHR = PipelineCreateFlags2CreateInfo; + struct PipelineCreationFeedback { using NativeType = VkPipelineCreationFeedback; @@ -101074,20 +101686,20 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineRasterizationDepthClipStateCreateInfoEXT; }; - struct PipelineRasterizationLineStateCreateInfoKHR + struct PipelineRasterizationLineStateCreateInfo { - using NativeType = VkPipelineRasterizationLineStateCreateInfoKHR; + using NativeType = VkPipelineRasterizationLineStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoKHR( - VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR::eDefault, - VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, - uint32_t lineStippleFactor_ = {}, - uint16_t lineStipplePattern_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( + VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault, + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, + uint32_t lineStippleFactor_ = {}, + uint16_t lineStipplePattern_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , lineRasterizationMode{ lineRasterizationMode_ } , stippledLineEnable{ stippledLineEnable_ } @@ -101096,64 +101708,64 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoKHR( PipelineRasterizationLineStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfo( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRasterizationLineStateCreateInfoKHR( VkPipelineRasterizationLineStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRasterizationLineStateCreateInfoKHR( *reinterpret_cast( &rhs ) ) + PipelineRasterizationLineStateCreateInfo( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRasterizationLineStateCreateInfo( *reinterpret_cast( &rhs ) ) { } - PipelineRasterizationLineStateCreateInfoKHR & operator=( PipelineRasterizationLineStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRasterizationLineStateCreateInfo & operator=( PipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRasterizationLineStateCreateInfoKHR & operator=( VkPipelineRasterizationLineStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRasterizationLineStateCreateInfo & operator=( VkPipelineRasterizationLineStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoKHR & - setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & + setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT { lineRasterizationMode = lineRasterizationMode_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoKHR & + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT { stippledLineEnable = stippledLineEnable_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoKHR & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT { lineStippleFactor = lineStippleFactor_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoKHR & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfo & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT { lineStipplePattern = lineStipplePattern_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineRasterizationLineStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationLineStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRasterizationLineStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRasterizationLineStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -101162,7 +101774,7 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple @@ -101174,9 +101786,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRasterizationLineStateCreateInfoKHR const & ) const = default; + auto operator<=>( PipelineRasterizationLineStateCreateInfo const & ) const = default; #else - bool operator==( PipelineRasterizationLineStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -101187,28 +101799,29 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PipelineRasterizationLineStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRasterizationLineStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoKHR; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeKHR::eDefault; - VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; - uint32_t lineStippleFactor = {}; - uint16_t lineStipplePattern = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::LineRasterizationMode lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationMode::eDefault; + VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {}; + uint32_t lineStippleFactor = {}; + uint16_t lineStipplePattern = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineRasterizationLineStateCreateInfoKHR; + using Type = PipelineRasterizationLineStateCreateInfo; }; - using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfoKHR; + using PipelineRasterizationLineStateCreateInfoEXT = PipelineRasterizationLineStateCreateInfo; + using PipelineRasterizationLineStateCreateInfoKHR = PipelineRasterizationLineStateCreateInfo; struct PipelineRasterizationProvokingVertexStateCreateInfoEXT { @@ -101794,20 +102407,20 @@ namespace VULKAN_HPP_NAMESPACE using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV; }; - struct PipelineRobustnessCreateInfoEXT + struct PipelineRobustnessCreateInfo { - using NativeType = VkPipelineRobustnessCreateInfoEXT; + using NativeType = VkPipelineRobustnessCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfoEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRobustnessCreateInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault, - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , storageBuffers{ storageBuffers_ } , uniformBuffers{ uniformBuffers_ } @@ -101816,65 +102429,65 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfoEXT( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineRobustnessCreateInfo( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineRobustnessCreateInfoEXT( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineRobustnessCreateInfoEXT( *reinterpret_cast( &rhs ) ) + PipelineRobustnessCreateInfo( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineRobustnessCreateInfo( *reinterpret_cast( &rhs ) ) { } - PipelineRobustnessCreateInfoEXT & operator=( PipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineRobustnessCreateInfo & operator=( PipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineRobustnessCreateInfoEXT & operator=( VkPipelineRobustnessCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineRobustnessCreateInfo & operator=( VkPipelineRobustnessCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & - setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setStorageBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers_ ) VULKAN_HPP_NOEXCEPT { storageBuffers = storageBuffers_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & - setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setUniformBuffers( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers_ ) VULKAN_HPP_NOEXCEPT { uniformBuffers = uniformBuffers_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & - setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & + setVertexInputs( VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs_ ) VULKAN_HPP_NOEXCEPT { vertexInputs = vertexInputs_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfoEXT & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineRobustnessCreateInfo & setImages( VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images_ ) VULKAN_HPP_NOEXCEPT { images = images_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineRobustnessCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineRobustnessCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineRobustnessCreateInfoEXT &() VULKAN_HPP_NOEXCEPT + operator VkPipelineRobustnessCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -101883,10 +102496,10 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior const &, + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -101895,9 +102508,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineRobustnessCreateInfoEXT const & ) const = default; + auto operator<=>( PipelineRobustnessCreateInfo const & ) const = default; #else - bool operator==( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -101907,27 +102520,29 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PipelineRobustnessCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineRobustnessCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT::eDeviceDefault; - VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT::eDeviceDefault; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRobustnessCreateInfo; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior storageBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior uniformBuffers = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior vertexInputs = VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehavior::eDeviceDefault; + VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior images = VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehavior::eDeviceDefault; }; template <> - struct CppType + struct CppType { - using Type = PipelineRobustnessCreateInfoEXT; + using Type = PipelineRobustnessCreateInfo; }; + using PipelineRobustnessCreateInfoEXT = PipelineRobustnessCreateInfo; + struct PipelineSampleLocationsStateCreateInfoEXT { using NativeType = VkPipelineSampleLocationsStateCreateInfoEXT; @@ -102470,55 +103085,55 @@ namespace VULKAN_HPP_NAMESPACE using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo; - struct VertexInputBindingDivisorDescriptionKHR + struct VertexInputBindingDivisorDescription { - using NativeType = VkVertexInputBindingDivisorDescriptionKHR; + using NativeType = VkVertexInputBindingDivisorDescription; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionKHR( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( uint32_t binding_ = {}, uint32_t divisor_ = {} ) VULKAN_HPP_NOEXCEPT : binding{ binding_ } , divisor{ divisor_ } { } - VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionKHR( VertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescription( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; - VertexInputBindingDivisorDescriptionKHR( VkVertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : VertexInputBindingDivisorDescriptionKHR( *reinterpret_cast( &rhs ) ) + VertexInputBindingDivisorDescription( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT + : VertexInputBindingDivisorDescription( *reinterpret_cast( &rhs ) ) { } - VertexInputBindingDivisorDescriptionKHR & operator=( VertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VertexInputBindingDivisorDescription & operator=( VertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - VertexInputBindingDivisorDescriptionKHR & operator=( VkVertexInputBindingDivisorDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT + VertexInputBindingDivisorDescription & operator=( VkVertexInputBindingDivisorDescription const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionKHR & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT { binding = binding_; return *this; } - VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionKHR & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescription & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT { divisor = divisor_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkVertexInputBindingDivisorDescriptionKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDivisorDescription const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkVertexInputBindingDivisorDescriptionKHR &() VULKAN_HPP_NOEXCEPT + operator VkVertexInputBindingDivisorDescription &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -102534,9 +103149,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( VertexInputBindingDivisorDescriptionKHR const & ) const = default; + auto operator<=>( VertexInputBindingDivisorDescription const & ) const = default; #else - bool operator==( VertexInputBindingDivisorDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -102545,7 +103160,7 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( VertexInputBindingDivisorDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( VertexInputBindingDivisorDescription const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } @@ -102556,37 +103171,38 @@ namespace VULKAN_HPP_NAMESPACE uint32_t divisor = {}; }; - using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescriptionKHR; + using VertexInputBindingDivisorDescriptionEXT = VertexInputBindingDivisorDescription; + using VertexInputBindingDivisorDescriptionKHR = VertexInputBindingDivisorDescription; - struct PipelineVertexInputDivisorStateCreateInfoKHR + struct PipelineVertexInputDivisorStateCreateInfo { - using NativeType = VkPipelineVertexInputDivisorStateCreateInfoKHR; + using NativeType = VkPipelineVertexInputDivisorStateCreateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR - PipelineVertexInputDivisorStateCreateInfoKHR( uint32_t vertexBindingDivisorCount_ = {}, - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR * pVertexBindingDivisors_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfo( uint32_t vertexBindingDivisorCount_ = {}, + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , vertexBindingDivisorCount{ vertexBindingDivisorCount_ } , pVertexBindingDivisors{ pVertexBindingDivisors_ } { } - VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoKHR( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfo( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PipelineVertexInputDivisorStateCreateInfoKHR( VkPipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PipelineVertexInputDivisorStateCreateInfoKHR( *reinterpret_cast( &rhs ) ) + PipelineVertexInputDivisorStateCreateInfo( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PipelineVertexInputDivisorStateCreateInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineVertexInputDivisorStateCreateInfoKHR( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_, - const void * pNext_ = nullptr ) + PipelineVertexInputDivisorStateCreateInfo( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , vertexBindingDivisorCount( static_cast( vertexBindingDivisors_.size() ) ) , pVertexBindingDivisors( vertexBindingDivisors_.data() ) @@ -102594,39 +103210,38 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PipelineVertexInputDivisorStateCreateInfoKHR & operator=( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PipelineVertexInputDivisorStateCreateInfo & operator=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PipelineVertexInputDivisorStateCreateInfoKHR & operator=( VkPipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PipelineVertexInputDivisorStateCreateInfo & operator=( VkPipelineVertexInputDivisorStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoKHR & - setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT { vertexBindingDivisorCount = vertexBindingDivisorCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoKHR & - setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfo & + setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT { pVertexBindingDivisors = pVertexBindingDivisors_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PipelineVertexInputDivisorStateCreateInfoKHR & setVertexBindingDivisors( - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) + PipelineVertexInputDivisorStateCreateInfo & setVertexBindingDivisors( + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT { vertexBindingDivisorCount = static_cast( vertexBindingDivisors_.size() ); @@ -102636,14 +103251,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPipelineVertexInputDivisorStateCreateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPipelineVertexInputDivisorStateCreateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPipelineVertexInputDivisorStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPipelineVertexInputDivisorStateCreateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -102653,7 +103268,7 @@ namespace VULKAN_HPP_NAMESPACE std::tuple + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -102662,9 +103277,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PipelineVertexInputDivisorStateCreateInfoKHR const & ) const = default; + auto operator<=>( PipelineVertexInputDivisorStateCreateInfo const & ) const = default; #else - bool operator==( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -102674,26 +103289,27 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PipelineVertexInputDivisorStateCreateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PipelineVertexInputDivisorStateCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR; - const void * pNext = {}; - uint32_t vertexBindingDivisorCount = {}; - const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionKHR * pVertexBindingDivisors = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfo; + const void * pNext = {}; + uint32_t vertexBindingDivisorCount = {}; + const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescription * pVertexBindingDivisors = {}; }; template <> - struct CppType + struct CppType { - using Type = PipelineVertexInputDivisorStateCreateInfoKHR; + using Type = PipelineVertexInputDivisorStateCreateInfo; }; - using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfoKHR; + using PipelineVertexInputDivisorStateCreateInfoEXT = PipelineVertexInputDivisorStateCreateInfo; + using PipelineVertexInputDivisorStateCreateInfoKHR = PipelineVertexInputDivisorStateCreateInfo; struct PipelineViewportCoarseSampleOrderStateCreateInfoNV { @@ -105073,20 +105689,20 @@ namespace VULKAN_HPP_NAMESPACE using Type = ProtectedSubmitInfo; }; - struct PushConstantsInfoKHR + struct PushConstantsInfo { - using NativeType = VkPushConstantsInfoKHR; + using NativeType = VkPushConstantsInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushConstantsInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushConstantsInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PushConstantsInfoKHR( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - uint32_t offset_ = {}, - uint32_t size_ = {}, - const void * pValues_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PushConstantsInfo( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + uint32_t offset_ = {}, + uint32_t size_ = {}, + const void * pValues_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , layout{ layout_ } , stageFlags{ stageFlags_ } @@ -105096,20 +105712,17 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PushConstantsInfoKHR( PushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PushConstantsInfo( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PushConstantsInfoKHR( VkPushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PushConstantsInfoKHR( *reinterpret_cast( &rhs ) ) - { - } + PushConstantsInfo( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT : PushConstantsInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template - PushConstantsInfoKHR( VULKAN_HPP_NAMESPACE::PipelineLayout layout_, - VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, - uint32_t offset_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_, - const void * pNext_ = nullptr ) + PushConstantsInfo( VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + uint32_t offset_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , layout( layout_ ) , stageFlags( stageFlags_ ) @@ -105120,47 +105733,47 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PushConstantsInfoKHR & operator=( PushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PushConstantsInfo & operator=( PushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PushConstantsInfoKHR & operator=( VkPushConstantsInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PushConstantsInfo & operator=( VkPushConstantsInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT { offset = offset_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT { size = size_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushConstantsInfoKHR & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushConstantsInfo & setPValues( const void * pValues_ ) VULKAN_HPP_NOEXCEPT { pValues = pValues_; return *this; @@ -105168,7 +105781,7 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) template - PushConstantsInfoKHR & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT + PushConstantsInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & values_ ) VULKAN_HPP_NOEXCEPT { size = static_cast( values_.size() * sizeof( T ) ); pValues = values_.data(); @@ -105177,14 +105790,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPushConstantsInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPushConstantsInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPushConstantsInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPushConstantsInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -105206,9 +105819,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PushConstantsInfoKHR const & ) const = default; + auto operator<=>( PushConstantsInfo const & ) const = default; #else - bool operator==( PushConstantsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -105218,14 +105831,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PushConstantsInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushConstantsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushConstantsInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushConstantsInfo; const void * pNext = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; @@ -105235,11 +105848,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = PushConstantsInfoKHR; + using Type = PushConstantsInfo; }; + using PushConstantsInfoKHR = PushConstantsInfo; + struct WriteDescriptorSet { using NativeType = VkWriteDescriptorSet; @@ -105471,20 +106086,20 @@ namespace VULKAN_HPP_NAMESPACE using Type = WriteDescriptorSet; }; - struct PushDescriptorSetInfoKHR + struct PushDescriptorSetInfo { - using NativeType = VkPushDescriptorSetInfoKHR; + using NativeType = VkPushDescriptorSetInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PushDescriptorSetInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - uint32_t set_ = {}, - uint32_t descriptorWriteCount_ = {}, - const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + uint32_t descriptorWriteCount_ = {}, + const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , stageFlags{ stageFlags_ } , layout{ layout_ } @@ -105494,19 +106109,19 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PushDescriptorSetInfoKHR( PushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PushDescriptorSetInfo( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PushDescriptorSetInfoKHR( VkPushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PushDescriptorSetInfoKHR( *reinterpret_cast( &rhs ) ) + PushDescriptorSetInfo( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PushDescriptorSetInfoKHR( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_, - uint32_t set_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_, - const void * pNext_ = nullptr ) + PushDescriptorSetInfo( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_, + uint32_t set_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , stageFlags( stageFlags_ ) , layout( layout_ ) @@ -105517,47 +106132,47 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - PushDescriptorSetInfoKHR & operator=( PushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PushDescriptorSetInfo & operator=( PushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PushDescriptorSetInfoKHR & operator=( VkPushDescriptorSetInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PushDescriptorSetInfo & operator=( VkPushDescriptorSetInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT { stageFlags = stageFlags_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT { set = set_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & setDescriptorWriteCount( uint32_t descriptorWriteCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setDescriptorWriteCount( uint32_t descriptorWriteCount_ ) VULKAN_HPP_NOEXCEPT { descriptorWriteCount = descriptorWriteCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfoKHR & + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetInfo & setPDescriptorWrites( const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites_ ) VULKAN_HPP_NOEXCEPT { pDescriptorWrites = pDescriptorWrites_; @@ -105565,7 +106180,7 @@ namespace VULKAN_HPP_NAMESPACE } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - PushDescriptorSetInfoKHR & setDescriptorWrites( + PushDescriptorSetInfo & setDescriptorWrites( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & descriptorWrites_ ) VULKAN_HPP_NOEXCEPT { descriptorWriteCount = static_cast( descriptorWrites_.size() ); @@ -105575,14 +106190,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPushDescriptorSetInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPushDescriptorSetInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -105604,9 +106219,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PushDescriptorSetInfoKHR const & ) const = default; + auto operator<=>( PushDescriptorSetInfo const & ) const = default; #else - bool operator==( PushDescriptorSetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -105616,14 +106231,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PushDescriptorSetInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushDescriptorSetInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetInfo; const void * pNext = {}; VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; @@ -105633,24 +106248,26 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = PushDescriptorSetInfoKHR; + using Type = PushDescriptorSetInfo; }; - struct PushDescriptorSetWithTemplateInfoKHR + using PushDescriptorSetInfoKHR = PushDescriptorSetInfo; + + struct PushDescriptorSetWithTemplateInfo { - using NativeType = VkPushDescriptorSetWithTemplateInfoKHR; + using NativeType = VkPushDescriptorSetWithTemplateInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetWithTemplateInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePushDescriptorSetWithTemplateInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfoKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ = {}, - VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, - uint32_t set_ = {}, - const void * pData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ = {}, + VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, + uint32_t set_ = {}, + const void * pData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , descriptorUpdateTemplate{ descriptorUpdateTemplate_ } , layout{ layout_ } @@ -105659,63 +106276,63 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfoKHR( PushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR PushDescriptorSetWithTemplateInfo( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - PushDescriptorSetWithTemplateInfoKHR( VkPushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : PushDescriptorSetWithTemplateInfoKHR( *reinterpret_cast( &rhs ) ) + PushDescriptorSetWithTemplateInfo( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : PushDescriptorSetWithTemplateInfo( *reinterpret_cast( &rhs ) ) { } - PushDescriptorSetWithTemplateInfoKHR & operator=( PushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + PushDescriptorSetWithTemplateInfo & operator=( PushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - PushDescriptorSetWithTemplateInfoKHR & operator=( VkPushDescriptorSetWithTemplateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + PushDescriptorSetWithTemplateInfo & operator=( VkPushDescriptorSetWithTemplateInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate_ ) VULKAN_HPP_NOEXCEPT { descriptorUpdateTemplate = descriptorUpdateTemplate_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT { layout = layout_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT { set = set_; return *this; } - VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfoKHR & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PushDescriptorSetWithTemplateInfo & setPData( const void * pData_ ) VULKAN_HPP_NOEXCEPT { pData = pData_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPushDescriptorSetWithTemplateInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetWithTemplateInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPushDescriptorSetWithTemplateInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkPushDescriptorSetWithTemplateInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -105736,9 +106353,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( PushDescriptorSetWithTemplateInfoKHR const & ) const = default; + auto operator<=>( PushDescriptorSetWithTemplateInfo const & ) const = default; #else - bool operator==( PushDescriptorSetWithTemplateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -105748,14 +106365,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( PushDescriptorSetWithTemplateInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( PushDescriptorSetWithTemplateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetWithTemplateInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePushDescriptorSetWithTemplateInfo; const void * pNext = {}; VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate = {}; VULKAN_HPP_NAMESPACE::PipelineLayout layout = {}; @@ -105764,11 +106381,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = PushDescriptorSetWithTemplateInfoKHR; + using Type = PushDescriptorSetWithTemplateInfo; }; + using PushDescriptorSetWithTemplateInfoKHR = PushDescriptorSetWithTemplateInfo; + struct QueryLowLatencySupportNV { using NativeType = VkQueryLowLatencySupportNV; @@ -106498,64 +107117,63 @@ namespace VULKAN_HPP_NAMESPACE using Type = QueueFamilyCheckpointPropertiesNV; }; - struct QueueFamilyGlobalPriorityPropertiesKHR + struct QueueFamilyGlobalPriorityProperties { - using NativeType = VkQueueFamilyGlobalPriorityPropertiesKHR; + using NativeType = VkQueueFamilyGlobalPriorityProperties; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyGlobalPriorityProperties; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR_14 - QueueFamilyGlobalPriorityPropertiesKHR( uint32_t priorityCount_ = {}, - std::array const & - priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow, - VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR::eLow } }, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( uint32_t priorityCount_ = {}, + std::array const & + priorities_ = { { VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow, + VULKAN_HPP_NAMESPACE::QueueGlobalPriority::eLow } }, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , priorityCount{ priorityCount_ } , priorities{ priorities_ } { } - VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityPropertiesKHR( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR_14 QueueFamilyGlobalPriorityProperties( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; - QueueFamilyGlobalPriorityPropertiesKHR( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : QueueFamilyGlobalPriorityPropertiesKHR( *reinterpret_cast( &rhs ) ) + QueueFamilyGlobalPriorityProperties( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT + : QueueFamilyGlobalPriorityProperties( *reinterpret_cast( &rhs ) ) { } - QueueFamilyGlobalPriorityPropertiesKHR & operator=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + QueueFamilyGlobalPriorityProperties & operator=( QueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - QueueFamilyGlobalPriorityPropertiesKHR & operator=( VkQueueFamilyGlobalPriorityPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT + QueueFamilyGlobalPriorityProperties & operator=( VkQueueFamilyGlobalPriorityProperties const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkQueueFamilyGlobalPriorityPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyGlobalPriorityProperties const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkQueueFamilyGlobalPriorityPropertiesKHR &() VULKAN_HPP_NOEXCEPT + operator VkQueueFamilyGlobalPriorityProperties &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -106565,7 +107183,7 @@ namespace VULKAN_HPP_NAMESPACE std::tuple const &> + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -106574,7 +107192,7 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - std::strong_ordering operator<=>( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + std::strong_ordering operator<=>( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { if ( auto cmp = sType <=> rhs.sType; cmp != 0 ) return cmp; @@ -106592,31 +107210,32 @@ namespace VULKAN_HPP_NAMESPACE } #endif - bool operator==( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( priorityCount == rhs.priorityCount ) && - ( memcmp( priorities, rhs.priorities, priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR ) ) == 0 ); + ( memcmp( priorities, rhs.priorities, priorityCount * sizeof( VULKAN_HPP_NAMESPACE::QueueGlobalPriority ) ) == 0 ); } - bool operator!=( QueueFamilyGlobalPriorityPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( QueueFamilyGlobalPriorityProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityPropertiesKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyGlobalPriorityProperties; void * pNext = {}; uint32_t priorityCount = {}; - VULKAN_HPP_NAMESPACE::ArrayWrapper1D priorities = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D priorities = {}; }; template <> - struct CppType + struct CppType { - using Type = QueueFamilyGlobalPriorityPropertiesKHR; + using Type = QueueFamilyGlobalPriorityProperties; }; - using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR; + using QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityProperties; + using QueueFamilyGlobalPriorityPropertiesKHR = QueueFamilyGlobalPriorityProperties; struct QueueFamilyProperties { @@ -111484,20 +112103,20 @@ namespace VULKAN_HPP_NAMESPACE using Type = RenderPassTransformBeginInfoQCOM; }; - struct RenderingAreaInfoKHR + struct RenderingAreaInfo { - using NativeType = VkRenderingAreaInfoKHR; + using NativeType = VkRenderingAreaInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAreaInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( uint32_t viewMask_ = {}, - uint32_t colorAttachmentCount_ = {}, - const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderingAreaInfo( uint32_t viewMask_ = {}, + uint32_t colorAttachmentCount_ = {}, + const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ = {}, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , viewMask{ viewMask_ } , colorAttachmentCount{ colorAttachmentCount_ } @@ -111507,19 +112126,16 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR RenderingAreaInfoKHR( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderingAreaInfo( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderingAreaInfoKHR( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderingAreaInfoKHR( *reinterpret_cast( &rhs ) ) - { - } + RenderingAreaInfo( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT : RenderingAreaInfo( *reinterpret_cast( &rhs ) ) {} # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingAreaInfoKHR( uint32_t viewMask_, - VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, - VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, - const void * pNext_ = nullptr ) + RenderingAreaInfo( uint32_t viewMask_, + VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_, + VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , viewMask( viewMask_ ) , colorAttachmentCount( static_cast( colorAttachmentFormats_.size() ) ) @@ -111530,43 +112146,42 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - RenderingAreaInfoKHR & operator=( RenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingAreaInfo & operator=( RenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderingAreaInfoKHR & operator=( VkRenderingAreaInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingAreaInfo & operator=( VkRenderingAreaInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT { viewMask = viewMask_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & - setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setPColorAttachmentFormats( const VULKAN_HPP_NAMESPACE::Format * pColorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { pColorAttachmentFormats = pColorAttachmentFormats_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingAreaInfoKHR & setColorAttachmentFormats( + RenderingAreaInfo & setColorAttachmentFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentFormats_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast( colorAttachmentFormats_.size() ); @@ -111575,27 +112190,27 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setDepthAttachmentFormat( VULKAN_HPP_NAMESPACE::Format depthAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT { depthAttachmentFormat = depthAttachmentFormat_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfoKHR & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAreaInfo & setStencilAttachmentFormat( VULKAN_HPP_NAMESPACE::Format stencilAttachmentFormat_ ) VULKAN_HPP_NOEXCEPT { stencilAttachmentFormat = stencilAttachmentFormat_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderingAreaInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderingAreaInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderingAreaInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkRenderingAreaInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -111617,9 +112232,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderingAreaInfoKHR const & ) const = default; + auto operator<=>( RenderingAreaInfo const & ) const = default; #else - bool operator==( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -111630,14 +112245,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( RenderingAreaInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingAreaInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAreaInfo; const void * pNext = {}; uint32_t viewMask = {}; uint32_t colorAttachmentCount = {}; @@ -111647,11 +112262,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = RenderingAreaInfoKHR; + using Type = RenderingAreaInfo; }; + using RenderingAreaInfoKHR = RenderingAreaInfo; + struct RenderingAttachmentInfo { using NativeType = VkRenderingAttachmentInfo; @@ -111805,33 +112422,33 @@ namespace VULKAN_HPP_NAMESPACE using RenderingAttachmentInfoKHR = RenderingAttachmentInfo; - struct RenderingAttachmentLocationInfoKHR + struct RenderingAttachmentLocationInfo { - using NativeType = VkRenderingAttachmentLocationInfoKHR; + using NativeType = VkRenderingAttachmentLocationInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentLocationInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingAttachmentLocationInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfoKHR( uint32_t colorAttachmentCount_ = {}, - const uint32_t * pColorAttachmentLocations_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( uint32_t colorAttachmentCount_ = {}, + const uint32_t * pColorAttachmentLocations_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , colorAttachmentCount{ colorAttachmentCount_ } , pColorAttachmentLocations{ pColorAttachmentLocations_ } { } - VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfoKHR( RenderingAttachmentLocationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderingAttachmentLocationInfo( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderingAttachmentLocationInfoKHR( VkRenderingAttachmentLocationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderingAttachmentLocationInfoKHR( *reinterpret_cast( &rhs ) ) + RenderingAttachmentLocationInfo( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingAttachmentLocationInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingAttachmentLocationInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentLocations_, - const void * pNext_ = nullptr ) + RenderingAttachmentLocationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentLocations_, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , colorAttachmentCount( static_cast( colorAttachmentLocations_.size() ) ) , pColorAttachmentLocations( colorAttachmentLocations_.data() ) @@ -111839,36 +112456,36 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - RenderingAttachmentLocationInfoKHR & operator=( RenderingAttachmentLocationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingAttachmentLocationInfo & operator=( RenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderingAttachmentLocationInfoKHR & operator=( VkRenderingAttachmentLocationInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingAttachmentLocationInfo & operator=( VkRenderingAttachmentLocationInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfoKHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfoKHR & setPColorAttachmentLocations( const uint32_t * pColorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingAttachmentLocationInfo & setPColorAttachmentLocations( const uint32_t * pColorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT { pColorAttachmentLocations = pColorAttachmentLocations_; return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingAttachmentLocationInfoKHR & + RenderingAttachmentLocationInfo & setColorAttachmentLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentLocations_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast( colorAttachmentLocations_.size() ); @@ -111878,14 +112495,14 @@ namespace VULKAN_HPP_NAMESPACE # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderingAttachmentLocationInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderingAttachmentLocationInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderingAttachmentLocationInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkRenderingAttachmentLocationInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -111901,9 +112518,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderingAttachmentLocationInfoKHR const & ) const = default; + auto operator<=>( RenderingAttachmentLocationInfo const & ) const = default; #else - bool operator==( RenderingAttachmentLocationInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -111913,25 +112530,27 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( RenderingAttachmentLocationInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingAttachmentLocationInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentLocationInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingAttachmentLocationInfo; const void * pNext = {}; uint32_t colorAttachmentCount = {}; const uint32_t * pColorAttachmentLocations = {}; }; template <> - struct CppType + struct CppType { - using Type = RenderingAttachmentLocationInfoKHR; + using Type = RenderingAttachmentLocationInfo; }; + using RenderingAttachmentLocationInfoKHR = RenderingAttachmentLocationInfo; + struct RenderingFragmentDensityMapAttachmentInfoEXT { using NativeType = VkRenderingFragmentDensityMapAttachmentInfoEXT; @@ -112368,19 +112987,19 @@ namespace VULKAN_HPP_NAMESPACE using RenderingInfoKHR = RenderingInfo; - struct RenderingInputAttachmentIndexInfoKHR + struct RenderingInputAttachmentIndexInfo { - using NativeType = VkRenderingInputAttachmentIndexInfoKHR; + using NativeType = VkRenderingInputAttachmentIndexInfo; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInputAttachmentIndexInfoKHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderingInputAttachmentIndexInfo; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfoKHR( uint32_t colorAttachmentCount_ = {}, - const uint32_t * pColorAttachmentInputIndices_ = {}, - const uint32_t * pDepthInputAttachmentIndex_ = {}, - const uint32_t * pStencilInputAttachmentIndex_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( uint32_t colorAttachmentCount_ = {}, + const uint32_t * pColorAttachmentInputIndices_ = {}, + const uint32_t * pDepthInputAttachmentIndex_ = {}, + const uint32_t * pStencilInputAttachmentIndex_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , colorAttachmentCount{ colorAttachmentCount_ } , pColorAttachmentInputIndices{ pColorAttachmentInputIndices_ } @@ -112389,18 +113008,18 @@ namespace VULKAN_HPP_NAMESPACE { } - VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfoKHR( RenderingInputAttachmentIndexInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR RenderingInputAttachmentIndexInfo( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; - RenderingInputAttachmentIndexInfoKHR( VkRenderingInputAttachmentIndexInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT - : RenderingInputAttachmentIndexInfoKHR( *reinterpret_cast( &rhs ) ) + RenderingInputAttachmentIndexInfo( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT + : RenderingInputAttachmentIndexInfo( *reinterpret_cast( &rhs ) ) { } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingInputAttachmentIndexInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentInputIndices_, - const uint32_t * pDepthInputAttachmentIndex_ = {}, - const uint32_t * pStencilInputAttachmentIndex_ = {}, - const void * pNext_ = nullptr ) + RenderingInputAttachmentIndexInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentInputIndices_, + const uint32_t * pDepthInputAttachmentIndex_ = {}, + const uint32_t * pStencilInputAttachmentIndex_ = {}, + const void * pNext_ = nullptr ) : pNext( pNext_ ) , colorAttachmentCount( static_cast( colorAttachmentInputIndices_.size() ) ) , pColorAttachmentInputIndices( colorAttachmentInputIndices_.data() ) @@ -112410,29 +113029,29 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - RenderingInputAttachmentIndexInfoKHR & operator=( RenderingInputAttachmentIndexInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + RenderingInputAttachmentIndexInfo & operator=( RenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - RenderingInputAttachmentIndexInfoKHR & operator=( VkRenderingInputAttachmentIndexInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT + RenderingInputAttachmentIndexInfo & operator=( VkRenderingInputAttachmentIndexInfo const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) - VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { pNext = pNext_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfoKHR & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = colorAttachmentCount_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfoKHR & + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPColorAttachmentInputIndices( const uint32_t * pColorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT { pColorAttachmentInputIndices = pColorAttachmentInputIndices_; @@ -112440,7 +113059,7 @@ namespace VULKAN_HPP_NAMESPACE } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) - RenderingInputAttachmentIndexInfoKHR & + RenderingInputAttachmentIndexInfo & setColorAttachmentInputIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries const & colorAttachmentInputIndices_ ) VULKAN_HPP_NOEXCEPT { colorAttachmentCount = static_cast( colorAttachmentInputIndices_.size() ); @@ -112449,14 +113068,14 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ - VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfoKHR & + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPDepthInputAttachmentIndex( const uint32_t * pDepthInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT { pDepthInputAttachmentIndex = pDepthInputAttachmentIndex_; return *this; } - VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfoKHR & + VULKAN_HPP_CONSTEXPR_14 RenderingInputAttachmentIndexInfo & setPStencilInputAttachmentIndex( const uint32_t * pStencilInputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT { pStencilInputAttachmentIndex = pStencilInputAttachmentIndex_; @@ -112464,14 +113083,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkRenderingInputAttachmentIndexInfoKHR const &() const VULKAN_HPP_NOEXCEPT + operator VkRenderingInputAttachmentIndexInfo const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkRenderingInputAttachmentIndexInfoKHR &() VULKAN_HPP_NOEXCEPT + operator VkRenderingInputAttachmentIndexInfo &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -112492,9 +113111,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( RenderingInputAttachmentIndexInfoKHR const & ) const = default; + auto operator<=>( RenderingInputAttachmentIndexInfo const & ) const = default; #else - bool operator==( RenderingInputAttachmentIndexInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -112505,14 +113124,14 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( RenderingInputAttachmentIndexInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( RenderingInputAttachmentIndexInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInputAttachmentIndexInfoKHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderingInputAttachmentIndexInfo; const void * pNext = {}; uint32_t colorAttachmentCount = {}; const uint32_t * pColorAttachmentInputIndices = {}; @@ -112521,11 +113140,13 @@ namespace VULKAN_HPP_NAMESPACE }; template <> - struct CppType + struct CppType { - using Type = RenderingInputAttachmentIndexInfoKHR; + using Type = RenderingInputAttachmentIndexInfo; }; + using RenderingInputAttachmentIndexInfoKHR = RenderingInputAttachmentIndexInfo; + struct ResolveImageInfo2 { using NativeType = VkResolveImageInfo2; @@ -118117,44 +118738,44 @@ namespace VULKAN_HPP_NAMESPACE using Type = SubpassShadingPipelineCreateInfoHUAWEI; }; - struct SubresourceHostMemcpySizeEXT + struct SubresourceHostMemcpySize { - using NativeType = VkSubresourceHostMemcpySizeEXT; + using NativeType = VkSubresourceHostMemcpySize; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySizeEXT; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceHostMemcpySize; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , size{ size_ } { } - VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySizeEXT( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubresourceHostMemcpySize( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubresourceHostMemcpySizeEXT( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : SubresourceHostMemcpySizeEXT( *reinterpret_cast( &rhs ) ) + SubresourceHostMemcpySize( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT + : SubresourceHostMemcpySize( *reinterpret_cast( &rhs ) ) { } - SubresourceHostMemcpySizeEXT & operator=( SubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubresourceHostMemcpySize & operator=( SubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubresourceHostMemcpySizeEXT & operator=( VkSubresourceHostMemcpySizeEXT const & rhs ) VULKAN_HPP_NOEXCEPT + SubresourceHostMemcpySize & operator=( VkSubresourceHostMemcpySize const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSubresourceHostMemcpySizeEXT const &() const VULKAN_HPP_NOEXCEPT + operator VkSubresourceHostMemcpySize const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubresourceHostMemcpySizeEXT &() VULKAN_HPP_NOEXCEPT + operator VkSubresourceHostMemcpySize &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -118170,9 +118791,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubresourceHostMemcpySizeEXT const & ) const = default; + auto operator<=>( SubresourceHostMemcpySize const & ) const = default; #else - bool operator==( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -118181,62 +118802,61 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( SubresourceHostMemcpySizeEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubresourceHostMemcpySize const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySizeEXT; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceHostMemcpySize; void * pNext = {}; VULKAN_HPP_NAMESPACE::DeviceSize size = {}; }; template <> - struct CppType + struct CppType { - using Type = SubresourceHostMemcpySizeEXT; + using Type = SubresourceHostMemcpySize; }; - struct SubresourceLayout2KHR + using SubresourceHostMemcpySizeEXT = SubresourceHostMemcpySize; + + struct SubresourceLayout2 { - using NativeType = VkSubresourceLayout2KHR; + using NativeType = VkSubresourceLayout2; static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2KHR; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubresourceLayout2; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR SubresourceLayout2( VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , subresourceLayout{ subresourceLayout_ } { } - VULKAN_HPP_CONSTEXPR SubresourceLayout2KHR( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + VULKAN_HPP_CONSTEXPR SubresourceLayout2( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; - SubresourceLayout2KHR( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT - : SubresourceLayout2KHR( *reinterpret_cast( &rhs ) ) - { - } + SubresourceLayout2( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT : SubresourceLayout2( *reinterpret_cast( &rhs ) ) {} - SubresourceLayout2KHR & operator=( SubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; + SubresourceLayout2 & operator=( SubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - SubresourceLayout2KHR & operator=( VkSubresourceLayout2KHR const & rhs ) VULKAN_HPP_NOEXCEPT + SubresourceLayout2 & operator=( VkSubresourceLayout2 const & rhs ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); + *this = *reinterpret_cast( &rhs ); return *this; } - operator VkSubresourceLayout2KHR const &() const VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout2 const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkSubresourceLayout2KHR &() VULKAN_HPP_NOEXCEPT + operator VkSubresourceLayout2 &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -118252,9 +118872,9 @@ namespace VULKAN_HPP_NAMESPACE #endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( SubresourceLayout2KHR const & ) const = default; + auto operator<=>( SubresourceLayout2 const & ) const = default; #else - bool operator==( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator==( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT { # if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); @@ -118263,25 +118883,26 @@ namespace VULKAN_HPP_NAMESPACE # endif } - bool operator!=( SubresourceLayout2KHR const & rhs ) const VULKAN_HPP_NOEXCEPT + bool operator!=( SubresourceLayout2 const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } #endif public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2KHR; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubresourceLayout2; void * pNext = {}; VULKAN_HPP_NAMESPACE::SubresourceLayout subresourceLayout = {}; }; template <> - struct CppType + struct CppType { - using Type = SubresourceLayout2KHR; + using Type = SubresourceLayout2; }; - using SubresourceLayout2EXT = SubresourceLayout2KHR; + using SubresourceLayout2EXT = SubresourceLayout2; + using SubresourceLayout2KHR = SubresourceLayout2; struct SurfaceCapabilities2EXT { diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index acaa3ab..971dc25 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -182,6 +182,8 @@ namespace VULKAN_HPP_NAMESPACE result += "TransientAttachment | "; if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | "; + if ( value & ImageUsageFlagBits::eHostTransfer ) + result += "HostTransfer | "; if ( value & ImageUsageFlagBits::eVideoDecodeDstKHR ) result += "VideoDecodeDstKHR | "; if ( value & ImageUsageFlagBits::eVideoDecodeSrcKHR ) @@ -192,8 +194,6 @@ namespace VULKAN_HPP_NAMESPACE result += "FragmentDensityMapEXT | "; if ( value & ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; - if ( value & ImageUsageFlagBits::eHostTransferEXT ) - result += "HostTransferEXT | "; if ( value & ImageUsageFlagBits::eVideoEncodeDstKHR ) result += "VideoEncodeDstKHR | "; if ( value & ImageUsageFlagBits::eVideoEncodeSrcKHR ) @@ -753,6 +753,10 @@ namespace VULKAN_HPP_NAMESPACE result += "FailOnPipelineCompileRequired | "; if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailure ) result += "EarlyReturnOnFailure | "; + if ( value & PipelineCreateFlagBits::eNoProtectedAccess ) + result += "NoProtectedAccess | "; + if ( value & PipelineCreateFlagBits::eProtectedAccessOnly ) + result += "ProtectedAccessOnly | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | "; if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) @@ -799,10 +803,6 @@ namespace VULKAN_HPP_NAMESPACE if ( value & PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV ) result += "RayTracingDisplacementMicromapNV | "; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - if ( value & PipelineCreateFlagBits::eNoProtectedAccessEXT ) - result += "NoProtectedAccessEXT | "; - if ( value & PipelineCreateFlagBits::eProtectedAccessOnlyEXT ) - result += "ProtectedAccessOnlyEXT | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -977,8 +977,8 @@ namespace VULKAN_HPP_NAMESPACE std::string result; if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | "; - if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) - result += "PushDescriptorKHR | "; + if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptor ) + result += "PushDescriptor | "; if ( value & DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT ) result += "DescriptorBufferEXT | "; if ( value & DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT ) @@ -1245,12 +1245,12 @@ namespace VULKAN_HPP_NAMESPACE result += "Clustered | "; if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | "; + if ( value & SubgroupFeatureFlagBits::eRotate ) + result += "Rotate | "; + if ( value & SubgroupFeatureFlagBits::eRotateClustered ) + result += "RotateClustered | "; if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | "; - if ( value & SubgroupFeatureFlagBits::eRotateKHR ) - result += "RotateKHR | "; - if ( value & SubgroupFeatureFlagBits::eRotateClusteredKHR ) - result += "RotateClusteredKHR | "; return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -1804,8 +1804,6 @@ namespace VULKAN_HPP_NAMESPACE result += "BlitDst | "; if ( value & FormatFeatureFlagBits2::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | "; - if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) - result += "SampledImageFilterCubic | "; if ( value & FormatFeatureFlagBits2::eTransferSrc ) result += "TransferSrc | "; if ( value & FormatFeatureFlagBits2::eTransferDst ) @@ -1832,6 +1830,10 @@ namespace VULKAN_HPP_NAMESPACE result += "StorageWriteWithoutFormat | "; if ( value & FormatFeatureFlagBits2::eSampledImageDepthComparison ) result += "SampledImageDepthComparison | "; + if ( value & FormatFeatureFlagBits2::eSampledImageFilterCubic ) + result += "SampledImageFilterCubic | "; + if ( value & FormatFeatureFlagBits2::eHostImageTransfer ) + result += "HostImageTransfer | "; if ( value & FormatFeatureFlagBits2::eVideoDecodeOutputKHR ) result += "VideoDecodeOutputKHR | "; if ( value & FormatFeatureFlagBits2::eVideoDecodeDpbKHR ) @@ -1842,8 +1844,6 @@ namespace VULKAN_HPP_NAMESPACE result += "FragmentDensityMapEXT | "; if ( value & FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | "; - if ( value & FormatFeatureFlagBits2::eHostImageTransferEXT ) - result += "HostImageTransferEXT | "; if ( value & FormatFeatureFlagBits2::eVideoEncodeInputKHR ) result += "VideoEncodeInputKHR | "; if ( value & FormatFeatureFlagBits2::eVideoEncodeDpbKHR ) @@ -1872,6 +1872,180 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & MemoryUnmapFlagBits::eReserveEXT ) + result += "ReserveEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & PipelineCreateFlagBits2::eDisableOptimization ) + result += "DisableOptimization | "; + if ( value & PipelineCreateFlagBits2::eAllowDerivatives ) + result += "AllowDerivatives | "; + if ( value & PipelineCreateFlagBits2::eDerivative ) + result += "Derivative | "; + if ( value & PipelineCreateFlagBits2::eViewIndexFromDeviceIndex ) + result += "ViewIndexFromDeviceIndex | "; + if ( value & PipelineCreateFlagBits2::eDispatchBase ) + result += "DispatchBase | "; + if ( value & PipelineCreateFlagBits2::eFailOnPipelineCompileRequired ) + result += "FailOnPipelineCompileRequired | "; + if ( value & PipelineCreateFlagBits2::eEarlyReturnOnFailure ) + result += "EarlyReturnOnFailure | "; + if ( value & PipelineCreateFlagBits2::eNoProtectedAccess ) + result += "NoProtectedAccess | "; + if ( value & PipelineCreateFlagBits2::eProtectedAccessOnly ) + result += "ProtectedAccessOnly | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & PipelineCreateFlagBits2::eExecutionGraphAMDX ) + result += "ExecutionGraphAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & PipelineCreateFlagBits2::eEnableLegacyDitheringEXT ) + result += "EnableLegacyDitheringEXT | "; + if ( value & PipelineCreateFlagBits2::eDeferCompileNV ) + result += "DeferCompileNV | "; + if ( value & PipelineCreateFlagBits2::eCaptureStatisticsKHR ) + result += "CaptureStatisticsKHR | "; + if ( value & PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR ) + result += "CaptureInternalRepresentationsKHR | "; + if ( value & PipelineCreateFlagBits2::eLinkTimeOptimizationEXT ) + result += "LinkTimeOptimizationEXT | "; + if ( value & PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT ) + result += "RetainLinkTimeOptimizationInfoEXT | "; + if ( value & PipelineCreateFlagBits2::eLibraryKHR ) + result += "LibraryKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR ) + result += "RayTracingSkipTrianglesKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR ) + result += "RayTracingSkipAabbsKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR ) + result += "RayTracingNoNullAnyHitShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR ) + result += "RayTracingNoNullClosestHitShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR ) + result += "RayTracingNoNullMissShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR ) + result += "RayTracingNoNullIntersectionShadersKHR | "; + if ( value & PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR ) + result += "RayTracingShaderGroupHandleCaptureReplayKHR | "; + if ( value & PipelineCreateFlagBits2::eIndirectBindableNV ) + result += "IndirectBindableNV | "; + if ( value & PipelineCreateFlagBits2::eRayTracingAllowMotionNV ) + result += "RayTracingAllowMotionNV | "; + if ( value & PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR ) + result += "RenderingFragmentShadingRateAttachmentKHR | "; + if ( value & PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT ) + result += "RenderingFragmentDensityMapAttachmentEXT | "; + if ( value & PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT ) + result += "RayTracingOpacityMicromapEXT | "; + if ( value & PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT ) + result += "ColorAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT ) + result += "DepthStencilAttachmentFeedbackLoopEXT | "; + if ( value & PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV ) + result += "RayTracingDisplacementMicromapNV | "; + if ( value & PipelineCreateFlagBits2::eDescriptorBufferEXT ) + result += "DescriptorBufferEXT | "; + if ( value & PipelineCreateFlagBits2::eCaptureDataKHR ) + result += "CaptureDataKHR | "; + if ( value & PipelineCreateFlagBits2::eIndirectBindableEXT ) + result += "IndirectBindableEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2 value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & BufferUsageFlagBits2::eTransferSrc ) + result += "TransferSrc | "; + if ( value & BufferUsageFlagBits2::eTransferDst ) + result += "TransferDst | "; + if ( value & BufferUsageFlagBits2::eUniformTexelBuffer ) + result += "UniformTexelBuffer | "; + if ( value & BufferUsageFlagBits2::eStorageTexelBuffer ) + result += "StorageTexelBuffer | "; + if ( value & BufferUsageFlagBits2::eUniformBuffer ) + result += "UniformBuffer | "; + if ( value & BufferUsageFlagBits2::eStorageBuffer ) + result += "StorageBuffer | "; + if ( value & BufferUsageFlagBits2::eIndexBuffer ) + result += "IndexBuffer | "; + if ( value & BufferUsageFlagBits2::eVertexBuffer ) + result += "VertexBuffer | "; + if ( value & BufferUsageFlagBits2::eIndirectBuffer ) + result += "IndirectBuffer | "; + if ( value & BufferUsageFlagBits2::eShaderDeviceAddress ) + result += "ShaderDeviceAddress | "; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + if ( value & BufferUsageFlagBits2::eExecutionGraphScratchAMDX ) + result += "ExecutionGraphScratchAMDX | "; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + if ( value & BufferUsageFlagBits2::eConditionalRenderingEXT ) + result += "ConditionalRenderingEXT | "; + if ( value & BufferUsageFlagBits2::eShaderBindingTableKHR ) + result += "ShaderBindingTableKHR | "; + if ( value & BufferUsageFlagBits2::eTransformFeedbackBufferEXT ) + result += "TransformFeedbackBufferEXT | "; + if ( value & BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT ) + result += "TransformFeedbackCounterBufferEXT | "; + if ( value & BufferUsageFlagBits2::eVideoDecodeSrcKHR ) + result += "VideoDecodeSrcKHR | "; + if ( value & BufferUsageFlagBits2::eVideoDecodeDstKHR ) + result += "VideoDecodeDstKHR | "; + if ( value & BufferUsageFlagBits2::eVideoEncodeDstKHR ) + result += "VideoEncodeDstKHR | "; + if ( value & BufferUsageFlagBits2::eVideoEncodeSrcKHR ) + result += "VideoEncodeSrcKHR | "; + if ( value & BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR ) + result += "AccelerationStructureBuildInputReadOnlyKHR | "; + if ( value & BufferUsageFlagBits2::eAccelerationStructureStorageKHR ) + result += "AccelerationStructureStorageKHR | "; + if ( value & BufferUsageFlagBits2::eSamplerDescriptorBufferEXT ) + result += "SamplerDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2::eResourceDescriptorBufferEXT ) + result += "ResourceDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT ) + result += "PushDescriptorsDescriptorBufferEXT | "; + if ( value & BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT ) + result += "MicromapBuildInputReadOnlyEXT | "; + if ( value & BufferUsageFlagBits2::eMicromapStorageEXT ) + result += "MicromapStorageEXT | "; + if ( value & BufferUsageFlagBits2::ePreprocessBufferEXT ) + result += "PreprocessBufferEXT | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlags value ) + { + if ( !value ) + return "{}"; + + std::string result; + if ( value & HostImageCopyFlagBits::eMemcpy ) + result += "Memcpy | "; + + return "{ " + result.substr( 0, result.size() - 3 ) + " }"; + } + //=== VK_KHR_surface === VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value ) @@ -2839,34 +3013,6 @@ namespace VULKAN_HPP_NAMESPACE return "{}"; } - //=== VK_EXT_host_image_copy === - - VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagsEXT value ) - { - if ( !value ) - return "{}"; - - std::string result; - if ( value & HostImageCopyFlagBitsEXT::eMemcpy ) - result += "Memcpy | "; - - return "{ " + result.substr( 0, result.size() - 3 ) + " }"; - } - - //=== VK_KHR_map_memory2 === - - VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagsKHR value ) - { - if ( !value ) - return "{}"; - - std::string result; - if ( value & MemoryUnmapFlagBitsKHR::eReserveEXT ) - result += "ReserveEXT | "; - - return "{ " + result.substr( 0, result.size() - 3 ) + " }"; - } - //=== VK_EXT_surface_maintenance1 === VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagsEXT value ) @@ -2964,13 +3110,10 @@ namespace VULKAN_HPP_NAMESPACE result += "PrecedingExternallyEncodedBytes | "; if ( value & VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection ) result += "InsufficientBitstreamBufferRangeDetection | "; -<<<<<<< HEAD -======= if ( value & VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap ) result += "QuantizationDeltaMap | "; if ( value & VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap ) result += "EmphasisMap | "; ->>>>>>> indev return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } @@ -3412,156 +3555,6 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } - //=== VK_KHR_maintenance5 === - - VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags2KHR value ) - { - if ( !value ) - return "{}"; - - std::string result; - if ( value & PipelineCreateFlagBits2KHR::eDisableOptimization ) - result += "DisableOptimization | "; - if ( value & PipelineCreateFlagBits2KHR::eAllowDerivatives ) - result += "AllowDerivatives | "; - if ( value & PipelineCreateFlagBits2KHR::eDerivative ) - result += "Derivative | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - if ( value & PipelineCreateFlagBits2KHR::eExecutionGraphAMDX ) - result += "ExecutionGraphAMDX | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - if ( value & PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT ) - result += "EnableLegacyDitheringEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex ) - result += "ViewIndexFromDeviceIndex | "; - if ( value & PipelineCreateFlagBits2KHR::eDispatchBase ) - result += "DispatchBase | "; - if ( value & PipelineCreateFlagBits2KHR::eDeferCompileNV ) - result += "DeferCompileNV | "; - if ( value & PipelineCreateFlagBits2KHR::eCaptureStatistics ) - result += "CaptureStatistics | "; - if ( value & PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations ) - result += "CaptureInternalRepresentations | "; - if ( value & PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired ) - result += "FailOnPipelineCompileRequired | "; - if ( value & PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure ) - result += "EarlyReturnOnFailure | "; - if ( value & PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT ) - result += "LinkTimeOptimizationEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT ) - result += "RetainLinkTimeOptimizationInfoEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eLibrary ) - result += "Library | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles ) - result += "RayTracingSkipTriangles | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs ) - result += "RayTracingSkipAabbs | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders ) - result += "RayTracingNoNullAnyHitShaders | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders ) - result += "RayTracingNoNullClosestHitShaders | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders ) - result += "RayTracingNoNullMissShaders | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders ) - result += "RayTracingNoNullIntersectionShaders | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay ) - result += "RayTracingShaderGroupHandleCaptureReplay | "; - if ( value & PipelineCreateFlagBits2KHR::eIndirectBindableNV ) - result += "IndirectBindableNV | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV ) - result += "RayTracingAllowMotionNV | "; - if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment ) - result += "RenderingFragmentShadingRateAttachment | "; - if ( value & PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT ) - result += "RenderingFragmentDensityMapAttachmentEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT ) - result += "RayTracingOpacityMicromapEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT ) - result += "ColorAttachmentFeedbackLoopEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT ) - result += "DepthStencilAttachmentFeedbackLoopEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT ) - result += "NoProtectedAccessEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT ) - result += "ProtectedAccessOnlyEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV ) - result += "RayTracingDisplacementMicromapNV | "; - if ( value & PipelineCreateFlagBits2KHR::eDescriptorBufferEXT ) - result += "DescriptorBufferEXT | "; - if ( value & PipelineCreateFlagBits2KHR::eCaptureData ) - result += "CaptureData | "; - if ( value & PipelineCreateFlagBits2KHR::eIndirectBindableEXT ) - result += "IndirectBindableEXT | "; - - return "{ " + result.substr( 0, result.size() - 3 ) + " }"; - } - - VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags2KHR value ) - { - if ( !value ) - return "{}"; - - std::string result; - if ( value & BufferUsageFlagBits2KHR::eTransferSrc ) - result += "TransferSrc | "; - if ( value & BufferUsageFlagBits2KHR::eTransferDst ) - result += "TransferDst | "; - if ( value & BufferUsageFlagBits2KHR::eUniformTexelBuffer ) - result += "UniformTexelBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eStorageTexelBuffer ) - result += "StorageTexelBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eUniformBuffer ) - result += "UniformBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eStorageBuffer ) - result += "StorageBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eIndexBuffer ) - result += "IndexBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eVertexBuffer ) - result += "VertexBuffer | "; - if ( value & BufferUsageFlagBits2KHR::eIndirectBuffer ) - result += "IndirectBuffer | "; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - if ( value & BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX ) - result += "ExecutionGraphScratchAMDX | "; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - if ( value & BufferUsageFlagBits2KHR::eConditionalRenderingEXT ) - result += "ConditionalRenderingEXT | "; - if ( value & BufferUsageFlagBits2KHR::eShaderBindingTable ) - result += "ShaderBindingTable | "; - if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT ) - result += "TransformFeedbackBufferEXT | "; - if ( value & BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT ) - result += "TransformFeedbackCounterBufferEXT | "; - if ( value & BufferUsageFlagBits2KHR::eVideoDecodeSrc ) - result += "VideoDecodeSrc | "; - if ( value & BufferUsageFlagBits2KHR::eVideoDecodeDst ) - result += "VideoDecodeDst | "; - if ( value & BufferUsageFlagBits2KHR::eVideoEncodeDst ) - result += "VideoEncodeDst | "; - if ( value & BufferUsageFlagBits2KHR::eVideoEncodeSrc ) - result += "VideoEncodeSrc | "; - if ( value & BufferUsageFlagBits2KHR::eShaderDeviceAddress ) - result += "ShaderDeviceAddress | "; - if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly ) - result += "AccelerationStructureBuildInputReadOnly | "; - if ( value & BufferUsageFlagBits2KHR::eAccelerationStructureStorage ) - result += "AccelerationStructureStorage | "; - if ( value & BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT ) - result += "SamplerDescriptorBufferEXT | "; - if ( value & BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT ) - result += "ResourceDescriptorBufferEXT | "; - if ( value & BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT ) - result += "PushDescriptorsDescriptorBufferEXT | "; - if ( value & BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT ) - result += "MicromapBuildInputReadOnlyEXT | "; - if ( value & BufferUsageFlagBits2KHR::eMicromapStorageEXT ) - result += "MicromapStorageEXT | "; - if ( value & BufferUsageFlagBits2KHR::ePreprocessBufferEXT ) - result += "PreprocessBufferEXT | "; - - return "{ " + result.substr( 0, result.size() - 3 ) + " }"; - } - //=== VK_EXT_shader_object === VULKAN_HPP_INLINE std::string to_string( ShaderCreateFlagsEXT value ) @@ -3590,8 +3583,6 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } -<<<<<<< HEAD -======= //=== VK_KHR_video_encode_av1 === VULKAN_HPP_INLINE std::string to_string( VideoEncodeAV1CapabilityFlagsKHR value ) @@ -3664,7 +3655,6 @@ namespace VULKAN_HPP_NAMESPACE return "{ " + result.substr( 0, result.size() - 3 ) + " }"; } ->>>>>>> indev //=== VK_EXT_device_generated_commands === VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsEXT value ) @@ -3740,6 +3730,7 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorFragmentation: return "ErrorFragmentation"; case Result::eErrorInvalidOpaqueCaptureAddress: return "ErrorInvalidOpaqueCaptureAddress"; case Result::ePipelineCompileRequired: return "PipelineCompileRequired"; + case Result::eErrorNotPermitted: return "ErrorNotPermitted"; case Result::eErrorSurfaceLostKHR: return "ErrorSurfaceLostKHR"; case Result::eErrorNativeWindowInUseKHR: return "ErrorNativeWindowInUseKHR"; case Result::eSuboptimalKHR: return "SuboptimalKHR"; @@ -3754,7 +3745,6 @@ namespace VULKAN_HPP_NAMESPACE case Result::eErrorVideoProfileCodecNotSupportedKHR: return "ErrorVideoProfileCodecNotSupportedKHR"; case Result::eErrorVideoStdVersionNotSupportedKHR: return "ErrorVideoStdVersionNotSupportedKHR"; case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT"; - case Result::eErrorNotPermittedKHR: return "ErrorNotPermittedKHR"; #if defined( VK_USE_PLATFORM_WIN32_KHR ) case Result::eErrorFullScreenExclusiveModeLostEXT: return "ErrorFullScreenExclusiveModeLostEXT"; #endif /*VK_USE_PLATFORM_WIN32_KHR*/ @@ -3991,6 +3981,56 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceMaintenance4Properties: return "PhysicalDeviceMaintenance4Properties"; case StructureType::eDeviceBufferMemoryRequirements: return "DeviceBufferMemoryRequirements"; case StructureType::eDeviceImageMemoryRequirements: return "DeviceImageMemoryRequirements"; + case StructureType::ePhysicalDeviceVulkan14Features: return "PhysicalDeviceVulkan14Features"; + case StructureType::ePhysicalDeviceVulkan14Properties: return "PhysicalDeviceVulkan14Properties"; + case StructureType::eDeviceQueueGlobalPriorityCreateInfo: return "DeviceQueueGlobalPriorityCreateInfo"; + case StructureType::ePhysicalDeviceGlobalPriorityQueryFeatures: return "PhysicalDeviceGlobalPriorityQueryFeatures"; + case StructureType::eQueueFamilyGlobalPriorityProperties: return "QueueFamilyGlobalPriorityProperties"; + case StructureType::ePhysicalDeviceShaderSubgroupRotateFeatures: return "PhysicalDeviceShaderSubgroupRotateFeatures"; + case StructureType::ePhysicalDeviceShaderFloatControls2Features: return "PhysicalDeviceShaderFloatControls2Features"; + case StructureType::ePhysicalDeviceShaderExpectAssumeFeatures: return "PhysicalDeviceShaderExpectAssumeFeatures"; + case StructureType::ePhysicalDeviceLineRasterizationFeatures: return "PhysicalDeviceLineRasterizationFeatures"; + case StructureType::ePipelineRasterizationLineStateCreateInfo: return "PipelineRasterizationLineStateCreateInfo"; + case StructureType::ePhysicalDeviceLineRasterizationProperties: return "PhysicalDeviceLineRasterizationProperties"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorProperties: return "PhysicalDeviceVertexAttributeDivisorProperties"; + case StructureType::ePipelineVertexInputDivisorStateCreateInfo: return "PipelineVertexInputDivisorStateCreateInfo"; + case StructureType::ePhysicalDeviceVertexAttributeDivisorFeatures: return "PhysicalDeviceVertexAttributeDivisorFeatures"; + case StructureType::ePhysicalDeviceIndexTypeUint8Features: return "PhysicalDeviceIndexTypeUint8Features"; + case StructureType::eMemoryMapInfo: return "MemoryMapInfo"; + case StructureType::eMemoryUnmapInfo: return "MemoryUnmapInfo"; + case StructureType::ePhysicalDeviceMaintenance5Features: return "PhysicalDeviceMaintenance5Features"; + case StructureType::ePhysicalDeviceMaintenance5Properties: return "PhysicalDeviceMaintenance5Properties"; + case StructureType::eRenderingAreaInfo: return "RenderingAreaInfo"; + case StructureType::eDeviceImageSubresourceInfo: return "DeviceImageSubresourceInfo"; + case StructureType::eSubresourceLayout2: return "SubresourceLayout2"; + case StructureType::eImageSubresource2: return "ImageSubresource2"; + case StructureType::ePipelineCreateFlags2CreateInfo: return "PipelineCreateFlags2CreateInfo"; + case StructureType::eBufferUsageFlags2CreateInfo: return "BufferUsageFlags2CreateInfo"; + case StructureType::ePhysicalDevicePushDescriptorProperties: return "PhysicalDevicePushDescriptorProperties"; + case StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeatures: return "PhysicalDeviceDynamicRenderingLocalReadFeatures"; + case StructureType::eRenderingAttachmentLocationInfo: return "RenderingAttachmentLocationInfo"; + case StructureType::eRenderingInputAttachmentIndexInfo: return "RenderingInputAttachmentIndexInfo"; + case StructureType::ePhysicalDeviceMaintenance6Features: return "PhysicalDeviceMaintenance6Features"; + case StructureType::ePhysicalDeviceMaintenance6Properties: return "PhysicalDeviceMaintenance6Properties"; + case StructureType::eBindMemoryStatus: return "BindMemoryStatus"; + case StructureType::eBindDescriptorSetsInfo: return "BindDescriptorSetsInfo"; + case StructureType::ePushConstantsInfo: return "PushConstantsInfo"; + case StructureType::ePushDescriptorSetInfo: return "PushDescriptorSetInfo"; + case StructureType::ePushDescriptorSetWithTemplateInfo: return "PushDescriptorSetWithTemplateInfo"; + case StructureType::ePhysicalDevicePipelineProtectedAccessFeatures: return "PhysicalDevicePipelineProtectedAccessFeatures"; + case StructureType::ePipelineRobustnessCreateInfo: return "PipelineRobustnessCreateInfo"; + case StructureType::ePhysicalDevicePipelineRobustnessFeatures: return "PhysicalDevicePipelineRobustnessFeatures"; + case StructureType::ePhysicalDevicePipelineRobustnessProperties: return "PhysicalDevicePipelineRobustnessProperties"; + case StructureType::ePhysicalDeviceHostImageCopyFeatures: return "PhysicalDeviceHostImageCopyFeatures"; + case StructureType::ePhysicalDeviceHostImageCopyProperties: return "PhysicalDeviceHostImageCopyProperties"; + case StructureType::eMemoryToImageCopy: return "MemoryToImageCopy"; + case StructureType::eImageToMemoryCopy: return "ImageToMemoryCopy"; + case StructureType::eCopyImageToMemoryInfo: return "CopyImageToMemoryInfo"; + case StructureType::eCopyMemoryToImageInfo: return "CopyMemoryToImageInfo"; + case StructureType::eHostImageLayoutTransitionInfo: return "HostImageLayoutTransitionInfo"; + case StructureType::eCopyImageToImageInfo: return "CopyImageToImageInfo"; + case StructureType::eSubresourceHostMemcpySize: return "SubresourceHostMemcpySize"; + case StructureType::eHostImageCopyDevicePerformanceQuery: return "HostImageCopyDevicePerformanceQuery"; case StructureType::eSwapchainCreateInfoKHR: return "SwapchainCreateInfoKHR"; case StructureType::ePresentInfoKHR: return "PresentInfoKHR"; case StructureType::eDeviceGroupPresentCapabilitiesKHR: return "DeviceGroupPresentCapabilitiesKHR"; @@ -4106,9 +4146,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_VI_NN*/ case StructureType::eImageViewAstcDecodeModeEXT: return "ImageViewAstcDecodeModeEXT"; case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT: return "PhysicalDeviceAstcDecodeFeaturesEXT"; - case StructureType::ePipelineRobustnessCreateInfoEXT: return "PipelineRobustnessCreateInfoEXT"; - case StructureType::ePhysicalDevicePipelineRobustnessFeaturesEXT: return "PhysicalDevicePipelineRobustnessFeaturesEXT"; - case StructureType::ePhysicalDevicePipelineRobustnessPropertiesEXT: return "PhysicalDevicePipelineRobustnessPropertiesEXT"; #if defined( VK_USE_PLATFORM_WIN32_KHR ) case StructureType::eImportMemoryWin32HandleInfoKHR: return "ImportMemoryWin32HandleInfoKHR"; case StructureType::eExportMemoryWin32HandleInfoKHR: return "ExportMemoryWin32HandleInfoKHR"; @@ -4127,7 +4164,6 @@ namespace VULKAN_HPP_NAMESPACE #endif /*VK_USE_PLATFORM_WIN32_KHR*/ case StructureType::eImportSemaphoreFdInfoKHR: return "ImportSemaphoreFdInfoKHR"; case StructureType::eSemaphoreGetFdInfoKHR: return "SemaphoreGetFdInfoKHR"; - case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR: return "PhysicalDevicePushDescriptorPropertiesKHR"; case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT: return "CommandBufferInheritanceConditionalRenderingInfoEXT"; case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT: return "PhysicalDeviceConditionalRenderingFeaturesEXT"; case StructureType::eConditionalRenderingBeginInfoEXT: return "ConditionalRenderingBeginInfoEXT"; @@ -4277,9 +4313,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eVideoDecodeH265ProfileInfoKHR: return "VideoDecodeH265ProfileInfoKHR"; case StructureType::eVideoDecodeH265PictureInfoKHR: return "VideoDecodeH265PictureInfoKHR"; case StructureType::eVideoDecodeH265DpbSlotInfoKHR: return "VideoDecodeH265DpbSlotInfoKHR"; - case StructureType::eDeviceQueueGlobalPriorityCreateInfoKHR: return "DeviceQueueGlobalPriorityCreateInfoKHR"; - case StructureType::ePhysicalDeviceGlobalPriorityQueryFeaturesKHR: return "PhysicalDeviceGlobalPriorityQueryFeaturesKHR"; - case StructureType::eQueueFamilyGlobalPriorityPropertiesKHR: return "QueueFamilyGlobalPriorityPropertiesKHR"; case StructureType::eDeviceMemoryOverallocationCreateInfoAMD: return "DeviceMemoryOverallocationCreateInfoAMD"; case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT: return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT"; #if defined( VK_USE_PLATFORM_GGP ) @@ -4322,9 +4355,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eRenderingFragmentShadingRateAttachmentInfoKHR: return "RenderingFragmentShadingRateAttachmentInfoKHR"; case StructureType::ePhysicalDeviceShaderCoreProperties2AMD: return "PhysicalDeviceShaderCoreProperties2AMD"; case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD: return "PhysicalDeviceCoherentMemoryFeaturesAMD"; - case StructureType::ePhysicalDeviceDynamicRenderingLocalReadFeaturesKHR: return "PhysicalDeviceDynamicRenderingLocalReadFeaturesKHR"; - case StructureType::eRenderingAttachmentLocationInfoKHR: return "RenderingAttachmentLocationInfoKHR"; - case StructureType::eRenderingInputAttachmentIndexInfoKHR: return "RenderingInputAttachmentIndexInfoKHR"; case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT: return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT"; case StructureType::ePhysicalDeviceShaderQuadControlFeaturesKHR: return "PhysicalDeviceShaderQuadControlFeaturesKHR"; case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT: return "PhysicalDeviceMemoryBudgetPropertiesEXT"; @@ -4361,18 +4391,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePipelineExecutableInfoKHR: return "PipelineExecutableInfoKHR"; case StructureType::ePipelineExecutableStatisticKHR: return "PipelineExecutableStatisticKHR"; case StructureType::ePipelineExecutableInternalRepresentationKHR: return "PipelineExecutableInternalRepresentationKHR"; - case StructureType::ePhysicalDeviceHostImageCopyFeaturesEXT: return "PhysicalDeviceHostImageCopyFeaturesEXT"; - case StructureType::ePhysicalDeviceHostImageCopyPropertiesEXT: return "PhysicalDeviceHostImageCopyPropertiesEXT"; - case StructureType::eMemoryToImageCopyEXT: return "MemoryToImageCopyEXT"; - case StructureType::eImageToMemoryCopyEXT: return "ImageToMemoryCopyEXT"; - case StructureType::eCopyImageToMemoryInfoEXT: return "CopyImageToMemoryInfoEXT"; - case StructureType::eCopyMemoryToImageInfoEXT: return "CopyMemoryToImageInfoEXT"; - case StructureType::eHostImageLayoutTransitionInfoEXT: return "HostImageLayoutTransitionInfoEXT"; - case StructureType::eCopyImageToImageInfoEXT: return "CopyImageToImageInfoEXT"; - case StructureType::eSubresourceHostMemcpySizeEXT: return "SubresourceHostMemcpySizeEXT"; - case StructureType::eHostImageCopyDevicePerformanceQueryEXT: return "HostImageCopyDevicePerformanceQueryEXT"; - case StructureType::eMemoryMapInfoKHR: return "MemoryMapInfoKHR"; - case StructureType::eMemoryUnmapInfoKHR: return "MemoryUnmapInfoKHR"; case StructureType::ePhysicalDeviceMapMemoryPlacedFeaturesEXT: return "PhysicalDeviceMapMemoryPlacedFeaturesEXT"; case StructureType::ePhysicalDeviceMapMemoryPlacedPropertiesEXT: return "PhysicalDeviceMapMemoryPlacedPropertiesEXT"; case StructureType::eMemoryMapPlacedInfoEXT: return "MemoryMapPlacedInfoEXT"; @@ -4573,7 +4591,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSamplerBorderColorComponentMappingCreateInfoEXT: return "SamplerBorderColorComponentMappingCreateInfoEXT"; case StructureType::ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT: return "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT"; case StructureType::ePhysicalDeviceShaderCorePropertiesARM: return "PhysicalDeviceShaderCorePropertiesARM"; - case StructureType::ePhysicalDeviceShaderSubgroupRotateFeaturesKHR: return "PhysicalDeviceShaderSubgroupRotateFeaturesKHR"; case StructureType::eDeviceQueueShaderCoreControlCreateInfoARM: return "DeviceQueueShaderCoreControlCreateInfoARM"; case StructureType::ePhysicalDeviceSchedulingControlsFeaturesARM: return "PhysicalDeviceSchedulingControlsFeaturesARM"; case StructureType::ePhysicalDeviceSchedulingControlsPropertiesARM: return "PhysicalDeviceSchedulingControlsPropertiesARM"; @@ -4629,20 +4646,11 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eOpticalFlowExecuteInfoNV: return "OpticalFlowExecuteInfoNV"; case StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV: return "OpticalFlowSessionCreatePrivateDataInfoNV"; case StructureType::ePhysicalDeviceLegacyDitheringFeaturesEXT: return "PhysicalDeviceLegacyDitheringFeaturesEXT"; - case StructureType::ePhysicalDevicePipelineProtectedAccessFeaturesEXT: return "PhysicalDevicePipelineProtectedAccessFeaturesEXT"; #if defined( VK_USE_PLATFORM_ANDROID_KHR ) case StructureType::ePhysicalDeviceExternalFormatResolveFeaturesANDROID: return "PhysicalDeviceExternalFormatResolveFeaturesANDROID"; case StructureType::ePhysicalDeviceExternalFormatResolvePropertiesANDROID: return "PhysicalDeviceExternalFormatResolvePropertiesANDROID"; case StructureType::eAndroidHardwareBufferFormatResolvePropertiesANDROID: return "AndroidHardwareBufferFormatResolvePropertiesANDROID"; #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ - case StructureType::ePhysicalDeviceMaintenance5FeaturesKHR: return "PhysicalDeviceMaintenance5FeaturesKHR"; - case StructureType::ePhysicalDeviceMaintenance5PropertiesKHR: return "PhysicalDeviceMaintenance5PropertiesKHR"; - case StructureType::eRenderingAreaInfoKHR: return "RenderingAreaInfoKHR"; - case StructureType::eDeviceImageSubresourceInfoKHR: return "DeviceImageSubresourceInfoKHR"; - case StructureType::eSubresourceLayout2KHR: return "SubresourceLayout2KHR"; - case StructureType::eImageSubresource2KHR: return "ImageSubresource2KHR"; - case StructureType::ePipelineCreateFlags2CreateInfoKHR: return "PipelineCreateFlags2CreateInfoKHR"; - case StructureType::eBufferUsageFlags2CreateInfoKHR: return "BufferUsageFlags2CreateInfoKHR"; case StructureType::ePhysicalDeviceAntiLagFeaturesAMD: return "PhysicalDeviceAntiLagFeaturesAMD"; case StructureType::eAntiLagDataAMD: return "AntiLagDataAMD"; case StructureType::eAntiLagPresentationInfoAMD: return "AntiLagPresentationInfoAMD"; @@ -4723,10 +4731,6 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::eSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM: return "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM"; case StructureType::ePhysicalDeviceCubicClampFeaturesQCOM: return "PhysicalDeviceCubicClampFeaturesQCOM"; case StructureType::ePhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT: return "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesKHR: return "PhysicalDeviceVertexAttributeDivisorPropertiesKHR"; - case StructureType::ePipelineVertexInputDivisorStateCreateInfoKHR: return "PipelineVertexInputDivisorStateCreateInfoKHR"; - case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesKHR: return "PhysicalDeviceVertexAttributeDivisorFeaturesKHR"; - case StructureType::ePhysicalDeviceShaderFloatControls2FeaturesKHR: return "PhysicalDeviceShaderFloatControls2FeaturesKHR"; #if defined( VK_USE_PLATFORM_SCREEN_QNX ) case StructureType::eScreenBufferPropertiesQNX: return "ScreenBufferPropertiesQNX"; case StructureType::eScreenBufferFormatPropertiesQNX: return "ScreenBufferFormatPropertiesQNX"; @@ -4735,19 +4739,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX: return "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX"; #endif /*VK_USE_PLATFORM_SCREEN_QNX*/ case StructureType::ePhysicalDeviceLayeredDriverPropertiesMSFT: return "PhysicalDeviceLayeredDriverPropertiesMSFT"; - case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesKHR: return "PhysicalDeviceIndexTypeUint8FeaturesKHR"; - case StructureType::ePhysicalDeviceLineRasterizationFeaturesKHR: return "PhysicalDeviceLineRasterizationFeaturesKHR"; - case StructureType::ePipelineRasterizationLineStateCreateInfoKHR: return "PipelineRasterizationLineStateCreateInfoKHR"; - case StructureType::ePhysicalDeviceLineRasterizationPropertiesKHR: return "PhysicalDeviceLineRasterizationPropertiesKHR"; case StructureType::eCalibratedTimestampInfoKHR: return "CalibratedTimestampInfoKHR"; - case StructureType::ePhysicalDeviceShaderExpectAssumeFeaturesKHR: return "PhysicalDeviceShaderExpectAssumeFeaturesKHR"; - case StructureType::ePhysicalDeviceMaintenance6FeaturesKHR: return "PhysicalDeviceMaintenance6FeaturesKHR"; - case StructureType::ePhysicalDeviceMaintenance6PropertiesKHR: return "PhysicalDeviceMaintenance6PropertiesKHR"; - case StructureType::eBindMemoryStatusKHR: return "BindMemoryStatusKHR"; - case StructureType::eBindDescriptorSetsInfoKHR: return "BindDescriptorSetsInfoKHR"; - case StructureType::ePushConstantsInfoKHR: return "PushConstantsInfoKHR"; - case StructureType::ePushDescriptorSetInfoKHR: return "PushDescriptorSetInfoKHR"; - case StructureType::ePushDescriptorSetWithTemplateInfoKHR: return "PushDescriptorSetWithTemplateInfoKHR"; case StructureType::eSetDescriptorBufferOffsetsInfoEXT: return "SetDescriptorBufferOffsetsInfoEXT"; case StructureType::eBindDescriptorBufferEmbeddedSamplersInfoEXT: return "BindDescriptorBufferEmbeddedSamplersInfoEXT"; case StructureType::ePhysicalDeviceDescriptorPoolOverallocationFeaturesNV: return "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV"; @@ -4798,10 +4790,7 @@ namespace VULKAN_HPP_NAMESPACE case StructureType::ePhysicalDeviceCooperativeMatrix2FeaturesNV: return "PhysicalDeviceCooperativeMatrix2FeaturesNV"; case StructureType::eCooperativeMatrixFlexibleDimensionsPropertiesNV: return "CooperativeMatrixFlexibleDimensionsPropertiesNV"; case StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV: return "PhysicalDeviceCooperativeMatrix2PropertiesNV"; -<<<<<<< HEAD -======= case StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT: return "PhysicalDeviceVertexAttributeRobustnessFeaturesEXT"; ->>>>>>> indev default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5140,6 +5129,8 @@ namespace VULKAN_HPP_NAMESPACE case Format::eAstc10x10SfloatBlock: return "Astc10x10SfloatBlock"; case Format::eAstc12x10SfloatBlock: return "Astc12x10SfloatBlock"; case Format::eAstc12x12SfloatBlock: return "Astc12x12SfloatBlock"; + case Format::eA1B5G5R5UnormPack16: return "A1B5G5R5UnormPack16"; + case Format::eA8Unorm: return "A8Unorm"; case Format::ePvrtc12BppUnormBlockIMG: return "Pvrtc12BppUnormBlockIMG"; case Format::ePvrtc14BppUnormBlockIMG: return "Pvrtc14BppUnormBlockIMG"; case Format::ePvrtc22BppUnormBlockIMG: return "Pvrtc22BppUnormBlockIMG"; @@ -5149,8 +5140,6 @@ namespace VULKAN_HPP_NAMESPACE case Format::ePvrtc22BppSrgbBlockIMG: return "Pvrtc22BppSrgbBlockIMG"; case Format::ePvrtc24BppSrgbBlockIMG: return "Pvrtc24BppSrgbBlockIMG"; case Format::eR16G16Sfixed5NV: return "R16G16Sfixed5NV"; - case Format::eA1B5G5R5UnormPack16KHR: return "A1B5G5R5UnormPack16KHR"; - case Format::eA8UnormKHR: return "A8UnormKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -5257,12 +5246,12 @@ namespace VULKAN_HPP_NAMESPACE case ImageUsageFlagBits::eDepthStencilAttachment: return "DepthStencilAttachment"; case ImageUsageFlagBits::eTransientAttachment: return "TransientAttachment"; case ImageUsageFlagBits::eInputAttachment: return "InputAttachment"; + case ImageUsageFlagBits::eHostTransfer: return "HostTransfer"; case ImageUsageFlagBits::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; case ImageUsageFlagBits::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; case ImageUsageFlagBits::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; case ImageUsageFlagBits::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; - case ImageUsageFlagBits::eHostTransferEXT: return "HostTransferEXT"; case ImageUsageFlagBits::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case ImageUsageFlagBits::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; case ImageUsageFlagBits::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; @@ -5651,6 +5640,7 @@ namespace VULKAN_HPP_NAMESPACE case ImageLayout::eStencilReadOnlyOptimal: return "StencilReadOnlyOptimal"; case ImageLayout::eReadOnlyOptimal: return "ReadOnlyOptimal"; case ImageLayout::eAttachmentOptimal: return "AttachmentOptimal"; + case ImageLayout::eRenderingLocalRead: return "RenderingLocalRead"; case ImageLayout::ePresentSrcKHR: return "PresentSrcKHR"; case ImageLayout::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; case ImageLayout::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; @@ -5658,7 +5648,6 @@ namespace VULKAN_HPP_NAMESPACE case ImageLayout::eSharedPresentKHR: return "SharedPresentKHR"; case ImageLayout::eFragmentDensityMapOptimalEXT: return "FragmentDensityMapOptimalEXT"; case ImageLayout::eFragmentShadingRateAttachmentOptimalKHR: return "FragmentShadingRateAttachmentOptimalKHR"; - case ImageLayout::eRenderingLocalReadKHR: return "RenderingLocalReadKHR"; case ImageLayout::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; case ImageLayout::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; case ImageLayout::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; @@ -5877,6 +5866,7 @@ namespace VULKAN_HPP_NAMESPACE case DynamicState::eRasterizerDiscardEnable: return "RasterizerDiscardEnable"; case DynamicState::eDepthBiasEnable: return "DepthBiasEnable"; case DynamicState::ePrimitiveRestartEnable: return "PrimitiveRestartEnable"; + case DynamicState::eLineStipple: return "LineStipple"; case DynamicState::eViewportWScalingNV: return "ViewportWScalingNV"; case DynamicState::eDiscardRectangleEXT: return "DiscardRectangleEXT"; case DynamicState::eDiscardRectangleEnableEXT: return "DiscardRectangleEnableEXT"; @@ -5924,7 +5914,6 @@ namespace VULKAN_HPP_NAMESPACE case DynamicState::eRepresentativeFragmentTestEnableNV: return "RepresentativeFragmentTestEnableNV"; case DynamicState::eCoverageReductionModeNV: return "CoverageReductionModeNV"; case DynamicState::eAttachmentFeedbackLoopEnableEXT: return "AttachmentFeedbackLoopEnableEXT"; - case DynamicState::eLineStippleKHR: return "LineStippleKHR"; case DynamicState::eDepthClampRangeEXT: return "DepthClampRangeEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } @@ -5975,6 +5964,8 @@ namespace VULKAN_HPP_NAMESPACE case PipelineCreateFlagBits::eDispatchBase: return "DispatchBase"; case PipelineCreateFlagBits::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; case PipelineCreateFlagBits::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits::eNoProtectedAccess: return "NoProtectedAccess"; + case PipelineCreateFlagBits::eProtectedAccessOnly: return "ProtectedAccessOnly"; case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; @@ -5999,8 +5990,6 @@ namespace VULKAN_HPP_NAMESPACE #if defined( VK_ENABLE_BETA_EXTENSIONS ) case PipelineCreateFlagBits::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; #endif /*VK_ENABLE_BETA_EXTENSIONS*/ - case PipelineCreateFlagBits::eNoProtectedAccessEXT: return "NoProtectedAccessEXT"; - case PipelineCreateFlagBits::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6242,7 +6231,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool: return "UpdateAfterBindPool"; - case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR: return "PushDescriptorKHR"; + case DescriptorSetLayoutCreateFlagBits::ePushDescriptor: return "PushDescriptor"; case DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT: return "DescriptorBufferEXT"; case DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT: return "EmbeddedImmutableSamplersEXT"; case DescriptorSetLayoutCreateFlagBits::eIndirectBindableNV: return "IndirectBindableNV"; @@ -6335,7 +6324,7 @@ namespace VULKAN_HPP_NAMESPACE case AttachmentLoadOp::eLoad: return "Load"; case AttachmentLoadOp::eClear: return "Clear"; case AttachmentLoadOp::eDontCare: return "DontCare"; - case AttachmentLoadOp::eNoneKHR: return "NoneKHR"; + case AttachmentLoadOp::eNone: return "None"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6477,8 +6466,8 @@ namespace VULKAN_HPP_NAMESPACE { case IndexType::eUint16: return "Uint16"; case IndexType::eUint32: return "Uint32"; + case IndexType::eUint8: return "Uint8"; case IndexType::eNoneKHR: return "NoneKHR"; - case IndexType::eUint8KHR: return "Uint8KHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6519,9 +6508,9 @@ namespace VULKAN_HPP_NAMESPACE case SubgroupFeatureFlagBits::eShuffleRelative: return "ShuffleRelative"; case SubgroupFeatureFlagBits::eClustered: return "Clustered"; case SubgroupFeatureFlagBits::eQuad: return "Quad"; + case SubgroupFeatureFlagBits::eRotate: return "Rotate"; + case SubgroupFeatureFlagBits::eRotateClustered: return "RotateClustered"; case SubgroupFeatureFlagBits::ePartitionedNV: return "PartitionedNV"; - case SubgroupFeatureFlagBits::eRotateKHR: return "RotateKHR"; - case SubgroupFeatureFlagBits::eRotateClusteredKHR: return "RotateClusteredKHR"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -6612,7 +6601,7 @@ namespace VULKAN_HPP_NAMESPACE switch ( value ) { case DescriptorUpdateTemplateType::eDescriptorSet: return "DescriptorSet"; - case DescriptorUpdateTemplateType::ePushDescriptorsKHR: return "PushDescriptorsKHR"; + case DescriptorUpdateTemplateType::ePushDescriptors: return "PushDescriptors"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -7007,7 +6996,6 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits2::eBlitSrc: return "BlitSrc"; case FormatFeatureFlagBits2::eBlitDst: return "BlitDst"; case FormatFeatureFlagBits2::eSampledImageFilterLinear: return "SampledImageFilterLinear"; - case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic"; case FormatFeatureFlagBits2::eTransferSrc: return "TransferSrc"; case FormatFeatureFlagBits2::eTransferDst: return "TransferDst"; case FormatFeatureFlagBits2::eSampledImageFilterMinmax: return "SampledImageFilterMinmax"; @@ -7022,12 +7010,13 @@ namespace VULKAN_HPP_NAMESPACE case FormatFeatureFlagBits2::eStorageReadWithoutFormat: return "StorageReadWithoutFormat"; case FormatFeatureFlagBits2::eStorageWriteWithoutFormat: return "StorageWriteWithoutFormat"; case FormatFeatureFlagBits2::eSampledImageDepthComparison: return "SampledImageDepthComparison"; + case FormatFeatureFlagBits2::eSampledImageFilterCubic: return "SampledImageFilterCubic"; + case FormatFeatureFlagBits2::eHostImageTransfer: return "HostImageTransfer"; case FormatFeatureFlagBits2::eVideoDecodeOutputKHR: return "VideoDecodeOutputKHR"; case FormatFeatureFlagBits2::eVideoDecodeDpbKHR: return "VideoDecodeDpbKHR"; case FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR: return "AccelerationStructureVertexBufferKHR"; case FormatFeatureFlagBits2::eFragmentDensityMapEXT: return "FragmentDensityMapEXT"; case FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR: return "FragmentShadingRateAttachmentKHR"; - case FormatFeatureFlagBits2::eHostImageTransferEXT: return "HostImageTransferEXT"; case FormatFeatureFlagBits2::eVideoEncodeInputKHR: return "VideoEncodeInputKHR"; case FormatFeatureFlagBits2::eVideoEncodeDpbKHR: return "VideoEncodeDpbKHR"; case FormatFeatureFlagBits2::eLinearColorAttachmentNV: return "LinearColorAttachmentNV"; @@ -7044,6 +7033,156 @@ namespace VULKAN_HPP_NAMESPACE } } + //=== VK_VERSION_1_4 === + + VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriority value ) + { + switch ( value ) + { + case QueueGlobalPriority::eLow: return "Low"; + case QueueGlobalPriority::eMedium: return "Medium"; + case QueueGlobalPriority::eHigh: return "High"; + case QueueGlobalPriority::eRealtime: return "Realtime"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( LineRasterizationMode value ) + { + switch ( value ) + { + case LineRasterizationMode::eDefault: return "Default"; + case LineRasterizationMode::eRectangular: return "Rectangular"; + case LineRasterizationMode::eBresenham: return "Bresenham"; + case LineRasterizationMode::eRectangularSmooth: return "RectangularSmooth"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBits value ) + { + switch ( value ) + { + case MemoryUnmapFlagBits::eReserveEXT: return "ReserveEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits2 value ) + { + switch ( value ) + { + case PipelineCreateFlagBits2::eDisableOptimization: return "DisableOptimization"; + case PipelineCreateFlagBits2::eAllowDerivatives: return "AllowDerivatives"; + case PipelineCreateFlagBits2::eDerivative: return "Derivative"; + case PipelineCreateFlagBits2::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; + case PipelineCreateFlagBits2::eDispatchBase: return "DispatchBase"; + case PipelineCreateFlagBits2::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; + case PipelineCreateFlagBits2::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; + case PipelineCreateFlagBits2::eNoProtectedAccess: return "NoProtectedAccess"; + case PipelineCreateFlagBits2::eProtectedAccessOnly: return "ProtectedAccessOnly"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case PipelineCreateFlagBits2::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case PipelineCreateFlagBits2::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; + case PipelineCreateFlagBits2::eDeferCompileNV: return "DeferCompileNV"; + case PipelineCreateFlagBits2::eCaptureStatisticsKHR: return "CaptureStatisticsKHR"; + case PipelineCreateFlagBits2::eCaptureInternalRepresentationsKHR: return "CaptureInternalRepresentationsKHR"; + case PipelineCreateFlagBits2::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; + case PipelineCreateFlagBits2::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; + case PipelineCreateFlagBits2::eLibraryKHR: return "LibraryKHR"; + case PipelineCreateFlagBits2::eRayTracingSkipTrianglesKHR: return "RayTracingSkipTrianglesKHR"; + case PipelineCreateFlagBits2::eRayTracingSkipAabbsKHR: return "RayTracingSkipAabbsKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullAnyHitShadersKHR: return "RayTracingNoNullAnyHitShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullClosestHitShadersKHR: return "RayTracingNoNullClosestHitShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullMissShadersKHR: return "RayTracingNoNullMissShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingNoNullIntersectionShadersKHR: return "RayTracingNoNullIntersectionShadersKHR"; + case PipelineCreateFlagBits2::eRayTracingShaderGroupHandleCaptureReplayKHR: return "RayTracingShaderGroupHandleCaptureReplayKHR"; + case PipelineCreateFlagBits2::eIndirectBindableNV: return "IndirectBindableNV"; + case PipelineCreateFlagBits2::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; + case PipelineCreateFlagBits2::eRenderingFragmentShadingRateAttachmentKHR: return "RenderingFragmentShadingRateAttachmentKHR"; + case PipelineCreateFlagBits2::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; + case PipelineCreateFlagBits2::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; + case PipelineCreateFlagBits2::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; + case PipelineCreateFlagBits2::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; + case PipelineCreateFlagBits2::eDescriptorBufferEXT: return "DescriptorBufferEXT"; + case PipelineCreateFlagBits2::eCaptureDataKHR: return "CaptureDataKHR"; + case PipelineCreateFlagBits2::eIndirectBindableEXT: return "IndirectBindableEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits2 value ) + { + switch ( value ) + { + case BufferUsageFlagBits2::eTransferSrc: return "TransferSrc"; + case BufferUsageFlagBits2::eTransferDst: return "TransferDst"; + case BufferUsageFlagBits2::eUniformTexelBuffer: return "UniformTexelBuffer"; + case BufferUsageFlagBits2::eStorageTexelBuffer: return "StorageTexelBuffer"; + case BufferUsageFlagBits2::eUniformBuffer: return "UniformBuffer"; + case BufferUsageFlagBits2::eStorageBuffer: return "StorageBuffer"; + case BufferUsageFlagBits2::eIndexBuffer: return "IndexBuffer"; + case BufferUsageFlagBits2::eVertexBuffer: return "VertexBuffer"; + case BufferUsageFlagBits2::eIndirectBuffer: return "IndirectBuffer"; + case BufferUsageFlagBits2::eShaderDeviceAddress: return "ShaderDeviceAddress"; +#if defined( VK_ENABLE_BETA_EXTENSIONS ) + case BufferUsageFlagBits2::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ + case BufferUsageFlagBits2::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; + case BufferUsageFlagBits2::eShaderBindingTableKHR: return "ShaderBindingTableKHR"; + case BufferUsageFlagBits2::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; + case BufferUsageFlagBits2::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; + case BufferUsageFlagBits2::eVideoDecodeSrcKHR: return "VideoDecodeSrcKHR"; + case BufferUsageFlagBits2::eVideoDecodeDstKHR: return "VideoDecodeDstKHR"; + case BufferUsageFlagBits2::eVideoEncodeDstKHR: return "VideoEncodeDstKHR"; + case BufferUsageFlagBits2::eVideoEncodeSrcKHR: return "VideoEncodeSrcKHR"; + case BufferUsageFlagBits2::eAccelerationStructureBuildInputReadOnlyKHR: return "AccelerationStructureBuildInputReadOnlyKHR"; + case BufferUsageFlagBits2::eAccelerationStructureStorageKHR: return "AccelerationStructureStorageKHR"; + case BufferUsageFlagBits2::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; + case BufferUsageFlagBits2::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; + case BufferUsageFlagBits2::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; + case BufferUsageFlagBits2::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; + case BufferUsageFlagBits2::eMicromapStorageEXT: return "MicromapStorageEXT"; + case BufferUsageFlagBits2::ePreprocessBufferEXT: return "PreprocessBufferEXT"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessBufferBehavior value ) + { + switch ( value ) + { + case PipelineRobustnessBufferBehavior::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessBufferBehavior::eDisabled: return "Disabled"; + case PipelineRobustnessBufferBehavior::eRobustBufferAccess: return "RobustBufferAccess"; + case PipelineRobustnessBufferBehavior::eRobustBufferAccess2: return "RobustBufferAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessImageBehavior value ) + { + switch ( value ) + { + case PipelineRobustnessImageBehavior::eDeviceDefault: return "DeviceDefault"; + case PipelineRobustnessImageBehavior::eDisabled: return "Disabled"; + case PipelineRobustnessImageBehavior::eRobustImageAccess: return "RobustImageAccess"; + case PipelineRobustnessImageBehavior::eRobustImageAccess2: return "RobustImageAccess2"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + + VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagBits value ) + { + switch ( value ) + { + case HostImageCopyFlagBits::eMemcpy: return "Memcpy"; + default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; + } + } + //=== VK_KHR_surface === VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value ) @@ -7660,32 +7799,6 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VK_USE_PLATFORM_VI_NN*/ - //=== VK_EXT_pipeline_robustness === - - VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessBufferBehaviorEXT value ) - { - switch ( value ) - { - case PipelineRobustnessBufferBehaviorEXT::eDeviceDefault: return "DeviceDefault"; - case PipelineRobustnessBufferBehaviorEXT::eDisabled: return "Disabled"; - case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess: return "RobustBufferAccess"; - case PipelineRobustnessBufferBehaviorEXT::eRobustBufferAccess2: return "RobustBufferAccess2"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - VULKAN_HPP_INLINE std::string to_string( PipelineRobustnessImageBehaviorEXT value ) - { - switch ( value ) - { - case PipelineRobustnessImageBehaviorEXT::eDeviceDefault: return "DeviceDefault"; - case PipelineRobustnessImageBehaviorEXT::eDisabled: return "Disabled"; - case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess: return "RobustImageAccess"; - case PipelineRobustnessImageBehaviorEXT::eRobustImageAccess2: return "RobustImageAccess2"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_EXT_conditional_rendering === VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value ) @@ -8176,20 +8289,6 @@ namespace VULKAN_HPP_NAMESPACE return "(void)"; } - //=== VK_KHR_global_priority === - - VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityKHR value ) - { - switch ( value ) - { - case QueueGlobalPriorityKHR::eLow: return "Low"; - case QueueGlobalPriorityKHR::eMedium: return "Medium"; - case QueueGlobalPriorityKHR::eHigh: return "High"; - case QueueGlobalPriorityKHR::eRealtime: return "Realtime"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_AMD_memory_overallocation_behavior === VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value ) @@ -8393,28 +8492,6 @@ namespace VULKAN_HPP_NAMESPACE } } - //=== VK_EXT_host_image_copy === - - VULKAN_HPP_INLINE std::string to_string( HostImageCopyFlagBitsEXT value ) - { - switch ( value ) - { - case HostImageCopyFlagBitsEXT::eMemcpy: return "Memcpy"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - //=== VK_KHR_map_memory2 === - - VULKAN_HPP_INLINE std::string to_string( MemoryUnmapFlagBitsKHR value ) - { - switch ( value ) - { - case MemoryUnmapFlagBitsKHR::eReserveEXT: return "ReserveEXT"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_EXT_surface_maintenance1 === VULKAN_HPP_INLINE std::string to_string( PresentScalingFlagBitsEXT value ) @@ -8521,11 +8598,8 @@ namespace VULKAN_HPP_NAMESPACE { case VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes: return "PrecedingExternallyEncodedBytes"; case VideoEncodeCapabilityFlagBitsKHR::eInsufficientBitstreamBufferRangeDetection: return "InsufficientBitstreamBufferRangeDetection"; -<<<<<<< HEAD -======= case VideoEncodeCapabilityFlagBitsKHR::eQuantizationDeltaMap: return "QuantizationDeltaMap"; case VideoEncodeCapabilityFlagBitsKHR::eEmphasisMap: return "EmphasisMap"; ->>>>>>> indev default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } @@ -9087,90 +9161,6 @@ namespace VULKAN_HPP_NAMESPACE } } - //=== VK_KHR_maintenance5 === - - VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits2KHR value ) - { - switch ( value ) - { - case PipelineCreateFlagBits2KHR::eDisableOptimization: return "DisableOptimization"; - case PipelineCreateFlagBits2KHR::eAllowDerivatives: return "AllowDerivatives"; - case PipelineCreateFlagBits2KHR::eDerivative: return "Derivative"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - case PipelineCreateFlagBits2KHR::eExecutionGraphAMDX: return "ExecutionGraphAMDX"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - case PipelineCreateFlagBits2KHR::eEnableLegacyDitheringEXT: return "EnableLegacyDitheringEXT"; - case PipelineCreateFlagBits2KHR::eViewIndexFromDeviceIndex: return "ViewIndexFromDeviceIndex"; - case PipelineCreateFlagBits2KHR::eDispatchBase: return "DispatchBase"; - case PipelineCreateFlagBits2KHR::eDeferCompileNV: return "DeferCompileNV"; - case PipelineCreateFlagBits2KHR::eCaptureStatistics: return "CaptureStatistics"; - case PipelineCreateFlagBits2KHR::eCaptureInternalRepresentations: return "CaptureInternalRepresentations"; - case PipelineCreateFlagBits2KHR::eFailOnPipelineCompileRequired: return "FailOnPipelineCompileRequired"; - case PipelineCreateFlagBits2KHR::eEarlyReturnOnFailure: return "EarlyReturnOnFailure"; - case PipelineCreateFlagBits2KHR::eLinkTimeOptimizationEXT: return "LinkTimeOptimizationEXT"; - case PipelineCreateFlagBits2KHR::eRetainLinkTimeOptimizationInfoEXT: return "RetainLinkTimeOptimizationInfoEXT"; - case PipelineCreateFlagBits2KHR::eLibrary: return "Library"; - case PipelineCreateFlagBits2KHR::eRayTracingSkipTriangles: return "RayTracingSkipTriangles"; - case PipelineCreateFlagBits2KHR::eRayTracingSkipAabbs: return "RayTracingSkipAabbs"; - case PipelineCreateFlagBits2KHR::eRayTracingNoNullAnyHitShaders: return "RayTracingNoNullAnyHitShaders"; - case PipelineCreateFlagBits2KHR::eRayTracingNoNullClosestHitShaders: return "RayTracingNoNullClosestHitShaders"; - case PipelineCreateFlagBits2KHR::eRayTracingNoNullMissShaders: return "RayTracingNoNullMissShaders"; - case PipelineCreateFlagBits2KHR::eRayTracingNoNullIntersectionShaders: return "RayTracingNoNullIntersectionShaders"; - case PipelineCreateFlagBits2KHR::eRayTracingShaderGroupHandleCaptureReplay: return "RayTracingShaderGroupHandleCaptureReplay"; - case PipelineCreateFlagBits2KHR::eIndirectBindableNV: return "IndirectBindableNV"; - case PipelineCreateFlagBits2KHR::eRayTracingAllowMotionNV: return "RayTracingAllowMotionNV"; - case PipelineCreateFlagBits2KHR::eRenderingFragmentShadingRateAttachment: return "RenderingFragmentShadingRateAttachment"; - case PipelineCreateFlagBits2KHR::eRenderingFragmentDensityMapAttachmentEXT: return "RenderingFragmentDensityMapAttachmentEXT"; - case PipelineCreateFlagBits2KHR::eRayTracingOpacityMicromapEXT: return "RayTracingOpacityMicromapEXT"; - case PipelineCreateFlagBits2KHR::eColorAttachmentFeedbackLoopEXT: return "ColorAttachmentFeedbackLoopEXT"; - case PipelineCreateFlagBits2KHR::eDepthStencilAttachmentFeedbackLoopEXT: return "DepthStencilAttachmentFeedbackLoopEXT"; - case PipelineCreateFlagBits2KHR::eNoProtectedAccessEXT: return "NoProtectedAccessEXT"; - case PipelineCreateFlagBits2KHR::eProtectedAccessOnlyEXT: return "ProtectedAccessOnlyEXT"; - case PipelineCreateFlagBits2KHR::eRayTracingDisplacementMicromapNV: return "RayTracingDisplacementMicromapNV"; - case PipelineCreateFlagBits2KHR::eDescriptorBufferEXT: return "DescriptorBufferEXT"; - case PipelineCreateFlagBits2KHR::eCaptureData: return "CaptureData"; - case PipelineCreateFlagBits2KHR::eIndirectBindableEXT: return "IndirectBindableEXT"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - - VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits2KHR value ) - { - switch ( value ) - { - case BufferUsageFlagBits2KHR::eTransferSrc: return "TransferSrc"; - case BufferUsageFlagBits2KHR::eTransferDst: return "TransferDst"; - case BufferUsageFlagBits2KHR::eUniformTexelBuffer: return "UniformTexelBuffer"; - case BufferUsageFlagBits2KHR::eStorageTexelBuffer: return "StorageTexelBuffer"; - case BufferUsageFlagBits2KHR::eUniformBuffer: return "UniformBuffer"; - case BufferUsageFlagBits2KHR::eStorageBuffer: return "StorageBuffer"; - case BufferUsageFlagBits2KHR::eIndexBuffer: return "IndexBuffer"; - case BufferUsageFlagBits2KHR::eVertexBuffer: return "VertexBuffer"; - case BufferUsageFlagBits2KHR::eIndirectBuffer: return "IndirectBuffer"; -#if defined( VK_ENABLE_BETA_EXTENSIONS ) - case BufferUsageFlagBits2KHR::eExecutionGraphScratchAMDX: return "ExecutionGraphScratchAMDX"; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ - case BufferUsageFlagBits2KHR::eConditionalRenderingEXT: return "ConditionalRenderingEXT"; - case BufferUsageFlagBits2KHR::eShaderBindingTable: return "ShaderBindingTable"; - case BufferUsageFlagBits2KHR::eTransformFeedbackBufferEXT: return "TransformFeedbackBufferEXT"; - case BufferUsageFlagBits2KHR::eTransformFeedbackCounterBufferEXT: return "TransformFeedbackCounterBufferEXT"; - case BufferUsageFlagBits2KHR::eVideoDecodeSrc: return "VideoDecodeSrc"; - case BufferUsageFlagBits2KHR::eVideoDecodeDst: return "VideoDecodeDst"; - case BufferUsageFlagBits2KHR::eVideoEncodeDst: return "VideoEncodeDst"; - case BufferUsageFlagBits2KHR::eVideoEncodeSrc: return "VideoEncodeSrc"; - case BufferUsageFlagBits2KHR::eShaderDeviceAddress: return "ShaderDeviceAddress"; - case BufferUsageFlagBits2KHR::eAccelerationStructureBuildInputReadOnly: return "AccelerationStructureBuildInputReadOnly"; - case BufferUsageFlagBits2KHR::eAccelerationStructureStorage: return "AccelerationStructureStorage"; - case BufferUsageFlagBits2KHR::eSamplerDescriptorBufferEXT: return "SamplerDescriptorBufferEXT"; - case BufferUsageFlagBits2KHR::eResourceDescriptorBufferEXT: return "ResourceDescriptorBufferEXT"; - case BufferUsageFlagBits2KHR::ePushDescriptorsDescriptorBufferEXT: return "PushDescriptorsDescriptorBufferEXT"; - case BufferUsageFlagBits2KHR::eMicromapBuildInputReadOnlyEXT: return "MicromapBuildInputReadOnlyEXT"; - case BufferUsageFlagBits2KHR::eMicromapStorageEXT: return "MicromapStorageEXT"; - case BufferUsageFlagBits2KHR::ePreprocessBufferEXT: return "PreprocessBufferEXT"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_AMD_anti_lag === VULKAN_HPP_INLINE std::string to_string( AntiLagModeAMD value ) @@ -9427,20 +9417,6 @@ namespace VULKAN_HPP_NAMESPACE } } - //=== VK_KHR_line_rasterization === - - VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeKHR value ) - { - switch ( value ) - { - case LineRasterizationModeKHR::eDefault: return "Default"; - case LineRasterizationModeKHR::eRectangular: return "Rectangular"; - case LineRasterizationModeKHR::eBresenham: return "Bresenham"; - case LineRasterizationModeKHR::eRectangularSmooth: return "RectangularSmooth"; - default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; - } - } - //=== VK_KHR_calibrated_timestamps === VULKAN_HPP_INLINE std::string to_string( TimeDomainKHR value ) @@ -9455,8 +9431,6 @@ namespace VULKAN_HPP_NAMESPACE } } -<<<<<<< HEAD -======= //=== VK_NV_display_stereo === VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceStereoTypeNV value ) @@ -9471,7 +9445,6 @@ namespace VULKAN_HPP_NAMESPACE } } ->>>>>>> indev //=== VK_KHR_maintenance7 === VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceLayeredApiKHR value ) diff --git a/third_party/vulkan/vulkan_video.hpp b/third_party/vulkan/vulkan_video.hpp index fb82aa3..ee88975 100644 --- a/third_party/vulkan/vulkan_video.hpp +++ b/third_party/vulkan/vulkan_video.hpp @@ -7,8 +7,15 @@ #ifndef VULKAN_VIDEO_HPP #define VULKAN_VIDEO_HPP +// clang-format off +#include +// clang-format on + #include #include +#if ( 301 < VK_HEADER_VERSION ) +# include +#endif #include #include #include @@ -16,7 +23,6 @@ #include #include #include -#include #if !defined( VULKAN_HPP_VIDEO_NAMESPACE ) # define VULKAN_HPP_VIDEO_NAMESPACE video From 5a04ad778136060de4c97a565f038644fc3c3c64 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 4 Dec 2024 18:34:53 +0100 Subject: [PATCH 088/131] moving experimental features to a new folder --- example/build.sh | 1 - example/main.c | 22 ----- experimental/RenderToTexture/build.sh | 11 +++ experimental/RenderToTexture/main.c | 86 +++++++++++++++++++ experimental/RenderToTexture/run.sh | 4 + includes/mlx.h | 2 +- runtime/Includes/Core/Application.h | 1 + runtime/Includes/Core/Application.inl | 3 + .../Sources/Renderer/Pipelines/Graphics.cpp | 28 ++++-- 9 files changed, 126 insertions(+), 32 deletions(-) create mode 100755 experimental/RenderToTexture/build.sh create mode 100644 experimental/RenderToTexture/main.c create mode 100755 experimental/RenderToTexture/run.sh diff --git a/example/build.sh b/example/build.sh index b332c18..ab5882b 100755 --- a/example/build.sh +++ b/example/build.sh @@ -9,4 +9,3 @@ if [ $(uname -s) = 'Darwin' ]; then else clang main.c ../libmlx.so -lSDL2 -g -Wall -Wextra -Werror; fi - diff --git a/example/main.c b/example/main.c index 84a524d..d32e563 100644 --- a/example/main.c +++ b/example/main.c @@ -9,8 +9,6 @@ typedef struct void* logo_jpg; void* logo_bmp; void* img; - void* render_target; - void* render_target_win; } mlx_t; int update(void* param) @@ -52,19 +50,6 @@ int update(void* param) mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFFFF0000); } - mlx_string_put(mlx->mlx, mlx->render_target_win, 20, 20, 0xFFAF2BFF, "cacaboudin"); - mlx_transform_put_image_to_window(mlx->mlx, mlx->render_target_win, mlx->logo_bmp, 100, 40, 0.5f, 75.0f); - mlx_put_image_to_window(mlx->mlx, mlx->render_target_win, mlx->img, 40, 60); - - for(int j = 0, color = 0; j < 200; j++) - { - mlx_pixel_put(mlx->mlx, mlx->render_target_win, j, j, 0xFFFF0000 + color); - mlx_pixel_put(mlx->mlx, mlx->render_target_win, 199 - j, j, 0xFF0000FF); - color += (color < 255); - } - - mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->render_target, 5, 250, 0.5f, 33.0f); - i++; return 0; } @@ -151,10 +136,6 @@ int main(void) mlx_get_screens_size(mlx.mlx, mlx.win, &w, &h); printf("screen size : %dx%d\n", w, h); - mlx.render_target = mlx_new_image(mlx.mlx, 200, 200); - mlx.render_target_win = mlx_new_window(mlx.mlx, 200, 200, (char*)mlx.render_target); - mlx_clear_window(mlx.mlx, mlx.render_target_win, 0xFFC16868); - mlx_set_fps_goal(mlx.mlx, 60); mlx_on_event(mlx.mlx, mlx.win, MLX_KEYDOWN, key_hook, &mlx); @@ -181,9 +162,6 @@ int main(void) mlx_destroy_image(mlx.mlx, mlx.img); mlx_destroy_window(mlx.mlx, mlx.win); - mlx_destroy_window(mlx.mlx, mlx.render_target_win); - mlx_destroy_image(mlx.mlx, mlx.render_target); - mlx_destroy_display(mlx.mlx); return 0; diff --git a/experimental/RenderToTexture/build.sh b/experimental/RenderToTexture/build.sh new file mode 100755 index 0000000..3ae5775 --- /dev/null +++ b/experimental/RenderToTexture/build.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +if [ -e a.out ]; then + rm a.out +fi + +if [ $(uname -s) = 'Darwin' ]; then + clang main.c ../../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -g; +else + clang main.c ../../libmlx.so -lSDL2 -g -Wall -Wextra -Werror; +fi diff --git a/experimental/RenderToTexture/main.c b/experimental/RenderToTexture/main.c new file mode 100644 index 0000000..fb91e81 --- /dev/null +++ b/experimental/RenderToTexture/main.c @@ -0,0 +1,86 @@ +#include "../../includes/mlx.h" + +typedef struct +{ + void* mlx; + void* win; + void* render_target; + void* render_target_win; +} mlx_t; + +int update(void* param) +{ + mlx_t* mlx = (mlx_t*)param; + + mlx_clear_window(mlx->mlx, mlx->win, 0xFF334D4D); + + mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFFFF2066, "text"); + mlx_string_put(mlx->mlx, mlx->win, 20, 50, 0xFFFFFFFF, "that's a text"); + + for(int j = 0, color = 0; j < 400; j++) + { + mlx_pixel_put(mlx->mlx, mlx->win, j, j, 0xFFFF0000 + color); + mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, 0xFF0000FF); + color += (color < 255); + } + + for(int j = 0; j < 20; j++) + { + for(int k = 0; k < 20; k++) + mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFFFF0000); + } + + mlx_string_put(mlx->mlx, mlx->render_target_win, 20, 20, 0xFFAF2BFF, "yippeeee"); + for(int j = 0, color = 0; j < 200; j++) + { + mlx_pixel_put(mlx->mlx, mlx->render_target_win, j, j, 0xFFFF0000 + color); + mlx_pixel_put(mlx->mlx, mlx->render_target_win, 199 - j, j, 0xFF0000FF); + color += (color < 255); + } + + mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->render_target, 5, 250, 0.5f, 33.0f); + + return 0; +} + +int key_hook(int key, void* param) +{ + mlx_t* mlx = (mlx_t*)param; + if(key == 41) + mlx_loop_end(mlx->mlx); + return 0; +} + +int window_hook(int event, void* param) +{ + if(event == 0) + mlx_loop_end(((mlx_t*)param)->mlx); + return 0; +} + +int main(void) +{ + mlx_t mlx; + + mlx.mlx = mlx_init(); + mlx.win = mlx_new_resizable_window(mlx.mlx, 400, 400, "My window"); + + mlx.render_target = mlx_new_image(mlx.mlx, 200, 200); + mlx.render_target_win = mlx_new_window(mlx.mlx, 200, 200, (char*)mlx.render_target); + mlx_clear_window(mlx.mlx, mlx.render_target_win, 0xFFC16868); + + mlx_on_event(mlx.mlx, mlx.win, MLX_KEYDOWN, key_hook, &mlx); + mlx_on_event(mlx.mlx, mlx.win, MLX_WINDOW_EVENT, window_hook, &mlx); + + mlx_loop_hook(mlx.mlx, update, &mlx); + mlx_loop(mlx.mlx); + + mlx_destroy_window(mlx.mlx, mlx.win); + + mlx_destroy_window(mlx.mlx, mlx.render_target_win); + mlx_destroy_image(mlx.mlx, mlx.render_target); + + mlx_destroy_display(mlx.mlx); + + return 0; +} diff --git a/experimental/RenderToTexture/run.sh b/experimental/RenderToTexture/run.sh new file mode 100755 index 0000000..993cb20 --- /dev/null +++ b/experimental/RenderToTexture/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +bash ./build.sh +./a.out diff --git a/includes/mlx.h b/includes/mlx.h index 9e3f6df..3c03052 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/11/05 18:18:22 by maldavid ### ########.fr */ +/* Updated: 2024/12/04 17:52:23 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index e2abe45..840e6bf 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -58,6 +58,7 @@ namespace mlx FontRegistry m_font_registry; ImageRegistry m_image_registry; std::vector> m_graphics; + std::shared_ptr p_last_font_bound; std::function f_loop_hook; std::unique_ptr p_render_core; #ifdef PROFILER diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index bf2c9be..6b1be72 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -98,6 +98,7 @@ namespace mlx m_in.RegisterWindow(m_graphics.back()->GetWindow()); } } + m_graphics.back()->GetScene().BindFont(p_last_font_bound); return static_cast(&m_graphics.back()->GetID()); } @@ -162,6 +163,8 @@ namespace mlx m_font_registry.RegisterFont(font); } + p_last_font_bound = font; + for(auto& gs : m_graphics) { if(gs) diff --git a/runtime/Sources/Renderer/Pipelines/Graphics.cpp b/runtime/Sources/Renderer/Pipelines/Graphics.cpp index d7fba28..eee0446 100644 --- a/runtime/Sources/Renderer/Pipelines/Graphics.cpp +++ b/runtime/Sources/Renderer/Pipelines/Graphics.cpp @@ -180,14 +180,26 @@ namespace mlx attachment_views.push_back(image->GetImageView()); } - VkSubpassDependency& dependency = dependencies.emplace_back(); - dependency.srcSubpass = VK_SUBPASS_EXTERNAL; - dependency.dstSubpass = 0; - dependency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - dependency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; - dependency.srcAccessMask = 0; - dependency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; - dependency.dependencyFlags = 0; + if(!render_targets.empty()) + { + VkSubpassDependency& first_depedency = dependencies.emplace_back(); + first_depedency.srcSubpass = VK_SUBPASS_EXTERNAL; + first_depedency.dstSubpass = 0; + first_depedency.srcStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + first_depedency.srcAccessMask = VK_ACCESS_SHADER_READ_BIT; + first_depedency.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + first_depedency.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + first_depedency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + + VkSubpassDependency& second_depedency = dependencies.emplace_back(); + second_depedency.srcSubpass = 0; + second_depedency.dstSubpass = VK_SUBPASS_EXTERNAL; + second_depedency.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + second_depedency.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + second_depedency.dstStageMask = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT; + second_depedency.dstAccessMask = VK_ACCESS_SHADER_READ_BIT; + second_depedency.dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT; + } m_renderpass = kvfCreateRenderPassWithSubpassDependencies(RenderCore::Get().GetDevice(), attachments.data(), attachments.size(), GetPipelineBindPoint(), dependencies.data(), dependencies.size()); m_clears.clear(); From 5d4d97e05d00d436ce7e675532c08c8f79fccc55 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 13 Dec 2024 02:51:52 +0100 Subject: [PATCH 089/131] fixing valgrind issues, injecting garbage collector inside vulkan's funtions --- runtime/Includes/Core/Memory.h | 1 + runtime/Includes/Graphics/Font.inl | 2 + runtime/Includes/PreCompiled.h | 6 +- runtime/Includes/Renderer/Memory.h | 2 +- runtime/Sources/Core/Memory.cpp | 8 + runtime/Sources/Renderer/Memory.cpp | 4 +- runtime/Sources/Renderer/RenderCore.cpp | 28 ++- third_party/kvf.h | 274 ++++++++++-------------- 8 files changed, 165 insertions(+), 160 deletions(-) diff --git a/runtime/Includes/Core/Memory.h b/runtime/Includes/Core/Memory.h index 174be18..91b36c1 100644 --- a/runtime/Includes/Core/Memory.h +++ b/runtime/Includes/Core/Memory.h @@ -9,6 +9,7 @@ namespace mlx MemManager(); static void* Malloc(std::size_t size); + static void* AlignedMalloc(std::size_t alignment, std::size_t size); static void* Calloc(std::size_t n, std::size_t size); static void* Realloc(void* ptr, std::size_t size); static void Free(void* ptr); diff --git a/runtime/Includes/Graphics/Font.inl b/runtime/Includes/Graphics/Font.inl index 1deb809..eeeada1 100644 --- a/runtime/Includes/Graphics/Font.inl +++ b/runtime/Includes/Graphics/Font.inl @@ -24,6 +24,8 @@ namespace mlx void FontRegistry::Reset() { + for(auto& font: m_fonts_registry) + font->Destroy(); m_fonts_registry.clear(); } } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 4a958a9..2f17519 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -67,10 +67,14 @@ #include // sincos #endif +#include + #define VMA_STATIC_VULKAN_FUNCTIONS 0 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 #define VMA_VULKAN_VERSION 1000000 -#define VMA_ASSERT(expr) ((void)0) // Because why not +#define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (mlx::MemManager::AlignedMalloc(alignment, size)) +#define VMA_SYSTEM_ALIGNED_FREE(ptr) (mlx::MemManager::Free(ptr)) +#define VMA_ASSERT_LEAK(expr) ((void)0) // Because why not #ifdef MLX_COMPILER_CLANG #pragma clang diagnostic push diff --git a/runtime/Includes/Renderer/Memory.h b/runtime/Includes/Renderer/Memory.h index 26f0b99..cf96bae 100644 --- a/runtime/Includes/Renderer/Memory.h +++ b/runtime/Includes/Renderer/Memory.h @@ -8,7 +8,7 @@ namespace mlx public: GPUAllocator() = default; - void Init() noexcept; + void Init(const VkAllocationCallbacks* callbacks) noexcept; void Destroy() noexcept; VmaAllocation CreateBuffer(const VkBufferCreateInfo* binfo, const VmaAllocationCreateInfo* vinfo, VkBuffer& buffer, const char* name = nullptr) noexcept; diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 3a69d32..c3eedbe 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -19,6 +19,14 @@ namespace mlx return ptr; } + void* MemManager::AlignedMalloc(std::size_t alignment, std::size_t size) + { + void* ptr = std::aligned_alloc(alignment, size); + if(ptr != nullptr) + s_blocks.push_back(ptr); + return ptr; + } + void* MemManager::Calloc(std::size_t n, std::size_t size) { void* ptr = std::calloc(n, size); diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 6243bab..58c27c6 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -1,3 +1,4 @@ +#include "vulkan/vulkan_core.h" #include #define VMA_IMPLEMENTATION #ifdef MLX_COMPILER_CLANG @@ -22,7 +23,7 @@ namespace mlx { - void GPUAllocator::Init() noexcept + void GPUAllocator::Init(const VkAllocationCallbacks* callbacks) noexcept { MLX_PROFILE_FUNCTION(); VmaVulkanFunctions vma_vulkan_func{}; @@ -49,6 +50,7 @@ namespace mlx allocator_create_info.physicalDevice = RenderCore::Get().GetPhysicalDevice(); allocator_create_info.device = RenderCore::Get().GetDevice(); allocator_create_info.instance = RenderCore::Get().GetInstance(); + allocator_create_info.pAllocationCallbacks = callbacks; allocator_create_info.pVulkanFunctions = &vma_vulkan_func; kvfCheckVk(vmaCreateAllocator(&allocator_create_info, &m_allocator)); diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index e4e6aa4..5d49820 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -31,6 +31,7 @@ #include #include + namespace mlx { static std::unique_ptr loader; @@ -53,6 +54,21 @@ namespace mlx std::cout << std::endl; } + void* VulkanAllocationFunction(void*, std::size_t size, std::size_t alignment, VkSystemAllocationScope) + { + return MemManager::AlignedMalloc(alignment, size); + } + + void* VulkanReallocationFunction(void*, void* ptr, std::size_t size, std::size_t, VkSystemAllocationScope) + { + return MemManager::Realloc(ptr, size); + } + + void VulkanFreeFunction(void*, void* ptr) + { + MemManager::Free(ptr); + } + RenderCore* RenderCore::s_instance = nullptr; RenderCore::RenderCore() @@ -102,7 +118,17 @@ namespace mlx vkDestroySurfaceKHR(m_instance, surface, nullptr); - m_allocator.Init(); + VkAllocationCallbacks callbacks; + callbacks.pUserData = nullptr; + callbacks.pfnAllocation = VulkanAllocationFunction; + callbacks.pfnReallocation = VulkanReallocationFunction; + callbacks.pfnFree = VulkanFreeFunction; + callbacks.pfnInternalAllocation = nullptr; + callbacks.pfnInternalFree = nullptr; + + kvfSetAllocationCallbacks(m_device, &callbacks); + + m_allocator.Init(&callbacks); } #undef MLX_LOAD_FUNCTION diff --git a/third_party/kvf.h b/third_party/kvf.h index be29a0b..fe1c172 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -130,6 +130,7 @@ VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, #ifdef KVF_IMPL_VK_NO_PROTOTYPES void kvfPassDeviceVulkanFunctionPointers(VkPhysicalDevice physical, VkDevice device, const KvfDeviceVulkanFunctions* fns); #endif +void kvfSetAllocationCallbacks(VkDevice device, const VkAllocationCallbacks* callbacks); void kvfDestroyDevice(VkDevice device); VkFence kvfCreateFence(VkDevice device); @@ -399,6 +400,7 @@ typedef struct __KvfDevice KvfDeviceVulkanFunctions fns; #endif VkDevice device; + VkAllocationCallbacks* callbacks; VkPhysicalDevice physical; VkCommandPool cmd_pool; VkCommandBuffer* cmd_buffers; @@ -595,38 +597,12 @@ void __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, kvf_device->cmd_buffers_size = 0; kvf_device->cmd_buffers_capacity = KVF_COMMAND_POOL_CAPACITY; kvf_device->cmd_buffers = (VkCommandBuffer*)KVF_MALLOC(KVF_COMMAND_POOL_CAPACITY * sizeof(VkCommandBuffer)); + kvf_device->callbacks = NULL; KVF_ASSERT(kvf_device->cmd_buffers != NULL && "allocation failed :("); } void __kvfDestroyDescriptorPools(VkDevice device); -void __kvfDestroyDevice(VkDevice device) -{ - KVF_ASSERT(device != VK_NULL_HANDLE); - for(size_t i = 0; i < __kvf_internal_devices_size; i++) - { - if(__kvf_internal_devices[i].device == device) - { - __KvfDevice* kvf_device = &__kvf_internal_devices[i]; - KVF_FREE(kvf_device->cmd_buffers); - KVF_GET_DEVICE_FUNCTION(vkDestroyCommandPool)(device, kvf_device->cmd_pool, NULL); - __kvfDestroyDescriptorPools(device); - KVF_GET_DEVICE_FUNCTION(vkDestroyDevice)(device, NULL); - // Shift the elements to fill the gap - for(size_t j = i; j < __kvf_internal_devices_size - 1; j++) - __kvf_internal_devices[j] = __kvf_internal_devices[j + 1]; - __kvf_internal_devices_size--; - if(__kvf_internal_devices_size == 0) - { - KVF_FREE(__kvf_internal_devices); - __kvf_internal_devices = NULL; - __kvf_internal_devices_capacity = 0; - } - return; - } - } -} - __KvfDevice* __kvfGetKvfDeviceFromVkPhysicalDevice(VkPhysicalDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); @@ -663,6 +639,43 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) return NULL; } +void kvfSetAllocationCallbacks(VkDevice device, const VkAllocationCallbacks* callbacks) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + kvf_device->callbacks = (VkAllocationCallbacks*)KVF_MALLOC(sizeof(VkAllocationCallbacks)); + KVF_ASSERT(kvf_device->callbacks && "allocation failed :("); + memcpy(kvf_device->callbacks, callbacks, sizeof(VkAllocationCallbacks)); +} + +void __kvfDestroyDevice(VkDevice device) +{ + KVF_ASSERT(device != VK_NULL_HANDLE); + for(size_t i = 0; i < __kvf_internal_devices_size; i++) + { + if(__kvf_internal_devices[i].device == device) + { + __KvfDevice* kvf_device = &__kvf_internal_devices[i]; + KVF_FREE(kvf_device->cmd_buffers); + KVF_GET_DEVICE_FUNCTION(vkDestroyCommandPool)(device, kvf_device->cmd_pool, NULL); + __kvfDestroyDescriptorPools(device); + KVF_GET_DEVICE_FUNCTION(vkDestroyDevice)(device, NULL); + // Shift the elements to fill the gap + for(size_t j = i; j < __kvf_internal_devices_size - 1; j++) + __kvf_internal_devices[j] = __kvf_internal_devices[j + 1]; + __kvf_internal_devices_size--; + if(__kvf_internal_devices_size == 0) + { + KVF_FREE(__kvf_internal_devices); + __kvf_internal_devices = NULL; + __kvf_internal_devices_capacity = 0; + } + return; + } + } +} + #ifndef KVF_NO_KHR void __kvfAddSwapchainToArray(VkSwapchainKHR swapchain, __KvfSwapchainSupportInternal support, VkFormat format, uint32_t images_count, VkExtent2D extent) { @@ -687,16 +700,14 @@ __KvfDevice* __kvfGetKvfDeviceFromVkCommandBuffer(VkCommandBuffer cmd) KVF_ASSERT(swapchain != VK_NULL_HANDLE); KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) { if(__kvf_internal_swapchains[i].swapchain == swapchain) { - KVF_GET_DEVICE_FUNCTION(vkDestroySwapchainKHR)(device, swapchain, NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroySwapchainKHR)(device, swapchain, kvf_device->callbacks); // Shift the elements to fill the gap for(size_t j = i; j < __kvf_internal_swapchains_size - 1; j++) __kvf_internal_swapchains[j] = __kvf_internal_swapchains[j + 1]; @@ -744,16 +755,14 @@ void __kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) KVF_ASSERT(framebuffer != VK_NULL_HANDLE); KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); for(size_t i = 0; i < __kvf_internal_framebuffers_size; i++) { if(__kvf_internal_framebuffers[i].framebuffer == framebuffer) { - KVF_GET_DEVICE_FUNCTION(vkDestroyFramebuffer)(device, framebuffer, NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyFramebuffer)(device, framebuffer, kvf_device->callbacks); // Shift the elements to fill the gap for(size_t j = i; j < __kvf_internal_framebuffers_size - 1; j++) __kvf_internal_framebuffers[j] = __kvf_internal_framebuffers[j + 1]; @@ -1802,15 +1811,13 @@ int32_t kvfFindDeviceQueueFamily(VkPhysicalDevice physical, KvfQueueType type) VkFence kvfCreateFence(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkFenceCreateInfo fence_info = {}; fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; VkFence fence; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateFence)(device, &fence_info, NULL, &fence)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateFence)(device, &fence_info, kvf_device->callbacks, &fence)); return fence; } @@ -1830,24 +1837,20 @@ void kvfDestroyFence(VkDevice device, VkFence fence) if(fence == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyFence)(device, fence, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyFence)(device, fence, kvf_device->callbacks); } VkSemaphore kvfCreateSemaphore(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkSemaphoreCreateInfo semaphore_info = {}; semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; VkSemaphore semaphore; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSemaphore)(device, &semaphore_info, NULL, &semaphore)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSemaphore)(device, &semaphore_info, kvf_device->callbacks, &semaphore)); return semaphore; } @@ -1856,11 +1859,9 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) if(semaphore == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroySemaphore)(device, semaphore, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroySemaphore)(device, semaphore, kvf_device->callbacks); } #ifndef KVF_NO_KHR @@ -1974,7 +1975,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) else createInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSwapchainKHR)(device, &createInfo, NULL, &swapchain)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSwapchainKHR)(device, &createInfo, kvf_device->callbacks, &swapchain)); uint32_t images_count; KVF_GET_DEVICE_FUNCTION(vkGetSwapchainImagesKHR)(device, swapchain, (uint32_t*)&images_count, NULL); @@ -2028,10 +2029,8 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, KvfImageType type) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkImageCreateInfo image_info = {}; image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; image_info.imageType = VK_IMAGE_TYPE_2D; @@ -2054,7 +2053,7 @@ VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkForma } VkImage image; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateImage)(device, &image_info, NULL, &image)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateImage)(device, &image_info, kvf_device->callbacks, &image)); return image; } @@ -2086,20 +2085,16 @@ void kvfDestroyImage(VkDevice device, VkImage image) if(image == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyImage)(device, image, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyImage)(device, image, kvf_device->callbacks); } VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, VkImageViewType type, VkImageAspectFlags aspect, int layer_count) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkImageViewCreateInfo create_info = {}; create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; create_info.image = image; @@ -2115,7 +2110,7 @@ VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, create_info.subresourceRange.baseArrayLayer = 0; create_info.subresourceRange.layerCount = layer_count; VkImageView view; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateImageView)(device, &create_info, NULL, &view)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateImageView)(device, &create_info, kvf_device->callbacks, &view)); return view; } @@ -2123,11 +2118,9 @@ void kvfDestroyImageView(VkDevice device, VkImageView image_view) { KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(image_view != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyImageView)(device, image_view, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyImageView)(device, image_view, kvf_device->callbacks); } void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, VkCommandBuffer cmd, VkFormat format, VkImageLayout old_layout, VkImageLayout new_layout, bool is_single_time_cmd_buffer) @@ -2193,10 +2186,8 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMode address_modes, VkSamplerMipmapMode mipmap_mode) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkSamplerCreateInfo info = {}; info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; info.magFilter = filters; @@ -2210,7 +2201,7 @@ VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMo info.anisotropyEnable = VK_FALSE; info.maxAnisotropy = 1.0f; VkSampler sampler; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSampler)(device, &info, NULL, &sampler)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateSampler)(device, &info, kvf_device->callbacks, &sampler)); return sampler; } @@ -2219,27 +2210,23 @@ void kvfDestroySampler(VkDevice device, VkSampler sampler) if(sampler == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroySampler)(device, sampler, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroySampler)(device, sampler, kvf_device->callbacks); } VkBuffer kvfCreateBuffer(VkDevice device, VkBufferUsageFlags usage, VkDeviceSize size) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkBufferCreateInfo buffer_info = {}; buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; buffer_info.size = size; buffer_info.usage = usage; buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE; VkBuffer buffer; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateBuffer)(device, &buffer_info, NULL, &buffer)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateBuffer)(device, &buffer_info, kvf_device->callbacks, &buffer)); return buffer; } @@ -2285,22 +2272,17 @@ void kvfDestroyBuffer(VkDevice device, VkBuffer buffer) if(buffer != VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyBuffer)(device, buffer, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyBuffer)(device, buffer, kvf_device->callbacks); } VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass render_pass, VkImageView* image_views, size_t image_views_count, VkExtent2D extent) { KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(image_views != NULL); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkFramebufferCreateInfo framebuffer_info = {}; framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; framebuffer_info.renderPass = render_pass; @@ -2310,7 +2292,7 @@ VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass render_pass, Vk framebuffer_info.height = extent.height; framebuffer_info.layers = 1; VkFramebuffer framebuffer = VK_NULL_HANDLE; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateFramebuffer)(device, &framebuffer_info, NULL, &framebuffer)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateFramebuffer)(device, &framebuffer_info, kvf_device->callbacks, &framebuffer)); __kvfAddFramebufferToArray(framebuffer, extent); return framebuffer; } @@ -2545,10 +2527,8 @@ VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttac } } - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkSubpassDescription subpass = {}; subpass.pipelineBindPoint = bind_point; @@ -2566,7 +2546,7 @@ VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttac renderpass_create_info.pDependencies = dependencies; VkRenderPass render_pass = VK_NULL_HANDLE; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateRenderPass)(device, &renderpass_create_info, NULL, &render_pass)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateRenderPass)(device, &renderpass_create_info, kvf_device->callbacks, &render_pass)); KVF_FREE(color_references); KVF_FREE(depth_references); return render_pass; @@ -2577,11 +2557,9 @@ void kvfDestroyRenderPass(VkDevice device, VkRenderPass renderPass) if(renderPass == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyRenderPass)(device, renderPass, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyRenderPass)(device, renderPass, kvf_device->callbacks); } void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer framebuffer, VkExtent2D framebuffer_extent, VkClearValue* clears, size_t clears_count) @@ -2608,16 +2586,14 @@ void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer fr VkShaderModule kvfCreateShaderModule(VkDevice device, uint32_t* code, size_t size) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkShaderModuleCreateInfo createInfo = {}; createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; createInfo.codeSize = size * sizeof(uint32_t); createInfo.pCode = code; VkShaderModule shader = VK_NULL_HANDLE; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateShaderModule)(device, &createInfo, NULL, &shader)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateShaderModule)(device, &createInfo, kvf_device->callbacks, &shader)); return shader; } @@ -2626,27 +2602,23 @@ void kvfDestroyShaderModule(VkDevice device, VkShaderModule shader) if(shader == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyShaderModule)(device, shader, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyShaderModule)(device, shader, kvf_device->callbacks); } VkDescriptorSetLayout kvfCreateDescriptorSetLayout(VkDevice device, VkDescriptorSetLayoutBinding* bindings, size_t bindings_count) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkDescriptorSetLayoutCreateInfo layout_info = {}; layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; layout_info.bindingCount = bindings_count; layout_info.pBindings = bindings; VkDescriptorSetLayout layout; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateDescriptorSetLayout)(device, &layout_info, NULL, &layout)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateDescriptorSetLayout)(device, &layout_info, kvf_device->callbacks, &layout)); return layout; } @@ -2655,11 +2627,9 @@ void kvfDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout layout if(layout == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyDescriptorSetLayout)(device, layout, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyDescriptorSetLayout)(device, layout, kvf_device->callbacks); } VkDescriptorSet kvfAllocateDescriptorSet(VkDevice device, VkDescriptorSetLayout layout) @@ -2766,10 +2736,8 @@ VkWriteDescriptorSet kvfWriteImageToDescriptorSet(VkDevice device, VkDescriptorS VkPipelineLayout kvfCreatePipelineLayout(VkDevice device, VkDescriptorSetLayout* set_layouts, size_t set_layouts_count, VkPushConstantRange* pc, size_t pc_count) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkPipelineLayoutCreateInfo pipeline_layout_info = {}; pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; pipeline_layout_info.setLayoutCount = set_layouts_count; @@ -2778,7 +2746,7 @@ VkPipelineLayout kvfCreatePipelineLayout(VkDevice device, VkDescriptorSetLayout* pipeline_layout_info.pPushConstantRanges = pc; VkPipelineLayout layout; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreatePipelineLayout)(device, &pipeline_layout_info, NULL, &layout)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreatePipelineLayout)(device, &pipeline_layout_info, kvf_device->callbacks, &layout)); return layout; } @@ -2787,11 +2755,9 @@ void kvfDestroyPipelineLayout(VkDevice device, VkPipelineLayout layout) if(layout == VK_NULL_HANDLE) return; KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyPipelineLayout)(device, layout, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyPipelineLayout)(device, layout, kvf_device->callbacks); } void kvfResetDeviceDescriptorPools(VkDevice device) @@ -2955,9 +2921,9 @@ void kvfGPipelineBuilderAddShaderStage(KvfGraphicsPipelineBuilder* builder, VkSh { KVF_ASSERT(builder != NULL); builder->shader_stages = (VkPipelineShaderStageCreateInfo*)KVF_REALLOC(builder->shader_stages, sizeof(VkPipelineShaderStageCreateInfo) * (builder->shader_stages_count + 1)); - KVF_ASSERT(builder->shader_stages != NULL); + KVF_ASSERT(builder->shader_stages != NULL && "allocation failed :("); memset(&builder->shader_stages[builder->shader_stages_count], 0, sizeof(VkPipelineShaderStageCreateInfo)); - char* entry_ptr = (char*)KVF_MALLOC(strlen(entry)); + char* entry_ptr = (char*)KVF_MALLOC(strlen(entry) + 1); KVF_ASSERT(entry_ptr != NULL && "allocation failed :("); strcpy(entry_ptr, entry); builder->shader_stages[builder->shader_stages_count].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; @@ -3026,23 +2992,19 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineCache cache, VkP pipeline_info.basePipelineHandle = VK_NULL_HANDLE; pipeline_info.pDepthStencilState = &builder->depth_stencil_state; - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); VkPipeline pipeline; - __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateGraphicsPipelines)(device, cache, 1, &pipeline_info, NULL, &pipeline)); + __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateGraphicsPipelines)(device, cache, 1, &pipeline_info, kvf_device->callbacks, &pipeline)); return pipeline; } void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline) { KVF_ASSERT(device != VK_NULL_HANDLE); - #ifdef KVF_IMPL_VK_NO_PROTOTYPES - __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); - #endif - KVF_GET_DEVICE_FUNCTION(vkDestroyPipeline)(device, pipeline, NULL); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + KVF_GET_DEVICE_FUNCTION(vkDestroyPipeline)(device, pipeline, kvf_device->callbacks); } #endif // KVF_IMPLEMENTATION From f5333882c614096cb7f111e3efbbc1c70392c7d7 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Dec 2024 01:39:50 +0100 Subject: [PATCH 090/131] working on garbage collection, fixing small issues with font destroy --- .gitignore | 2 +- runtime/Includes/Core/Memory.h | 14 ++++- runtime/Includes/PreCompiled.h | 4 +- runtime/Sources/Core/Application.cpp | 1 - runtime/Sources/Core/Memory.cpp | 76 ++++++++++++++++++++----- runtime/Sources/Graphics/Font.cpp | 6 +- runtime/Sources/Renderer/Image.cpp | 8 +-- runtime/Sources/Renderer/RenderCore.cpp | 4 +- runtime/Sources/Renderer/Swapchain.cpp | 1 + 9 files changed, 90 insertions(+), 26 deletions(-) diff --git a/.gitignore b/.gitignore index 82d65a8..9c8cb76 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,7 @@ *.gch *.pch *.exe -*vgcore +*vgcore.* *.gdb_history .vs/ .xmake/ diff --git a/runtime/Includes/Core/Memory.h b/runtime/Includes/Core/Memory.h index 91b36c1..891baba 100644 --- a/runtime/Includes/Core/Memory.h +++ b/runtime/Includes/Core/Memory.h @@ -11,7 +11,9 @@ namespace mlx static void* Malloc(std::size_t size); static void* AlignedMalloc(std::size_t alignment, std::size_t size); static void* Calloc(std::size_t n, std::size_t size); + static void* AlignedCalloc(std::size_t alignment, std::size_t n, std::size_t size); static void* Realloc(void* ptr, std::size_t size); + static void* AlignedRealloc(void* ptr, std::size_t alignment, std::size_t size); static void Free(void* ptr); inline static bool IsInit() noexcept { return s_instance != nullptr; } @@ -19,9 +21,19 @@ namespace mlx ~MemManager(); + private: + struct Descriptor + { + void* ptr; + std::size_t size; + bool aligned; + + Descriptor(void* ptr, std::size_t size, bool aligned) : ptr(ptr), size(size), aligned(aligned) {} + }; + private: static MemManager* s_instance; - inline static std::vector s_blocks; + inline static std::vector s_blocks; }; } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 2f17519..8d99dfc 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -68,12 +68,15 @@ #endif #include +#include #define VMA_STATIC_VULKAN_FUNCTIONS 0 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 0 #define VMA_VULKAN_VERSION 1000000 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (mlx::MemManager::AlignedMalloc(alignment, size)) #define VMA_SYSTEM_ALIGNED_FREE(ptr) (mlx::MemManager::Free(ptr)) +//#define VMA_ASSERT(expr) (mlx::Assert(expr, "VMA Assertion: " #expr)) +#define VMA_ASSERT(expr) ((void)0) #define VMA_ASSERT_LEAK(expr) ((void)0) // Because why not #ifdef MLX_COMPILER_CLANG @@ -104,7 +107,6 @@ #endif #include -#include #include #include #include diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index ce32c96..3195ba2 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -91,7 +91,6 @@ namespace mlx if(gs) gs->TryEraseSpritesInScene(texture); } - m_image_registry.UnregisterTexture(texture); delete texture; } diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index c3eedbe..1dcb660 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -15,15 +15,24 @@ namespace mlx { void* ptr = std::malloc(size); if(ptr != nullptr) - s_blocks.push_back(ptr); + s_blocks.emplace_back(ptr, size, false); return ptr; } void* MemManager::AlignedMalloc(std::size_t alignment, std::size_t size) { - void* ptr = std::aligned_alloc(alignment, size); + if(alignment < sizeof(void*)) + alignment = sizeof(void*); + if(size % alignment != 0) + size += alignment - (size % alignment); + + #ifdef MLX_COMPILER_MSVC + void* ptr = _aligned_malloc(alignment, size); + #else + void* ptr = std::aligned_alloc(alignment, size); + #endif if(ptr != nullptr) - s_blocks.push_back(ptr); + s_blocks.emplace_back(ptr, size, true); return ptr; } @@ -31,7 +40,14 @@ namespace mlx { void* ptr = std::calloc(n, size); if(ptr != nullptr) - s_blocks.push_back(ptr); + s_blocks.emplace_back(ptr, n * size, false); + return ptr; + } + + void* MemManager::AlignedCalloc(std::size_t alignment, std::size_t n, std::size_t size) + { + void* ptr = AlignedMalloc(alignment, n * size); + std::memset(ptr, 0, n * size); return ptr; } @@ -39,33 +55,65 @@ namespace mlx { void* ptr2 = std::realloc(ptr, size); if(ptr2 != nullptr) - s_blocks.push_back(ptr2); - auto it = std::find(s_blocks.begin(), s_blocks.end(), ptr); + s_blocks.emplace_back(ptr, size, false); + auto it = std::find_if(s_blocks.begin(), s_blocks.end(), [=](const Descriptor& rhs){ return ptr == rhs.ptr; }); if(it != s_blocks.end()) s_blocks.erase(it); return ptr2; } + void* MemManager::AlignedRealloc(void* ptr, std::size_t alignment, std::size_t size) + { + auto it = std::find_if(s_blocks.begin(), s_blocks.end(), [=](const Descriptor& rhs){ return ptr == rhs.ptr; }); + + #ifdef MLX_COMPILER_MSVC + void* ptr2 = _aligned_realloc(ptr, alignment, size); + #else + void* ptr2 = AlignedMalloc(alignment, size); + if(it != s_blocks.end()) + std::memcpy(ptr2, ptr, it->size); + #endif + + if(it != s_blocks.end()) + s_blocks.erase(it); + + if(ptr2 != nullptr) + s_blocks.emplace_back(ptr, size, true); + return ptr2; + } + void MemManager::Free(void* ptr) { if(ptr == nullptr) return; - auto it = std::find(s_blocks.begin(), s_blocks.end(), ptr); + auto it = std::find_if(s_blocks.begin(), s_blocks.end(), [=](const Descriptor& rhs){ return ptr == rhs.ptr; }); if(it == s_blocks.end()) - { - Error("Memory Manager: trying to free a pointer not allocated by the memory manager"); return; - } - std::free(*it); + #ifdef MLX_COMPILER_MSVC + if(it->aligned) + _aligned_free(it->ptr); + else + std::free(it->ptr); + #else + std::free(it->ptr); + #endif s_blocks.erase(it); } MemManager::~MemManager() { - std::for_each(s_blocks.begin(), s_blocks.end(), [](void* ptr) + for(const Descriptor& desc : s_blocks) { - std::free(ptr); - }); + #ifdef MLX_COMPILER_MSVC + if(it->aligned) + _aligned_free(desc.ptr); + else + std::free(desc.ptr); + #else + std::free(desc.ptr); + #endif + } + DebugLog("Memory Manager: finished garbage collection"); s_instance = nullptr; } } diff --git a/runtime/Sources/Graphics/Font.cpp b/runtime/Sources/Graphics/Font.cpp index 339aa62..b871dec 100644 --- a/runtime/Sources/Graphics/Font.cpp +++ b/runtime/Sources/Graphics/Font.cpp @@ -8,8 +8,8 @@ #include #define STB_TRUETYPE_IMPLEMENTATION -#define STB_malloc(x, u) ((void)(u), mlx::MemManager::Get().Malloc(x)) -#define STB_free(x, u) ((void)(u), mlx::MemManager::Get().Free(x)) +#define STB_malloc(x, u) ((void)(u), mlx::MemManager::Malloc(x)) +#define STB_free(x, u) ((void)(u), mlx::MemManager::Free(x)) #include namespace mlx @@ -64,6 +64,8 @@ namespace mlx void Font::Destroy() { + if(!m_atlas.IsInit()) + return; m_atlas.Destroy(); DebugLog("Font: unloaded % with a scale of %", m_name, m_scale); } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index f4f1276..77b6539 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -9,9 +9,9 @@ #define STB_IMAGE_IMPLEMENTATION #define STBI_ASSERT(x) (mlx::Assert(x, "internal stb assertion " #x)) -#define STBI_MALLOC(x) (mlx::MemManager::Get().Malloc(x)) -#define STBI_REALLOC(p, x) (mlx::MemManager::Get().Realloc(p, x)) -#define STBI_FREE(x) (mlx::MemManager::Get().Free(x)) +#define STBI_MALLOC(x) (mlx::MemManager::Malloc(x)) +#define STBI_REALLOC(p, x) (mlx::MemManager::Realloc(p, x)) +#define STBI_FREE(x) (mlx::MemManager::Free(x)) #ifdef MLX_COMPILER_GCC #pragma GCC diagnostic push @@ -295,7 +295,7 @@ namespace mlx int channels; std::uint8_t* data = stbi_load(filename.c_str(), &size.x, &size.y, &channels, 4); - CallOnExit defer([=]() { stbi_image_free(data); }); + CallOnExit defer([&]() { stbi_image_free(data); }); CPUBuffer buffer(size.x * size.y * 4); std::memcpy(buffer.GetData(), data, buffer.GetSize()); diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 5d49820..510801b 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -59,9 +59,9 @@ namespace mlx return MemManager::AlignedMalloc(alignment, size); } - void* VulkanReallocationFunction(void*, void* ptr, std::size_t size, std::size_t, VkSystemAllocationScope) + void* VulkanReallocationFunction(void*, void* ptr, std::size_t size, std::size_t alignment, VkSystemAllocationScope) { - return MemManager::Realloc(ptr, size); + return MemManager::AlignedRealloc(ptr, alignment, size); } void VulkanFreeFunction(void*, void* ptr) diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp index d598120..bc414bb 100644 --- a/runtime/Sources/Renderer/Swapchain.cpp +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -102,6 +102,7 @@ namespace mlx VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + m_resize = false; DebugLog("Vulkan: swapchain created"); } } From 0622684e40664f1fe7006876fcf5c78f0d78d039 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 14 Dec 2024 16:38:44 +0100 Subject: [PATCH 091/131] rewritting parts of the API; ci skip --- includes/mlx.h | 316 ++++++++++++++++++++-------------------- includes/mlx_extended.h | 171 ++++++++++++++++++++++ 2 files changed, 328 insertions(+), 159 deletions(-) create mode 100644 includes/mlx_extended.h diff --git a/includes/mlx.h b/includes/mlx.h index 3c03052..cbd58a6 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,15 +6,15 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/04 17:52:23 by maldavid ### ########.fr */ +/* Updated: 2024/12/14 16:33:17 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ // MacroLibX official repo https://github.com/seekrs/MacroLibX // MacroLibX official website https://macrolibx.kbz8.me/ -#ifndef __MACRO_LIB_X_H__ -#define __MACRO_LIB_X_H__ +#ifndef MACROLIB_X_H +#define MACROLIB_X_H #include "mlx_profile.h" @@ -22,7 +22,7 @@ extern "C" { #endif -typedef enum +typedef enum mlx_event_type { MLX_KEYDOWN = 0, MLX_KEYUP = 1, @@ -32,6 +32,23 @@ typedef enum MLX_WINDOW_EVENT = 5 } mlx_event_type; +/** + * @brief Descriptor structure for window creation + */ +typedef struct mlx_window_create_info +{ + void* mlx_extension; + const char* title; + int width; + int height; + bool is_fullscreen; + bool is_resizable; +} mlx_window_create_info; + + + + /* MLX backend related functions */ + /** * @brief Initializes the MLX internal application * @@ -40,48 +57,130 @@ typedef enum MLX_API void* mlx_init(); /** - * @brief Creates a new window + * @brief Caps the FPS * * @param mlx Internal MLX application - * @param w Width of the window - * @param h Height of the window - * @param title Title of the window - * - * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error + * @param fps The FPS cap */ -MLX_API void* mlx_new_window(void* mlx, int w, int h, const char* title); +MLX_API void mlx_set_fps_goal(void* mlx, int fps); /** - * @brief Creates a new resizable window + * @brief Destroy internal MLX application * * @param mlx Internal MLX application - * @param w Width of the window - * @param h Height of the window - * @param title Title of the window - * - * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error */ -MLX_API void* mlx_new_resizable_window(void* mlx, int w, int h, const char* title); +MLX_API void mlx_destroy_display(void* mlx); + + + + /* Window related functions */ + /** * @brief Creates a new window * * @param mlx Internal MLX application + * @param info Pointer to a descriptor structure + * + * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error + */ +MLX_API void* mlx_new_window(void* mlx, const mlx_window_create_info* info); + +/** + * @brief Destroys internal window + * + * @param mlx Internal MLX application + * @param win Internal window + */ +MLX_API void mlx_destroy_window(void* mlx, void* win); + +/** + * @brief Sets window position + * + * @param mlx Internal MLX application * @param win Internal window to move * @param x New x position * @param y New y position - * */ MLX_API void mlx_set_window_position(void *mlx, void *win, int x, int y); +/** + * @brief Sets window size + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param width New width + * @param height New height + */ +MLX_API void mlx_set_window_size(void *mlx, void *win, int width, int height); + +/** + * @brief Sets window title + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param title New title + */ +MLX_API void mlx_set_window_title(void *mlx, void *win, const char* title); + +/** + * @brief Enables/Disables window fullscreen mode + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param enable Switch or not to fullscreen + */ +MLX_API void mlx_set_window_fullscreen(void *mlx, void *win, bool enable); + +/** + * @brief Gets window position + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param x Pointers to get position of the window + * @param y Pointers to get position of the window + */ +MLX_API void mlx_get_window_position(void *mlx, void *win, int* x, int* y); + +/** + * @brief Gets window size + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param x Pointers to get size of the window + * @param y Pointers to get size of the window + */ +MLX_API void mlx_get_window_size(void *mlx, void *win, int* x, int* y); + +/** + * @brief Clears the given window (resets all rendered data) + * + * @param mlx Internal MLX application + * @param win Internal window + */ +MLX_API void mlx_clear_window(void* mlx, void* win, int color); + +/** + * @brief Get the size of the screen the given window is on + * + * @param mlx Internal MLX application + * @param win Internal window to choose screen the window is on + * @param w Get width size + * @param h Get height size + */ +MLX_API void mlx_get_screens_size(void* mlx, void* win, int* w, int* h); + + + + /* Loop related functions */ + + /** * @brief Gives a function to be executed at each loop turn * * @param mlx Internal MLX application * @param f The function * @param param Param to give to the function passed - * - * @return (void) */ MLX_API void mlx_loop_hook(void* mlx, int (*f)(void*), void* param); @@ -89,8 +188,6 @@ MLX_API void mlx_loop_hook(void* mlx, int (*f)(void*), void* param); * @brief Starts the internal main loop * * @param mlx Internal MLX application - * - * @return (void) */ MLX_API void mlx_loop(void* mlx); @@ -98,22 +195,21 @@ MLX_API void mlx_loop(void* mlx); * @brief Ends the internal run loop * * @param mlx Internal MLX application - * - * @return (void) */ MLX_API void mlx_loop_end(void* mlx); + + + /* Events related functions */ + + /** * @brief Shows mouse cursor - * - * @return (void) */ MLX_API void mlx_mouse_show(); /** * @brief Hides mouse cursor - * - * @return (void) */ MLX_API void mlx_mouse_hide(); @@ -124,8 +220,6 @@ MLX_API void mlx_mouse_hide(); * @param win Internal window from which cursor moves * @param x X coordinate * @param y Y coordinate - * - * @return (void) */ MLX_API void mlx_mouse_move(void* mlx, void* win, int x, int y); @@ -135,8 +229,6 @@ MLX_API void mlx_mouse_move(void* mlx, void* win, int x, int y); * @param mlx Internal MLX application * @param x Get x coordinate * @param y Get y coordinate - * - * @return (void) */ MLX_API void mlx_mouse_get_pos(void* mlx, int* x, int* y); @@ -148,11 +240,14 @@ MLX_API void mlx_mouse_get_pos(void* mlx, int* x, int* y); * @param event Event type (see union on top of this file) * @param f Function to be executed * @param param Parameter given to the function - * - * @return (void) */ MLX_API void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(int, void*), void* param); + + + /* Pixels drawing related functions */ + + /** * @brief Put a pixel in the window * @@ -160,15 +255,15 @@ MLX_API void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(i * @param win Internal window * @param x X coordinate * @param y Y coordinate - * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) - * - * Note : If your're reading pixel colors from an image, don't forget to shift them - * one byte to the right as image pixels are encoded as 0xRRGGBBAA and pixel put takes 0xAARRGGBB. - * - * @return (void) + * @param color Color of the pixel (coded on 4 bytes in an int, 0xRRGGBBAA) */ MLX_API void mlx_pixel_put(void* mlx, void* win, int x, int y, int color); + + + /* Images related functions */ + + /** * @brief Create a new empty image * @@ -180,6 +275,26 @@ MLX_API void mlx_pixel_put(void* mlx, void* win, int x, int y, int color); */ MLX_API void* mlx_new_image(void* mlx, int width, int height); +/** + * @brief Create a new image from a png/jpg/bmp file + * + * @param mlx Internal MLX application + * @param filename Path to the png file + * @param width Get the width of the image + * @param heigth Get the height of the image + * + * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + */ +MLX_API void* mlx_new_image_from_file(void* mlx, char* filename, int* width, int* height); + +/** + * @brief Destroys internal image + * + * @param mlx Internal MLX application + * @param img Internal image + */ +MLX_API void mlx_destroy_image(void* mlx, void* img); + /** * @brief Get image pixel data * @@ -209,8 +324,6 @@ MLX_API int mlx_get_image_pixel(void* mlx, void* img, int x, int y); * @param y Y coordinate in the image * @param color Color of the pixel to set * - * @return (void) - * * /!\ If you run into glitches when writing or reading pixels from images /!\ * You need to add IMAGES_OPTIMIZED=false to your make mlx command * ``` @@ -229,71 +342,13 @@ MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); * @param img Internal image * @param x X coordinate * @param y Y coordinate - * - * @return (void) */ MLX_API void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y); -/** - * @brief Transform and put image to the given window - * - * @param mlx Internal MLX application - * @param win Internal window - * @param img Internal image - * @param x X coordinate - * @param y Y coordinate - * @param scale Scale of the image - * @param angle Rotation angle of the image (clockwise) - * - * @return (void) - */ -MLX_API void mlx_transform_put_image_to_window(void* mlx, void* win, void* img, int x, int y, float scale, float angle); -/** - * @brief Destroys internal image - * - * @param mlx Internal MLX application - * @param img Internal image - * - * @return (void) - */ -MLX_API void mlx_destroy_image(void* mlx, void* img); -/** - * @brief Create a new image from a png file - * - * @param mlx Internal MLX application - * @param filename Path to the png file - * @param width Get the width of the image - * @param heigth Get the height of the image - * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error - */ -MLX_API void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* height); + /* Strings drawing related functions */ -/** - * @brief Create a new image from a jpg file - * - * @param mlx Internal MLX application - * @param filename Path to the jpg file - * @param width Get the width of the image - * @param heigth Get the height of the image - * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error - */ -MLX_API void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* height); - -/** - * @brief Create a new image from a bmp file - * - * @param mlx Internal MLX application - * @param filename Path to the bmp file - * @param width Get the width of the image - * @param heigth Get the height of the image - * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error - */ -MLX_API void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* height); /** * @brief Put text in given window @@ -304,8 +359,6 @@ MLX_API void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* * @param y Y coordinate * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) * @param str Text to put - * - * @return (void) */ MLX_API void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str); @@ -315,8 +368,6 @@ MLX_API void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* * @param mlx Internal MLX application * @param win Internal window * @param filepath Filepath to the font or "default" to reset to the embedded font - * - * @return (void) */ MLX_API void mlx_set_font(void* mlx, char* filepath); @@ -327,62 +378,9 @@ MLX_API void mlx_set_font(void* mlx, char* filepath); * @param win Internal window * @param filepath Filepath to the font or "default" to reset to the embedded font * @param scale Scale to apply to the font - * - * @return (void) */ MLX_API void mlx_set_font_scale(void* mlx, char* filepath, float scale); -/** - * @brief Clears the given window (resets all rendered data) - * - * @param mlx Internal MLX application - * @param win Internal window - * - * @return (void) - */ -MLX_API void mlx_clear_window(void* mlx, void* win, int color); - -/** - * @brief Destroys internal window - * - * @param mlx Internal MLX application - * @param win Internal window - * - * @return (void) - */ -MLX_API void mlx_destroy_window(void* mlx, void* win); - -/** - * @brief Destroy internal MLX application - * - * @param mlx Internal MLX application - * - * @return (void) - */ -MLX_API void mlx_destroy_display(void* mlx); - -/** - * @brief Get the size of the screen the given window is on - * - * @param mlx Internal MLX application - * @param win Internal window - * @param w Get width size - * @param h Get height size - * - * @return (void) - */ -MLX_API void mlx_get_screens_size(void* mlx, void* win, int* w, int* h); - -/** - * @brief Caps the FPS - * - * @param mlx Internal MLX application - * @param fps The FPS cap - * - * @return (void) - */ -MLX_API void mlx_set_fps_goal(void* mlx, int fps); - #ifdef __cplusplus } #endif diff --git a/includes/mlx_extended.h b/includes/mlx_extended.h new file mode 100644 index 0000000..1ceda2f --- /dev/null +++ b/includes/mlx_extended.h @@ -0,0 +1,171 @@ +/* ************************************************************************** */ +/* */ +/* ::: :::::::: */ +/* mlx_extended.h :+: :+: :+: */ +/* +:+ +:+ +:+ */ +/* By: maldavid +#+ +:+ +#+ */ +/* +#+#+#+#+#+ +#+ */ +/* Created: 2024/12/14 16:17:10 by maldavid #+# #+# */ +/* Updated: 2024/12/14 16:37:49 by maldavid ### ########.fr */ +/* */ +/* ************************************************************************** */ + +// MacroLibX official repo https://github.com/seekrs/MacroLibX +// MacroLibX official website https://macrolibx.kbz8.me/ + +#ifndef MACROLIB_X_EXTENDED_H +#define MACROLIB_X_EXTENDED_H + +#include "mlx_profile.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct mlx_window_create_info_extension +{ + int position_x; + int position_y; + int max_width; + int max_height; + int min_width; + int min_height; +} mlx_window_create_info_extension; + + + + /* Window related functions */ + + +/** + * @brief Sets maximum window size + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param x New x maximum size + * @param y New y maximum size + */ +MLX_API void mlx_set_window_max_size(void *mlx, void *win, int x, int y); + +/** + * @brief Sets minimum window size + * + * @param mlx Internal MLX application + * @param win Internal window to move + * @param x New x minimum size + * @param y New y minimum size + */ +MLX_API void mlx_set_window_min_size(void *mlx, void *win, int x, int y); + +/** + * @brief Maximizes a window + * + * @param mlx Internal MLX application + * @param win Internal window to move + */ +MLX_API void mlx_maximise_window(void *mlx, void *win); + +/** + * @brief Minimizes a window + * + * @param mlx Internal MLX application + * @param win Internal window to move + */ +MLX_API void mlx_minimize_window(void *mlx, void *win); + +/** + * @brief Restore window to formal size + * + * @param mlx Internal MLX application + * @param win Internal window to move + */ +MLX_API void mlx_restore_window(void *mlx, void *win); + + + + /* Pixels drawing related functions */ + + +/** + * @brief Put an array of pixels in the window + * + * @param mlx Internal MLX application + * @param win Internal window + * @param x X coordinate + * @param y Y coordinate + * @param pixels Array of pixels (coded on 4 bytes in an int, 0xRRGGBBAA) + */ +MLX_API void mlx_pixel_put_array(void* mlx, void* win, int x, int y, int* pixels); + + + + /* Images related functions */ + + +/** + * @brief Get image region + * + * @param mlx Internal MLX application + * @param img Internal image + * @param x X coordinate in the image + * @param y Y coordinate in the image + * @param w Width of the region + * @param y Height of the region + * @param dst Array of pixels to copy to + * + * Note: it is responsability of the user to make sure the size of `dst` is + * big enough for the given region. + * + * /!\ If you run into glitches when writing or reading pixels from images /!\ + * You need to add IMAGES_OPTIMIZED=false to your make mlx command + * ``` + * ~ git clone https://github.com/seekrs/MacroLibX.git + * ~ cd MacroLibX + * ~ make IMAGES_OPTIMIZED=false + * ``` + */ +MLX_API void mlx_get_image_region(void* mlx, void* img, int x, int y, int w, int h, int* dst); + +/** + * @brief Set image region + * + * @param mlx Internal MLX application + * @param img Internal image + * @param x X coordinate in the image + * @param y Y coordinate in the image + * @param w Width of the region + * @param y Height of the region + * @param pixels Array of pixels to copy from + * + * Note: it is responsability of the user to make sure the size of `pixels` is + * big enough for the given region. + * + * /!\ If you run into glitches when writing or reading pixels from images /!\ + * You need to add IMAGES_OPTIMIZED=false to your make mlx command + * ``` + * ~ git clone https://github.com/seekrs/MacroLibX.git + * ~ cd MacroLibX + * ~ make IMAGES_OPTIMIZED=false + * ``` + */ +MLX_API void mlx_set_image_region(void* mlx, void* img, int x, int y, int w, int h, int* pixels); + +/** + * @brief Transform and put image to the given window + * + * @param mlx Internal MLX application + * @param win Internal window + * @param img Internal image + * @param x X coordinate + * @param y Y coordinate + * @param scale_x Scale x of the image + * @param scale_y Scale y of the image + * @param angle Rotation angle of the image (clockwise) + */ +MLX_API void mlx_put_transformed_image_to_window(void* mlx, void* win, void* img, int x, int y, float scale_x, float scale_y, float angle); + +#ifdef __cplusplus +} +#endif + +#endif From 323b16f1073095d6af84b3394a77697c78adab1f Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 15 Dec 2024 03:35:17 +0100 Subject: [PATCH 092/131] reworking application and bridge --- example/main.c | 66 ++-- includes/mlx.h | 171 ++++++---- includes/mlx_extended.h | 31 +- includes/mlx_profile.h | 6 +- runtime/Includes/Core/Application.h | 28 +- runtime/Includes/Core/Application.inl | 178 +++------- runtime/Includes/Core/Graphics.h | 8 +- runtime/Includes/Core/Graphics.inl | 16 +- runtime/Includes/Core/Handles.h | 25 ++ runtime/Includes/Core/SDLManager.h | 3 +- runtime/Includes/Graphics/PutPixelManager.h | 2 +- runtime/Includes/Platform/Window.h | 3 +- runtime/Includes/PreCompiled.h | 1 + runtime/Includes/Renderer/Image.h | 4 +- runtime/Sources/Core/Application.cpp | 29 +- runtime/Sources/Core/Bridge.cpp | 335 +++++++++---------- runtime/Sources/Core/Graphics.cpp | 20 +- runtime/Sources/Core/SDLManager.cpp | 6 +- runtime/Sources/Graphics/PutPixelManager.cpp | 2 +- runtime/Sources/Platform/Window.cpp | 4 +- runtime/Sources/Renderer/Image.cpp | 24 +- runtime/Sources/Renderer/RenderCore.cpp | 7 +- 22 files changed, 464 insertions(+), 505 deletions(-) create mode 100644 runtime/Includes/Core/Handles.h diff --git a/example/main.c b/example/main.c index d32e563..205f7dc 100644 --- a/example/main.c +++ b/example/main.c @@ -3,12 +3,12 @@ typedef struct { - void* mlx; - void* win; - void* logo_png; - void* logo_jpg; - void* logo_bmp; - void* img; + mlx_context mlx; + mlx_window win; + mlx_image logo_png; + mlx_image logo_jpg; + mlx_image logo_bmp; + mlx_image img; } mlx_t; int update(void* param) @@ -17,17 +17,17 @@ int update(void* param) mlx_t* mlx = (mlx_t*)param; if(i == 200) - mlx_clear_window(mlx->mlx, mlx->win, 0xFF334D4D); + mlx_clear_window(mlx->mlx, mlx->win, 0x334D4DFF); if(i >= 250) mlx_set_font_scale(mlx->mlx, "default", 16.f); else mlx_set_font_scale(mlx->mlx, "default", 6.f); - mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFFFF2066, "this text should be hidden"); + mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFF2066FF, "this text should be hidden"); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_png, 100, 100); - mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40, 0.5f, 75.0f); + //mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40, 0.5f, 75.0f); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); mlx_set_font(mlx->mlx, "default"); @@ -35,29 +35,29 @@ int update(void* param) for(int j = 0, color = 0; j < 400; j++) { - mlx_pixel_put(mlx->mlx, mlx->win, j, j, 0xFFFF0000 + color); - mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, 0xFF0000FF); + mlx_pixel_put(mlx->mlx, mlx->win, j, j, 0x0000FFFF + (color << 24)); + mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, 0x0000FFFF); color += (color < 255); } - mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, 2.0f, 0.0f); + //mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, 2.0f, 0.0f); mlx_set_font_scale(mlx->mlx, "default", 8.f); mlx_string_put(mlx->mlx, mlx->win, 210, 175, 0xFFAF2BFF, "hidden"); for(int j = 0; j < 20; j++) { for(int k = 0; k < 20; k++) - mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFFFF0000); + mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFF0000FF); } i++; return 0; } -void* create_image(mlx_t* mlx) +mlx_image create_image(mlx_t* mlx) { unsigned char pixel[4]; - void* img = mlx_new_image(mlx->mlx, 100, 100); + mlx_image img = mlx_new_image(mlx->mlx, 100, 100); for(int i = 0, j = 0, k = 0; i < (100 * 100) * 4; i += 4, j++) { if(j >= 100) @@ -67,10 +67,10 @@ void* create_image(mlx_t* mlx) } if(i < 10000 || i > 20000) { - pixel[0] = i; - pixel[1] = j; - pixel[2] = k; - pixel[3] = 0x99; + pixel[0] = 0x99; + pixel[1] = i; + pixel[2] = j; + pixel[3] = k; mlx_set_image_pixel(mlx->mlx, img, j, k, *((int*)pixel)); } } @@ -90,13 +90,13 @@ int key_hook(int key, void* param) mlx_loop_end(mlx->mlx); break; case 22 : // (S)how - mlx_mouse_show(); + mlx_mouse_show(mlx->mlx); break; case 11 : // (H)ide - mlx_mouse_hide(); + mlx_mouse_hide(mlx->mlx); break; case 6 : // (C)lear - mlx_clear_window(mlx->mlx, mlx->win, 0xFF334D4D); + mlx_clear_window(mlx->mlx, mlx->win, 0x334D4DFF); break; case 79 : // RIGHT KEY mlx_mouse_move(mlx->mlx, mlx->win, x + 10, y); @@ -131,9 +131,15 @@ int main(void) int dummy; mlx.mlx = mlx_init(); - mlx.win = mlx_new_resizable_window(mlx.mlx, 400, 400, "My window"); - mlx_get_screens_size(mlx.mlx, mlx.win, &w, &h); + mlx_window_create_info info = { 0 }; + info.title = "My window"; + info.width = 400; + info.height = 400; + info.is_resizable = true; + mlx.win = mlx_new_window(mlx.mlx, &info); + + mlx_get_screen_size(mlx.mlx, mlx.win, &w, &h); printf("screen size : %dx%d\n", w, h); mlx_set_fps_goal(mlx.mlx, 60); @@ -141,17 +147,17 @@ int main(void) mlx_on_event(mlx.mlx, mlx.win, MLX_KEYDOWN, key_hook, &mlx); mlx_on_event(mlx.mlx, mlx.win, MLX_WINDOW_EVENT, window_hook, &mlx); - mlx.logo_png = mlx_png_file_to_image(mlx.mlx, "42_logo.png", &dummy, &dummy); - mlx.logo_bmp = mlx_bmp_file_to_image(mlx.mlx, "42_logo.bmp", &dummy, &dummy); - mlx.logo_jpg = mlx_jpg_file_to_image(mlx.mlx, "42_logo.jpg", &dummy, &dummy); + mlx.logo_png = mlx_new_image_from_file(mlx.mlx, "42_logo.png", &dummy, &dummy); + mlx.logo_bmp = mlx_new_image_from_file(mlx.mlx, "42_logo.bmp", &dummy, &dummy); + mlx.logo_jpg = mlx_new_image_from_file(mlx.mlx, "42_logo.jpg", &dummy, &dummy); - mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, 0xFFFF00FF); + mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, 0xFF00FFFF); mlx_put_image_to_window(mlx.mlx, mlx.win, mlx.logo_png, 10, 190); mlx.img = create_image(&mlx); mlx_set_font_scale(mlx.mlx, "font.ttf", 16.f); - mlx_string_put(mlx.mlx, mlx.win, 20, 20, 0xFF0020FF, "that text will disappear"); + mlx_string_put(mlx.mlx, mlx.win, 20, 20, 0x0020FFFF, "that text will disappear"); mlx_loop_hook(mlx.mlx, update, &mlx); mlx_loop(mlx.mlx); @@ -162,7 +168,7 @@ int main(void) mlx_destroy_image(mlx.mlx, mlx.img); mlx_destroy_window(mlx.mlx, mlx.win); - mlx_destroy_display(mlx.mlx); + mlx_destroy_context(mlx.mlx); return 0; } diff --git a/includes/mlx.h b/includes/mlx.h index cbd58a6..0d4e2f5 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/14 16:33:17 by maldavid ### ########.fr */ +/* Updated: 2024/12/15 01:58:12 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -16,21 +16,62 @@ #ifndef MACROLIB_X_H #define MACROLIB_X_H +#include #include "mlx_profile.h" #ifdef __cplusplus extern "C" { #endif -typedef enum mlx_event_type -{ - MLX_KEYDOWN = 0, - MLX_KEYUP = 1, - MLX_MOUSEDOWN = 2, - MLX_MOUSEUP = 3, - MLX_MOUSEWHEEL = 4, - MLX_WINDOW_EVENT = 5 -} mlx_event_type; + + /* MLX types */ + +/** + * @brief Opaque handle that represents the MLX context + */ +MLX_DEFINE_HANDLE(mlx_context); + +/** + * @brief Opaque handle that represents a MLX window + */ +MLX_DEFINE_HANDLE(mlx_window); + +/** + * @brief Opaque handle that represents a MLX image + */ +MLX_DEFINE_HANDLE(mlx_image); + + + + /* MLX backend related functions */ + + +/** + * @brief Initializes the MLX internal application + * + * @return (mlx_context) An opaque handler to the internal MLX application or NULL (0x0) in case of error + */ +MLX_API mlx_context mlx_init(); + +/** + * @brief Caps the FPS + * + * @param mlx Internal MLX application + * @param fps The FPS cap + */ +MLX_API void mlx_set_fps_goal(mlx_context mlx, int fps); + +/** + * @brief Destroy internal MLX application + * + * @param mlx Internal MLX application + */ +MLX_API void mlx_destroy_context(mlx_context mlx); + + + + /* Window related functions */ + /** * @brief Descriptor structure for window creation @@ -45,46 +86,15 @@ typedef struct mlx_window_create_info bool is_resizable; } mlx_window_create_info; - - - /* MLX backend related functions */ - -/** - * @brief Initializes the MLX internal application - * - * @return (void*) An opaque pointer to the internal MLX application or NULL (0x0) in case of error - */ -MLX_API void* mlx_init(); - -/** - * @brief Caps the FPS - * - * @param mlx Internal MLX application - * @param fps The FPS cap - */ -MLX_API void mlx_set_fps_goal(void* mlx, int fps); - -/** - * @brief Destroy internal MLX application - * - * @param mlx Internal MLX application - */ -MLX_API void mlx_destroy_display(void* mlx); - - - - /* Window related functions */ - - /** * @brief Creates a new window * * @param mlx Internal MLX application * @param info Pointer to a descriptor structure * - * @return (void*) An opaque pointer to the internal MLX window or NULL (0x0) in case of error + * @return (mlx_widnow) An opaque handler to the internal MLX window or NULL (0x0) in case of error */ -MLX_API void* mlx_new_window(void* mlx, const mlx_window_create_info* info); +MLX_API mlx_window mlx_new_window(mlx_context mlx, const mlx_window_create_info* info); /** * @brief Destroys internal window @@ -92,7 +102,7 @@ MLX_API void* mlx_new_window(void* mlx, const mlx_window_create_info* info); * @param mlx Internal MLX application * @param win Internal window */ -MLX_API void mlx_destroy_window(void* mlx, void* win); +MLX_API void mlx_destroy_window(mlx_context mlx, mlx_window win); /** * @brief Sets window position @@ -102,7 +112,7 @@ MLX_API void mlx_destroy_window(void* mlx, void* win); * @param x New x position * @param y New y position */ -MLX_API void mlx_set_window_position(void *mlx, void *win, int x, int y); +MLX_API void mlx_set_window_position(mlx_context mlx, mlx_window win, int x, int y); /** * @brief Sets window size @@ -112,7 +122,7 @@ MLX_API void mlx_set_window_position(void *mlx, void *win, int x, int y); * @param width New width * @param height New height */ -MLX_API void mlx_set_window_size(void *mlx, void *win, int width, int height); +MLX_API void mlx_set_window_size(mlx_context mlx, mlx_window win, int width, int height); /** * @brief Sets window title @@ -121,7 +131,7 @@ MLX_API void mlx_set_window_size(void *mlx, void *win, int width, int height); * @param win Internal window to move * @param title New title */ -MLX_API void mlx_set_window_title(void *mlx, void *win, const char* title); +MLX_API void mlx_set_window_title(mlx_context mlx, mlx_window win, const char* title); /** * @brief Enables/Disables window fullscreen mode @@ -130,7 +140,7 @@ MLX_API void mlx_set_window_title(void *mlx, void *win, const char* title); * @param win Internal window to move * @param enable Switch or not to fullscreen */ -MLX_API void mlx_set_window_fullscreen(void *mlx, void *win, bool enable); +MLX_API void mlx_set_window_fullscreen(mlx_context mlx, mlx_window win, bool enable); /** * @brief Gets window position @@ -140,7 +150,7 @@ MLX_API void mlx_set_window_fullscreen(void *mlx, void *win, bool enable); * @param x Pointers to get position of the window * @param y Pointers to get position of the window */ -MLX_API void mlx_get_window_position(void *mlx, void *win, int* x, int* y); +MLX_API void mlx_get_window_position(mlx_context mlx, mlx_window win, int* x, int* y); /** * @brief Gets window size @@ -150,7 +160,7 @@ MLX_API void mlx_get_window_position(void *mlx, void *win, int* x, int* y); * @param x Pointers to get size of the window * @param y Pointers to get size of the window */ -MLX_API void mlx_get_window_size(void *mlx, void *win, int* x, int* y); +MLX_API void mlx_get_window_size(mlx_context mlx, mlx_window win, int* x, int* y); /** * @brief Clears the given window (resets all rendered data) @@ -158,7 +168,7 @@ MLX_API void mlx_get_window_size(void *mlx, void *win, int* x, int* y); * @param mlx Internal MLX application * @param win Internal window */ -MLX_API void mlx_clear_window(void* mlx, void* win, int color); +MLX_API void mlx_clear_window(mlx_context mlx, mlx_window win, int color); /** * @brief Get the size of the screen the given window is on @@ -168,7 +178,7 @@ MLX_API void mlx_clear_window(void* mlx, void* win, int color); * @param w Get width size * @param h Get height size */ -MLX_API void mlx_get_screens_size(void* mlx, void* win, int* w, int* h); +MLX_API void mlx_get_screen_size(mlx_context mlx, mlx_window win, int* w, int* h); @@ -182,21 +192,21 @@ MLX_API void mlx_get_screens_size(void* mlx, void* win, int* w, int* h); * @param f The function * @param param Param to give to the function passed */ -MLX_API void mlx_loop_hook(void* mlx, int (*f)(void*), void* param); +MLX_API void mlx_loop_hook(mlx_context mlx, int (*f)(void*), void* param); /** * @brief Starts the internal main loop * * @param mlx Internal MLX application */ -MLX_API void mlx_loop(void* mlx); +MLX_API void mlx_loop(mlx_context mlx); /** * @brief Ends the internal run loop * * @param mlx Internal MLX application */ -MLX_API void mlx_loop_end(void* mlx); +MLX_API void mlx_loop_end(mlx_context mlx); @@ -205,13 +215,17 @@ MLX_API void mlx_loop_end(void* mlx); /** * @brief Shows mouse cursor + * + * @param mlx Internal MLX application */ -MLX_API void mlx_mouse_show(); +MLX_API void mlx_mouse_show(mlx_context mlx); /** * @brief Hides mouse cursor + * + * @param mlx Internal MLX application */ -MLX_API void mlx_mouse_hide(); +MLX_API void mlx_mouse_hide(mlx_context mlx); /** * @brief Moves cursor to givent position @@ -221,7 +235,7 @@ MLX_API void mlx_mouse_hide(); * @param x X coordinate * @param y Y coordinate */ -MLX_API void mlx_mouse_move(void* mlx, void* win, int x, int y); +MLX_API void mlx_mouse_move(mlx_context mlx, mlx_window win, int x, int y); /** * @brief Get cursor's position @@ -230,7 +244,20 @@ MLX_API void mlx_mouse_move(void* mlx, void* win, int x, int y); * @param x Get x coordinate * @param y Get y coordinate */ -MLX_API void mlx_mouse_get_pos(void* mlx, int* x, int* y); +MLX_API void mlx_mouse_get_pos(mlx_context mlx, int* x, int* y); + +/** + * @brief Type of event + */ +typedef enum mlx_event_type +{ + MLX_KEYDOWN = 0, + MLX_KEYUP = 1, + MLX_MOUSEDOWN = 2, + MLX_MOUSEUP = 3, + MLX_MOUSEWHEEL = 4, + MLX_WINDOW_EVENT = 5 +} mlx_event_type; /** * @brief Gives a function to be executed on event type @@ -241,7 +268,7 @@ MLX_API void mlx_mouse_get_pos(void* mlx, int* x, int* y); * @param f Function to be executed * @param param Parameter given to the function */ -MLX_API void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(int, void*), void* param); +MLX_API void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, int (*f)(int, void*), void* param); @@ -257,7 +284,7 @@ MLX_API void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*f)(i * @param y Y coordinate * @param color Color of the pixel (coded on 4 bytes in an int, 0xRRGGBBAA) */ -MLX_API void mlx_pixel_put(void* mlx, void* win, int x, int y, int color); +MLX_API void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, int color); @@ -271,9 +298,9 @@ MLX_API void mlx_pixel_put(void* mlx, void* win, int x, int y, int color); * @param width Width of the image * @param height Height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (mlx_image) An opaque handler to the internal image or NULL (0x0) in case of error */ -MLX_API void* mlx_new_image(void* mlx, int width, int height); +MLX_API mlx_image mlx_new_image(mlx_context mlx, int width, int height); /** * @brief Create a new image from a png/jpg/bmp file @@ -283,9 +310,9 @@ MLX_API void* mlx_new_image(void* mlx, int width, int height); * @param width Get the width of the image * @param heigth Get the height of the image * - * @return (void*) An opaque pointer to the internal image or NULL (0x0) in case of error + * @return (mlx_image) An opaque handler to the internal image or NULL (0x0) in case of error */ -MLX_API void* mlx_new_image_from_file(void* mlx, char* filename, int* width, int* height); +MLX_API mlx_image mlx_new_image_from_file(mlx_context mlx, char* filename, int* width, int* height); /** * @brief Destroys internal image @@ -293,7 +320,7 @@ MLX_API void* mlx_new_image_from_file(void* mlx, char* filename, int* width, int * @param mlx Internal MLX application * @param img Internal image */ -MLX_API void mlx_destroy_image(void* mlx, void* img); +MLX_API void mlx_destroy_image(mlx_context mlx, mlx_image image); /** * @brief Get image pixel data @@ -313,7 +340,7 @@ MLX_API void mlx_destroy_image(void* mlx, void* img); * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API int mlx_get_image_pixel(void* mlx, void* img, int x, int y); +MLX_API int mlx_get_image_pixel(mlx_context mlx, mlx_image image, int x, int y); /** * @brief Set image pixel data @@ -332,7 +359,7 @@ MLX_API int mlx_get_image_pixel(void* mlx, void* img, int x, int y); * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); +MLX_API void mlx_set_image_pixel(mlx_context mlx, mlx_image image, int x, int y, int color); /** * @brief Put image to the given window @@ -343,7 +370,7 @@ MLX_API void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color); * @param x X coordinate * @param y Y coordinate */ -MLX_API void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y); +MLX_API void mlx_put_image_to_window(mlx_context mlx, mlx_window win, mlx_image image, int x, int y); @@ -360,7 +387,7 @@ MLX_API void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) * @param str Text to put */ -MLX_API void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str); +MLX_API void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, int color, char* str); /** * @brief Loads a font to be used by `mlx_string_put` @@ -369,7 +396,7 @@ MLX_API void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* * @param win Internal window * @param filepath Filepath to the font or "default" to reset to the embedded font */ -MLX_API void mlx_set_font(void* mlx, char* filepath); +MLX_API void mlx_set_font(mlx_context mlx, char* filepath); /** * @brief Loads a font to be used by `mlx_string_put` and scales it @@ -379,7 +406,7 @@ MLX_API void mlx_set_font(void* mlx, char* filepath); * @param filepath Filepath to the font or "default" to reset to the embedded font * @param scale Scale to apply to the font */ -MLX_API void mlx_set_font_scale(void* mlx, char* filepath, float scale); +MLX_API void mlx_set_font_scale(mlx_context mlx, char* filepath, float scale); #ifdef __cplusplus } diff --git a/includes/mlx_extended.h b/includes/mlx_extended.h index 1ceda2f..64d7357 100644 --- a/includes/mlx_extended.h +++ b/includes/mlx_extended.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/12/14 16:17:10 by maldavid #+# #+# */ -/* Updated: 2024/12/14 16:37:49 by maldavid ### ########.fr */ +/* Updated: 2024/12/14 17:42:06 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -16,12 +16,16 @@ #ifndef MACROLIB_X_EXTENDED_H #define MACROLIB_X_EXTENDED_H -#include "mlx_profile.h" +#include "mlx.h" #ifdef __cplusplus extern "C" { #endif + + /* Window related functions */ + + typedef struct mlx_window_create_info_extension { int position_x; @@ -32,11 +36,6 @@ typedef struct mlx_window_create_info_extension int min_height; } mlx_window_create_info_extension; - - - /* Window related functions */ - - /** * @brief Sets maximum window size * @@ -45,7 +44,7 @@ typedef struct mlx_window_create_info_extension * @param x New x maximum size * @param y New y maximum size */ -MLX_API void mlx_set_window_max_size(void *mlx, void *win, int x, int y); +MLX_API void mlx_set_window_max_size(mlx_context mlx, mlx_window win, int x, int y); /** * @brief Sets minimum window size @@ -55,7 +54,7 @@ MLX_API void mlx_set_window_max_size(void *mlx, void *win, int x, int y); * @param x New x minimum size * @param y New y minimum size */ -MLX_API void mlx_set_window_min_size(void *mlx, void *win, int x, int y); +MLX_API void mlx_set_window_min_size(mlx_context mlx, mlx_window win, int x, int y); /** * @brief Maximizes a window @@ -63,7 +62,7 @@ MLX_API void mlx_set_window_min_size(void *mlx, void *win, int x, int y); * @param mlx Internal MLX application * @param win Internal window to move */ -MLX_API void mlx_maximise_window(void *mlx, void *win); +MLX_API void mlx_maximise_window(mlx_context mlx, mlx_window win); /** * @brief Minimizes a window @@ -71,7 +70,7 @@ MLX_API void mlx_maximise_window(void *mlx, void *win); * @param mlx Internal MLX application * @param win Internal window to move */ -MLX_API void mlx_minimize_window(void *mlx, void *win); +MLX_API void mlx_minimize_window(mlx_context mlx, mlx_window win); /** * @brief Restore window to formal size @@ -79,7 +78,7 @@ MLX_API void mlx_minimize_window(void *mlx, void *win); * @param mlx Internal MLX application * @param win Internal window to move */ -MLX_API void mlx_restore_window(void *mlx, void *win); +MLX_API void mlx_restore_window(mlx_context mlx, mlx_window win); @@ -95,7 +94,7 @@ MLX_API void mlx_restore_window(void *mlx, void *win); * @param y Y coordinate * @param pixels Array of pixels (coded on 4 bytes in an int, 0xRRGGBBAA) */ -MLX_API void mlx_pixel_put_array(void* mlx, void* win, int x, int y, int* pixels); +MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels); @@ -124,7 +123,7 @@ MLX_API void mlx_pixel_put_array(void* mlx, void* win, int x, int y, int* pixels * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API void mlx_get_image_region(void* mlx, void* img, int x, int y, int w, int h, int* dst); +MLX_API void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* dst); /** * @brief Set image region @@ -148,7 +147,7 @@ MLX_API void mlx_get_image_region(void* mlx, void* img, int x, int y, int w, int * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API void mlx_set_image_region(void* mlx, void* img, int x, int y, int w, int h, int* pixels); +MLX_API void mlx_set_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* pixels); /** * @brief Transform and put image to the given window @@ -162,7 +161,7 @@ MLX_API void mlx_set_image_region(void* mlx, void* img, int x, int y, int w, int * @param scale_y Scale y of the image * @param angle Rotation angle of the image (clockwise) */ -MLX_API void mlx_put_transformed_image_to_window(void* mlx, void* win, void* img, int x, int y, float scale_x, float scale_y, float angle); +MLX_API void mlx_put_transformed_image_to_window(mlx_context mlx, mlx_window win, mlx_image image, int x, int y, float scale_x, float scale_y, float angle); #ifdef __cplusplus } diff --git a/includes/mlx_profile.h b/includes/mlx_profile.h index 18ea4c1..99f2661 100644 --- a/includes/mlx_profile.h +++ b/includes/mlx_profile.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ -/* Updated: 2024/04/23 18:28:12 by maldavid ### ########.fr */ +/* Updated: 2024/12/14 17:58:37 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -156,8 +156,10 @@ #define MLX_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) #define MLX_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) +#define MLX_DEFINE_HANDLE(object) typedef struct object##_handler* object + #define MLX_VERSION MLX_MAKE_VERSION(2, 0, 0) -#define MLX_TARGET_VULKAN_API_VERSION MLX_MAKE_VERSION(1, 2, 0) +#define MLX_TARGET_VULKAN_API_VERSION MLX_MAKE_VERSION(1, 0, 0) // Checking common assumptions #ifdef __cplusplus diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 840e6bf..5814218 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -11,35 +11,25 @@ namespace mlx { - // TODO : FIX THIS DAMN GOD CLASS !!!!!!!!!!!!!!!! class Application { public: Application(); inline void GetMousePos(int* x, int* y) noexcept; - inline void MouseMove(Handle win, int x, int y) noexcept; - - inline void OnEvent(Handle win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; - - inline void GetScreenSize(Handle win, int* w, int* h) noexcept; - + inline void GetScreenSize(mlx_window win, int* w, int* h) noexcept; inline void SetFPSCap(std::uint32_t fps) noexcept; - inline Handle NewGraphicsSuport(std::size_t w, std::size_t h, const char* title, bool is_resizable); - inline void ClearGraphicsSupport(Handle win, int color); - inline void DestroyGraphicsSupport(Handle win); - inline void SetGraphicsSupportPosition(Handle win, int x, int y); + inline void OnEvent(mlx_window win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; - inline void PixelPut(Handle win, int x, int y, std::uint32_t color) const noexcept; - inline void StringPut(Handle win, int x, int y, std::uint32_t color, char* str); + inline mlx_window NewGraphicsSuport(const mlx_window_create_info* info); + inline NonOwningPtr GetGraphicsSupport(mlx_window win); + inline void DestroyGraphicsSupport(mlx_window win); - Handle NewTexture(int w, int h); - Handle NewStbTexture(char* file, int* w, int* h); // stb textures are image files (png, jpg, bpm, ...) - inline void TexturePut(Handle win, Handle img, int x, int y, float scale, float angle); - inline int GetTexturePixel(Handle img, int x, int y); - inline void SetTexturePixel(Handle img, int x, int y, std::uint32_t color); - void DestroyTexture(Handle ptr); + mlx_image NewTexture(int w, int h); + mlx_image NewStbTexture(char* file, int* w, int* h); // stb textures are image files (png, jpg, bpm, ...) + inline NonOwningPtr GetTexture(mlx_image image); + void DestroyTexture(mlx_image img); inline void LoopHook(int (*f)(void*), void* param); inline void LoopEnd() noexcept; diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 6b1be72..822c4d9 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -1,33 +1,34 @@ #pragma once #include +#include #include #ifndef DISABLE_ALL_SAFETIES - #define CHECK_WINDOW_PTR(win) \ + #define CHECK_WINDOW_PTR(win, retval) \ if(win == nullptr) \ { \ - Error("invalid window ptr (NULL)"); \ - return; \ + Error("invalid window handle (NULL)"); \ + return retval; \ } \ - else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return gs && *static_cast(win) == gs->GetID(); }) == m_graphics.end()) \ + else if(std::find_if(m_graphics.begin(), m_graphics.end(), [win](const std::unique_ptr& gs){ return gs && win->id == gs->GetID(); }) == m_graphics.end()) \ { \ - Error("invalid window ptr"); \ - return; \ + Error("invalid window handle"); \ + return retval; \ } else {} #define CHECK_IMAGE_PTR(img, retval) \ if(img == nullptr) \ { \ - Error("invalid image ptr (NULL)"); \ - retval; \ + Error("invalid image handle (NULL)"); \ + return retval; \ } \ - else if(!m_image_registry.IsTextureKnown(static_cast(img))) \ + else if(!m_image_registry.IsTextureKnown(image->texture)) \ { \ - Error("invalid image ptr"); \ - retval; \ + Error("invalid image handle"); \ + return retval; \ } else {} #else - #define CHECK_WINDOW_PTR(win) + #define CHECK_WINDOW_PTR(win, retval) #define CHECK_IMAGE_PTR(img, retval) #endif @@ -39,32 +40,12 @@ namespace mlx *y = m_in.GetY(); } - void Application::MouseMove(Handle win, int x, int y) noexcept + void Application::OnEvent(mlx_window win, int event, int (*funct_ptr)(int, void*), void* param) noexcept { - CHECK_WINDOW_PTR(win); - if(!m_graphics[*static_cast(win)]->HasWindow()) - { - Warning("trying to move the mouse relative to a window that is targeting an image and not a real window, this is not allowed (move ignored)"); + CHECK_WINDOW_PTR(win, ); + if(!m_graphics[win->id]->HasWindow()) return; - } - m_graphics[*static_cast(win)]->GetWindow()->MoveMouse(x, y); - } - - void Application::OnEvent(Handle win, int event, int (*funct_ptr)(int, void*), void* param) noexcept - { - CHECK_WINDOW_PTR(win); - if(!m_graphics[*static_cast(win)]->HasWindow()) - { - Warning("trying to add event hook for a window that is targeting an image and not a real window, this is not allowed (hook ignored)"); - return; - } - m_in.OnEvent(m_graphics[*static_cast(win)]->GetWindow()->GetID(), event, funct_ptr, param); - } - - void Application::GetScreenSize(Handle win, int* w, int* h) noexcept - { - CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)]->GetWindow()->GetScreenSizeWindowIsOn(w, h); + m_in.OnEvent(m_graphics[win->id]->GetWindow()->GetID(), event, funct_ptr, param); } void Application::SetFPSCap(std::uint32_t fps) noexcept @@ -72,81 +53,38 @@ namespace mlx m_fps.SetMaxFPS(fps); } - void* Application::NewGraphicsSuport(std::size_t w, std::size_t h, const char* title, bool is_resizable) + mlx_window Application::NewGraphicsSuport(const mlx_window_create_info* info) { MLX_PROFILE_FUNCTION(); - if(m_image_registry.IsTextureKnown(reinterpret_cast(const_cast(title)))) - m_graphics.emplace_back(std::make_unique(w, h, reinterpret_cast(const_cast(title)), m_graphics.size())); - else + if(!info) { - if(title == NULL) - { - FatalError("invalid window title (NULL)"); - return nullptr; - } - if(static_cast(const_cast(title)) == static_cast(this)) - { - for(std::size_t i = 0; i < 8; i++) - { - m_graphics.emplace_back(std::make_unique(std::rand() % 1920, std::rand() % 1080, "让我们在月光下åšçˆ±å§", m_graphics.size(), is_resizable)); - m_graphics.back()->GetWindow()->SetPosition(std::rand() % 1920, std::rand() % 1080); - } - } - else - { - m_graphics.emplace_back(std::make_unique(w, h, title, m_graphics.size(), is_resizable)); - m_in.RegisterWindow(m_graphics.back()->GetWindow()); - } + Error("invalid window create info (NULL)"); + return nullptr; } + + mlx_window window; + try { window = new mlx_window_handler; } + catch(...) { return nullptr; } + + m_graphics.emplace_back(std::make_unique(info, m_graphics.size())); + m_in.RegisterWindow(m_graphics.back()->GetWindow()); m_graphics.back()->GetScene().BindFont(p_last_font_bound); - return static_cast(&m_graphics.back()->GetID()); + window->id = m_graphics.back()->GetID(); + return window; } - void Application::ClearGraphicsSupport(Handle win, int color) + NonOwningPtr Application::GetGraphicsSupport(mlx_window win) + { + CHECK_WINDOW_PTR(win, nullptr); + return m_graphics[win->id].get(); + } + + void Application::DestroyGraphicsSupport(mlx_window win) { MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)]->ResetRenderData(color); - } - - void Application::DestroyGraphicsSupport(Handle win) - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)].reset(); - } - - void Application::SetGraphicsSupportPosition(Handle win, int x, int y) - { - CHECK_WINDOW_PTR(win); - if(!m_graphics[*static_cast(win)]->HasWindow()) - Warning("trying to move a window that is targeting an image and not a real window, this is not allowed"); - else - m_graphics[*static_cast(win)]->GetWindow()->SetPosition(x, y); - } - - void Application::PixelPut(Handle win, int x, int y, std::uint32_t color) const noexcept - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - m_graphics[*static_cast(win)]->PixelPut(x, y, color); - } - - void Application::StringPut(Handle win, int x, int y, std::uint32_t color, char* str) - { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - if(str == nullptr) - { - Error("invalid text (NULL)"); - return; - } - if(std::strlen(str) == 0) - { - Warning("trying to put an empty text"); - return; - } - m_graphics[*static_cast(win)]->StringPut(x, y, color, str); + CHECK_WINDOW_PTR(win, ); + m_graphics[win->id].reset(); + delete win; } void Application::LoadFont(const std::filesystem::path& filepath, float scale) @@ -172,40 +110,16 @@ namespace mlx } } - void Application::TexturePut(Handle win, Handle img, int x, int y, float scale, float angle) + NonOwningPtr Application::GetTexture(mlx_image image) { - MLX_PROFILE_FUNCTION(); - CHECK_WINDOW_PTR(win); - CHECK_IMAGE_PTR(img, return); - NonOwningPtr texture = static_cast(img); - if(!texture->IsInit()) - Error("trying to put a texture that has been destroyed"); - else - m_graphics[*static_cast(win)]->TexturePut(texture, x, y, scale, angle); - } - - int Application::GetTexturePixel(Handle img, int x, int y) - { - MLX_PROFILE_FUNCTION(); - CHECK_IMAGE_PTR(img, return 0); - NonOwningPtr texture = static_cast(img); + CHECK_IMAGE_PTR(image, nullptr); + NonOwningPtr texture = image->texture; if(!texture->IsInit()) { - Error("trying to get a pixel from texture that has been destroyed"); - return 0; + Error("trying to use a texture that has been destroyed"); + return nullptr; } - return texture->GetPixel(x, y); - } - - void Application::SetTexturePixel(Handle img, int x, int y, std::uint32_t color) - { - MLX_PROFILE_FUNCTION(); - CHECK_IMAGE_PTR(img, return); - NonOwningPtr texture = static_cast(img); - if(!texture->IsInit()) - Error("trying to set a pixel on texture that has been destroyed"); - else - texture->SetPixel(x, y, color); + return texture; } void Application::LoopHook(int (*f)(void*), void* param) diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index d348000..8d69a91 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -1,6 +1,7 @@ #ifndef __MLX_GRAPHICS__ #define __MLX_GRAPHICS__ +#include #include #include #include @@ -14,8 +15,7 @@ namespace mlx class GraphicsSupport : public NonCopyable { public: - GraphicsSupport(std::size_t w, std::size_t h, NonOwningPtr render_target, int id); - GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id, bool is_resizable); + GraphicsSupport(const mlx_window_create_info* info, int id); [[nodiscard]] MLX_FORCEINLINE int& GetID() noexcept { return m_id; } [[nodiscard]] inline std::shared_ptr GetWindow() { return p_window; } @@ -24,8 +24,8 @@ namespace mlx inline void ResetRenderData(int color) noexcept; - inline void PixelPut(int x, int y, std::uint32_t color) noexcept; - inline void StringPut(int x, int y, std::uint32_t color, std::string str); + inline void PixelPut(int x, int y, int color) noexcept; + inline void StringPut(int x, int y, int, std::string str); inline void TexturePut(NonOwningPtr texture, int x, int y, float scale, float angle); inline void TryEraseSpritesInScene(NonOwningPtr texture) noexcept; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index b07350e..13a33eb 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -7,10 +7,10 @@ namespace mlx { MLX_PROFILE_FUNCTION(); Vec4f vec_color = { - static_cast((color & 0x000000FF)) / 255.0f, - static_cast((color & 0x0000FF00) >> 8) / 255.0f, + static_cast((color & 0xFF000000) >> 24) / 255.0f, static_cast((color & 0x00FF0000) >> 16) / 255.0f, - static_cast((color & 0xFF000000) >> 24) / 255.0f + static_cast((color & 0x0000FF00) >> 8) / 255.0f, + static_cast((color & 0x000000FF)) / 255.0f, }; p_scene->ResetScene(std::move(vec_color)); m_put_pixel_manager.ResetRenderData(); @@ -18,7 +18,7 @@ namespace mlx m_pixelput_called = false; } - void GraphicsSupport::PixelPut(int x, int y, std::uint32_t color) noexcept + void GraphicsSupport::PixelPut(int x, int y, int color) noexcept { MLX_PROFILE_FUNCTION(); NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_draw_layer, color); @@ -30,17 +30,17 @@ namespace mlx } } - void GraphicsSupport::StringPut(int x, int y, std::uint32_t color, std::string str) + void GraphicsSupport::StringPut(int x, int y, int color, std::string str) { MLX_PROFILE_FUNCTION(); if(str.empty()) return; Vec4f vec_color = { - static_cast((color & 0x000000FF)) / 255.0f, - static_cast((color & 0x0000FF00) >> 8) / 255.0f, + static_cast((color & 0xFF000000) >> 24) / 255.0f, static_cast((color & 0x00FF0000) >> 16) / 255.0f, - static_cast((color & 0xFF000000) >> 24) / 255.0f + static_cast((color & 0x0000FF00) >> 8) / 255.0f, + static_cast((color & 0x000000FF)) / 255.0f, }; NonOwningPtr text = p_scene->GetTextFromPositionAndColor(str, Vec2f{ static_cast(x), static_cast(y) }, vec_color); diff --git a/runtime/Includes/Core/Handles.h b/runtime/Includes/Core/Handles.h new file mode 100644 index 0000000..be97c39 --- /dev/null +++ b/runtime/Includes/Core/Handles.h @@ -0,0 +1,25 @@ +#ifndef __MLX_HANDLES__ +#define __MLX_HANDLES__ + +#include +#include + +extern "C" +{ + struct mlx_context_handler + { + mlx::NonOwningPtr app; + }; + + struct mlx_window_handler + { + int id; + }; + + struct mlx_image_handler + { + mlx::NonOwningPtr texture; + }; +} + +#endif diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 2178f59..2d2e57a 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -1,6 +1,7 @@ #ifndef __MLX_SDL_MANAGER__ #define __MLX_SDL_MANAGER__ +#include #include namespace mlx @@ -10,7 +11,7 @@ namespace mlx public: SDLManager(); - Handle CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id, bool is_resizable); + Handle CreateWindow(const mlx_window_create_info* info, std::int32_t& id, bool hidden); void DestroyWindow(Handle window) noexcept; void InputsFetcher(func::function functor); diff --git a/runtime/Includes/Graphics/PutPixelManager.h b/runtime/Includes/Graphics/PutPixelManager.h index 5f76609..9bd14be 100644 --- a/runtime/Includes/Graphics/PutPixelManager.h +++ b/runtime/Includes/Graphics/PutPixelManager.h @@ -11,7 +11,7 @@ namespace mlx PutPixelManager(NonOwningPtr renderer) : p_renderer(renderer) {} // Return a valid pointer when a new texture has been created - NonOwningPtr DrawPixel(int x, int y, std::uint64_t draw_layer, std::uint32_t color); + NonOwningPtr DrawPixel(int x, int y, std::uint64_t draw_layer, int color); void ResetRenderData(); ~PutPixelManager(); diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 66ba306..5bbd628 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -1,6 +1,7 @@ #ifndef __MLX_WINDOW__ #define __MLX_WINDOW__ +#include #include #include @@ -9,7 +10,7 @@ namespace mlx class Window { public: - Window(std::size_t w, std::size_t h, const std::string& title, bool is_resizable, bool hidden = false); + Window(const mlx_window_create_info* info, bool hidden = false); inline Handle GetWindowHandle() const noexcept { return p_window; } inline int GetWidth() const noexcept { return m_width; } diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 8d99dfc..8979494 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -44,6 +44,7 @@ #include #include #include +#include #include #include #include diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index c9fd54e..839dfda 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -83,7 +83,7 @@ namespace mlx void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name); void Destroy() noexcept override; - void SetPixel(int x, int y, std::uint32_t color) noexcept; + void SetPixel(int x, int y, int color) noexcept; int GetPixel(int x, int y) noexcept; void Update(VkCommandBuffer cmd); @@ -94,7 +94,7 @@ namespace mlx void OpenCPUBuffer(); private: - std::vector m_cpu_buffer; + std::vector m_cpu_buffer; std::optional m_staging_buffer; bool m_has_been_modified = false; }; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 3195ba2..52d5583 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -50,37 +50,49 @@ namespace mlx RenderCore::Get().WaitDeviceIdle(); } - void* Application::NewTexture(int w, int h) + mlx_image Application::NewTexture(int w, int h) { MLX_PROFILE_FUNCTION(); + + mlx_image image; + try { image = new mlx_image_handler; } + catch(...) { return nullptr; } + Texture* texture; try { texture = new Texture({}, w, h, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_user_image"); } catch(...) { return nullptr; } m_image_registry.RegisterTexture(texture); - return texture; + image->texture = texture; + return image; } - void* Application::NewStbTexture(char* file, int* w, int* h) + mlx_image Application::NewStbTexture(char* file, int* w, int* h) { MLX_PROFILE_FUNCTION(); + + mlx_image image; + try { image = new mlx_image_handler; } + catch(...) { return nullptr; } + Texture* texture = StbTextureLoad(file, w, h); if(texture == nullptr) return nullptr; m_image_registry.RegisterTexture(texture); - return texture; + image->texture = texture; + return image; } - void Application::DestroyTexture(void* ptr) + void Application::DestroyTexture(mlx_image image) { MLX_PROFILE_FUNCTION(); RenderCore::Get().WaitDeviceIdle(); - if(!m_image_registry.IsTextureKnown(static_cast(ptr))) + if(!m_image_registry.IsTextureKnown(image->texture)) { - Error("invalid image ptr"); + Error("invalid image handle"); return; } - Texture* texture = static_cast(ptr); + Texture* texture = image->texture.Get(); if(!texture->IsInit()) Error("trying to destroy a texture that has already been destroyed"); else @@ -92,6 +104,7 @@ namespace mlx gs->TryEraseSpritesInScene(texture); } delete texture; + delete image; } Application::~Application() diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 2588115..d556b88 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -5,13 +5,14 @@ #include #include #include +#include -static void* __mlx_ptr = nullptr; +static mlx::Application* __internal_application_ptr = nullptr; #ifndef DISABLE_ALL_SAFETIES #define MLX_CHECK_APPLICATION_POINTER(ptr) \ - if(ptr != __mlx_ptr || ptr == NULL) \ - mlx::FatalError("invalid mlx pointer passed to '%'", MLX_FUNC_SIG); \ + if(ptr == NULL || ptr->app.Get() != __internal_application_ptr) \ + mlx::FatalError("invalid mlx handle passed to '%'", MLX_FUNC_SIG); \ else {} // just to avoid issues with possible if-else statements outside this macro #else #define MLX_CHECK_APPLICATION_POINTER(ptr) @@ -19,148 +20,162 @@ static void* __mlx_ptr = nullptr; extern "C" { - void* mlx_init() + mlx_context mlx_init() { - if(__mlx_ptr != nullptr) + if(__internal_application_ptr != nullptr) { mlx::Error("MLX cannot be initialized multiple times"); return nullptr; } + mlx::MemManager::Get(); // just to initialize the C garbage collector - mlx::Application* app = new mlx::Application; - if(app == nullptr) - mlx::FatalError("Tout a pété"); - __mlx_ptr = static_cast(app); - return __mlx_ptr; + + try { __internal_application_ptr = new mlx::Application; } + catch(...) { mlx::FatalError("internal application memory allocation failed"); } + + mlx_context_handler* context; + try { context = new mlx_context_handler; } + catch(...) { mlx::FatalError("mlx_context memory allocation failed"); } + context->app = __internal_application_ptr; + return context; } - void* mlx_new_window(void* mlx, int w, int h, const char* title) + void mlx_set_fps_goal(mlx_context mlx, int fps) { MLX_CHECK_APPLICATION_POINTER(mlx); - if(w <= 0 || h <= 0) + if(fps < 0) + mlx::Error("You cannot set a negative FPS cap (nice try)"); + else if(fps == 0) + mlx::Error("You cannot set a FPS cap to 0 (nice try)"); + else + mlx->app->SetFPSCap(static_cast(fps)); + } + + void mlx_destroy_context(mlx_context mlx) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + delete mlx; + delete __internal_application_ptr; + __internal_application_ptr = nullptr; + } + + mlx_window mlx_new_window(mlx_context mlx, const mlx_window_create_info* info) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + return mlx->app->NewGraphicsSuport(info); + } + + void mlx_destroy_window(mlx_context mlx, mlx_window win) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx->app->DestroyGraphicsSupport(win); + } + + void mlx_set_window_position(mlx_context mlx, mlx_window win, int x, int y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs && !gs->HasWindow()) + return; + gs->GetWindow()->SetPosition(x, y); + } + + void mlx_clear_window(mlx_context mlx, mlx_window win, int color) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->ResetRenderData(color); + } + + void mlx_get_screen_size(mlx_context mlx, mlx_window win, int* w, int* h) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + if(!gs->HasWindow()) { - mlx::FatalError("invalid window size (%d x %d)", w, h); - return NULL; // not nullptr for the C compatibility + *w = 0; + *h = 0; } - return static_cast(mlx)->NewGraphicsSuport(w, h, title, false); + else + gs->GetWindow()->GetScreenSizeWindowIsOn(w, h); } - void* mlx_new_resizable_window(void* mlx, int w, int h, const char* title) + void mlx_loop_hook(mlx_context mlx, int (*f)(void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); - if(w <= 0 || h <= 0) - { - mlx::FatalError("invalid window size (%d x %d)", w, h); - return NULL; // not nullptr for the C compatibility - } - return static_cast(mlx)->NewGraphicsSuport(w, h, title, true); + mlx->app->LoopHook(f, param); } - void mlx_set_window_position(void *mlx, void *win, int x, int y) + void mlx_loop(mlx_context mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->SetGraphicsSupportPosition(win, x, y); + mlx->app->Run(); } - void mlx_loop_hook(void* mlx, int (*f)(void*), void* param) + void mlx_loop_end(mlx_context mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->LoopHook(f, param); + mlx->app->LoopEnd(); } - void mlx_loop(void* mlx) + void mlx_mouse_show(mlx_context mlx) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->Run(); - } - - void mlx_loop_end(void* mlx) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->LoopEnd(); - } - - void mlx_mouse_show() - { mlx::SDLManager::ShowCursor(); } - void mlx_mouse_hide() + void mlx_mouse_hide(mlx_context mlx) { + MLX_CHECK_APPLICATION_POINTER(mlx); mlx::SDLManager::HideCursor(); } - void mlx_mouse_move(void* mlx, void* win, int x, int y) + void mlx_mouse_move(mlx_context mlx, mlx_window win, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->MouseMove(win, x, y); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs && !gs->HasWindow()) + return; + gs->GetWindow()->MoveMouse(x, y); } - void mlx_mouse_get_pos(void* mlx, int* x, int* y) + void mlx_mouse_get_pos(mlx_context mlx, int* x, int* y) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->GetMousePos(x, y); + mlx->app->GetMousePos(x, y); } - void mlx_on_event(void* mlx, void* win, mlx_event_type event, int (*funct_ptr)(int, void*), void* param) + void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, int (*funct_ptr)(int, void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->OnEvent(win, static_cast(event), funct_ptr, param); + mlx->app->OnEvent(win, static_cast(event), funct_ptr, param); } - void* mlx_new_image(void* mlx, int width, int height) + void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, int color) { MLX_CHECK_APPLICATION_POINTER(mlx); - if (width <= 0 || height <= 0) + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->PixelPut(x, y, color); + } + + mlx_image mlx_new_image(mlx_context mlx, int width, int height) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + if(width <= 0 || height <= 0) { mlx::Error("invalid image size (% x %)", width, height); return nullptr; } - return static_cast(mlx)->NewTexture(width, height); + return mlx->app->NewTexture(width, height); } - int mlx_get_image_pixel(void* mlx, void* img, int x, int y) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - int color = static_cast(mlx)->GetTexturePixel(img, x, y); - unsigned char color_bits[4]; - color_bits[0] = (color & 0x000000FF); - color_bits[1] = (color & 0x0000FF00) >> 8; - color_bits[2] = (color & 0x00FF0000) >> 16; - color_bits[3] = (color & 0xFF000000) >> 24; - return *reinterpret_cast(color_bits); - } - - void mlx_set_image_pixel(void* mlx, void* img, int x, int y, int color) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - unsigned char color_bits[4]; - color_bits[0] = (color & 0x00FF0000) >> 16; - color_bits[1] = (color & 0x0000FF00) >> 8; - color_bits[2] = (color & 0x000000FF); - color_bits[3] = (color & 0xFF000000) >> 24; - static_cast(mlx)->SetTexturePixel(img, x, y, *reinterpret_cast(color_bits)); - } - - void mlx_put_image_to_window(void* mlx, void* win, void* img, int x, int y) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->TexturePut(win, img, x, y, 1.0f, 0.0f); - } - - void mlx_transform_put_image_to_window(void* mlx, void* win, void* img, int x, int y, float scale, float angle) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->TexturePut(win, img, x, y, scale, angle); - } - - void mlx_destroy_image(void* mlx, void* img) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->DestroyTexture(img); - } - - void* mlx_png_file_to_image(void* mlx, char* filename, int* width, int* height) + mlx_image mlx_new_image_from_file(mlx_context mlx, char* filename, int* width, int* height) { MLX_CHECK_APPLICATION_POINTER(mlx); if (filename == nullptr) @@ -169,71 +184,74 @@ extern "C" return nullptr; } std::filesystem::path file(filename); - if(file.extension() != ".png") + if( file.extension() != ".png" && + file.extension() != ".jpg" && + file.extension() != ".jpeg" && + file.extension() != ".bmp" && + file.extension() != ".dib") { - mlx::Error("PNG loader: not a png file '%'", filename); + mlx::Error("PNG loader: not a valid file format '%'", filename); return nullptr; } - return static_cast(mlx)->NewStbTexture(filename, width, height); + return mlx->app->NewStbTexture(filename, width, height); } - void* mlx_jpg_file_to_image(void* mlx, char* filename, int* width, int* height) + void mlx_destroy_image(mlx_context mlx, mlx_image image) { MLX_CHECK_APPLICATION_POINTER(mlx); - if (filename == nullptr) - { - mlx::Error("JPG loader: filename is NULL"); - return nullptr; - } - std::filesystem::path file(filename); - if(file.extension() != ".jpg" && file.extension() != ".jpeg") - { - mlx::Error("JPG loader: not a jpg file '%'", filename); - return nullptr; - } - return static_cast(mlx)->NewStbTexture(filename, width, height); + mlx->app->DestroyTexture(image); } - void* mlx_bmp_file_to_image(void* mlx, char* filename, int* width, int* height) + int mlx_get_image_pixel(mlx_context mlx, mlx_image image, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); - if (filename == nullptr) - { - mlx::Error("BMP loader: filename is NULL"); - return nullptr; - } - std::filesystem::path file(filename); - if(file.extension() != ".bmp" && file.extension() != ".dib") - { - mlx::Error("BMP loader: not a bmp file '%'", filename); - return nullptr; - } - return static_cast(mlx)->NewStbTexture(filename, width, height); + mlx::NonOwningPtr texture = mlx->app->GetTexture(image); + if(!texture) + return 0; + return texture->GetPixel(x, y); } - void mlx_pixel_put(void* mlx, void* win, int x, int y, int color) + void mlx_set_image_pixel(mlx_context mlx, mlx_image image, int x, int y, int color) { MLX_CHECK_APPLICATION_POINTER(mlx); - unsigned char color_bits[4]; - color_bits[0] = (color & 0x00FF0000) >> 16; - color_bits[1] = (color & 0x0000FF00) >> 8; - color_bits[2] = (color & 0x000000FF); - color_bits[3] = (color & 0xFF000000) >> 24; - static_cast(mlx)->PixelPut(win, x, y, *reinterpret_cast(color_bits)); + mlx::NonOwningPtr texture = mlx->app->GetTexture(image); + if(!texture) + return; + texture->SetPixel(x, y, color); } - void mlx_string_put(void* mlx, void* win, int x, int y, int color, char* str) + void mlx_put_image_to_window(mlx_context mlx, mlx_window win, mlx_image image, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); - unsigned char color_bits[4]; - color_bits[0] = (color & 0x00FF0000) >> 16; - color_bits[1] = (color & 0x0000FF00) >> 8; - color_bits[2] = (color & 0x000000FF); - color_bits[3] = (color & 0xFF000000) >> 24; - static_cast(mlx)->StringPut(win, x, y, *reinterpret_cast(color_bits), str); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + mlx::NonOwningPtr texture = mlx->app->GetTexture(image); + if(!texture) + return; + gs->TexturePut(texture, x, y, 1.0f, 0.0f); } - void mlx_set_font(void* mlx, char* filepath) + void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, int color, char* str) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + if(str == nullptr) + { + mlx::Error("invalid text (NULL)"); + return; + } + if(std::strlen(str) == 0) + { + mlx::Warning("trying to put an empty text"); + return; + } + gs->StringPut(x, y, color, str); + } + + void mlx_set_font(mlx_context mlx, char* filepath) { MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) @@ -248,12 +266,12 @@ extern "C" return; } if(std::strcmp(filepath, "default") == 0) - static_cast(mlx)->LoadFont(file, 6.f); + mlx->app->LoadFont(file, 6.f); else - static_cast(mlx)->LoadFont(file, 16.f); + mlx->app->LoadFont(file, 16.f); } - void mlx_set_font_scale(void* mlx, char* filepath, float scale) + void mlx_set_font_scale(mlx_context mlx, char* filepath, float scale) { MLX_CHECK_APPLICATION_POINTER(mlx); if (filepath == nullptr) @@ -267,47 +285,6 @@ extern "C" mlx::Error("TTF loader: not a truetype font file '%'", filepath); return; } - static_cast(mlx)->LoadFont(file, scale); - } - - void mlx_clear_window(void* mlx, void* win, int color) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - unsigned char color_bits[4]; - color_bits[0] = (color & 0x00FF0000) >> 16; - color_bits[1] = (color & 0x0000FF00) >> 8; - color_bits[2] = (color & 0x000000FF); - color_bits[3] = (color & 0xFF000000) >> 24; - static_cast(mlx)->ClearGraphicsSupport(win, *reinterpret_cast(color_bits)); - } - - void mlx_destroy_window(void* mlx, void* win) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->DestroyGraphicsSupport(win); - } - - void mlx_destroy_display(void* mlx) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - delete static_cast(mlx); - __mlx_ptr = nullptr; - } - - void mlx_get_screens_size(void* mlx, void* win, int* w, int* h) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - static_cast(mlx)->GetScreenSize(win, w, h); - } - - void mlx_set_fps_goal(void* mlx, int fps) - { - MLX_CHECK_APPLICATION_POINTER(mlx); - if(fps < 0) - mlx::Error("You cannot set a negative FPS cap (nice try)"); - else if(fps == 0) - mlx::Error("You cannot set a FPS cap to 0 (nice try)"); - else - static_cast(mlx)->SetFPSCap(static_cast(fps)); + mlx->app->LoadFont(file, scale); } } diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 93721c4..413d862 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -3,25 +3,15 @@ namespace mlx { - GraphicsSupport::GraphicsSupport([[maybe_unused]] std::size_t w, [[maybe_unused]] std::size_t h, NonOwningPtr render_target, int id) : + GraphicsSupport::GraphicsSupport(const mlx_window_create_info* info, int id) : m_put_pixel_manager(&m_renderer), - p_window(nullptr), - m_id(id), - m_has_window(false) + m_id(id) { MLX_PROFILE_FUNCTION(); - m_renderer.Init(render_target); - m_scene_renderer.Init(render_target); - p_scene = std::make_unique(); - } - GraphicsSupport::GraphicsSupport(std::size_t w, std::size_t h, std::string title, int id, bool is_resizable) : - m_put_pixel_manager(&m_renderer), - p_window(std::make_shared(w, h, title, is_resizable)), - m_id(id), - m_has_window(true) - { - MLX_PROFILE_FUNCTION(); + p_window = std::make_shared(info); + m_has_window = true; + m_renderer.Init(p_window.get()); m_scene_renderer.Init(nullptr); p_scene = std::make_unique(); diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 8239119..707a48f 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -47,7 +47,7 @@ namespace mlx DebugLog("SDL Manager initialized"); } - Handle SDLManager::CreateWindow(const std::string& title, std::size_t w, std::size_t h, bool hidden, std::int32_t& id, bool is_resizable) + Handle SDLManager::CreateWindow(const mlx_window_create_info* info, std::int32_t& id, bool hidden) { Internal::WindowInfos* infos = new Internal::WindowInfos; Verify(infos != nullptr, "SDL: window allocation failed"); @@ -57,10 +57,10 @@ namespace mlx flags |= SDL_WINDOW_HIDDEN; else flags |= SDL_WINDOW_SHOWN; - if(is_resizable) + if(info->is_resizable) flags |= SDL_WINDOW_RESIZABLE; - infos->window = SDL_CreateWindow(title.c_str(), SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, w, h, flags); + infos->window = SDL_CreateWindow(info->title, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, info->width, info->height, flags); if(!infos->window) FatalError("SDL: unable to open a new window; %", SDL_GetError()); infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 9d4d575..0993afc 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -5,7 +5,7 @@ namespace mlx { - NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t draw_layer, std::uint32_t color) + NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t draw_layer, int color) { Verify((bool)p_renderer, "invalid renderer pointer"); diff --git a/runtime/Sources/Platform/Window.cpp b/runtime/Sources/Platform/Window.cpp index 53bd6c3..1ae7f26 100644 --- a/runtime/Sources/Platform/Window.cpp +++ b/runtime/Sources/Platform/Window.cpp @@ -5,9 +5,9 @@ namespace mlx { - Window::Window(std::size_t w, std::size_t h, const std::string& title, bool is_resizable, bool hidden) : m_name(title), m_width(w), m_height(h) + Window::Window(const mlx_window_create_info* info, bool hidden) : m_name(info->title), m_width(info->width), m_height(info->height) { - p_window = SDLManager::Get().CreateWindow(title, w, h, hidden, m_id, is_resizable); + p_window = SDLManager::Get().CreateWindow(info, m_id, hidden); } void Window::Destroy() noexcept diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 77b6539..95ef3ef 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -30,6 +30,14 @@ namespace mlx { + unsigned char reverse(unsigned char b) + { + b = (b & 0xF0) >> 4 | (b & 0x0F) << 4; + b = (b & 0xCC) >> 2 | (b & 0x33) << 2; + b = (b & 0xAA) >> 1 | (b & 0x55) << 1; + return b; + } + void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); @@ -199,14 +207,19 @@ namespace mlx Image::Destroy(); } - void Texture::SetPixel(int x, int y, std::uint32_t color) noexcept + void Texture::SetPixel(int x, int y, int color) noexcept { MLX_PROFILE_FUNCTION(); if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) return; if(!m_staging_buffer.has_value()) OpenCPUBuffer(); - m_cpu_buffer[(y * m_width) + x] = color; + unsigned char bytes[4]; + bytes[0] = (color >> 24) & 0xFF; + bytes[1] = (color >> 16) & 0xFF; + bytes[2] = (color >> 8) & 0xFF; + bytes[3] = color & 0xFF; + m_cpu_buffer[(y * m_width) + x] = *reinterpret_cast(bytes); m_has_been_modified = true; } @@ -217,12 +230,7 @@ namespace mlx return 0; if(!m_staging_buffer.has_value()) OpenCPUBuffer(); - std::uint32_t color = m_cpu_buffer[(y * m_width) + x]; - std::uint8_t* bytes = reinterpret_cast(&color); - std::uint8_t tmp = bytes[0]; - bytes[0] = bytes[2]; - bytes[2] = tmp; - return *reinterpret_cast(bytes); + return m_cpu_buffer[(y * m_width) + x]; } void Texture::Update(VkCommandBuffer cmd) diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 510801b..9a91b71 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -26,6 +26,7 @@ #include #endif +#include #include #include #include @@ -86,7 +87,11 @@ namespace mlx kvfSetValidationErrorCallback(&ValidationErrorCallback); kvfSetValidationWarningCallback(&WarningCallback); - Window window(1, 1, "", true); + mlx_window_create_info info{}; + info.title = ""; + info.width = 1; + info.height = 1; + Window window(&info, true); std::vector instance_extensions = window.GetRequiredVulkanInstanceExtentions(); #ifdef MLX_PLAT_MACOS instance_extensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME); From 11ccc041d29c416378972ae5ffcec4ff601128e8 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 15 Dec 2024 03:53:56 +0100 Subject: [PATCH 093/131] adding missing function implementation --- runtime/Includes/Core/SDLManager.h | 5 ++++ runtime/Includes/Platform/Window.h | 11 +++++-- runtime/Sources/Core/Bridge.cpp | 45 +++++++++++++++++++++++++++++ runtime/Sources/Core/SDLManager.cpp | 25 ++++++++++++++++ runtime/Sources/Renderer/Image.cpp | 1 + 5 files changed, 85 insertions(+), 2 deletions(-) diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 2d2e57a..952ccc4 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -22,6 +22,11 @@ namespace mlx void MoveMouseOnWindow(Handle window, int x, int y) const noexcept; void GetScreenSizeWindowIsOn(Handle window, int* x, int* y) const noexcept; void SetWindowPosition(Handle window, int x, int y) const noexcept; + void SetWindowSize(Handle window, int x, int y) const noexcept; + void SetWindowTitle(Handle window, std::string_view title) const noexcept; + void SetWindowFullscreen(Handle window, bool enable) const noexcept; + void GetWindowPosition(Handle window, int* x, int* y) const noexcept; + void GetWindowSize(Handle window, int* x, int* y) const noexcept; static void HideCursor() noexcept; static void ShowCursor() noexcept; diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index 5bbd628..b837820 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -13,14 +13,21 @@ namespace mlx Window(const mlx_window_create_info* info, bool hidden = false); inline Handle GetWindowHandle() const noexcept { return p_window; } - inline int GetWidth() const noexcept { return m_width; } - inline int GetHeight() const noexcept { return m_height; } + inline int GetWidth() noexcept { SDLManager::Get().GetWindowSize(p_window, &m_width, &m_height); return m_width; } + inline int GetHeight() noexcept { SDLManager::Get().GetWindowSize(p_window, &m_width, &m_height); return m_height; } inline std::uint32_t GetID() const noexcept { return m_id; } inline const std::string& GetName() const { return m_name; } inline void MoveMouse(int x, int y) { SDLManager::Get().MoveMouseOnWindow(p_window, x, y); } inline void GetScreenSizeWindowIsOn(int* x, int* y) { SDLManager::Get().GetScreenSizeWindowIsOn(p_window, x, y); } + inline void SetPosition(int x, int y) { SDLManager::Get().SetWindowPosition(p_window, x, y); } + inline void SetSize(int x, int y) { SDLManager::Get().SetWindowSize(p_window, x, y); m_width = x; m_height = y; } + inline void SetTitle(std::string title) { SDLManager::Get().SetWindowTitle(p_window, title); m_name = std::move(title); } + inline void SetFullscreen(bool enable) { SDLManager::Get().SetWindowFullscreen(p_window, enable); } + + inline void GetPosition(int* x, int* y) { SDLManager::Get().GetWindowPosition(p_window, x, y); } + inline void GetSize(int* x, int* y) { *x = GetWidth(); *y = GetHeight(); } inline VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index d556b88..a178420 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -80,6 +80,51 @@ extern "C" gs->GetWindow()->SetPosition(x, y); } + void mlx_set_window_size(mlx_context mlx, mlx_window win, int width, int height) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs && !gs->HasWindow()) + return; + gs->GetWindow()->SetSize(width, height); + } + + void mlx_set_window_title(mlx_context mlx, mlx_window win, const char* title) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs && !gs->HasWindow()) + return; + gs->GetWindow()->SetTitle(title); + } + + void mlx_set_window_fullscreen(mlx_context mlx, mlx_window win, bool enable) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs && !gs->HasWindow()) + return; + gs->GetWindow()->SetFullscreen(enable); + } + + void mlx_get_window_position(mlx_context mlx, mlx_window win, int* x, int* y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs && !gs->HasWindow()) + return; + gs->GetWindow()->GetPosition(x, y); + } + + void mlx_get_window_size(mlx_context mlx, mlx_window win, int* x, int* y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs && !gs->HasWindow()) + return; + gs->GetWindow()->GetSize(x, y); + } + void mlx_clear_window(mlx_context mlx, mlx_window win, int color) { MLX_CHECK_APPLICATION_POINTER(mlx); diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 707a48f..cd66caa 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -133,6 +133,31 @@ namespace mlx SDL_SetWindowPosition(static_cast(window)->window, x, y); } + void SDLManager::SetWindowSize(Handle window, int x, int y) const noexcept + { + SDL_SetWindowSize(static_cast(window)->window, x, y); + } + + void SDLManager::SetWindowTitle(Handle window, std::string_view title) const noexcept + { + SDL_SetWindowTitle(static_cast(window)->window, title.data()); + } + + void SDLManager::SetWindowFullscreen(Handle window, bool enable) const noexcept + { + SDL_SetWindowFullscreen(static_cast(window)->window, (enable ? SDL_WINDOW_FULLSCREEN_DESKTOP : 0)); + } + + void SDLManager::GetWindowPosition(Handle window, int* x, int* y) const noexcept + { + SDL_GetWindowPosition(static_cast(window)->window, x, y); + } + + void SDLManager::GetWindowSize(Handle window, int* x, int* y) const noexcept + { + SDL_GetWindowSize(static_cast(window)->window, x, y); + } + void SDLManager::HideCursor() noexcept { SDL_ShowCursor(SDL_DISABLE); diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 95ef3ef..892629d 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -214,6 +214,7 @@ namespace mlx return; if(!m_staging_buffer.has_value()) OpenCPUBuffer(); + // Needs to reverse bytes order because why not unsigned char bytes[4]; bytes[0] = (color >> 24) & 0xFF; bytes[1] = (color >> 16) & 0xFF; From ab4d67d7649e493cc1b0e761aefd48fa0aeab498 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 01:24:25 +0100 Subject: [PATCH 094/131] finxing memory usages --- example/build.sh | 4 +- example/main.c | 13 ++- includes/mlx.h | 3 +- includes/mlx_extended.h | 12 +-- runtime/Includes/Core/Application.h | 2 + runtime/Includes/Core/Graphics.h | 2 +- runtime/Includes/Core/Graphics.inl | 6 +- runtime/Includes/Core/SDLManager.h | 6 ++ runtime/Includes/Graphics/Mesh.h | 26 ++++++ runtime/Includes/Graphics/Mesh.inl | 59 ++++++++++++- runtime/Includes/Graphics/PutPixelManager.h | 8 +- runtime/Includes/Graphics/Scene.h | 2 +- runtime/Includes/Platform/Window.h | 37 ++++---- runtime/Includes/Renderer/Vertex.h | 4 +- runtime/Includes/Renderer/Vertex.inl | 5 ++ runtime/Sources/Core/Application.cpp | 7 +- runtime/Sources/Core/Bridge.cpp | 90 +++++++++++++++++++- runtime/Sources/Core/Memory.cpp | 9 +- runtime/Sources/Core/SDLManager.cpp | 27 ++++++ runtime/Sources/Graphics/PutPixelManager.cpp | 49 +++++++---- runtime/Sources/Graphics/Scene.cpp | 6 +- runtime/Sources/Graphics/Sprite.cpp | 6 +- runtime/Sources/Graphics/Text.cpp | 9 +- runtime/Sources/Renderer/Swapchain.cpp | 2 +- 24 files changed, 315 insertions(+), 79 deletions(-) diff --git a/example/build.sh b/example/build.sh index ab5882b..249b0f5 100755 --- a/example/build.sh +++ b/example/build.sh @@ -5,7 +5,7 @@ if [ -e a.out ]; then fi if [ $(uname -s) = 'Darwin' ]; then - clang main.c ../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -g; + clang main.c ../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -lm -g; else - clang main.c ../libmlx.so -lSDL2 -g -Wall -Wextra -Werror; + clang main.c ../libmlx.so -lSDL2 -lm -g -Wall -Wextra -Werror; fi diff --git a/example/main.c b/example/main.c index 205f7dc..a9f5cb7 100644 --- a/example/main.c +++ b/example/main.c @@ -1,5 +1,7 @@ #include +#include #include "../includes/mlx.h" +#include "../includes/mlx_extended.h" typedef struct { @@ -16,8 +18,11 @@ int update(void* param) static int i = 0; mlx_t* mlx = (mlx_t*)param; - if(i == 200) + if(i > 200) + { mlx_clear_window(mlx->mlx, mlx->win, 0x334D4DFF); + mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40, 0.5f, 0.5f, i); + } if(i >= 250) mlx_set_font_scale(mlx->mlx, "default", 16.f); @@ -27,7 +32,6 @@ int update(void* param) mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFF2066FF, "this text should be hidden"); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_png, 100, 100); - //mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40, 0.5f, 75.0f); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); mlx_set_font(mlx->mlx, "default"); @@ -40,7 +44,10 @@ int update(void* param) color += (color < 255); } - //mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, 2.0f, 0.0f); + if(i < 200) + mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, 0.5f, 2.0f, 0.0f); + else + mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, fabs(sin(i / 100.0f)), fabs(cos(i / 100.0f) * 2.0f), 0.0f); mlx_set_font_scale(mlx->mlx, "default", 8.f); mlx_string_put(mlx->mlx, mlx->win, 210, 175, 0xFFAF2BFF, "hidden"); diff --git a/includes/mlx.h b/includes/mlx.h index 0d4e2f5..e39da89 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/15 01:58:12 by maldavid ### ########.fr */ +/* Updated: 2024/12/15 13:59:00 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -78,7 +78,6 @@ MLX_API void mlx_destroy_context(mlx_context mlx); */ typedef struct mlx_window_create_info { - void* mlx_extension; const char* title; int width; int height; diff --git a/includes/mlx_extended.h b/includes/mlx_extended.h index 64d7357..f324d06 100644 --- a/includes/mlx_extended.h +++ b/includes/mlx_extended.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/12/14 16:17:10 by maldavid #+# #+# */ -/* Updated: 2024/12/14 17:42:06 by maldavid ### ########.fr */ +/* Updated: 2024/12/15 13:58:58 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -26,16 +26,6 @@ extern "C" { /* Window related functions */ -typedef struct mlx_window_create_info_extension -{ - int position_x; - int position_y; - int max_width; - int max_height; - int min_width; - int min_height; -} mlx_window_create_info_extension; - /** * @brief Sets maximum window size * diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 5814218..4dff74f 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -8,6 +8,7 @@ #include #include #include +#include namespace mlx { @@ -47,6 +48,7 @@ namespace mlx Inputs m_in; FontRegistry m_font_registry; ImageRegistry m_image_registry; + MeshRegistry m_mesh_registry; std::vector> m_graphics; std::shared_ptr p_last_font_bound; std::function f_loop_hook; diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 8d69a91..eeb2820 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -26,7 +26,7 @@ namespace mlx inline void PixelPut(int x, int y, int color) noexcept; inline void StringPut(int x, int y, int, std::string str); - inline void TexturePut(NonOwningPtr texture, int x, int y, float scale, float angle); + inline void TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle); inline void TryEraseSpritesInScene(NonOwningPtr texture) noexcept; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 13a33eb..f6fa842 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -59,10 +59,10 @@ namespace mlx p_scene->BringToDrawLayer(text.Get(), m_draw_layer); } - void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y, float scale, float angle) + void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle) { MLX_PROFILE_FUNCTION(); - NonOwningPtr sprite = p_scene->GetSpriteFromTexturePositionScaleRotation(texture, Vec2f{ static_cast(x), static_cast(y) }, scale, angle); + NonOwningPtr sprite = p_scene->GetSpriteFromTexturePositionScaleRotation(texture, Vec2f{ static_cast(x), static_cast(y) }, scale_x, scale_y, angle); if(!sprite) { if(m_pixelput_called) @@ -73,7 +73,7 @@ namespace mlx Sprite& new_sprite = p_scene->CreateSprite(texture); new_sprite.SetCenter(Vec2f{ texture->GetWidth() / 2.0f, texture->GetHeight() / 2.0f }); new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); - new_sprite.SetScale(Vec2f{ scale, scale }); + new_sprite.SetScale(Vec2f{ scale_x, scale_y }); new_sprite.SetRotation(angle); } else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 952ccc4..6ed1c57 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -25,6 +25,12 @@ namespace mlx void SetWindowSize(Handle window, int x, int y) const noexcept; void SetWindowTitle(Handle window, std::string_view title) const noexcept; void SetWindowFullscreen(Handle window, bool enable) const noexcept; + void SetWindowMaxSize(Handle window, int x, int y) const noexcept; + void SetWindowMinSize(Handle window, int x, int y) const noexcept; + void MaximizeWindow(Handle window) const noexcept; + void MinimizeWindow(Handle window) const noexcept; + void RestoreWindow(Handle window) const noexcept; + void GetWindowPosition(Handle window, int* x, int* y) const noexcept; void GetWindowSize(Handle window, int* x, int* y) const noexcept; diff --git a/runtime/Includes/Graphics/Mesh.h b/runtime/Includes/Graphics/Mesh.h index 627b808..c0f59a9 100644 --- a/runtime/Includes/Graphics/Mesh.h +++ b/runtime/Includes/Graphics/Mesh.h @@ -12,11 +12,16 @@ namespace mlx public: struct SubMesh { + struct NoBuild {}; + VertexBuffer vbo; IndexBuffer ibo; + std::vector vertex_data; + std::vector index_data; std::size_t triangle_count = 0; inline SubMesh(const std::vector& vertices, const std::vector& indices); + inline SubMesh(const std::vector& vertices, const std::vector& indices, NoBuild); }; public: @@ -35,6 +40,27 @@ namespace mlx private: std::vector m_sub_meshes; }; + + // A registry just to avoid destroying meshes when clearing a window + class MeshRegistry + { + public: + inline MeshRegistry(); + + inline void RegisterMesh(std::shared_ptr mesh); + inline std::shared_ptr FindMesh(const std::vector& sub_meshes); + inline void UnregisterMesh(std::shared_ptr mesh); + inline void Reset(); + + inline static bool IsInit() noexcept { return s_instance != nullptr; } + inline static MeshRegistry& Get() noexcept { return *s_instance; } + + inline ~MeshRegistry(); + + private: + inline static MeshRegistry* s_instance = nullptr; + std::unordered_set> m_meshes_registry; + }; } #include diff --git a/runtime/Includes/Graphics/Mesh.inl b/runtime/Includes/Graphics/Mesh.inl index 2713ff0..5677f6f 100644 --- a/runtime/Includes/Graphics/Mesh.inl +++ b/runtime/Includes/Graphics/Mesh.inl @@ -3,7 +3,7 @@ namespace mlx { - Mesh::SubMesh::SubMesh(const std::vector& vertices, const std::vector& indices) + Mesh::SubMesh::SubMesh(const std::vector& vertices, const std::vector& indices) : vertex_data(vertices), index_data(indices) { CPUBuffer vb(vertices.size() * sizeof(Vertex)); std::memcpy(vb.GetData(), vertices.data(), vb.GetSize()); @@ -17,4 +17,61 @@ namespace mlx triangle_count = vertices.size() / 3; } + + Mesh::SubMesh::SubMesh(const std::vector& vertices, const std::vector& indices, NoBuild) : vertex_data(vertices), index_data(indices) {} + + MeshRegistry::MeshRegistry() + { + s_instance = this; + } + + void MeshRegistry::RegisterMesh(std::shared_ptr mesh) + { + m_meshes_registry.insert(mesh); + } + + std::shared_ptr MeshRegistry::FindMesh(const std::vector& sub_meshes) + { + for(const std::shared_ptr& mesh : m_meshes_registry) + { + if(mesh->GetSubMeshCount() != sub_meshes.size()) // If the number of submeshes is different than the one we want to find no need to test + continue; + bool found = true; + for(std::size_t i = 0; i < sub_meshes.size(); i++) + { + try + { + const Mesh::SubMesh& registered_sub_mesh = mesh->GetSubMesh(i); + if(registered_sub_mesh.vertex_data != sub_meshes[i].vertex_data || registered_sub_mesh.index_data != sub_meshes[i].index_data) + { + found = false; + break; + } + } + catch(...) + { + found = false; + break; + } + } + if(found) + return mesh; + } + return nullptr; + } + + void MeshRegistry::UnregisterMesh(std::shared_ptr mesh) + { + m_meshes_registry.erase(mesh); + } + + void MeshRegistry::Reset() + { + m_meshes_registry.clear(); + } + + MeshRegistry::~MeshRegistry() + { + s_instance = nullptr; + } } diff --git a/runtime/Includes/Graphics/PutPixelManager.h b/runtime/Includes/Graphics/PutPixelManager.h index 9bd14be..e630b4c 100644 --- a/runtime/Includes/Graphics/PutPixelManager.h +++ b/runtime/Includes/Graphics/PutPixelManager.h @@ -10,15 +10,17 @@ namespace mlx public: PutPixelManager(NonOwningPtr renderer) : p_renderer(renderer) {} - // Return a valid pointer when a new texture has been created + // Returns a valid pointer when a new texture has been created NonOwningPtr DrawPixel(int x, int y, std::uint64_t draw_layer, int color); void ResetRenderData(); - ~PutPixelManager(); + ~PutPixelManager() = default; private: - std::unordered_map m_textures; + std::unordered_map> m_placements; + std::vector> m_textures; NonOwningPtr p_renderer; + std::size_t m_current_texture_index = 0; }; } diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 843d2b0..69d7fdc 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -17,7 +17,7 @@ namespace mlx Scene() = default; Sprite& CreateSprite(NonOwningPtr texture) noexcept; - NonOwningPtr GetSpriteFromTexturePositionScaleRotation(NonOwningPtr texture, const Vec2f& position, float scale, float rotation) const; + NonOwningPtr GetSpriteFromTexturePositionScaleRotation(NonOwningPtr texture, const Vec2f& position, float scale_x, float scale_y, float rotation) const; void TryEraseSpriteFromTexture(NonOwningPtr texture); bool IsTextureAtGivenDrawLayer(NonOwningPtr texture, std::uint64_t draw_layer) const; diff --git a/runtime/Includes/Platform/Window.h b/runtime/Includes/Platform/Window.h index b837820..a3a28d5 100644 --- a/runtime/Includes/Platform/Window.h +++ b/runtime/Includes/Platform/Window.h @@ -12,26 +12,31 @@ namespace mlx public: Window(const mlx_window_create_info* info, bool hidden = false); - inline Handle GetWindowHandle() const noexcept { return p_window; } - inline int GetWidth() noexcept { SDLManager::Get().GetWindowSize(p_window, &m_width, &m_height); return m_width; } - inline int GetHeight() noexcept { SDLManager::Get().GetWindowSize(p_window, &m_width, &m_height); return m_height; } - inline std::uint32_t GetID() const noexcept { return m_id; } - inline const std::string& GetName() const { return m_name; } + MLX_FORCEINLINE Handle GetWindowHandle() const noexcept { return p_window; } + MLX_FORCEINLINE int GetWidth() noexcept { SDLManager::Get().GetWindowSize(p_window, &m_width, &m_height); return m_width; } + MLX_FORCEINLINE int GetHeight() noexcept { SDLManager::Get().GetWindowSize(p_window, &m_width, &m_height); return m_height; } + MLX_FORCEINLINE std::uint32_t GetID() const noexcept { return m_id; } + MLX_FORCEINLINE const std::string& GetName() const { return m_name; } - inline void MoveMouse(int x, int y) { SDLManager::Get().MoveMouseOnWindow(p_window, x, y); } - inline void GetScreenSizeWindowIsOn(int* x, int* y) { SDLManager::Get().GetScreenSizeWindowIsOn(p_window, x, y); } + MLX_FORCEINLINE void MoveMouse(int x, int y) { SDLManager::Get().MoveMouseOnWindow(p_window, x, y); } + MLX_FORCEINLINE void GetScreenSizeWindowIsOn(int* x, int* y) { SDLManager::Get().GetScreenSizeWindowIsOn(p_window, x, y); } - inline void SetPosition(int x, int y) { SDLManager::Get().SetWindowPosition(p_window, x, y); } - inline void SetSize(int x, int y) { SDLManager::Get().SetWindowSize(p_window, x, y); m_width = x; m_height = y; } - inline void SetTitle(std::string title) { SDLManager::Get().SetWindowTitle(p_window, title); m_name = std::move(title); } - inline void SetFullscreen(bool enable) { SDLManager::Get().SetWindowFullscreen(p_window, enable); } + MLX_FORCEINLINE void SetPosition(int x, int y) { SDLManager::Get().SetWindowPosition(p_window, x, y); } + MLX_FORCEINLINE void SetSize(int x, int y) { SDLManager::Get().SetWindowSize(p_window, x, y); m_width = x; m_height = y; } + MLX_FORCEINLINE void SetTitle(std::string title) { SDLManager::Get().SetWindowTitle(p_window, title); m_name = std::move(title); } + MLX_FORCEINLINE void SetFullscreen(bool enable) { SDLManager::Get().SetWindowFullscreen(p_window, enable); } + MLX_FORCEINLINE void SetMaxSize(int x, int y) { SDLManager::Get().SetWindowMaxSize(p_window, x, y); } + MLX_FORCEINLINE void SetMinSize(int x, int y) { SDLManager::Get().SetWindowMinSize(p_window, x, y); } + MLX_FORCEINLINE void Maximize() { SDLManager::Get().MaximizeWindow(p_window); } + MLX_FORCEINLINE void Minimize() { SDLManager::Get().MinimizeWindow(p_window); } + MLX_FORCEINLINE void Restore() { SDLManager::Get().RestoreWindow(p_window); } - inline void GetPosition(int* x, int* y) { SDLManager::Get().GetWindowPosition(p_window, x, y); } - inline void GetSize(int* x, int* y) { *x = GetWidth(); *y = GetHeight(); } + MLX_FORCEINLINE void GetPosition(int* x, int* y) { SDLManager::Get().GetWindowPosition(p_window, x, y); } + MLX_FORCEINLINE void GetSize(int* x, int* y) { *x = GetWidth(); *y = GetHeight(); } - inline VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } - inline std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } - inline Vec2ui GetVulkanDrawableSize() const noexcept { return SDLManager::Get().GetVulkanDrawableSize(p_window); } + MLX_FORCEINLINE VkSurfaceKHR CreateVulkanSurface(VkInstance instance) const noexcept { return SDLManager::Get().CreateVulkanSurface(p_window, instance); } + MLX_FORCEINLINE std::vector GetRequiredVulkanInstanceExtentions() const noexcept { return SDLManager::Get().GetRequiredVulkanInstanceExtentions(p_window); } + MLX_FORCEINLINE Vec2ui GetVulkanDrawableSize() const noexcept { return SDLManager::Get().GetVulkanDrawableSize(p_window); } void Destroy() noexcept; diff --git a/runtime/Includes/Renderer/Vertex.h b/runtime/Includes/Renderer/Vertex.h index d815a3e..f2c11d0 100644 --- a/runtime/Includes/Renderer/Vertex.h +++ b/runtime/Includes/Renderer/Vertex.h @@ -12,7 +12,9 @@ namespace mlx alignas(16) Vec2f uv = Vec2f{ 0.0f, 0.0f }; Vertex() = default; - Vertex(Vec4f p, Vec2f u) : position(std::move(p)), uv(std::move(u)) {} + inline Vertex(Vec4f p, Vec2f u) : position(std::move(p)), uv(std::move(u)) {} + + [[nodiscard]] inline bool operator==(const Vertex& rhs) const noexcept; [[nodiscard]] inline static VkVertexInputBindingDescription GetBindingDescription(); [[nodiscard]] inline static std::array GetAttributeDescriptions(); diff --git a/runtime/Includes/Renderer/Vertex.inl b/runtime/Includes/Renderer/Vertex.inl index fb09f95..e52ced8 100644 --- a/runtime/Includes/Renderer/Vertex.inl +++ b/runtime/Includes/Renderer/Vertex.inl @@ -3,6 +3,11 @@ namespace mlx { + bool Vertex::operator==(const Vertex& rhs) const noexcept + { + return position == rhs.position && uv == rhs.uv; + } + VkVertexInputBindingDescription Vertex::GetBindingDescription() { VkVertexInputBindingDescription binding_description{}; diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index 52d5583..daf35c0 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -109,12 +109,7 @@ namespace mlx Application::~Application() { - for(auto& window : m_graphics) - { - if(window && window->GetWindow()->GetName() == "让我们在月光下åšçˆ±å§") - window.reset(); - } - + m_mesh_registry.Reset(); m_font_registry.Reset(); p_render_core.reset(); p_sdl_manager.reset(); diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index a178420..2202705 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -1,3 +1,4 @@ +#include "mlx_extended.h" #include #include @@ -274,7 +275,7 @@ extern "C" mlx::NonOwningPtr texture = mlx->app->GetTexture(image); if(!texture) return; - gs->TexturePut(texture, x, y, 1.0f, 0.0f); + gs->TexturePut(texture, x, y, 1.0f, 1.0f, 0.0f); } void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, int color, char* str) @@ -299,7 +300,7 @@ extern "C" void mlx_set_font(mlx_context mlx, char* filepath) { MLX_CHECK_APPLICATION_POINTER(mlx); - if (filepath == nullptr) + if(filepath == nullptr) { mlx::Error("Font loader: filepath is NULL"); return; @@ -319,7 +320,7 @@ extern "C" void mlx_set_font_scale(mlx_context mlx, char* filepath, float scale) { MLX_CHECK_APPLICATION_POINTER(mlx); - if (filepath == nullptr) + if(filepath == nullptr) { mlx::Error("Font loader: filepath is NULL"); return; @@ -332,4 +333,87 @@ extern "C" } mlx->app->LoadFont(file, scale); } + + // Extended + + void mlx_set_window_max_size(mlx_context mlx, mlx_window win, int x, int y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->GetWindow()->SetMaxSize(x, y); + } + + void mlx_set_window_min_size(mlx_context mlx, mlx_window win, int x, int y) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->GetWindow()->SetMinSize(x, y); + } + + void mlx_maximise_window(mlx_context mlx, mlx_window win) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->GetWindow()->Maximize(); + } + + void mlx_minimize_window(mlx_context mlx, mlx_window win) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->GetWindow()->Minimize(); + } + + void mlx_restore_window(mlx_context mlx, mlx_window win) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->GetWindow()->Restore(); + } + + void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + } + + void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* dst) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr texture = mlx->app->GetTexture(image); + if(!texture) + return; + } + + void mlx_set_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* pixels) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr texture = mlx->app->GetTexture(image); + if(!texture) + return; + } + + void mlx_put_transformed_image_to_window(mlx_context mlx, mlx_window win, mlx_image image, int x, int y, float scale_x, float scale_y, float angle) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + mlx::NonOwningPtr texture = mlx->app->GetTexture(image); + if(!texture) + return; + gs->TexturePut(texture, x, y, scale_x, scale_y, angle); + } } diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 1dcb660..690814b 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -68,15 +68,16 @@ namespace mlx #ifdef MLX_COMPILER_MSVC void* ptr2 = _aligned_realloc(ptr, alignment, size); + if(it != s_blocks.end()) + s_blocks.erase(it); #else void* ptr2 = AlignedMalloc(alignment, size); if(it != s_blocks.end()) + { std::memcpy(ptr2, ptr, it->size); + Free(ptr); + } #endif - - if(it != s_blocks.end()) - s_blocks.erase(it); - if(ptr2 != nullptr) s_blocks.emplace_back(ptr, size, true); return ptr2; diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index cd66caa..5fa5ce6 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -59,6 +59,8 @@ namespace mlx flags |= SDL_WINDOW_SHOWN; if(info->is_resizable) flags |= SDL_WINDOW_RESIZABLE; + if(info->is_fullscreen) + flags |= SDL_WINDOW_FULLSCREEN_DESKTOP; infos->window = SDL_CreateWindow(info->title, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, info->width, info->height, flags); if(!infos->window) @@ -148,6 +150,31 @@ namespace mlx SDL_SetWindowFullscreen(static_cast(window)->window, (enable ? SDL_WINDOW_FULLSCREEN_DESKTOP : 0)); } + void SDLManager::SetWindowMaxSize(Handle window, int x, int y) const noexcept + { + SDL_SetWindowMaximumSize(static_cast(window)->window, x, y); + } + + void SDLManager::SetWindowMinSize(Handle window, int x, int y) const noexcept + { + SDL_SetWindowMinimumSize(static_cast(window)->window, x, y); + } + + void SDLManager::MaximizeWindow(Handle window) const noexcept + { + SDL_MaximizeWindow(static_cast(window)->window); + } + + void SDLManager::MinimizeWindow(Handle window) const noexcept + { + SDL_MinimizeWindow(static_cast(window)->window); + } + + void SDLManager::RestoreWindow(Handle window) const noexcept + { + SDL_RestoreWindow(static_cast(window)->window); + } + void SDLManager::GetWindowPosition(Handle window, int* x, int* y) const noexcept { SDL_GetWindowPosition(static_cast(window)->window, x, y); diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 0993afc..1c99b65 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -17,24 +17,43 @@ namespace mlx else FatalError("a renderer was created without window nor render target attached (wtf)"); - #ifdef DEBUG - auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(draw_layer)); - #else - auto res = m_textures.try_emplace(draw_layer, CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{}); - #endif - if(res.second) - res.first->second.Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); - res.first->second.SetPixel(x, y, color); - return (res.second ? &res.first->second : nullptr); + auto it = m_placements.find(draw_layer); + if(it != m_placements.end()) + { + it->second->SetPixel(x, y, color); + return nullptr; + } + + bool adjusment = false; + if(m_current_texture_index >= m_textures.size()) + { + #ifdef DEBUG + m_textures.push_back(std::make_unique(CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(draw_layer))); + #else + m_textures.push_back(std::make_unique(CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{})); + #endif + m_current_texture_index++; + adjusment = true; + } + try + { + m_placements[draw_layer] = m_textures.at(m_current_texture_index - adjusment).get(); + m_textures.at(m_current_texture_index - adjusment)->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); + m_textures.at(m_current_texture_index - adjusment)->SetPixel(x, y, color); + return m_textures.at(m_current_texture_index - adjusment).get(); + } + catch(...) + { + Error("PutPixelManager: invalid texture index; % is not in range of 0-% (internal mlx issue, please report to devs)", m_current_texture_index - 1, m_textures.size()); + return nullptr; + } } void PutPixelManager::ResetRenderData() { - m_textures.clear(); - } - - PutPixelManager::~PutPixelManager() - { - ResetRenderData(); + m_placements.clear(); + for(auto& texture : m_textures) + texture->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); + m_current_texture_index = 0; } } diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index c070e64..d849d91 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -27,16 +27,16 @@ namespace mlx return *sprite; } - NonOwningPtr Scene::GetSpriteFromTexturePositionScaleRotation(NonOwningPtr texture, const Vec2f& position, float scale, float rotation) const + NonOwningPtr Scene::GetSpriteFromTexturePositionScaleRotation(NonOwningPtr texture, const Vec2f& position, float scale_x, float scale_y, float rotation) const { MLX_PROFILE_FUNCTION(); - auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&texture, &position, scale, rotation](std::shared_ptr drawable) + auto it = std::find_if(m_drawables.begin(), m_drawables.end(), [&texture, &position, scale_x, scale_y, rotation](std::shared_ptr drawable) { if(!drawable || drawable->GetType() != DrawableType::Sprite) return false; return static_cast(drawable.get())->GetTexture() == texture && drawable->GetPosition() == position && - drawable->GetScale() == Vec2f{ scale, scale } && + drawable->GetScale() == Vec2f{ scale_x, scale_y } && drawable->GetRotation().ToEulerAngles() == EulerAnglesf{ 0.0f, 0.0f, rotation }; }); return static_cast(it != m_drawables.end() ? it->get() : nullptr); diff --git a/runtime/Sources/Graphics/Sprite.cpp b/runtime/Sources/Graphics/Sprite.cpp index 3fd9ae0..5e1b616 100644 --- a/runtime/Sources/Graphics/Sprite.cpp +++ b/runtime/Sources/Graphics/Sprite.cpp @@ -32,7 +32,10 @@ namespace mlx 0, }; - std::shared_ptr mesh = std::make_shared(); + std::shared_ptr mesh = MeshRegistry::Get().FindMesh({ Mesh::SubMesh{ data, indices, Mesh::SubMesh::NoBuild{} } }); + if(mesh) + return mesh; + mesh = std::make_shared(); mesh->AddSubMesh({ std::move(data), std::move(indices) }); return mesh; } @@ -42,6 +45,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); Verify((bool)texture, "Sprite: invalid texture (internal mlx issue, please report to devs)"); p_mesh = CreateQuad(0, 0, texture->GetWidth(), texture->GetHeight()); + MeshRegistry::Get().RegisterMesh(p_mesh); p_texture = texture; } diff --git a/runtime/Sources/Graphics/Text.cpp b/runtime/Sources/Graphics/Text.cpp index 90033d5..9498411 100644 --- a/runtime/Sources/Graphics/Text.cpp +++ b/runtime/Sources/Graphics/Text.cpp @@ -40,8 +40,13 @@ namespace mlx index_data.emplace_back(index + 0); } - std::shared_ptr mesh = std::make_shared(); - mesh->AddSubMesh({ std::move(vertex_data), std::move(index_data) }); + std::shared_ptr mesh = MeshRegistry::Get().FindMesh({ Mesh::SubMesh{ vertex_data, index_data, Mesh::SubMesh::NoBuild{} } }); + if(!mesh) + { + mesh = std::make_shared(); + mesh->AddSubMesh({ std::move(vertex_data), std::move(index_data) }); + MeshRegistry::Get().RegisterMesh(mesh); + } Init(text, font, mesh); } diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp index bc414bb..b4b3117 100644 --- a/runtime/Sources/Renderer/Swapchain.cpp +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -79,7 +79,7 @@ namespace mlx m_surface = p_window->CreateVulkanSurface(RenderCore::Get().GetInstance()); DebugLog("Vulkan: surface created"); - m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, VK_NULL_HANDLE, false); + m_swapchain = kvfCreateSwapchainKHR(RenderCore::Get().GetDevice(), RenderCore::Get().GetPhysicalDevice(), m_surface, extent, m_swapchain, false); m_images_count = kvfGetSwapchainImagesCount(m_swapchain); m_min_images_count = kvfGetSwapchainMinImagesCount(m_swapchain); From 4987a8ca6e93b9fc442847537dab9808abf1c6e3 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 16:13:44 +0100 Subject: [PATCH 095/131] implementing last functions, adding put pixel region --- example/main.c | 21 ++++-- includes/mlx_extended.h | 21 +++++- runtime/Includes/Core/Graphics.h | 2 + runtime/Includes/Core/Graphics.inl | 24 +++++++ runtime/Includes/Graphics/PutPixelManager.h | 5 ++ runtime/Includes/Renderer/Image.h | 3 + runtime/Includes/Utils/Bits.h | 16 +++++ runtime/Sources/Core/Bridge.cpp | 14 +++- runtime/Sources/Graphics/PutPixelManager.cpp | 42 ++++++++++-- runtime/Sources/Renderer/Image.cpp | 71 ++++++++++++++++++-- 10 files changed, 201 insertions(+), 18 deletions(-) create mode 100644 runtime/Includes/Utils/Bits.h diff --git a/example/main.c b/example/main.c index a9f5cb7..f09d680 100644 --- a/example/main.c +++ b/example/main.c @@ -13,6 +13,11 @@ typedef struct mlx_image img; } mlx_t; +#define CIRCLE_RADIUS 50 +#define CIRCLE_DIAMETER (CIRCLE_RADIUS + CIRCLE_RADIUS) + +static int pixels_circle[CIRCLE_DIAMETER * CIRCLE_DIAMETER] = { 0 }; + int update(void* param) { static int i = 0; @@ -51,11 +56,7 @@ int update(void* param) mlx_set_font_scale(mlx->mlx, "default", 8.f); mlx_string_put(mlx->mlx, mlx->win, 210, 175, 0xFFAF2BFF, "hidden"); - for(int j = 0; j < 20; j++) - { - for(int k = 0; k < 20; k++) - mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFF0000FF); - } + mlx_pixel_put_region(mlx->mlx, mlx->win, 200, 170, CIRCLE_DIAMETER, CIRCLE_DIAMETER, pixels_circle); i++; return 0; @@ -137,6 +138,16 @@ int main(void) int h; int dummy; + int i = 0; + for(int j = 0; j < CIRCLE_DIAMETER; j++) + { + for(int k = 0; k < CIRCLE_DIAMETER; k++, i++) + { + if((CIRCLE_RADIUS - j) * (CIRCLE_RADIUS - j) + (CIRCLE_RADIUS - k) * (CIRCLE_RADIUS - k) < CIRCLE_RADIUS * CIRCLE_RADIUS) + pixels_circle[i] = 0xA10000FF + ((j * k * i) << 8); + } + } + mlx.mlx = mlx_init(); mlx_window_create_info info = { 0 }; diff --git a/includes/mlx_extended.h b/includes/mlx_extended.h index f324d06..942fdd1 100644 --- a/includes/mlx_extended.h +++ b/includes/mlx_extended.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/12/14 16:17:10 by maldavid #+# #+# */ -/* Updated: 2024/12/15 13:58:58 by maldavid ### ########.fr */ +/* Updated: 2024/12/16 15:06:43 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -17,6 +17,7 @@ #define MACROLIB_X_EXTENDED_H #include "mlx.h" +#include #ifdef __cplusplus extern "C" { @@ -84,7 +85,23 @@ MLX_API void mlx_restore_window(mlx_context mlx, mlx_window win); * @param y Y coordinate * @param pixels Array of pixels (coded on 4 bytes in an int, 0xRRGGBBAA) */ -MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels); +MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels, size_t pixels_size); + +/** + * @brief Put a region of pixels in the window + * + * @param mlx Internal MLX application + * @param win Internal window + * @param x X coordinate + * @param y Y coordinate + * @param w Width + * @param h Height + * @param pixels Array of pixels (coded on 4 bytes in an int, 0xRRGGBBAA) + * + * Note: it is responsability of the user to make sure the size of `pixels` is + * big enough for the given region. + */ +MLX_API void mlx_pixel_put_region(mlx_context mlx, mlx_window win, int x, int y, int w, int h, int* pixels); diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index eeb2820..381061e 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -25,6 +25,8 @@ namespace mlx inline void ResetRenderData(int color) noexcept; inline void PixelPut(int x, int y, int color) noexcept; + inline void PixelPutArray(int x, int y, int* pixels, std::size_t pixels_size) noexcept; + inline void PixelPutRegion(int x, int y, int w, int h, int* pixels) noexcept; inline void StringPut(int x, int y, int, std::string str); inline void TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle); diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index f6fa842..eee2785 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -30,6 +30,30 @@ namespace mlx } } + void GraphicsSupport::PixelPutArray(int x, int y, int* pixels, std::size_t pixels_size) noexcept + { + MLX_PROFILE_FUNCTION(); + NonOwningPtr texture = m_put_pixel_manager.DrawPixelsArray(x, y, m_draw_layer, pixels, pixels_size); + if(texture) + { + m_pixelput_called = true; + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); + } + } + + void GraphicsSupport::PixelPutRegion(int x, int y, int w, int h, int* pixels) noexcept + { + MLX_PROFILE_FUNCTION(); + NonOwningPtr texture = m_put_pixel_manager.DrawPixelsRegion(x, y, w, h, m_draw_layer, pixels); + if(texture) + { + m_pixelput_called = true; + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); + } + } + void GraphicsSupport::StringPut(int x, int y, int color, std::string str) { MLX_PROFILE_FUNCTION(); diff --git a/runtime/Includes/Graphics/PutPixelManager.h b/runtime/Includes/Graphics/PutPixelManager.h index e630b4c..8283759 100644 --- a/runtime/Includes/Graphics/PutPixelManager.h +++ b/runtime/Includes/Graphics/PutPixelManager.h @@ -12,10 +12,15 @@ namespace mlx // Returns a valid pointer when a new texture has been created NonOwningPtr DrawPixel(int x, int y, std::uint64_t draw_layer, int color); + NonOwningPtr DrawPixelsArray(int x, int y, std::uint64_t draw_layer, int* pixels, std::size_t pixels_size); + NonOwningPtr DrawPixelsRegion(int x, int y, int w, int h, std::uint64_t draw_layer, int* pixels); void ResetRenderData(); ~PutPixelManager() = default; + private: + NonOwningPtr GetLayer(std::uint64_t draw_layer, bool& is_newlayer); + private: std::unordered_map> m_placements; std::vector> m_textures; diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 839dfda..1ce6545 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -84,7 +84,10 @@ namespace mlx void Destroy() noexcept override; void SetPixel(int x, int y, int color) noexcept; + void SetRegion(int x, int y, int w, int h, int* pixels) noexcept; + void SetLinearRegion(int x, int y, std::size_t len, int* pixels) noexcept; int GetPixel(int x, int y) noexcept; + void GetRegion(int x, int y, int w, int h, int* dst) noexcept; void Update(VkCommandBuffer cmd); diff --git a/runtime/Includes/Utils/Bits.h b/runtime/Includes/Utils/Bits.h new file mode 100644 index 0000000..0b6ecf2 --- /dev/null +++ b/runtime/Includes/Utils/Bits.h @@ -0,0 +1,16 @@ +#ifndef __MLX_BITS__ +#define __MLX_BITS__ + +namespace mlx +{ + template + constexpr T ByteSwap(T value) noexcept + { + static_assert(std::has_unique_object_representations_v, "T may not have padding bits"); + auto value_representation = std::bit_cast>(value); + std::ranges::reverse(value_representation); + return std::bit_cast(value_representation); + } +} + +#endif diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 2202705..10e8a41 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -381,12 +381,22 @@ extern "C" gs->GetWindow()->Restore(); } - void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels) + void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels, size_t pixels_size) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); if(!gs) return; + gs->PixelPutArray(x, y, pixels, pixels_size); + } + + void mlx_pixel_put_region(mlx_context mlx, mlx_window win, int x, int y, int w, int h, int* pixels) + { + MLX_CHECK_APPLICATION_POINTER(mlx); + mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); + if(!gs) + return; + gs->PixelPutRegion(x, y, w, h, pixels); } void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* dst) @@ -395,6 +405,7 @@ extern "C" mlx::NonOwningPtr texture = mlx->app->GetTexture(image); if(!texture) return; + texture->GetRegion(x, y, w, h, dst); } void mlx_set_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* pixels) @@ -403,6 +414,7 @@ extern "C" mlx::NonOwningPtr texture = mlx->app->GetTexture(image); if(!texture) return; + texture->SetRegion(x, y, w, h, pixels); } void mlx_put_transformed_image_to_window(mlx_context mlx, mlx_window win, mlx_image image, int x, int y, float scale_x, float scale_y, float angle) diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 1c99b65..9f83490 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -7,6 +7,40 @@ namespace mlx { NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t draw_layer, int color) { + MLX_PROFILE_FUNCTION(); + bool is_newlayer; + NonOwningPtr layer = GetLayer(draw_layer, is_newlayer); + if(!layer) + return nullptr; + layer->SetPixel(x, y, color); + return (is_newlayer ? layer : nullptr); + } + + NonOwningPtr PutPixelManager::DrawPixelsArray(int x, int y, std::uint64_t draw_layer, int* pixels, std::size_t pixels_size) + { + MLX_PROFILE_FUNCTION(); + bool is_newlayer; + NonOwningPtr layer = GetLayer(draw_layer, is_newlayer); + if(!layer) + return nullptr; + layer->SetLinearRegion(x, y, pixels_size, pixels); + return (is_newlayer ? layer : nullptr); + } + + NonOwningPtr PutPixelManager::DrawPixelsRegion(int x, int y, int w, int h, std::uint64_t draw_layer, int* pixels) + { + MLX_PROFILE_FUNCTION(); + bool is_newlayer; + NonOwningPtr layer = GetLayer(draw_layer, is_newlayer); + if(!layer) + return nullptr; + layer->SetRegion(x, y, w, h, pixels); + return (is_newlayer ? layer : nullptr); + } + + NonOwningPtr PutPixelManager::GetLayer(std::uint64_t draw_layer, bool& is_newlayer) + { + MLX_PROFILE_FUNCTION(); Verify((bool)p_renderer, "invalid renderer pointer"); VkExtent2D extent; @@ -20,9 +54,10 @@ namespace mlx auto it = m_placements.find(draw_layer); if(it != m_placements.end()) { - it->second->SetPixel(x, y, color); - return nullptr; + is_newlayer = false; + return it->second; } + is_newlayer = true; bool adjusment = false; if(m_current_texture_index >= m_textures.size()) @@ -39,12 +74,11 @@ namespace mlx { m_placements[draw_layer] = m_textures.at(m_current_texture_index - adjusment).get(); m_textures.at(m_current_texture_index - adjusment)->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); - m_textures.at(m_current_texture_index - adjusment)->SetPixel(x, y, color); return m_textures.at(m_current_texture_index - adjusment).get(); } catch(...) { - Error("PutPixelManager: invalid texture index; % is not in range of 0-% (internal mlx issue, please report to devs)", m_current_texture_index - 1, m_textures.size()); + Error("PutPixelManager: invalid texture index; % is not in range of 0-% (internal mlx issue, please report to devs)", m_current_texture_index - adjusment, m_textures.size()); return nullptr; } } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 892629d..24a1348 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -5,6 +5,7 @@ #include #include #include +#include #define STB_IMAGE_IMPLEMENTATION @@ -215,12 +216,49 @@ namespace mlx if(!m_staging_buffer.has_value()) OpenCPUBuffer(); // Needs to reverse bytes order because why not - unsigned char bytes[4]; - bytes[0] = (color >> 24) & 0xFF; - bytes[1] = (color >> 16) & 0xFF; - bytes[2] = (color >> 8) & 0xFF; - bytes[3] = color & 0xFF; - m_cpu_buffer[(y * m_width) + x] = *reinterpret_cast(bytes); + color = ByteSwap(color); + m_cpu_buffer[(y * m_width) + x] = color; + m_has_been_modified = true; + } + + void Texture::SetRegion(int x, int y, int w, int h, int* pixels) noexcept + { + MLX_PROFILE_FUNCTION(); + if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) + return; + if(w < 0 || h < 0) + return; + if(!m_staging_buffer.has_value()) + OpenCPUBuffer(); + for(std::uint32_t i = 0, moving_x = x, moving_y = y;; i++, moving_x++) + { + if(moving_x >= static_cast(x + w) || moving_x >= m_width) + { + moving_x = x; + if(moving_y >= static_cast(y + h) || moving_y >= m_height) + break; + moving_y++; + } + // Needs to reverse bytes order because why not + int color = ByteSwap(pixels[i]); + m_cpu_buffer[(moving_y * m_width) + moving_x] = color; + } + m_has_been_modified = true; + } + + void Texture::SetLinearRegion(int x, int y, std::size_t len, int* pixels) noexcept + { + MLX_PROFILE_FUNCTION(); + if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) + return; + if(!m_staging_buffer.has_value()) + OpenCPUBuffer(); + for(std::size_t i = 0; i < len; i++) + { + // Needs to reverse bytes order because why not + int color = ByteSwap(pixels[i]); + m_cpu_buffer[(y * m_width) + x + i] = color; + } m_has_been_modified = true; } @@ -234,6 +272,27 @@ namespace mlx return m_cpu_buffer[(y * m_width) + x]; } + void Texture::GetRegion(int x, int y, int w, int h, int* dst) noexcept + { + MLX_PROFILE_FUNCTION(); + if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) + return; + if(!m_staging_buffer.has_value()) + OpenCPUBuffer(); + for(std::uint32_t i = 0, moving_x = x, moving_y = y;; i++, moving_x++) + { + if(moving_x >= static_cast(x + w) || moving_x >= m_width) + { + moving_x = x; + if(moving_y >= static_cast(y + h) || moving_y >= m_height) + break; + moving_y++; + } + // Needs to reverse bytes order because why not + dst[i] = ByteSwap(m_cpu_buffer[(moving_y * m_width) + moving_x]); + } + } + void Texture::Update(VkCommandBuffer cmd) { MLX_PROFILE_FUNCTION(); From 47f6bc73e967648f63ac70ed2ebe1ebe50276135 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 16:17:17 +0100 Subject: [PATCH 096/131] fixing macos compilation issue --- includes/mlx_profile.h | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/includes/mlx_profile.h b/includes/mlx_profile.h index 99f2661..d525c45 100644 --- a/includes/mlx_profile.h +++ b/includes/mlx_profile.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ -/* Updated: 2024/12/14 17:58:37 by maldavid ### ########.fr */ +/* Updated: 2024/12/16 16:16:07 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -121,21 +121,15 @@ #if defined(MLX_PLAT_WINDOWS) #define VK_USE_PLATFORM_WIN32_KHR - #ifdef __cplusplus - constexpr const char* VULKAN_LIB_NAME = "vulkan-1.dll"; - #endif + #define VULKAN_LIB_NAME "vulkan-1.dll" #elif defined(MLX_PLAT_MACOS) #define VK_USE_PLATFORM_MACOS_MVK #define VK_USE_PLATFORM_METAL_EXT - #ifdef __cplusplus - constexpr const char* VULKAN_LIB_NAME = "libvulkan.dylib / libvulkan.1.dylib / libMoltenVK.dylib"; - #endif + #define VULKAN_LIB_NAME "libvulkan.dylib / libvulkan.1.dylib / libMoltenVK.dylib" #else #define VK_USE_PLATFORM_XLIB_KHR #define VK_USE_PLATFORM_WAYLAND_KHR - #ifdef __cplusplus - constexpr const char* VULKAN_LIB_NAME = "libvulkan.so / libvulkan.so.1"; - #endif + #define VULKAN_LIB_NAME "libvulkan.so / libvulkan.so.1" #endif #if !defined(MLX_FORCEINLINE) From 7d407fc6d1f5e8e101991c404efaf409f47a988c Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 16:39:04 +0100 Subject: [PATCH 097/131] adding gcc13 to CI --- .github/workflows/linux_gcc.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/linux_gcc.yml b/.github/workflows/linux_gcc.yml index 35e0348..72dea2b 100644 --- a/.github/workflows/linux_gcc.yml +++ b/.github/workflows/linux_gcc.yml @@ -30,8 +30,9 @@ jobs: - name: Install system dependencies run: | + sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y sudo apt-get update - sudo apt-get -y install mesa-common-dev libsdl2-2.0-0 libsdl2-dev build-essential libvulkan-dev + sudo apt-get -y install gcc-13 mesa-common-dev libsdl2-2.0-0 libsdl2-dev build-essential libvulkan-dev # Build the lib - name: Build MacroLibX From d379e5a9fe46183a97e067dad2ae51ae2c70a30a Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 16:39:50 +0100 Subject: [PATCH 098/131] fixing windows compilation issue --- runtime/Sources/Core/Memory.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 690814b..57869c2 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -106,7 +106,7 @@ namespace mlx for(const Descriptor& desc : s_blocks) { #ifdef MLX_COMPILER_MSVC - if(it->aligned) + if(desc.aligned) _aligned_free(desc.ptr); else std::free(desc.ptr); From 17ae77d11a62cc017a852dc57847c931cad738c4 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 16:57:23 +0100 Subject: [PATCH 099/131] update action images --- .github/workflows/linux_clang.yml | 2 +- .github/workflows/linux_gcc.yml | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/workflows/linux_clang.yml b/.github/workflows/linux_clang.yml index b35ab2c..d39d34b 100644 --- a/.github/workflows/linux_clang.yml +++ b/.github/workflows/linux_clang.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest] + os: [ubuntu-24.04] arch: [x86_64] mode: [release] diff --git a/.github/workflows/linux_gcc.yml b/.github/workflows/linux_gcc.yml index 72dea2b..7b32745 100644 --- a/.github/workflows/linux_gcc.yml +++ b/.github/workflows/linux_gcc.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest] + os: [ubuntu-24.04] arch: [x86_64] mode: [release] @@ -30,9 +30,8 @@ jobs: - name: Install system dependencies run: | - sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y sudo apt-get update - sudo apt-get -y install gcc-13 mesa-common-dev libsdl2-2.0-0 libsdl2-dev build-essential libvulkan-dev + sudo apt-get -y install mesa-common-dev libsdl2-2.0-0 libsdl2-dev build-essential libvulkan-dev # Build the lib - name: Build MacroLibX From 9183e7c1e8b7a3ae935200bb11d5e644beb71555 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 17:03:53 +0100 Subject: [PATCH 100/131] installing SDL2 framework in macos action --- .github/workflows/macos_x86.yml | 7 ++++--- Makefile | 1 - 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/macos_x86.yml b/.github/workflows/macos_x86.yml index 78433d2..41af27e 100644 --- a/.github/workflows/macos_x86.yml +++ b/.github/workflows/macos_x86.yml @@ -29,9 +29,10 @@ jobs: uses: actions/checkout@v4 # Install system dependencies - - name: Install Dependancies - run: | - brew install SDL2 + - name: Setup SDL2 frameworks + uses: BrettDong/setup-sdl2-frameworks@main + with: + sdl2: latest # Build the lib - name: Build MacroLibX diff --git a/Makefile b/Makefile index 250c4d0..0f12752 100644 --- a/Makefile +++ b/Makefile @@ -168,7 +168,6 @@ clean: fclean: clean @$(RM) $(NAME) @printf "Cleaned $(_BOLD)$(NAME)$(_RESET)\n" - @printf "Cleaned $(_BOLD)$(NAME)$(_RESET)\n" re: fclean _printbuildinfos @$(MAKE) $(NAME) From d3808cde1afd543218bdd91cad2f513df80cf522 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 17:22:34 +0100 Subject: [PATCH 101/131] revert --- .github/workflows/macos_x86.yml | 7 +++---- Makefile | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/macos_x86.yml b/.github/workflows/macos_x86.yml index 41af27e..78433d2 100644 --- a/.github/workflows/macos_x86.yml +++ b/.github/workflows/macos_x86.yml @@ -29,10 +29,9 @@ jobs: uses: actions/checkout@v4 # Install system dependencies - - name: Setup SDL2 frameworks - uses: BrettDong/setup-sdl2-frameworks@main - with: - sdl2: latest + - name: Install Dependancies + run: | + brew install SDL2 # Build the lib - name: Build MacroLibX diff --git a/Makefile b/Makefile index 0f12752..06b084e 100644 --- a/Makefile +++ b/Makefile @@ -34,7 +34,7 @@ INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party CXXPCHFLAGS = -xc++-header PCH = runtime/Includes/PreCompiled.h -GCH = +GCH = NZRRC = nzslc From 335c797c12805c075a028e7c563cc1e60d81df1f Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 17:26:12 +0100 Subject: [PATCH 102/131] fixing homebrew path --- .github/workflows/linux_clang.yml | 4 ---- .github/workflows/linux_gcc.yml | 4 ---- .github/workflows/macos_x86.yml | 4 ---- Makefile | 4 ++-- 4 files changed, 2 insertions(+), 14 deletions(-) diff --git a/.github/workflows/linux_clang.yml b/.github/workflows/linux_clang.yml index d39d34b..1300ead 100644 --- a/.github/workflows/linux_clang.yml +++ b/.github/workflows/linux_clang.yml @@ -21,10 +21,6 @@ jobs: if: "!contains(github.event.head_commit.message, 'ci skip')" steps: - - name: Get current date as package key - id: cache_key - run: echo "key=$(date +'%W')" >> $GITHUB_OUTPUT - - name: Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/linux_gcc.yml b/.github/workflows/linux_gcc.yml index 7b32745..f676874 100644 --- a/.github/workflows/linux_gcc.yml +++ b/.github/workflows/linux_gcc.yml @@ -21,10 +21,6 @@ jobs: if: "!contains(github.event.head_commit.message, 'ci skip')" steps: - - name: Get current date as package key - id: cache_key - run: echo "key=$(date +'%W')" >> $GITHUB_OUTPUT - - name: Checkout repository uses: actions/checkout@v4 diff --git a/.github/workflows/macos_x86.yml b/.github/workflows/macos_x86.yml index 78433d2..80f8408 100644 --- a/.github/workflows/macos_x86.yml +++ b/.github/workflows/macos_x86.yml @@ -21,10 +21,6 @@ jobs: if: "!contains(github.event.head_commit.message, 'ci skip')" steps: - - name: Get current date as package key - id: cache_key - run: echo "key=$(date +'%W')" >> $GITHUB_OUTPUT - - name: Checkout repository uses: actions/checkout@v4 diff --git a/Makefile b/Makefile index 06b084e..5a5c659 100644 --- a/Makefile +++ b/Makefile @@ -48,8 +48,8 @@ else endif ifeq ($(OS), Darwin) - LDFLAGS += -L /opt/homebrew/lib -lSDL2 - CXXFLAGS += -I /opt/homebrew/include + LDFLAGS += -L /opt/homebrew/Cellar/lib -lSDL2 + CXXFLAGS += -I /opt/homebrew/Cellar/include NAME = libmlx.dylib endif From 30328b1732a40fc7df57340c36c25c9e8360165b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Mon, 16 Dec 2024 19:07:16 +0100 Subject: [PATCH 103/131] fixing bug in texture clear --- runtime/Includes/Renderer/Image.h | 3 ++- runtime/Sources/Graphics/PutPixelManager.cpp | 17 ++++++--------- runtime/Sources/Renderer/Image.cpp | 23 +++++++++++++------- 3 files changed, 24 insertions(+), 19 deletions(-) diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 1ce6545..f5a70f4 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -30,7 +30,7 @@ namespace mlx void CreateImageView(VkImageViewType type, VkImageAspectFlags aspectFlags, int layer_count = 1) noexcept; void CreateSampler() noexcept; void TransitionLayout(VkImageLayout new_layout, VkCommandBuffer cmd = VK_NULL_HANDLE); - void Clear(VkCommandBuffer cmd, Vec4f color); + virtual void Clear(VkCommandBuffer cmd, Vec4f color); void DestroySampler() noexcept; void DestroyImageView() noexcept; @@ -88,6 +88,7 @@ namespace mlx void SetLinearRegion(int x, int y, std::size_t len, int* pixels) noexcept; int GetPixel(int x, int y) noexcept; void GetRegion(int x, int y, int w, int h, int* dst) noexcept; + void Clear(VkCommandBuffer cmd, Vec4f color) override; void Update(VkCommandBuffer cmd); diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 9f83490..79753ce 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -59,26 +59,25 @@ namespace mlx } is_newlayer = true; - bool adjusment = false; if(m_current_texture_index >= m_textures.size()) { #ifdef DEBUG - m_textures.push_back(std::make_unique(CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(draw_layer))); + m_textures.push_back(std::make_unique(CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(m_current_texture_index))); #else m_textures.push_back(std::make_unique(CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, std::string_view{})); #endif - m_current_texture_index++; - adjusment = true; } try { - m_placements[draw_layer] = m_textures.at(m_current_texture_index - adjusment).get(); - m_textures.at(m_current_texture_index - adjusment)->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); - return m_textures.at(m_current_texture_index - adjusment).get(); + m_placements[draw_layer] = m_textures.at(m_current_texture_index).get(); + m_textures.at(m_current_texture_index)->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); + NonOwningPtr texture = m_textures.at(m_current_texture_index).get(); + m_current_texture_index++; + return texture; } catch(...) { - Error("PutPixelManager: invalid texture index; % is not in range of 0-% (internal mlx issue, please report to devs)", m_current_texture_index - adjusment, m_textures.size()); + Error("PutPixelManager: invalid texture index; % is not in range of 0-% (internal mlx issue, please report to devs)", m_current_texture_index, m_textures.size()); return nullptr; } } @@ -86,8 +85,6 @@ namespace mlx void PutPixelManager::ResetRenderData() { m_placements.clear(); - for(auto& texture : m_textures) - texture->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); m_current_texture_index = 0; } } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 24a1348..00e773b 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -31,14 +31,6 @@ namespace mlx { - unsigned char reverse(unsigned char b) - { - b = (b & 0xF0) >> 4 | (b & 0x0F) << 4; - b = (b & 0xCC) >> 2 | (b & 0x33) << 2; - b = (b & 0xAA) >> 1 | (b & 0x55) << 1; - return b; - } - void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); @@ -293,6 +285,21 @@ namespace mlx } } + void Texture::Clear(VkCommandBuffer cmd, Vec4f color) + { + MLX_PROFILE_FUNCTION(); + Image::Clear(cmd, std::move(color)); + if(m_staging_buffer.has_value()) + { + std::uint8_t color_bytes[4]; + color_bytes[0] = static_cast(color.r * 255.f); + color_bytes[1] = static_cast(color.g * 255.f); + color_bytes[2] = static_cast(color.b * 255.f); + color_bytes[3] = static_cast(color.a * 255.f); + std::fill(m_cpu_buffer.begin(), m_cpu_buffer.end(), *reinterpret_cast(color_bytes)); + } + } + void Texture::Update(VkCommandBuffer cmd) { MLX_PROFILE_FUNCTION(); From 42e62ac3eb0bf54951c3bd550d36671fed13d194 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 00:11:21 +0100 Subject: [PATCH 104/131] working on endianness with vulkan images --- example/main.c | 38 +++++------ includes/mlx.h | 27 ++++++-- includes/mlx_extended.h | 10 +-- runtime/Includes/Core/Graphics.h | 10 +-- runtime/Includes/Core/Graphics.inl | 26 ++++---- runtime/Includes/Graphics/PutPixelManager.h | 6 +- runtime/Includes/Renderer/Image.h | 12 ++-- runtime/Includes/Utils/Bits.h | 32 ++++++++++ runtime/Sources/Core/Bridge.cpp | 20 +++--- runtime/Sources/Core/SDLManager.cpp | 15 +---- runtime/Sources/Graphics/PutPixelManager.cpp | 9 +-- runtime/Sources/Renderer/Image.cpp | 66 ++++++++++++-------- 12 files changed, 162 insertions(+), 109 deletions(-) diff --git a/example/main.c b/example/main.c index f09d680..a06fc0e 100644 --- a/example/main.c +++ b/example/main.c @@ -16,7 +16,7 @@ typedef struct #define CIRCLE_RADIUS 50 #define CIRCLE_DIAMETER (CIRCLE_RADIUS + CIRCLE_RADIUS) -static int pixels_circle[CIRCLE_DIAMETER * CIRCLE_DIAMETER] = { 0 }; +static mlx_color pixels_circle[CIRCLE_DIAMETER * CIRCLE_DIAMETER] = { 0 }; int update(void* param) { @@ -25,7 +25,7 @@ int update(void* param) if(i > 200) { - mlx_clear_window(mlx->mlx, mlx->win, 0x334D4DFF); + mlx_clear_window(mlx->mlx, mlx->win, (mlx_color){ .rgba = 0x334D4DFF }); mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40, 0.5f, 0.5f, i); } @@ -34,18 +34,19 @@ int update(void* param) else mlx_set_font_scale(mlx->mlx, "default", 6.f); - mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFF2066FF, "this text should be hidden"); + mlx_string_put(mlx->mlx, mlx->win, 160, 120, (mlx_color){ .rgba = 0xFF2066FF }, "this text should be hidden"); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_png, 100, 100); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); mlx_set_font(mlx->mlx, "default"); - mlx_string_put(mlx->mlx, mlx->win, 20, 50, 0xFFFFFFFF, "that's a text"); + mlx_string_put(mlx->mlx, mlx->win, 20, 50, (mlx_color){ .rgba = 0xFFFFFFFF }, "that's a text"); - for(int j = 0, color = 0; j < 400; j++) + uint32_t color = 0; + for(int j = 0; j < 400; j++) { - mlx_pixel_put(mlx->mlx, mlx->win, j, j, 0x0000FFFF + (color << 24)); - mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, 0x0000FFFF); + mlx_pixel_put(mlx->mlx, mlx->win, j, j, (mlx_color){ .rgba = 0x0000FFFF + (color << 24) }); + mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, (mlx_color){ .rgba = 0x0000FFFF }); color += (color < 255); } @@ -54,7 +55,7 @@ int update(void* param) else mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, fabs(sin(i / 100.0f)), fabs(cos(i / 100.0f) * 2.0f), 0.0f); mlx_set_font_scale(mlx->mlx, "default", 8.f); - mlx_string_put(mlx->mlx, mlx->win, 210, 175, 0xFFAF2BFF, "hidden"); + mlx_string_put(mlx->mlx, mlx->win, 210, 175, (mlx_color){ .rgba = 0xFFAF2BFF }, "hidden"); mlx_pixel_put_region(mlx->mlx, mlx->win, 200, 170, CIRCLE_DIAMETER, CIRCLE_DIAMETER, pixels_circle); @@ -64,7 +65,6 @@ int update(void* param) mlx_image create_image(mlx_t* mlx) { - unsigned char pixel[4]; mlx_image img = mlx_new_image(mlx->mlx, 100, 100); for(int i = 0, j = 0, k = 0; i < (100 * 100) * 4; i += 4, j++) { @@ -75,11 +75,13 @@ mlx_image create_image(mlx_t* mlx) } if(i < 10000 || i > 20000) { - pixel[0] = 0x99; - pixel[1] = i; - pixel[2] = j; - pixel[3] = k; - mlx_set_image_pixel(mlx->mlx, img, j, k, *((int*)pixel)); + mlx_color pixel = { + .r = (uint8_t)k, + .g = (uint8_t)j, + .b = (uint8_t)i, + .a = 0x99 + }; + mlx_set_image_pixel(mlx->mlx, img, j, k, pixel); } } return img; @@ -104,7 +106,7 @@ int key_hook(int key, void* param) mlx_mouse_hide(mlx->mlx); break; case 6 : // (C)lear - mlx_clear_window(mlx->mlx, mlx->win, 0x334D4DFF); + mlx_clear_window(mlx->mlx, mlx->win, (mlx_color){ .rgba = 0x334D4DFF }); break; case 79 : // RIGHT KEY mlx_mouse_move(mlx->mlx, mlx->win, x + 10, y); @@ -144,7 +146,7 @@ int main(void) for(int k = 0; k < CIRCLE_DIAMETER; k++, i++) { if((CIRCLE_RADIUS - j) * (CIRCLE_RADIUS - j) + (CIRCLE_RADIUS - k) * (CIRCLE_RADIUS - k) < CIRCLE_RADIUS * CIRCLE_RADIUS) - pixels_circle[i] = 0xA10000FF + ((j * k * i) << 8); + pixels_circle[i] = (mlx_color){ .rgba = 0xA10000FF + ((j * k * i) << 8) }; } } @@ -169,13 +171,13 @@ int main(void) mlx.logo_bmp = mlx_new_image_from_file(mlx.mlx, "42_logo.bmp", &dummy, &dummy); mlx.logo_jpg = mlx_new_image_from_file(mlx.mlx, "42_logo.jpg", &dummy, &dummy); - mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, 0xFF00FFFF); + mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, (mlx_color){ .rgba = 0xFF00FFFF }); mlx_put_image_to_window(mlx.mlx, mlx.win, mlx.logo_png, 10, 190); mlx.img = create_image(&mlx); mlx_set_font_scale(mlx.mlx, "font.ttf", 16.f); - mlx_string_put(mlx.mlx, mlx.win, 20, 20, 0x0020FFFF, "that text will disappear"); + mlx_string_put(mlx.mlx, mlx.win, 20, 20, (mlx_color){ .rgba = 0x0020FFFF }, "that text will disappear"); mlx_loop_hook(mlx.mlx, update, &mlx); mlx_loop(mlx.mlx); diff --git a/includes/mlx.h b/includes/mlx.h index e39da89..21fc75a 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/15 13:59:00 by maldavid ### ########.fr */ +/* Updated: 2024/12/16 23:11:59 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -41,6 +41,21 @@ MLX_DEFINE_HANDLE(mlx_window); */ MLX_DEFINE_HANDLE(mlx_image); +/** + * @brief Union representing RGBA color with access to each part as bytes + */ +typedef union mlx_color +{ + struct + { + uint8_t a; + uint8_t b; + uint8_t g; + uint8_t r; + }; + uint32_t rgba; +} mlx_color; + /* MLX backend related functions */ @@ -167,7 +182,7 @@ MLX_API void mlx_get_window_size(mlx_context mlx, mlx_window win, int* x, int* y * @param mlx Internal MLX application * @param win Internal window */ -MLX_API void mlx_clear_window(mlx_context mlx, mlx_window win, int color); +MLX_API void mlx_clear_window(mlx_context mlx, mlx_window win, mlx_color color); /** * @brief Get the size of the screen the given window is on @@ -283,7 +298,7 @@ MLX_API void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, * @param y Y coordinate * @param color Color of the pixel (coded on 4 bytes in an int, 0xRRGGBBAA) */ -MLX_API void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, int color); +MLX_API void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, mlx_color color); @@ -339,7 +354,7 @@ MLX_API void mlx_destroy_image(mlx_context mlx, mlx_image image); * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API int mlx_get_image_pixel(mlx_context mlx, mlx_image image, int x, int y); +MLX_API mlx_color mlx_get_image_pixel(mlx_context mlx, mlx_image image, int x, int y); /** * @brief Set image pixel data @@ -358,7 +373,7 @@ MLX_API int mlx_get_image_pixel(mlx_context mlx, mlx_image image, int x, int y); * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API void mlx_set_image_pixel(mlx_context mlx, mlx_image image, int x, int y, int color); +MLX_API void mlx_set_image_pixel(mlx_context mlx, mlx_image image, int x, int y, mlx_color color); /** * @brief Put image to the given window @@ -386,7 +401,7 @@ MLX_API void mlx_put_image_to_window(mlx_context mlx, mlx_window win, mlx_image * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) * @param str Text to put */ -MLX_API void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, int color, char* str); +MLX_API void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, mlx_color color, char* str); /** * @brief Loads a font to be used by `mlx_string_put` diff --git a/includes/mlx_extended.h b/includes/mlx_extended.h index 942fdd1..a67206f 100644 --- a/includes/mlx_extended.h +++ b/includes/mlx_extended.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/12/14 16:17:10 by maldavid #+# #+# */ -/* Updated: 2024/12/16 15:06:43 by maldavid ### ########.fr */ +/* Updated: 2024/12/16 20:33:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -85,7 +85,7 @@ MLX_API void mlx_restore_window(mlx_context mlx, mlx_window win); * @param y Y coordinate * @param pixels Array of pixels (coded on 4 bytes in an int, 0xRRGGBBAA) */ -MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels, size_t pixels_size); +MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, mlx_color* pixels, size_t pixels_size); /** * @brief Put a region of pixels in the window @@ -101,7 +101,7 @@ MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, * Note: it is responsability of the user to make sure the size of `pixels` is * big enough for the given region. */ -MLX_API void mlx_pixel_put_region(mlx_context mlx, mlx_window win, int x, int y, int w, int h, int* pixels); +MLX_API void mlx_pixel_put_region(mlx_context mlx, mlx_window win, int x, int y, int w, int h, mlx_color* pixels); @@ -130,7 +130,7 @@ MLX_API void mlx_pixel_put_region(mlx_context mlx, mlx_window win, int x, int y, * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* dst); +MLX_API void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, mlx_color* dst); /** * @brief Set image region @@ -154,7 +154,7 @@ MLX_API void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y * ~ make IMAGES_OPTIMIZED=false * ``` */ -MLX_API void mlx_set_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* pixels); +MLX_API void mlx_set_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, mlx_color* pixels); /** * @brief Transform and put image to the given window diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 381061e..06a221a 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -22,12 +22,12 @@ namespace mlx void Render() noexcept; - inline void ResetRenderData(int color) noexcept; + inline void ResetRenderData(mlx_color color) noexcept; - inline void PixelPut(int x, int y, int color) noexcept; - inline void PixelPutArray(int x, int y, int* pixels, std::size_t pixels_size) noexcept; - inline void PixelPutRegion(int x, int y, int w, int h, int* pixels) noexcept; - inline void StringPut(int x, int y, int, std::string str); + inline void PixelPut(int x, int y, mlx_color color) noexcept; + inline void PixelPutArray(int x, int y, mlx_color* color, std::size_t pixels_size) noexcept; + inline void PixelPutRegion(int x, int y, int w, int h, mlx_color* color) noexcept; + inline void StringPut(int x, int y, mlx_color color, std::string str); inline void TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle); inline void TryEraseSpritesInScene(NonOwningPtr texture) noexcept; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index eee2785..1416b9a 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -3,14 +3,14 @@ namespace mlx { - void GraphicsSupport::ResetRenderData(int color) noexcept + void GraphicsSupport::ResetRenderData(mlx_color color) noexcept { MLX_PROFILE_FUNCTION(); Vec4f vec_color = { - static_cast((color & 0xFF000000) >> 24) / 255.0f, - static_cast((color & 0x00FF0000) >> 16) / 255.0f, - static_cast((color & 0x0000FF00) >> 8) / 255.0f, - static_cast((color & 0x000000FF)) / 255.0f, + static_cast(color.r) / 255.0f, + static_cast(color.g) / 255.0f, + static_cast(color.b) / 255.0f, + static_cast(color.a) / 255.0f }; p_scene->ResetScene(std::move(vec_color)); m_put_pixel_manager.ResetRenderData(); @@ -18,7 +18,7 @@ namespace mlx m_pixelput_called = false; } - void GraphicsSupport::PixelPut(int x, int y, int color) noexcept + void GraphicsSupport::PixelPut(int x, int y, mlx_color color) noexcept { MLX_PROFILE_FUNCTION(); NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_draw_layer, color); @@ -30,7 +30,7 @@ namespace mlx } } - void GraphicsSupport::PixelPutArray(int x, int y, int* pixels, std::size_t pixels_size) noexcept + void GraphicsSupport::PixelPutArray(int x, int y, mlx_color* pixels, std::size_t pixels_size) noexcept { MLX_PROFILE_FUNCTION(); NonOwningPtr texture = m_put_pixel_manager.DrawPixelsArray(x, y, m_draw_layer, pixels, pixels_size); @@ -42,7 +42,7 @@ namespace mlx } } - void GraphicsSupport::PixelPutRegion(int x, int y, int w, int h, int* pixels) noexcept + void GraphicsSupport::PixelPutRegion(int x, int y, int w, int h, mlx_color* pixels) noexcept { MLX_PROFILE_FUNCTION(); NonOwningPtr texture = m_put_pixel_manager.DrawPixelsRegion(x, y, w, h, m_draw_layer, pixels); @@ -54,17 +54,17 @@ namespace mlx } } - void GraphicsSupport::StringPut(int x, int y, int color, std::string str) + void GraphicsSupport::StringPut(int x, int y, mlx_color color, std::string str) { MLX_PROFILE_FUNCTION(); if(str.empty()) return; Vec4f vec_color = { - static_cast((color & 0xFF000000) >> 24) / 255.0f, - static_cast((color & 0x00FF0000) >> 16) / 255.0f, - static_cast((color & 0x0000FF00) >> 8) / 255.0f, - static_cast((color & 0x000000FF)) / 255.0f, + static_cast(color.r) / 255.0f, + static_cast(color.g) / 255.0f, + static_cast(color.b) / 255.0f, + static_cast(color.a) / 255.0f, }; NonOwningPtr text = p_scene->GetTextFromPositionAndColor(str, Vec2f{ static_cast(x), static_cast(y) }, vec_color); diff --git a/runtime/Includes/Graphics/PutPixelManager.h b/runtime/Includes/Graphics/PutPixelManager.h index 8283759..8d1626c 100644 --- a/runtime/Includes/Graphics/PutPixelManager.h +++ b/runtime/Includes/Graphics/PutPixelManager.h @@ -11,9 +11,9 @@ namespace mlx PutPixelManager(NonOwningPtr renderer) : p_renderer(renderer) {} // Returns a valid pointer when a new texture has been created - NonOwningPtr DrawPixel(int x, int y, std::uint64_t draw_layer, int color); - NonOwningPtr DrawPixelsArray(int x, int y, std::uint64_t draw_layer, int* pixels, std::size_t pixels_size); - NonOwningPtr DrawPixelsRegion(int x, int y, int w, int h, std::uint64_t draw_layer, int* pixels); + NonOwningPtr DrawPixel(int x, int y, std::uint64_t draw_layer, mlx_color color); + NonOwningPtr DrawPixelsArray(int x, int y, std::uint64_t draw_layer, mlx_color* pixels, std::size_t pixels_size); + NonOwningPtr DrawPixelsRegion(int x, int y, int w, int h, std::uint64_t draw_layer, mlx_color* pixels); void ResetRenderData(); ~PutPixelManager() = default; diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index f5a70f4..5833f0a 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -83,11 +83,11 @@ namespace mlx void Init(CPUBuffer pixels, std::uint32_t width, std::uint32_t height, VkFormat format, bool is_multisampled, [[maybe_unused]] std::string_view debug_name); void Destroy() noexcept override; - void SetPixel(int x, int y, int color) noexcept; - void SetRegion(int x, int y, int w, int h, int* pixels) noexcept; - void SetLinearRegion(int x, int y, std::size_t len, int* pixels) noexcept; - int GetPixel(int x, int y) noexcept; - void GetRegion(int x, int y, int w, int h, int* dst) noexcept; + void SetPixel(int x, int y, mlx_color color) noexcept; + void SetRegion(int x, int y, int w, int h, mlx_color* color) noexcept; + void SetLinearRegion(int x, int y, std::size_t len, mlx_color* color) noexcept; + mlx_color GetPixel(int x, int y) noexcept; + void GetRegion(int x, int y, int w, int h, mlx_color* dst) noexcept; void Clear(VkCommandBuffer cmd, Vec4f color) override; void Update(VkCommandBuffer cmd); @@ -98,7 +98,7 @@ namespace mlx void OpenCPUBuffer(); private: - std::vector m_cpu_buffer; + std::vector m_cpu_buffer; std::optional m_staging_buffer; bool m_has_been_modified = false; }; diff --git a/runtime/Includes/Utils/Bits.h b/runtime/Includes/Utils/Bits.h index 0b6ecf2..4d7df89 100644 --- a/runtime/Includes/Utils/Bits.h +++ b/runtime/Includes/Utils/Bits.h @@ -11,6 +11,38 @@ namespace mlx std::ranges::reverse(value_representation); return std::bit_cast(value_representation); } + + constexpr std::uint32_t Rmask() noexcept + { + if constexpr(std::endian::native == std::endian::big) + return 0xFF000000; + else + return 0x000000FF; + } + + constexpr std::uint32_t Gmask() noexcept + { + if constexpr(std::endian::native == std::endian::big) + return 0x00FF0000; + else + return 0x0000FF00; + } + + constexpr std::uint32_t Bmask() noexcept + { + if constexpr(std::endian::native == std::endian::big) + return 0x0000FF00; + else + return 0x00FF0000; + } + + constexpr std::uint32_t Amask() noexcept + { + if constexpr(std::endian::native == std::endian::big) + return 0x000000FF; + else + return 0xFF000000; + } } #endif diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 10e8a41..eff5d65 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -126,7 +126,7 @@ extern "C" gs->GetWindow()->GetSize(x, y); } - void mlx_clear_window(mlx_context mlx, mlx_window win, int color) + void mlx_clear_window(mlx_context mlx, mlx_window win, mlx_color color) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); @@ -201,7 +201,7 @@ extern "C" mlx->app->OnEvent(win, static_cast(event), funct_ptr, param); } - void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, int color) + void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, mlx_color color) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); @@ -248,16 +248,16 @@ extern "C" mlx->app->DestroyTexture(image); } - int mlx_get_image_pixel(mlx_context mlx, mlx_image image, int x, int y) + mlx_color mlx_get_image_pixel(mlx_context mlx, mlx_image image, int x, int y) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr texture = mlx->app->GetTexture(image); if(!texture) - return 0; + return { .rgba = 0x00000000 }; return texture->GetPixel(x, y); } - void mlx_set_image_pixel(mlx_context mlx, mlx_image image, int x, int y, int color) + void mlx_set_image_pixel(mlx_context mlx, mlx_image image, int x, int y, mlx_color color) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr texture = mlx->app->GetTexture(image); @@ -278,7 +278,7 @@ extern "C" gs->TexturePut(texture, x, y, 1.0f, 1.0f, 0.0f); } - void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, int color, char* str) + void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, mlx_color color, char* str) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); @@ -381,7 +381,7 @@ extern "C" gs->GetWindow()->Restore(); } - void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, int* pixels, size_t pixels_size) + void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, mlx_color* pixels, size_t pixels_size) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); @@ -390,7 +390,7 @@ extern "C" gs->PixelPutArray(x, y, pixels, pixels_size); } - void mlx_pixel_put_region(mlx_context mlx, mlx_window win, int x, int y, int w, int h, int* pixels) + void mlx_pixel_put_region(mlx_context mlx, mlx_window win, int x, int y, int w, int h, mlx_color* pixels) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr gs = mlx->app->GetGraphicsSupport(win); @@ -399,7 +399,7 @@ extern "C" gs->PixelPutRegion(x, y, w, h, pixels); } - void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* dst) + void mlx_get_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, mlx_color* dst) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr texture = mlx->app->GetTexture(image); @@ -408,7 +408,7 @@ extern "C" texture->GetRegion(x, y, w, h, dst); } - void mlx_set_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, int* pixels) + void mlx_set_image_region(mlx_context mlx, mlx_image image, int x, int y, int w, int h, mlx_color* pixels) { MLX_CHECK_APPLICATION_POINTER(mlx); mlx::NonOwningPtr texture = mlx->app->GetTexture(image); diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 5fa5ce6..08141de 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -2,21 +2,10 @@ #include #include #include +#include namespace mlx { - #if SDL_BYTEORDER == SDL_BIG_ENDIAN - constexpr const std::uint32_t rmask = 0xff000000; - constexpr const std::uint32_t gmask = 0x00ff0000; - constexpr const std::uint32_t bmask = 0x0000ff00; - constexpr const std::uint32_t amask = 0x000000ff; - #else - constexpr const std::uint32_t rmask = 0x000000ff; - constexpr const std::uint32_t gmask = 0x0000ff00; - constexpr const std::uint32_t bmask = 0x00ff0000; - constexpr const std::uint32_t amask = 0xff000000; - #endif - namespace Internal { struct WindowInfos @@ -65,7 +54,7 @@ namespace mlx infos->window = SDL_CreateWindow(info->title, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, info->width, info->height, flags); if(!infos->window) FatalError("SDL: unable to open a new window; %", SDL_GetError()); - infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, rmask, gmask, bmask, amask); + infos->icon = SDL_CreateRGBSurfaceFrom(static_cast(logo_mlx), logo_mlx_width, logo_mlx_height, 32, 4 * logo_mlx_width, Rmask(), Gmask(), Bmask(), Amask()); SDL_SetWindowIcon(infos->window, infos->icon); m_windows_registry.insert(infos); diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 79753ce..06cedf6 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -2,10 +2,11 @@ #include #include +#include namespace mlx { - NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t draw_layer, int color) + NonOwningPtr PutPixelManager::DrawPixel(int x, int y, std::uint64_t draw_layer, mlx_color color) { MLX_PROFILE_FUNCTION(); bool is_newlayer; @@ -16,7 +17,7 @@ namespace mlx return (is_newlayer ? layer : nullptr); } - NonOwningPtr PutPixelManager::DrawPixelsArray(int x, int y, std::uint64_t draw_layer, int* pixels, std::size_t pixels_size) + NonOwningPtr PutPixelManager::DrawPixelsArray(int x, int y, std::uint64_t draw_layer, mlx_color* pixels, std::size_t pixels_size) { MLX_PROFILE_FUNCTION(); bool is_newlayer; @@ -27,7 +28,7 @@ namespace mlx return (is_newlayer ? layer : nullptr); } - NonOwningPtr PutPixelManager::DrawPixelsRegion(int x, int y, int w, int h, std::uint64_t draw_layer, int* pixels) + NonOwningPtr PutPixelManager::DrawPixelsRegion(int x, int y, int w, int h, std::uint64_t draw_layer, mlx_color* pixels) { MLX_PROFILE_FUNCTION(); bool is_newlayer; @@ -70,7 +71,7 @@ namespace mlx try { m_placements[draw_layer] = m_textures.at(m_current_texture_index).get(); - m_textures.at(m_current_texture_index)->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); + m_textures.at(m_current_texture_index)->Clear(VK_NULL_HANDLE, Vec4f{ 0.f }); NonOwningPtr texture = m_textures.at(m_current_texture_index).get(); m_current_texture_index++; return texture; diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 00e773b..7c40b18 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -5,7 +5,6 @@ #include #include #include -#include #define STB_IMAGE_IMPLEMENTATION @@ -200,20 +199,21 @@ namespace mlx Image::Destroy(); } - void Texture::SetPixel(int x, int y, int color) noexcept + void Texture::SetPixel(int x, int y, mlx_color color) noexcept { MLX_PROFILE_FUNCTION(); if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) return; if(!m_staging_buffer.has_value()) OpenCPUBuffer(); - // Needs to reverse bytes order because why not - color = ByteSwap(color); - m_cpu_buffer[(y * m_width) + x] = color; + if constexpr(std::endian::native == std::endian::little) + m_cpu_buffer[(y * m_width) + x] = mlx_color{ .r = color.a, .g = color.b, .b = color.g, .a = color.r }; + else + m_cpu_buffer[(y * m_width) + x] = color; m_has_been_modified = true; } - void Texture::SetRegion(int x, int y, int w, int h, int* pixels) noexcept + void Texture::SetRegion(int x, int y, int w, int h, mlx_color* pixels) noexcept { MLX_PROFILE_FUNCTION(); if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) @@ -231,40 +231,48 @@ namespace mlx break; moving_y++; } - // Needs to reverse bytes order because why not - int color = ByteSwap(pixels[i]); - m_cpu_buffer[(moving_y * m_width) + moving_x] = color; + if constexpr(std::endian::native == std::endian::little) + m_cpu_buffer[(moving_y * m_width) + moving_x] = mlx_color{ .r = pixels[i].a, .g = pixels[i].b, .b = pixels[i].g, .a = pixels[i].r }; + else + m_cpu_buffer[(moving_y * m_width) + moving_x] = pixels[i]; } m_has_been_modified = true; } - void Texture::SetLinearRegion(int x, int y, std::size_t len, int* pixels) noexcept + void Texture::SetLinearRegion(int x, int y, std::size_t len, mlx_color* pixels) noexcept { MLX_PROFILE_FUNCTION(); if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) return; if(!m_staging_buffer.has_value()) OpenCPUBuffer(); - for(std::size_t i = 0; i < len; i++) + if constexpr(std::endian::native == std::endian::little) + for(std::size_t i = 0; i < len; i++) + m_cpu_buffer[(y * m_width) + x + i] = mlx_color{ .r = pixels[i].a, .g = pixels[i].b, .b = pixels[i].g, .a = pixels[i].r }; + else { - // Needs to reverse bytes order because why not - int color = ByteSwap(pixels[i]); - m_cpu_buffer[(y * m_width) + x + i] = color; + std::memcpy(&m_cpu_buffer[(y * m_width) + x], pixels, len); } m_has_been_modified = true; } - int Texture::GetPixel(int x, int y) noexcept + mlx_color Texture::GetPixel(int x, int y) noexcept { MLX_PROFILE_FUNCTION(); if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) - return 0; + return { .rgba = 0x00000000 }; if(!m_staging_buffer.has_value()) OpenCPUBuffer(); - return m_cpu_buffer[(y * m_width) + x]; + if constexpr(std::endian::native == std::endian::little) + { + mlx_color color = m_cpu_buffer[(y * m_width) + x]; + return { .r = color.a, .g = color.b, .b = color.g, .a = color.r }; + } + else + return m_cpu_buffer[(y * m_width) + x]; } - void Texture::GetRegion(int x, int y, int w, int h, int* dst) noexcept + void Texture::GetRegion(int x, int y, int w, int h, mlx_color* dst) noexcept { MLX_PROFILE_FUNCTION(); if(x < 0 || y < 0 || static_cast(x) > m_width || static_cast(y) > m_height) @@ -280,8 +288,13 @@ namespace mlx break; moving_y++; } - // Needs to reverse bytes order because why not - dst[i] = ByteSwap(m_cpu_buffer[(moving_y * m_width) + moving_x]); + if constexpr(std::endian::native == std::endian::little) + { + mlx_color color = m_cpu_buffer[(moving_y * m_width) + moving_x]; + dst[i] = mlx_color{ .r = color.a, .g = color.b, .b = color.g, .a = color.r }; + } + else + dst[i] = m_cpu_buffer[(moving_y * m_width) + moving_x]; } } @@ -291,12 +304,13 @@ namespace mlx Image::Clear(cmd, std::move(color)); if(m_staging_buffer.has_value()) { - std::uint8_t color_bytes[4]; - color_bytes[0] = static_cast(color.r * 255.f); - color_bytes[1] = static_cast(color.g * 255.f); - color_bytes[2] = static_cast(color.b * 255.f); - color_bytes[3] = static_cast(color.a * 255.f); - std::fill(m_cpu_buffer.begin(), m_cpu_buffer.end(), *reinterpret_cast(color_bytes)); + mlx_color processed_color{ + .r = static_cast(color.r * 255.f), + .g = static_cast(color.g * 255.f), + .b = static_cast(color.b * 255.f), + .a = static_cast(color.a * 255.f) + }; + std::fill(m_cpu_buffer.begin(), m_cpu_buffer.end(), processed_color); } } From 213a8f6a6d50a6c3333502b3cc38f92c6f4b74ad Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 00:19:13 +0100 Subject: [PATCH 105/131] implementing endianness check on mlx_color struct --- includes/mlx.h | 17 ++++++++++++----- includes/mlx_profile.h | 6 +++++- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/includes/mlx.h b/includes/mlx.h index 21fc75a..adde595 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/16 23:11:59 by maldavid ### ########.fr */ +/* Updated: 2024/12/17 00:15:34 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -48,10 +48,17 @@ typedef union mlx_color { struct { - uint8_t a; - uint8_t b; - uint8_t g; - uint8_t r; + #if MLX_ENDIAN_ORDER == MLX_LITTLE_ENDIAN + uint8_t a; + uint8_t b; + uint8_t g; + uint8_t r; + #else + uint8_t r; + uint8_t g; + uint8_t b; + uint8_t a; + #endif }; uint32_t rgba; } mlx_color; diff --git a/includes/mlx_profile.h b/includes/mlx_profile.h index d525c45..8f6431a 100644 --- a/includes/mlx_profile.h +++ b/includes/mlx_profile.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ -/* Updated: 2024/12/16 16:16:07 by maldavid ### ########.fr */ +/* Updated: 2024/12/17 00:14:47 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -142,6 +142,10 @@ #endif #endif +#define MLX_LITTLE_ENDIAN 0x41424344UL +#define MLX_BIG_ENDIAN 0x44434241UL +#define MLX_ENDIAN_ORDER ('ABCD') + #include #define MLX_MAKE_VERSION(major, minor, patch) ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) From 905859acba7a660cc789e973889f01c0bbf5fd66 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 00:42:21 +0100 Subject: [PATCH 106/131] changing endianness getter --- includes/mlx.h | 4 +-- includes/mlx_profile.h | 55 +++++++++++++++++++++++++++++++++++++++--- 2 files changed, 53 insertions(+), 6 deletions(-) diff --git a/includes/mlx.h b/includes/mlx.h index adde595..f41edf2 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/17 00:15:34 by maldavid ### ########.fr */ +/* Updated: 2024/12/17 00:27:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -48,7 +48,7 @@ typedef union mlx_color { struct { - #if MLX_ENDIAN_ORDER == MLX_LITTLE_ENDIAN + #if MLX_BYTEORDER == MLX_LITTLE_ENDIAN uint8_t a; uint8_t b; uint8_t g; diff --git a/includes/mlx_profile.h b/includes/mlx_profile.h index 8f6431a..5f53b0f 100644 --- a/includes/mlx_profile.h +++ b/includes/mlx_profile.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ -/* Updated: 2024/12/17 00:14:47 by maldavid ### ########.fr */ +/* Updated: 2024/12/17 00:35:35 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -49,6 +49,14 @@ #define MLX_PLAT_MACOS #elif defined(unix) || defined(__unix__) || defined(__unix) #define MLX_PLAT_UNIX +#elif defined(__sun) && defined(__SVR4) + #define MLX_PLAT_SOLARIS +#elif defined(__OpenBSD__) + #define MLX_PLAT_OPENBSD +#elif defined(__FreeBSD__) || defined(__FreeBSD_kernel__) || defined(__DragonFly__) + #define MLX_PLAT_FREEBSD +#elif defined(__NetBSD__) + #define MLX_PLAT_NETBSD #else #error "Unknown environment (not Windows, not Linux, not MacOS, not Unix)" #endif @@ -142,9 +150,48 @@ #endif #endif -#define MLX_LITTLE_ENDIAN 0x41424344UL -#define MLX_BIG_ENDIAN 0x44434241UL -#define MLX_ENDIAN_ORDER ('ABCD') +#define MLX_LITTLE_ENDIAN 1234 +#define MLX_BIG_ENDIAN 4321 + +#ifndef MLX_BYTEORDER + #if defined(MLX_PLAT_LINUX) + #include + #define MLX_BYTEORDER __BYTE_ORDER + #elif defined(MLX_PLAT_SOLARIS) + #include + #if defined(_LITTLE_ENDIAN) + #define MLX_BYTEORDER MLX_LITTLE_ENDIAN + #elif defined(_BIG_ENDIAN) + #define MLX_BYTEORDER MLX_BIG_ENDIAN + #else + #error Unsupported endianness + #endif + #elif defined(MLX_PLAT_OPENBSD) || defined(__DragonFly__) + #include + #define MLX_BYTEORDER BYTE_ORDER + #elif defined(MLX_PLAT_FREEBSD) || defined(MLX_PLAT_NETBSD) + #include + #define MLX_BYTEORDER BYTE_ORDER + #elif defined(__ORDER_LITTLE_ENDIAN__) && defined(__ORDER_BIG_ENDIAN__) && defined(__BYTE_ORDER__) + #if (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) + #define MLX_BYTEORDER MLX_LITTLE_ENDIAN + #elif (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__) + #define MLX_BYTEORDER MLX_BIG_ENDIAN + #else + #error Unsupported endianness + #endif + #else + #if defined(__hppa__) || \ + defined(__m68k__) || defined(mc68000) || defined(_M_M68K) || \ + (defined(__MIPS__) && defined(__MIPSEB__)) || \ + defined(__ppc__) || defined(__POWERPC__) || defined(__powerpc__) || defined(__PPC__) || \ + defined(__sparc__) || defined(__sparc) + #define MLX_BYTEORDER MLX_BIG_ENDIAN + #else + #define MLX_BYTEORDER MLX_LITTLE_ENDIAN + #endif + #endif +#endif #include From 24d9b4b6c9c762856e99212c7d5ed4227ad5827c Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 00:59:54 +0100 Subject: [PATCH 107/131] fixing gcc/msvc error --- runtime/Sources/Core/Memory.cpp | 8 ++++---- runtime/Sources/Renderer/Image.cpp | 26 +++++++++++++++----------- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/runtime/Sources/Core/Memory.cpp b/runtime/Sources/Core/Memory.cpp index 57869c2..9f8ee24 100644 --- a/runtime/Sources/Core/Memory.cpp +++ b/runtime/Sources/Core/Memory.cpp @@ -53,12 +53,12 @@ namespace mlx void* MemManager::Realloc(void* ptr, std::size_t size) { - void* ptr2 = std::realloc(ptr, size); - if(ptr2 != nullptr) - s_blocks.emplace_back(ptr, size, false); auto it = std::find_if(s_blocks.begin(), s_blocks.end(), [=](const Descriptor& rhs){ return ptr == rhs.ptr; }); if(it != s_blocks.end()) s_blocks.erase(it); + void* ptr2 = std::realloc(ptr, size); + if(ptr2 != nullptr) + s_blocks.emplace_back(ptr2, size, false); return ptr2; } @@ -79,7 +79,7 @@ namespace mlx } #endif if(ptr2 != nullptr) - s_blocks.emplace_back(ptr, size, true); + s_blocks.emplace_back(ptr2, size, true); return ptr2; } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 7c40b18..3a3e3c1 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -30,6 +30,16 @@ namespace mlx { + mlx_color ReverseColor(mlx_color color) + { + mlx_color reversed_color; + reversed_color.r = color.a; + reversed_color.g = color.b; + reversed_color.b = color.g; + reversed_color.a = color.r; + return reversed_color; + } + void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); @@ -207,7 +217,7 @@ namespace mlx if(!m_staging_buffer.has_value()) OpenCPUBuffer(); if constexpr(std::endian::native == std::endian::little) - m_cpu_buffer[(y * m_width) + x] = mlx_color{ .r = color.a, .g = color.b, .b = color.g, .a = color.r }; + m_cpu_buffer[(y * m_width) + x] = ReverseColor(color); else m_cpu_buffer[(y * m_width) + x] = color; m_has_been_modified = true; @@ -232,7 +242,7 @@ namespace mlx moving_y++; } if constexpr(std::endian::native == std::endian::little) - m_cpu_buffer[(moving_y * m_width) + moving_x] = mlx_color{ .r = pixels[i].a, .g = pixels[i].b, .b = pixels[i].g, .a = pixels[i].r }; + m_cpu_buffer[(moving_y * m_width) + moving_x] = ReverseColor(pixels[i]); else m_cpu_buffer[(moving_y * m_width) + moving_x] = pixels[i]; } @@ -248,7 +258,7 @@ namespace mlx OpenCPUBuffer(); if constexpr(std::endian::native == std::endian::little) for(std::size_t i = 0; i < len; i++) - m_cpu_buffer[(y * m_width) + x + i] = mlx_color{ .r = pixels[i].a, .g = pixels[i].b, .b = pixels[i].g, .a = pixels[i].r }; + m_cpu_buffer[(y * m_width) + x + i] = ReverseColor(pixels[i]); else { std::memcpy(&m_cpu_buffer[(y * m_width) + x], pixels, len); @@ -264,10 +274,7 @@ namespace mlx if(!m_staging_buffer.has_value()) OpenCPUBuffer(); if constexpr(std::endian::native == std::endian::little) - { - mlx_color color = m_cpu_buffer[(y * m_width) + x]; - return { .r = color.a, .g = color.b, .b = color.g, .a = color.r }; - } + return ReverseColor(m_cpu_buffer[(y * m_width) + x]); else return m_cpu_buffer[(y * m_width) + x]; } @@ -289,10 +296,7 @@ namespace mlx moving_y++; } if constexpr(std::endian::native == std::endian::little) - { - mlx_color color = m_cpu_buffer[(moving_y * m_width) + moving_x]; - dst[i] = mlx_color{ .r = color.a, .g = color.b, .b = color.g, .a = color.r }; - } + dst[i] = ReverseColor(m_cpu_buffer[(moving_y * m_width) + moving_x]); else dst[i] = m_cpu_buffer[(moving_y * m_width) + moving_x]; } From 6de8d3a4fddefaea0c939f29aae235946f1b2d3b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 01:03:00 +0100 Subject: [PATCH 108/131] fixing gcc/msvc error --- runtime/Sources/Renderer/Image.cpp | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 3a3e3c1..af34d74 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -308,12 +308,11 @@ namespace mlx Image::Clear(cmd, std::move(color)); if(m_staging_buffer.has_value()) { - mlx_color processed_color{ - .r = static_cast(color.r * 255.f), - .g = static_cast(color.g * 255.f), - .b = static_cast(color.b * 255.f), - .a = static_cast(color.a * 255.f) - }; + mlx_color processed_color; + processed_color.r = static_cast(color.r * 255.f); + processed_color.g = static_cast(color.g * 255.f); + processed_color.b = static_cast(color.b * 255.f); + processed_color.a = static_cast(color.a * 255.f); std::fill(m_cpu_buffer.begin(), m_cpu_buffer.end(), processed_color); } } From 8f4dc6e3b546a819871dac5ab7be2f8f21d894f5 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 01:40:58 +0100 Subject: [PATCH 109/131] fixing gcc/msvc error --- runtime/Sources/Graphics/PutPixelManager.cpp | 15 +++++++-------- runtime/Sources/Renderer/Memory.cpp | 1 + xmake.lua | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 06cedf6..1adc0a6 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -44,14 +44,6 @@ namespace mlx MLX_PROFILE_FUNCTION(); Verify((bool)p_renderer, "invalid renderer pointer"); - VkExtent2D extent; - if(p_renderer->GetWindow()) - extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain().Get()); - else if(p_renderer->GetRenderTarget()) - extent = VkExtent2D{ .width = p_renderer->GetRenderTarget()->GetWidth(), .height = p_renderer->GetRenderTarget()->GetHeight() }; - else - FatalError("a renderer was created without window nor render target attached (wtf)"); - auto it = m_placements.find(draw_layer); if(it != m_placements.end()) { @@ -62,6 +54,13 @@ namespace mlx if(m_current_texture_index >= m_textures.size()) { + VkExtent2D extent; + if(p_renderer->GetWindow()) + extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain().Get()); + else if(p_renderer->GetRenderTarget()) + extent = VkExtent2D{ .width = p_renderer->GetRenderTarget()->GetWidth(), .height = p_renderer->GetRenderTarget()->GetHeight() }; + else + FatalError("a renderer was created without window nor render target attached (wtf)"); #ifdef DEBUG m_textures.push_back(std::make_unique(CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(m_current_texture_index))); #else diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 58c27c6..16734c2 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -12,6 +12,7 @@ #pragma GCC diagnostic ignored "-Wmissing-field-initializers" #pragma GCC diagnostic ignored "-Wunused-parameter" #pragma GCC diagnostic ignored "-Wunused-variable" + #pragma GCC diagnostic ignored "-Wunused-function" #pragma GCC diagnostic ignored "-Wparentheses" #include #pragma GCC diagnostic pop diff --git a/xmake.lua b/xmake.lua index 857433a..c145724 100644 --- a/xmake.lua +++ b/xmake.lua @@ -3,7 +3,7 @@ add_requires("libsdl", { configs = { sdlmain = false } }) add_rules("mode.debug", "mode.release") -set_languages("cxx20", "c99") +set_languages("cxx20", "c11") set_objectdir("objs/xmake/$(os)_$(arch)") set_targetdir("./") @@ -94,7 +94,7 @@ target("Test") add_deps("mlx") - add_files("example/main.c") + add_files("example/main.c", { languages = "c99" }) add_defines("SDL_MAIN_HANDLED") From ffff4722b774f021326b1e65d567ef3cc8f31ed1 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 02:33:04 +0100 Subject: [PATCH 110/131] adding multiple hooks supports for events and loop --- .github/workflows/windows.yml | 4 ---- Makefile | 21 +++++++++++--------- example/main.c | 11 ++++------ includes/mlx.h | 6 +++--- runtime/Includes/Core/Application.h | 16 +++++++++++---- runtime/Includes/Core/Application.inl | 9 ++++----- runtime/Includes/Core/Logs.inl | 4 ++-- runtime/Includes/Platform/Inputs.h | 11 +++++----- runtime/Sources/Core/Application.cpp | 7 +++++-- runtime/Sources/Core/Bridge.cpp | 19 +++++++++--------- runtime/Sources/Graphics/PutPixelManager.cpp | 3 ++- runtime/Sources/Platform/Inputs.cpp | 8 ++++++-- runtime/Sources/Renderer/Descriptor.cpp | 2 +- runtime/Sources/Renderer/Image.cpp | 13 +++++++----- xmake.lua | 2 ++ 15 files changed, 77 insertions(+), 59 deletions(-) diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml index ad4d479..6e3abd5 100644 --- a/.github/workflows/windows.yml +++ b/.github/workflows/windows.yml @@ -50,7 +50,3 @@ jobs: # Build the mlx - name: Build MacroLibX run: xmake --yes - - # Build the example - - name: Build Example - run: xmake build --yes Test diff --git a/Makefile b/Makefile index 5a5c659..7dd2529 100644 --- a/Makefile +++ b/Makefile @@ -33,17 +33,19 @@ INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party CXXPCHFLAGS = -xc++-header -PCH = runtime/Includes/PreCompiled.h -GCH = +PCH_SOURCE = runtime/Includes/PreCompiled.h +GCH = runtime/Includes/PreCompiled.h.gch +CCH = runtime/Includes/PreCompiled.h.pch +PCH = -NZRRC = nzslc +NZRRC ?= nzslc ifeq ($(TOOLCHAIN), gcc) CXX = g++ - GCH = runtime/Includes/PreCompiled.h.gch + PCH = $(GCH) CXXFLAGS += -Wno-error=cpp else - GCH = runtime/Includes/PreCompiled.h.pch + PCH = $(CCH) CXXFLAGS += -Wno-error=#warning -include-pch $(GCH) endif @@ -115,7 +117,7 @@ ifeq ($(OBJS_TOTAL), 0) # To avoid division per 0 endif CURR_OBJ = 0 -$(OBJ_DIR)/%.o: %.cpp $(GCH) +$(OBJ_DIR)/%.o: %.cpp $(PCH) @mkdir -p $(dir $@) @$(eval CURR_OBJ=$(shell echo $$(( $(CURR_OBJ) + 1 )))) @$(eval PERCENT=$(shell echo $$(( $(CURR_OBJ) * 100 / $(OBJS_TOTAL) )))) @@ -139,9 +141,9 @@ CURR_SPV = 0 all: _printbuildinfos @$(MAKE) $(NAME) -$(GCH): +$(PCH): @printf "$(COLOR)($(_BOLD)%3s%%$(_RESET)$(COLOR)) $(_RESET)Compiling $(_BOLD)PreCompiled header$(_RESET)\n" "0" - @$(CXX) $(CXXPCHFLAGS) $(INCLUDES) $(PCH) -o $(GCH) + @$(CXX) $(CXXPCHFLAGS) $(INCLUDES) $(PCH_SOURCE) -o $(PCH) $(NAME): $(OBJS) @printf "Linking $(_BOLD)$(NAME)$(_RESET)\n" @@ -163,7 +165,8 @@ clean: @$(RM) $(OBJ_DIR) @printf "Cleaned $(_BOLD)$(OBJ_DIR)$(_RESET)\n" @$(RM) $(GCH) - @printf "Cleaned $(_BOLD)$(GCH)$(_RESET)\n" + @$(RM) $(CCH) + @printf "Cleaned pre compiled header\n" fclean: clean @$(RM) $(NAME) diff --git a/example/main.c b/example/main.c index a06fc0e..8868be8 100644 --- a/example/main.c +++ b/example/main.c @@ -18,7 +18,7 @@ typedef struct static mlx_color pixels_circle[CIRCLE_DIAMETER * CIRCLE_DIAMETER] = { 0 }; -int update(void* param) +void update(void* param) { static int i = 0; mlx_t* mlx = (mlx_t*)param; @@ -60,7 +60,6 @@ int update(void* param) mlx_pixel_put_region(mlx->mlx, mlx->win, 200, 170, CIRCLE_DIAMETER, CIRCLE_DIAMETER, pixels_circle); i++; - return 0; } mlx_image create_image(mlx_t* mlx) @@ -87,7 +86,7 @@ mlx_image create_image(mlx_t* mlx) return img; } -int key_hook(int key, void* param) +void key_hook(int key, void* param) { int x; int y; @@ -123,14 +122,12 @@ int key_hook(int key, void* param) default : break; } - return 0; } -int window_hook(int event, void* param) +void window_hook(int event, void* param) { if(event == 0) mlx_loop_end(((mlx_t*)param)->mlx); - return 0; } int main(void) @@ -179,7 +176,7 @@ int main(void) mlx_set_font_scale(mlx.mlx, "font.ttf", 16.f); mlx_string_put(mlx.mlx, mlx.win, 20, 20, (mlx_color){ .rgba = 0x0020FFFF }, "that text will disappear"); - mlx_loop_hook(mlx.mlx, update, &mlx); + mlx_add_loop_hook(mlx.mlx, update, &mlx); mlx_loop(mlx.mlx); mlx_destroy_image(mlx.mlx, mlx.logo_png); diff --git a/includes/mlx.h b/includes/mlx.h index f41edf2..56f461e 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/17 00:27:35 by maldavid ### ########.fr */ +/* Updated: 2024/12/17 02:23:40 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -213,7 +213,7 @@ MLX_API void mlx_get_screen_size(mlx_context mlx, mlx_window win, int* w, int* h * @param f The function * @param param Param to give to the function passed */ -MLX_API void mlx_loop_hook(mlx_context mlx, int (*f)(void*), void* param); +MLX_API void mlx_add_loop_hook(mlx_context mlx, void(*f)(void*), void* param); /** * @brief Starts the internal main loop @@ -289,7 +289,7 @@ typedef enum mlx_event_type * @param f Function to be executed * @param param Parameter given to the function */ -MLX_API void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, int (*f)(int, void*), void* param); +MLX_API void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, void(*f)(int, void*), void* param); diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 4dff74f..3cc30ee 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -21,7 +21,7 @@ namespace mlx inline void GetScreenSize(mlx_window win, int* w, int* h) noexcept; inline void SetFPSCap(std::uint32_t fps) noexcept; - inline void OnEvent(mlx_window win, int event, int (*funct_ptr)(int, void*), void* param) noexcept; + inline void OnEvent(mlx_window win, int event, void(*f)(int, void*), void* param) noexcept; inline mlx_window NewGraphicsSuport(const mlx_window_create_info* info); inline NonOwningPtr GetGraphicsSupport(mlx_window win); @@ -32,7 +32,7 @@ namespace mlx inline NonOwningPtr GetTexture(mlx_image image); void DestroyTexture(mlx_image img); - inline void LoopHook(int (*f)(void*), void* param); + inline void AddLoopHook(void(*f)(void*), void* param); inline void LoopEnd() noexcept; inline void LoadFont(const std::filesystem::path& filepath, float scale); @@ -41,6 +41,15 @@ namespace mlx ~Application(); + private: + struct Hook + { + func::function fn; + void* param; + + Hook(func::function fn, void* param) : fn(fn), param(param) {} + }; + private: std::unique_ptr p_mem_manager; // Putting ptr here to initialise them before inputs, even if it f*cks the padding std::unique_ptr p_sdl_manager; @@ -50,13 +59,12 @@ namespace mlx ImageRegistry m_image_registry; MeshRegistry m_mesh_registry; std::vector> m_graphics; + std::vector m_hooks; std::shared_ptr p_last_font_bound; - std::function f_loop_hook; std::unique_ptr p_render_core; #ifdef PROFILER std::unique_ptr p_profiler; #endif - Handle p_param = nullptr; }; } diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 822c4d9..cbdbbb2 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -40,12 +40,12 @@ namespace mlx *y = m_in.GetY(); } - void Application::OnEvent(mlx_window win, int event, int (*funct_ptr)(int, void*), void* param) noexcept + void Application::OnEvent(mlx_window win, int event, void(*f)(int, void*), void* param) noexcept { CHECK_WINDOW_PTR(win, ); if(!m_graphics[win->id]->HasWindow()) return; - m_in.OnEvent(m_graphics[win->id]->GetWindow()->GetID(), event, funct_ptr, param); + m_in.OnEvent(m_graphics[win->id]->GetWindow()->GetID(), event, f, param); } void Application::SetFPSCap(std::uint32_t fps) noexcept @@ -122,10 +122,9 @@ namespace mlx return texture; } - void Application::LoopHook(int (*f)(void*), void* param) + void Application::AddLoopHook(void(*f)(void*), void* param) { - f_loop_hook = f; - p_param = param; + m_hooks.emplace_back(f, param); } void Application::LoopEnd() noexcept diff --git a/runtime/Includes/Core/Logs.inl b/runtime/Includes/Core/Logs.inl index 1275383..b9f66e4 100644 --- a/runtime/Includes/Core/Logs.inl +++ b/runtime/Includes/Core/Logs.inl @@ -93,7 +93,7 @@ namespace mlx try { std::stringstream ss; - ss << Format("Verification failed : %", message, args...); + ss << Format("Verification failed: %", message, args...); Logs::Report(LogType::FatalError, line, file, function, ss.str()); } catch(const std::exception& e) @@ -112,7 +112,7 @@ namespace mlx try { std::stringstream ss; - ss << Format("Assertion failed : %", message, args...); + ss << Format("Assertion failed: %", message, args...); Logs::Report(LogType::FatalError, line, file, function, ss.str()); } catch(const std::exception& e) diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h index 3ac5b90..e2b7c9a 100644 --- a/runtime/Includes/Platform/Inputs.h +++ b/runtime/Includes/Platform/Inputs.h @@ -12,8 +12,10 @@ namespace mlx public: struct Hook { - func::function hook; + func::function fn; void* param = nullptr; + + Hook(func::function fn, void* param) : fn(fn), param(param) {} }; public: @@ -33,17 +35,16 @@ namespace mlx MLX_FORCEINLINE constexpr void Finish() noexcept { m_run = false; } MLX_FORCEINLINE constexpr void Run() noexcept { m_run = true; } - inline void OnEvent(std::uint32_t id, int event, int (*funct_ptr)(int, void*), void* param) noexcept + inline void OnEvent(std::uint32_t id, int event, void(*f)(int, void*), void* param) noexcept { - m_events_hooks[id][event].hook = funct_ptr; - m_events_hooks[id][event].param = param; + m_events_hooks[id][event].emplace_back(f, param); } ~Inputs() = default; private: std::unordered_map> m_windows; - std::unordered_map> m_events_hooks; + std::unordered_map, 6>> m_events_hooks; bool m_run = false; }; } diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index daf35c0..b3cf579 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -38,8 +38,11 @@ namespace mlx m_in.FetchInputs(); - if(f_loop_hook) - f_loop_hook(p_param); + for(const auto& hook : m_hooks) + { + if(hook.fn) + hook.fn(hook.param); + } for(auto& gs : m_graphics) { diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index eff5d65..835dce2 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -31,12 +31,13 @@ extern "C" mlx::MemManager::Get(); // just to initialize the C garbage collector - try { __internal_application_ptr = new mlx::Application; } - catch(...) { mlx::FatalError("internal application memory allocation failed"); } + __internal_application_ptr = new mlx::Application; + if(__internal_application_ptr == nullptr) + mlx::FatalError("internal application memory allocation failed"); - mlx_context_handler* context; - try { context = new mlx_context_handler; } - catch(...) { mlx::FatalError("mlx_context memory allocation failed"); } + mlx_context_handler* context = new mlx_context_handler; + if(context == nullptr) + mlx::FatalError("mlx_context memory allocation failed"); context->app = __internal_application_ptr; return context; } @@ -150,10 +151,10 @@ extern "C" gs->GetWindow()->GetScreenSizeWindowIsOn(w, h); } - void mlx_loop_hook(mlx_context mlx, int (*f)(void*), void* param) + void mlx_add_loop_hook(mlx_context mlx, void(*f)(void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); - mlx->app->LoopHook(f, param); + mlx->app->AddLoopHook(f, param); } void mlx_loop(mlx_context mlx) @@ -195,10 +196,10 @@ extern "C" mlx->app->GetMousePos(x, y); } - void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, int (*funct_ptr)(int, void*), void* param) + void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, void(*f)(int, void*), void* param) { MLX_CHECK_APPLICATION_POINTER(mlx); - mlx->app->OnEvent(win, static_cast(event), funct_ptr, param); + mlx->app->OnEvent(win, static_cast(event), f, param); } void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, mlx_color color) diff --git a/runtime/Sources/Graphics/PutPixelManager.cpp b/runtime/Sources/Graphics/PutPixelManager.cpp index 1adc0a6..41ba4de 100644 --- a/runtime/Sources/Graphics/PutPixelManager.cpp +++ b/runtime/Sources/Graphics/PutPixelManager.cpp @@ -54,13 +54,14 @@ namespace mlx if(m_current_texture_index >= m_textures.size()) { - VkExtent2D extent; + VkExtent2D extent{ .width = 0, .height = 0 }; if(p_renderer->GetWindow()) extent = kvfGetSwapchainImagesSize(p_renderer->GetSwapchain().Get()); else if(p_renderer->GetRenderTarget()) extent = VkExtent2D{ .width = p_renderer->GetRenderTarget()->GetWidth(), .height = p_renderer->GetRenderTarget()->GetHeight() }; else FatalError("a renderer was created without window nor render target attached (wtf)"); + #ifdef DEBUG m_textures.push_back(std::make_unique(CPUBuffer{}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_put_pixel_layer_" + std::to_string(m_current_texture_index))); #else diff --git a/runtime/Sources/Platform/Inputs.cpp b/runtime/Sources/Platform/Inputs.cpp index 25ea36f..dd4c905 100644 --- a/runtime/Sources/Platform/Inputs.cpp +++ b/runtime/Sources/Platform/Inputs.cpp @@ -12,9 +12,13 @@ namespace mlx { if(!m_windows.contains(window_id)) return; - if(!m_events_hooks.contains(window_id) || !m_events_hooks[window_id][event].hook) + if(!m_events_hooks.contains(window_id) || m_events_hooks[window_id][event].empty()) return; - m_events_hooks[window_id][event].hook(code, m_events_hooks[window_id][event].param); + for(const auto& hook : m_events_hooks[window_id][event]) + { + if(hook.fn) + hook.fn(code, hook.param); + } }); } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 6436770..8d7afca 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -80,7 +80,7 @@ namespace mlx case ShaderType::Vertex: vulkan_shader_stage = VK_SHADER_STAGE_VERTEX_BIT; break; case ShaderType::Fragment: vulkan_shader_stage = VK_SHADER_STAGE_FRAGMENT_BIT; break; - default : FatalError("wtf"); break; + default: FatalError("wtf"); vulkan_shader_stage = VK_SHADER_STAGE_VERTEX_BIT; /* Just to shut up warnings */ break; } std::vector bindings(layout.binds.size()); diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index af34d74..9757ace 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -43,6 +43,8 @@ namespace mlx void Image::Init(ImageType type, std::uint32_t width, std::uint32_t height, VkFormat format, VkImageTiling tiling, VkImageUsageFlags usage, bool is_multisampled, [[maybe_unused]] std::string_view debug_name) { MLX_PROFILE_FUNCTION(); + Verify(width > 0 && height > 0, "width or height cannot be null"); + m_type = type; m_width = width; m_height = height; @@ -387,6 +389,11 @@ namespace mlx int channels; std::uint8_t* data = stbi_load(filename.c_str(), &size.x, &size.y, &channels, 4); + if(data == nullptr) + { + Error("Image loader: could not load % due to %", file, stbi_failure_reason()); + return nullptr; + } CallOnExit defer([&]() { stbi_image_free(data); }); CPUBuffer buffer(size.x * size.y * 4); @@ -396,10 +403,6 @@ namespace mlx *w = size.x; if(h != nullptr) *h = size.y; - - Texture* texture; - try { texture = new Texture(std::move(buffer), size.x, size.y, VK_FORMAT_R8G8B8A8_SRGB, false, std::move(filename)); } - catch(...) { return nullptr; } - return texture; + return new Texture(std::move(buffer), size.x, size.y, VK_FORMAT_R8G8B8A8_SRGB, false, std::move(filename)); } } diff --git a/xmake.lua b/xmake.lua index c145724..1ffbc7e 100644 --- a/xmake.lua +++ b/xmake.lua @@ -85,6 +85,7 @@ target("mlx") end) target_end() +--[[ target("Test") set_default(false) set_kind("binary") @@ -100,3 +101,4 @@ target("Test") add_packages("libsdl") target_end() +]]-- From b6660b99d61c1e45af91b577f023815c52da173d Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 17 Dec 2024 03:00:12 +0100 Subject: [PATCH 111/131] fixing documentation in headers --- includes/mlx.h | 10 +++++----- includes/mlx_extended.h | 7 ++++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/includes/mlx.h b/includes/mlx.h index 56f461e..29babe4 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/17 02:23:40 by maldavid ### ########.fr */ +/* Updated: 2024/12/17 02:58:07 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -207,7 +207,7 @@ MLX_API void mlx_get_screen_size(mlx_context mlx, mlx_window win, int* w, int* h /** - * @brief Gives a function to be executed at each loop turn + * @brief Gives another function to be executed at each loop turn * * @param mlx Internal MLX application * @param f The function @@ -281,7 +281,7 @@ typedef enum mlx_event_type } mlx_event_type; /** - * @brief Gives a function to be executed on event type + * @brief Gives a function to be executed on event type, does not override previous functions * * @param mlx Internal MLX application * @param win Internal window @@ -303,7 +303,7 @@ MLX_API void mlx_on_event(mlx_context mlx, mlx_window win, mlx_event_type event, * @param win Internal window * @param x X coordinate * @param y Y coordinate - * @param color Color of the pixel (coded on 4 bytes in an int, 0xRRGGBBAA) + * @param color Color of the pixel */ MLX_API void mlx_pixel_put(mlx_context mlx, mlx_window win, int x, int y, mlx_color color); @@ -405,7 +405,7 @@ MLX_API void mlx_put_image_to_window(mlx_context mlx, mlx_window win, mlx_image * @param win Internal window * @param x X coordinate * @param y Y coordinate - * @param color Color of the pixel (coded on 4 bytes in an int, 0xAARRGGBB) + * @param color Color of the pixel * @param str Text to put */ MLX_API void mlx_string_put(mlx_context mlx, mlx_window win, int x, int y, mlx_color color, char* str); diff --git a/includes/mlx_extended.h b/includes/mlx_extended.h index a67206f..12bbea0 100644 --- a/includes/mlx_extended.h +++ b/includes/mlx_extended.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/12/14 16:17:10 by maldavid #+# #+# */ -/* Updated: 2024/12/16 20:33:50 by maldavid ### ########.fr */ +/* Updated: 2024/12/17 02:59:50 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -83,7 +83,8 @@ MLX_API void mlx_restore_window(mlx_context mlx, mlx_window win); * @param win Internal window * @param x X coordinate * @param y Y coordinate - * @param pixels Array of pixels (coded on 4 bytes in an int, 0xRRGGBBAA) + * @param pixels Array of pixels + * @param pixels_size Size or the array of pixels */ MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, mlx_color* pixels, size_t pixels_size); @@ -96,7 +97,7 @@ MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, * @param y Y coordinate * @param w Width * @param h Height - * @param pixels Array of pixels (coded on 4 bytes in an int, 0xRRGGBBAA) + * @param pixels Array of pixels * * Note: it is responsability of the user to make sure the size of `pixels` is * big enough for the given region. From b6edf442a2fb4f22616647b6f5f95abf22e101d4 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 20 Dec 2024 01:19:15 +0100 Subject: [PATCH 112/131] re-adding render to texture --- experimental/RenderToTexture/build.sh | 4 +- experimental/RenderToTexture/main.c | 69 +++++++++++-------- includes/mlx.h | 11 ++- runtime/Includes/Core/Application.inl | 3 +- runtime/Includes/Core/Handles.h | 3 - runtime/Sources/Core/Graphics.cpp | 25 +++++-- .../Sources/Renderer/RenderPasses/Passes.cpp | 4 +- 7 files changed, 77 insertions(+), 42 deletions(-) diff --git a/experimental/RenderToTexture/build.sh b/experimental/RenderToTexture/build.sh index 3ae5775..5488aa8 100755 --- a/experimental/RenderToTexture/build.sh +++ b/experimental/RenderToTexture/build.sh @@ -5,7 +5,7 @@ if [ -e a.out ]; then fi if [ $(uname -s) = 'Darwin' ]; then - clang main.c ../../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -g; + clang main.c ../../libmlx.dylib -L /opt/homebrew/lib -lSDL2 -lm -g; else - clang main.c ../../libmlx.so -lSDL2 -g -Wall -Wextra -Werror; + clang main.c ../../libmlx.so -lSDL2 -g -Wall -Wextra -Werror -lm; fi diff --git a/experimental/RenderToTexture/main.c b/experimental/RenderToTexture/main.c index fb91e81..5113a98 100644 --- a/experimental/RenderToTexture/main.c +++ b/experimental/RenderToTexture/main.c @@ -1,61 +1,62 @@ +#include +#include #include "../../includes/mlx.h" +#include "../../includes/mlx_extended.h" typedef struct { - void* mlx; - void* win; - void* render_target; - void* render_target_win; + mlx_context mlx; + mlx_window win; + mlx_image render_target; + mlx_window render_target_window; } mlx_t; -int update(void* param) +void update(void* param) { mlx_t* mlx = (mlx_t*)param; - mlx_clear_window(mlx->mlx, mlx->win, 0xFF334D4D); + mlx_clear_window(mlx->mlx, mlx->win, (mlx_color){ .rgba = 0x334D4DFF }); - mlx_string_put(mlx->mlx, mlx->win, 160, 120, 0xFFFF2066, "text"); - mlx_string_put(mlx->mlx, mlx->win, 20, 50, 0xFFFFFFFF, "that's a text"); + mlx_string_put(mlx->mlx, mlx->win, 160, 120, (mlx_color){ .rgba = 0xFF2066FF }, "text"); + mlx_string_put(mlx->mlx, mlx->win, 20, 50, (mlx_color){ .rgba = 0xFFFFFFFF }, "that's a text"); for(int j = 0, color = 0; j < 400; j++) { - mlx_pixel_put(mlx->mlx, mlx->win, j, j, 0xFFFF0000 + color); - mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, 0xFF0000FF); + mlx_pixel_put(mlx->mlx, mlx->win, j, j, (mlx_color){ .rgba = 0xFF0000FF + (color << 8) }); + mlx_pixel_put(mlx->mlx, mlx->win, 399 - j, j, (mlx_color){ .rgba = 0x0000FFFF }); color += (color < 255); } for(int j = 0; j < 20; j++) { for(int k = 0; k < 20; k++) - mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, 0xFFFF0000); + mlx_pixel_put(mlx->mlx, mlx->win, 220 + j, 160 + k, (mlx_color){ .rgba = 0xFF0000FF }); } - mlx_string_put(mlx->mlx, mlx->render_target_win, 20, 20, 0xFFAF2BFF, "yippeeee"); + mlx_string_put(mlx->mlx, mlx->render_target_window, 20, 20, (mlx_color){ .rgba = 0xAF2BFFFF }, "yippeeee"); for(int j = 0, color = 0; j < 200; j++) { - mlx_pixel_put(mlx->mlx, mlx->render_target_win, j, j, 0xFFFF0000 + color); - mlx_pixel_put(mlx->mlx, mlx->render_target_win, 199 - j, j, 0xFF0000FF); + mlx_pixel_put(mlx->mlx, mlx->render_target_window, j, j, (mlx_color){ .rgba = 0xFF0000FF + (color << 8) }); + mlx_pixel_put(mlx->mlx, mlx->render_target_window, 199 - j, j, (mlx_color){ .rgba = 0x0000FFFF }); color += (color < 255); } - mlx_transform_put_image_to_window(mlx->mlx, mlx->win, mlx->render_target, 5, 250, 0.5f, 33.0f); - - return 0; + static int i = 0; + i++; + mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->render_target, 5, 100, 1.0f, 1.0f, i); } -int key_hook(int key, void* param) +void key_hook(int key, void* param) { mlx_t* mlx = (mlx_t*)param; if(key == 41) mlx_loop_end(mlx->mlx); - return 0; } -int window_hook(int event, void* param) +void window_hook(int event, void* param) { if(event == 0) mlx_loop_end(((mlx_t*)param)->mlx); - return 0; } int main(void) @@ -63,24 +64,36 @@ int main(void) mlx_t mlx; mlx.mlx = mlx_init(); - mlx.win = mlx_new_resizable_window(mlx.mlx, 400, 400, "My window"); + + mlx_set_fps_goal(mlx.mlx, 60); + + mlx_window_create_info info = { 0 }; + info.title = "My window"; + info.width = 400; + info.height = 400; + info.is_resizable = true; + mlx.win = mlx_new_window(mlx.mlx, &info); mlx.render_target = mlx_new_image(mlx.mlx, 200, 200); - mlx.render_target_win = mlx_new_window(mlx.mlx, 200, 200, (char*)mlx.render_target); - mlx_clear_window(mlx.mlx, mlx.render_target_win, 0xFFC16868); + info.render_target = mlx.render_target; + info.title = NULL; + info.width = 200; + info.height = 200; + mlx.render_target_window = mlx_new_window(mlx.mlx, &info); + mlx_clear_window(mlx.mlx, mlx.render_target_window, (mlx_color){ .rgba = 0xC16868FF }); mlx_on_event(mlx.mlx, mlx.win, MLX_KEYDOWN, key_hook, &mlx); mlx_on_event(mlx.mlx, mlx.win, MLX_WINDOW_EVENT, window_hook, &mlx); - mlx_loop_hook(mlx.mlx, update, &mlx); + mlx_add_loop_hook(mlx.mlx, update, &mlx); mlx_loop(mlx.mlx); mlx_destroy_window(mlx.mlx, mlx.win); - mlx_destroy_window(mlx.mlx, mlx.render_target_win); + mlx_destroy_window(mlx.mlx, mlx.render_target_window); mlx_destroy_image(mlx.mlx, mlx.render_target); - - mlx_destroy_display(mlx.mlx); + + mlx_destroy_context(mlx.mlx); return 0; } diff --git a/includes/mlx.h b/includes/mlx.h index 29babe4..0c4152a 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/17 02:58:07 by maldavid ### ########.fr */ +/* Updated: 2024/12/20 00:42:01 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -97,9 +97,18 @@ MLX_API void mlx_destroy_context(mlx_context mlx); /** * @brief Descriptor structure for window creation + * + * Note: if a valid mlx_image is passed as render_target, this window will not be a real system window + * and will rather act as a gate to use any draw function to draw directly on an image. + * + * Ex: you could use mlx_string_put or mlx_pixel_put to draw on a given image and then use this image + * with mlx_put_image_to_window to render it on a real window. + * + * See experimental/RenderToTexture/main.c for a concrete example. */ typedef struct mlx_window_create_info { + mlx_image render_target; const char* title; int width; int height; diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index cbdbbb2..8cc58c3 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -67,7 +67,8 @@ namespace mlx catch(...) { return nullptr; } m_graphics.emplace_back(std::make_unique(info, m_graphics.size())); - m_in.RegisterWindow(m_graphics.back()->GetWindow()); + if(m_graphics.back()->HasWindow()) + m_in.RegisterWindow(m_graphics.back()->GetWindow()); m_graphics.back()->GetScene().BindFont(p_last_font_bound); window->id = m_graphics.back()->GetID(); return window; diff --git a/runtime/Includes/Core/Handles.h b/runtime/Includes/Core/Handles.h index be97c39..a07381d 100644 --- a/runtime/Includes/Core/Handles.h +++ b/runtime/Includes/Core/Handles.h @@ -1,9 +1,6 @@ #ifndef __MLX_HANDLES__ #define __MLX_HANDLES__ -#include -#include - extern "C" { struct mlx_context_handler diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 413d862..6560495 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -1,5 +1,7 @@ #include #include +#include +#include namespace mlx { @@ -9,11 +11,24 @@ namespace mlx { MLX_PROFILE_FUNCTION(); - p_window = std::make_shared(info); - m_has_window = true; + if(info->render_target == nullptr) + { + p_window = std::make_shared(info); + m_has_window = true; + } + else + m_has_window = false; - m_renderer.Init(p_window.get()); - m_scene_renderer.Init(nullptr); + if(info->render_target != nullptr) + { + m_renderer.Init(info->render_target->texture); + m_scene_renderer.Init(info->render_target->texture); + } + else + { + m_renderer.Init(p_window.get()); + m_scene_renderer.Init(nullptr); + } p_scene = std::make_unique(); } @@ -25,7 +40,7 @@ namespace mlx m_scene_renderer.Render(*p_scene, m_renderer); m_renderer.EndFrame(); #ifdef GRAPHICS_MEMORY_DUMP - // dump memory to file every two seconds + // Dump memory usage to file every two seconds using namespace std::chrono_literals; static std::int64_t timer = static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()); if(std::chrono::duration{ static_cast(std::chrono::duration_cast(std::chrono::high_resolution_clock::now().time_since_epoch()).count()) - timer } >= 1s) diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 41987d7..75c09a5 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -29,9 +29,9 @@ namespace mlx else extent = kvfGetSwapchainImagesSize(renderer.GetSwapchain().Get()); #ifdef DEBUG - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, "mlx_renderpasses_target"); + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_UNORM, false, "mlx_renderpasses_target"); #else - m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_SRGB, false, {}); + m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_UNORM, false, {}); #endif m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); } From 036fa0e7b69bb87e7bfc1048a8f0783fd23c8f0a Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 22 Dec 2024 00:40:35 +0000 Subject: [PATCH 113/131] [BOT] update dependencies --- third_party/vulkan/vulkan.cppm | 26 + third_party/vulkan/vulkan.hpp | 2 +- third_party/vulkan/vulkan_core.h | 4 +- third_party/vulkan/vulkan_enums.hpp | 16 +- .../vulkan/vulkan_extension_inspection.hpp | 8 +- third_party/vulkan/vulkan_handles.hpp | 156 ++++ third_party/vulkan/vulkan_structs.hpp | 842 ++++++++++++------ third_party/vulkan/vulkan_to_string.hpp | 4 +- third_party/vulkan/vulkan_video.hpp | 361 +++++++- 9 files changed, 1130 insertions(+), 289 deletions(-) diff --git a/third_party/vulkan/vulkan.cppm b/third_party/vulkan/vulkan.cppm index 0ba8645..e48a653 100644 --- a/third_party/vulkan/vulkan.cppm +++ b/third_party/vulkan/vulkan.cppm @@ -22,6 +22,7 @@ module; #include #include #include +#include export module vulkan_hpp; @@ -41,6 +42,7 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderDynamic; #if !defined( VK_NO_PROTOTYPES ) using VULKAN_HPP_NAMESPACE::detail::DispatchLoaderStatic; + using VULKAN_HPP_NAMESPACE::detail::getDispatchLoaderStatic; #endif /*VK_NO_PROTOTYPES*/ } // namespace detail @@ -2686,6 +2688,30 @@ export namespace VULKAN_HPP_NAMESPACE using VULKAN_HPP_NAMESPACE::ApiVersion14; using VULKAN_HPP_NAMESPACE::HeaderVersionComplete; + //==================== + //=== FUNCPOINTERs === + //==================== + + //=== VK_VERSION_1_0 === + using VULKAN_HPP_NAMESPACE::PFN_AllocationFunction; + using VULKAN_HPP_NAMESPACE::PFN_FreeFunction; + using VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification; + using VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification; + using VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction; + using VULKAN_HPP_NAMESPACE::PFN_VoidFunction; + + //=== VK_EXT_debug_report === + using VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT; + + //=== VK_EXT_debug_utils === + using VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT; + + //=== VK_EXT_device_memory_report === + using VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT; + + //=== VK_LUNARG_direct_driver_loading === + using VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG; + //=============== //=== STRUCTs === //=============== diff --git a/third_party/vulkan/vulkan.hpp b/third_party/vulkan/vulkan.hpp index 96c7cc8..fce9f3f 100644 --- a/third_party/vulkan/vulkan.hpp +++ b/third_party/vulkan/vulkan.hpp @@ -63,7 +63,7 @@ extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE h # include #endif -static_assert( VK_HEADER_VERSION == 303, "Wrong VK_HEADER_VERSION!" ); +static_assert( VK_HEADER_VERSION == 304, "Wrong VK_HEADER_VERSION!" ); // includes through some other header // this results in major(x) being resolved to gnu_dev_major(x) diff --git a/third_party/vulkan/vulkan_core.h b/third_party/vulkan/vulkan_core.h index 4e716da..f1af7ea 100644 --- a/third_party/vulkan/vulkan_core.h +++ b/third_party/vulkan/vulkan_core.h @@ -69,7 +69,7 @@ extern "C" { #define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 // Version of this file -#define VK_HEADER_VERSION 303 +#define VK_HEADER_VERSION 304 // Complete version of this file #define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 4, VK_HEADER_VERSION) @@ -5954,7 +5954,7 @@ typedef enum VkDriverId { VK_DRIVER_ID_MESA_NVK = 24, VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25, VK_DRIVER_ID_MESA_HONEYKRISP = 26, - VK_DRIVER_ID_RESERVED_27 = 27, + VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN = 27, VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, diff --git a/third_party/vulkan/vulkan_enums.hpp b/third_party/vulkan/vulkan_enums.hpp index c7de355..f879bff 100644 --- a/third_party/vulkan/vulkan_enums.hpp +++ b/third_party/vulkan/vulkan_enums.hpp @@ -256,8 +256,8 @@ namespace VULKAN_HPP_NAMESPACE eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT, eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR, ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED, - eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT, + eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT, eErrorNotPermitted = VK_ERROR_NOT_PERMITTED, eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT, eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR, @@ -413,8 +413,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR, ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, - ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES, + ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR, eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO, ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES, @@ -495,8 +495,8 @@ namespace VULKAN_HPP_NAMESPACE ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR, ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, - ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR, + ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR, ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR, eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, @@ -675,11 +675,11 @@ namespace VULKAN_HPP_NAMESPACE eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT, eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, ePhysicalDeviceGlobalPriorityQueryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES, - ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT, eQueueFamilyGlobalPriorityProperties = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES, - eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT, ePhysicalDeviceShaderSubgroupRotateFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES, ePhysicalDeviceShaderSubgroupRotateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR, ePhysicalDeviceShaderFloatControls2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES, @@ -3532,9 +3532,9 @@ namespace VULKAN_HPP_NAMESPACE eStore = VK_ATTACHMENT_STORE_OP_STORE, eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE, eNone = VK_ATTACHMENT_STORE_OP_NONE, - eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT, eNoneKHR = VK_ATTACHMENT_STORE_OP_NONE_KHR, - eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM + eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM, + eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT }; enum class DependencyFlagBits : VkDependencyFlags @@ -4151,7 +4151,7 @@ namespace VULKAN_HPP_NAMESPACE eMesaNvk = VK_DRIVER_ID_MESA_NVK, eImaginationOpenSourceMESA = VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA, eMesaHoneykrisp = VK_DRIVER_ID_MESA_HONEYKRISP, - eReserved27 = VK_DRIVER_ID_RESERVED_27 + eVulkanScEmulationOnVulkan = VK_DRIVER_ID_VULKAN_SC_EMULATION_ON_VULKAN }; using DriverIdKHR = DriverId; diff --git a/third_party/vulkan/vulkan_extension_inspection.hpp b/third_party/vulkan/vulkan_extension_inspection.hpp index 4829143..05365bf 100644 --- a/third_party/vulkan/vulkan_extension_inspection.hpp +++ b/third_party/vulkan/vulkan_extension_inspection.hpp @@ -2400,7 +2400,13 @@ namespace VULKAN_HPP_NAMESPACE { { "VK_VERSION_1_0", { { "VK_KHR_cooperative_matrix", - } } } } } + } } } } }, + { "VK_EXT_vertex_attribute_robustness", + { { "VK_VERSION_1_0", + { { + "VK_KHR_get_physical_device_properties2", + } } }, + { "VK_VERSION_1_1", { {} } } } } }; auto depIt = dependencies.find( extension ); return ( depIt != dependencies.end() ) ? depIt->second : noDependencies; diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index 617126a..c983286 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -11633,6 +11633,112 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_swapchain === + template + VULKAN_HPP_NODISCARD Result mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, + void ** ppData, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + typename ResultValueType::type unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, + VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D + getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + void getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, + VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; + template + VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain + getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, + const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + template + VULKAN_HPP_NODISCARD Result transitionImageLayout( uint32_t transitionCount, + const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; +#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE + template + VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type + transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, + Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; +#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + + //=== VK_KHR_swapchain === + template VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, @@ -17706,5 +17812,55 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NODISCARD typename ResultValueType::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ); #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ + // operators to compare vk::-handles +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + template ::value, int>::type = 0> + auto operator<=>( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) <=> static_cast( rhs ); + } +#else + template ::value, int>::type = 0> + bool operator==( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) == static_cast( rhs ); + } + + template ::value, int>::type = 0> + bool operator!=( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) != static_cast( rhs ); + } + + template ::value, int>::type = 0> + bool operator<( T const & lhs, T const & rhs ) + { + return static_cast( lhs ) < static_cast( rhs ); + } +#endif + + template ::value, int>::type = 0> + bool operator==( T const & v, std::nullptr_t ) + { + return !v; + } + + template ::value, int>::type = 0> + bool operator==( std::nullptr_t, T const & v ) + { + return !v; + } + + template ::value, int>::type = 0> + bool operator!=( T const & v, std::nullptr_t ) + { + return !!v; + } + + template ::value, int>::type = 0> + bool operator!=( std::nullptr_t, T const & v ) + { + return !!v; + } } // namespace VULKAN_HPP_NAMESPACE #endif diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index 054482c..1b59c70 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -4483,17 +4483,37 @@ namespace VULKAN_HPP_NAMESPACE using Type = AcquireProfilingLockInfoKHR; }; + typedef void *( VKAPI_PTR * PFN_AllocationFunction )( void * pUserData, + size_t size, + size_t alignment, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void *( VKAPI_PTR * PFN_ReallocationFunction )( + void * pUserData, void * pOriginal, size_t size, size_t alignment, VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void( VKAPI_PTR * PFN_FreeFunction )( void * pUserData, void * pMemory ); + + typedef void( VKAPI_PTR * PFN_InternalAllocationNotification )( void * pUserData, + size_t size, + VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + + typedef void( VKAPI_PTR * PFN_InternalFreeNotification )( void * pUserData, + size_t size, + VULKAN_HPP_NAMESPACE::InternalAllocationType allocationType, + VULKAN_HPP_NAMESPACE::SystemAllocationScope allocationScope ); + struct AllocationCallbacks { using NativeType = VkAllocationCallbacks; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, - PFN_vkAllocationFunction pfnAllocation_ = {}, - PFN_vkReallocationFunction pfnReallocation_ = {}, - PFN_vkFreeFunction pfnFree_ = {}, - PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, - PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR AllocationCallbacks( void * pUserData_ = {}, + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ = {}, + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ = {}, + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT : pUserData{ pUserData_ } , pfnAllocation{ pfnAllocation_ } , pfnReallocation{ pfnReallocation_ } @@ -4509,6 +4529,33 @@ namespace VULKAN_HPP_NAMESPACE { } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + AllocationCallbacks( void * pUserData_, + PFN_vkAllocationFunction pfnAllocation_, + PFN_vkReallocationFunction pfnReallocation_ = {}, + PFN_vkFreeFunction pfnFree_ = {}, + PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, + PFN_vkInternalFreeNotification pfnInternalFree_ = {} ) VULKAN_HPP_NOEXCEPT + : AllocationCallbacks( pUserData_, + reinterpret_cast( pfnAllocation_ ), + reinterpret_cast( pfnReallocation_ ), + reinterpret_cast( pfnFree_ ), + reinterpret_cast( pfnInternalAllocation_ ), + reinterpret_cast( pfnInternalFree_ ) ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -4525,35 +4572,74 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnAllocation( VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT { pfnAllocation = pfnAllocation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnReallocation( VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT { pfnReallocation = pfnReallocation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnFree( VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT { pfnFree = pfnFree_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & + setPfnInternalAllocation( VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT { pfnInternalAllocation = pfnInternalAllocation_; return *this; } - VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & setPfnInternalFree( VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT { pfnInternalFree = pfnInternalFree_; return *this; } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnAllocation( reinterpret_cast( pfnAllocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnReallocation( reinterpret_cast( pfnReallocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnInternalAllocation( reinterpret_cast( pfnInternalAllocation_ ) ); + } + + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnInternalFree( reinterpret_cast( pfnInternalFree_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkAllocationCallbacks const &() const VULKAN_HPP_NOEXCEPT @@ -4571,11 +4657,11 @@ namespace VULKAN_HPP_NAMESPACE auto # else std::tuple + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction const &, + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction const &, + VULKAN_HPP_NAMESPACE::PFN_FreeFunction const &, + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification const &, + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -4599,12 +4685,12 @@ namespace VULKAN_HPP_NAMESPACE } public: - void * pUserData = {}; - PFN_vkAllocationFunction pfnAllocation = {}; - PFN_vkReallocationFunction pfnReallocation = {}; - PFN_vkFreeFunction pfnFree = {}; - PFN_vkInternalAllocationNotification pfnInternalAllocation = {}; - PFN_vkInternalFreeNotification pfnInternalFree = {}; + void * pUserData = {}; + VULKAN_HPP_NAMESPACE::PFN_AllocationFunction pfnAllocation = {}; + VULKAN_HPP_NAMESPACE::PFN_ReallocationFunction pfnReallocation = {}; + VULKAN_HPP_NAMESPACE::PFN_FreeFunction pfnFree = {}; + VULKAN_HPP_NAMESPACE::PFN_InternalAllocationNotification pfnInternalAllocation = {}; + VULKAN_HPP_NAMESPACE::PFN_InternalFreeNotification pfnInternalFree = {}; }; struct AmigoProfilingSubmitInfoSEC @@ -21679,6 +21765,15 @@ namespace VULKAN_HPP_NAMESPACE using Type = DebugMarkerObjectTagInfoEXT; }; + typedef VULKAN_HPP_NAMESPACE::Bool32( VKAPI_PTR * PFN_DebugReportCallbackEXT )( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, + VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char * pLayerPrefix, + const char * pMessage, + void * pUserData ); + struct DebugReportCallbackCreateInfoEXT { using NativeType = VkDebugReportCallbackCreateInfoEXT; @@ -21687,10 +21782,10 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, - PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, - void * pUserData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pfnCallback{ pfnCallback_ } @@ -21705,6 +21800,26 @@ namespace VULKAN_HPP_NAMESPACE { } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + DebugReportCallbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_, + PFN_vkDebugReportCallbackEXT pfnCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DebugReportCallbackCreateInfoEXT( flags_, reinterpret_cast( pfnCallback_ ), pUserData_, pNext_ ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -21727,7 +21842,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & + setPfnCallback( VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT { pfnCallback = pfnCallback_; return *this; @@ -21738,6 +21854,23 @@ namespace VULKAN_HPP_NAMESPACE pUserData = pUserData_; return *this; } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnCallback( reinterpret_cast( pfnCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkDebugReportCallbackCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT @@ -21757,7 +21890,7 @@ namespace VULKAN_HPP_NAMESPACE std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -21781,11 +21914,11 @@ namespace VULKAN_HPP_NAMESPACE } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; - PFN_vkDebugReportCallbackEXT pfnCallback = {}; - void * pUserData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::PFN_DebugReportCallbackEXT pfnCallback = {}; + void * pUserData = {}; }; template <> @@ -22313,6 +22446,12 @@ namespace VULKAN_HPP_NAMESPACE using Type = DebugUtilsMessengerCallbackDataEXT; }; + typedef VULKAN_HPP_NAMESPACE::Bool32( VKAPI_PTR * PFN_DebugUtilsMessengerCallbackEXT )( + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, + const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData, + void * pUserData ); + struct DebugUtilsMessengerCreateInfoEXT { using NativeType = VkDebugUtilsMessengerCreateInfoEXT; @@ -22321,12 +22460,12 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, - void * pUserData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , messageSeverity{ messageSeverity_ } @@ -22343,6 +22482,33 @@ namespace VULKAN_HPP_NAMESPACE { } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + DebugUtilsMessengerCreateInfoEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_, + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_, + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DebugUtilsMessengerCreateInfoEXT( flags_, + messageSeverity_, + messageType_, + reinterpret_cast( pfnUserCallback_ ), + pUserData_, + pNext_ ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -22379,7 +22545,8 @@ namespace VULKAN_HPP_NAMESPACE return *this; } - VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & + setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; @@ -22390,6 +22557,23 @@ namespace VULKAN_HPP_NAMESPACE pUserData = pUserData_; return *this; } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkDebugUtilsMessengerCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT @@ -22411,7 +22595,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT const &, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT const &, - PFN_vkDebugUtilsMessengerCallbackEXT const &, + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT const &, void * const &> # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -22436,13 +22620,13 @@ namespace VULKAN_HPP_NAMESPACE } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; - VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; - PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {}; - void * pUserData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {}; + VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {}; + VULKAN_HPP_NAMESPACE::PFN_DebugUtilsMessengerCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; }; template <> @@ -27842,6 +28026,120 @@ namespace VULKAN_HPP_NAMESPACE using Type = DeviceCreateInfo; }; + struct DeviceMemoryReportCallbackDataEXT + { + using NativeType = VkDeviceMemoryReportCallbackDataEXT; + + static const bool allowDuplicate = false; + static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; + +#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, + uint64_t memoryObjectId_ = {}, + VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, + VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, + uint64_t objectHandle_ = {}, + uint32_t heapIndex_ = {}, + void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : pNext{ pNext_ } + , flags{ flags_ } + , type{ type_ } + , memoryObjectId{ memoryObjectId_ } + , size{ size_ } + , objectType{ objectType_ } + , objectHandle{ objectHandle_ } + , heapIndex{ heapIndex_ } + { + } + + VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; + + DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) + { + } + + DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); + } +#endif + +#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; +#else + bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) && + ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); +# endif + } + + bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } +#endif + + public: + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; + void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; + uint64_t memoryObjectId = {}; + VULKAN_HPP_NAMESPACE::DeviceSize size = {}; + VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; + uint64_t objectHandle = {}; + uint32_t heapIndex = {}; + }; + + template <> + struct CppType + { + using Type = DeviceMemoryReportCallbackDataEXT; + }; + + typedef void( VKAPI_PTR * PFN_DeviceMemoryReportCallbackEXT )( const VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT * pCallbackData, + void * pUserData ); + struct DeviceDeviceMemoryReportCreateInfoEXT { using NativeType = VkDeviceDeviceMemoryReportCreateInfoEXT; @@ -27850,10 +28148,10 @@ namespace VULKAN_HPP_NAMESPACE static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, - PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, - void * pUserData_ = {}, - const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, + VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } , pfnUserCallback{ pfnUserCallback_ } @@ -27868,6 +28166,29 @@ namespace VULKAN_HPP_NAMESPACE { } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This constructor is deprecated. Use the one taking function pointer types from the vk-namespace instead." ) + + DeviceDeviceMemoryReportCreateInfoEXT( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_, + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_, + void * pUserData_ = {}, + const void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT + : DeviceDeviceMemoryReportCreateInfoEXT( flags_, + reinterpret_cast( pfnUserCallback_ ), + pUserData_, + pNext_ ) + { + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif + DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -27891,7 +28212,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & - setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + setPfnUserCallback( VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT { pfnUserCallback = pfnUserCallback_; return *this; @@ -27902,6 +28223,23 @@ namespace VULKAN_HPP_NAMESPACE pUserData = pUserData_; return *this; } + +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic push +# if defined( __clang__ ) +# pragma clang diagnostic ignored "-Wunknown-warning-option" +# endif +# pragma GCC diagnostic ignored "-Wcast-function-type" +# endif + VULKAN_HPP_DEPRECATED( "This setter is deprecated. Use the one taking a function pointer type from the vk-namespace instead." ) + + DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT + { + return setPfnUserCallback( reinterpret_cast( pfnUserCallback_ ) ); + } +# if defined( __clang__ ) || defined( __GNUC__ ) +# pragma GCC diagnostic pop +# endif #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkDeviceDeviceMemoryReportCreateInfoEXT const &() const VULKAN_HPP_NOEXCEPT @@ -27921,7 +28259,7 @@ namespace VULKAN_HPP_NAMESPACE std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -27946,11 +28284,11 @@ namespace VULKAN_HPP_NAMESPACE } public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; - const void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; - PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {}; - void * pUserData = {}; + VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT; + const void * pNext = {}; + VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; + VULKAN_HPP_NAMESPACE::PFN_DeviceMemoryReportCallbackEXT pfnUserCallback = {}; + void * pUserData = {}; }; template <> @@ -30634,117 +30972,6 @@ namespace VULKAN_HPP_NAMESPACE using Type = DeviceMemoryOverallocationCreateInfoAMD; }; - struct DeviceMemoryReportCallbackDataEXT - { - using NativeType = VkDeviceMemoryReportCallbackDataEXT; - - static const bool allowDuplicate = false; - static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT; - -#if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) - VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( - VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, - VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, - uint64_t memoryObjectId_ = {}, - VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, - VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, - uint64_t objectHandle_ = {}, - uint32_t heapIndex_ = {}, - void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT - : pNext{ pNext_ } - , flags{ flags_ } - , type{ type_ } - , memoryObjectId{ memoryObjectId_ } - , size{ size_ } - , objectType{ objectType_ } - , objectHandle{ objectHandle_ } - , heapIndex{ heapIndex_ } - { - } - - VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; - - DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast( &rhs ) ) - { - } - - DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT - { - *this = *reinterpret_cast( &rhs ); - return *this; - } - - operator VkDeviceMemoryReportCallbackDataEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT - { - return std::tie( sType, pNext, flags, type, memoryObjectId, size, objectType, objectHandle, heapIndex ); - } -#endif - -#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) - auto operator<=>( DeviceMemoryReportCallbackDataEXT const & ) const = default; -#else - bool operator==( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { -# if defined( VULKAN_HPP_USE_REFLECT ) - return this->reflect() == rhs.reflect(); -# else - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( type == rhs.type ) && ( memoryObjectId == rhs.memoryObjectId ) && - ( size == rhs.size ) && ( objectType == rhs.objectType ) && ( objectHandle == rhs.objectHandle ) && ( heapIndex == rhs.heapIndex ); -# endif - } - - bool operator!=( DeviceMemoryReportCallbackDataEXT const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return !operator==( rhs ); - } -#endif - - public: - VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT; - void * pNext = {}; - VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {}; - VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate; - uint64_t memoryObjectId = {}; - VULKAN_HPP_NAMESPACE::DeviceSize size = {}; - VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown; - uint64_t objectHandle = {}; - uint32_t heapIndex = {}; - }; - - template <> - struct CppType - { - using Type = DeviceMemoryReportCallbackDataEXT; - }; - #if defined( VK_ENABLE_BETA_EXTENSIONS ) union DeviceOrHostAddressConstAMDX { @@ -31301,6 +31528,8 @@ namespace VULKAN_HPP_NAMESPACE using Type = DeviceQueueShaderCoreControlCreateInfoARM; }; + typedef PFN_vkVoidFunction( VKAPI_PTR * PFN_GetInstanceProcAddrLUNARG )( VULKAN_HPP_NAMESPACE::Instance instance, const char * pName ); + struct DirectDriverLoadingInfoLUNARG { using NativeType = VkDirectDriverLoadingInfoLUNARG; @@ -31310,7 +31539,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS ) VULKAN_HPP_CONSTEXPR DirectDriverLoadingInfoLUNARG( VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags_ = {}, - PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, + VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ = {}, void * pNext_ = nullptr ) VULKAN_HPP_NOEXCEPT : pNext{ pNext_ } , flags{ flags_ } @@ -31348,7 +31577,7 @@ namespace VULKAN_HPP_NAMESPACE } VULKAN_HPP_CONSTEXPR_14 DirectDriverLoadingInfoLUNARG & - setPfnGetInstanceProcAddr( PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT + setPfnGetInstanceProcAddr( VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr_ ) VULKAN_HPP_NOEXCEPT { pfnGetInstanceProcAddr = pfnGetInstanceProcAddr_; return *this; @@ -31372,7 +31601,7 @@ namespace VULKAN_HPP_NAMESPACE std::tuple + VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -31398,7 +31627,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectDriverLoadingInfoLUNARG; void * pNext = {}; VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG flags = {}; - PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {}; + VULKAN_HPP_NAMESPACE::PFN_GetInstanceProcAddrLUNARG pfnGetInstanceProcAddr = {}; }; template <> @@ -52427,7 +52656,6 @@ namespace VULKAN_HPP_NAMESPACE return std::strong_ordering::equivalent; } -#endif bool operator==( InstanceCreateInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -53246,6 +53474,7 @@ namespace VULKAN_HPP_NAMESPACE ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) ) && ( type == rhs.type ) && ( valueCount == rhs.valueCount ) && ( pValues == rhs.pValues ); } +#endif bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -54457,6 +54686,7 @@ namespace VULKAN_HPP_NAMESPACE memory = memory_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 MemoryGetRemoteAddressInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT @@ -55686,6 +55916,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT { @@ -55842,6 +56073,14 @@ namespace VULKAN_HPP_NAMESPACE MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MetalSurfaceCreateInfoEXT( *reinterpret_cast( &rhs ) ) { +# ifdef VULKAN_HPP_NO_EXCEPTIONS + VULKAN_HPP_ASSERT( ( !usageCounts_.empty() + !pUsageCounts_.empty() ) <= 1 ); +# else + if ( 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() ) ) + { + throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::MicromapBuildInfoEXT::MicromapBuildInfoEXT: 1 < ( !usageCounts_.empty() + !pUsageCounts_.empty() )" ); + } +# endif /*VULKAN_HPP_NO_EXCEPTIONS*/ } MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -55912,7 +56151,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( MetalSurfaceCreateInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + usageCountsCount = static_cast( usageCounts_.size() ); + pUsageCounts = usageCounts_.data(); + return *this; } # endif @@ -55963,6 +56204,8 @@ namespace VULKAN_HPP_NAMESPACE , triangleArray{ triangleArray_ } , triangleArrayStride{ triangleArrayStride_ } { + ppUsageCounts = ppUsageCounts_; + return *this; } VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT( MicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -55970,7 +56213,11 @@ namespace VULKAN_HPP_NAMESPACE MicromapBuildInfoEXT( VkMicromapBuildInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MicromapBuildInfoEXT( *reinterpret_cast( &rhs ) ) { + usageCountsCount = static_cast( pUsageCounts_.size() ); + ppUsageCounts = pUsageCounts_.data(); + return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) MicromapBuildInfoEXT( VULKAN_HPP_NAMESPACE::MicromapTypeEXT type_, @@ -56020,7 +56267,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapBuildInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + scratchData = scratchData_; return *this; } @@ -56333,7 +56580,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapCreateInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + dataOffset = dataOffset_; return *this; } @@ -56565,7 +56812,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + firstIndex = firstIndex_; return *this; } @@ -56645,6 +56892,8 @@ namespace VULKAN_HPP_NAMESPACE MultiDrawIndexedInfoEXT( VkMultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT : MultiDrawIndexedInfoEXT( *reinterpret_cast( &rhs ) ) { + *this = *reinterpret_cast( &rhs ); + return *this; } MultiDrawIndexedInfoEXT & operator=( MultiDrawIndexedInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -56655,6 +56904,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MultiDrawIndexedInfoEXT & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT @@ -56839,6 +57089,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT { @@ -57052,12 +57303,13 @@ namespace VULKAN_HPP_NAMESPACE perViewAttributesPositionXOnly = perViewAttributesPositionXOnly_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkMultiviewPerViewAttributesInfoNVX const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkMultiviewPerViewAttributesInfoNVX &() VULKAN_HPP_NOEXCEPT { @@ -57154,7 +57406,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + descriptorTypeCount = descriptorTypeCount_; return *this; } @@ -57301,6 +57553,8 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkMutableDescriptorTypeListEXT &() VULKAN_HPP_NOEXCEPT { @@ -57759,7 +58013,6 @@ namespace VULKAN_HPP_NAMESPACE usage = usage_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkOpticalFlowImageFormatInfoNV const &() const VULKAN_HPP_NOEXCEPT { @@ -58366,7 +58619,6 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif public: uint32_t presentID = {}; @@ -58503,6 +58755,8 @@ namespace VULKAN_HPP_NAMESPACE PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) { + int32 = int32_; + return *this; } PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -58569,7 +58823,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return *reinterpret_cast( this ); } public: @@ -59018,12 +59272,12 @@ namespace VULKAN_HPP_NAMESPACE counterPassIndex = counterPassIndex_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { @@ -59040,7 +59294,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, counterPassIndex ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceQuerySubmitInfoKHR const & ) const = default; @@ -59114,7 +59367,6 @@ namespace VULKAN_HPP_NAMESPACE marker = marker_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { @@ -59600,7 +59852,6 @@ namespace VULKAN_HPP_NAMESPACE storagePushConstant8 = storagePushConstant8_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevice8BitStorageFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -60486,7 +60737,6 @@ namespace VULKAN_HPP_NAMESPACE attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -61266,6 +61516,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { @@ -61670,7 +61921,6 @@ namespace VULKAN_HPP_NAMESPACE commandBufferInheritance = commandBufferInheritance_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCommandBufferInheritanceFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -61992,7 +62242,15 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -62496,7 +62754,6 @@ namespace VULKAN_HPP_NAMESPACE cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -63140,7 +63397,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } @@ -63150,7 +63407,7 @@ namespace VULKAN_HPP_NAMESPACE coverageReductionMode = coverageReductionMode_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -63162,8 +63419,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63172,14 +63429,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, coverageReductionMode ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( coverageReductionMode == rhs.coverageReductionMode ); @@ -63190,7 +63447,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV; @@ -63203,6 +63460,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCoverageReductionModeFeaturesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCubicClampFeaturesQCOM { @@ -63246,7 +63504,6 @@ namespace VULKAN_HPP_NAMESPACE cubicRangeClamp = cubicRangeClamp_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { @@ -63258,8 +63515,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63268,14 +63525,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, cubicRangeClamp ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicRangeClamp == rhs.cubicRangeClamp ); @@ -63286,7 +63543,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; @@ -63299,6 +63556,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCubicClampFeaturesQCOM; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCubicWeightsFeaturesQCOM { @@ -64172,7 +64430,12 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -64254,7 +64517,6 @@ namespace VULKAN_HPP_NAMESPACE depthClipControl = depthClipControl_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -64559,7 +64821,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default; @@ -64576,9 +64837,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; @@ -64611,6 +64872,8 @@ namespace VULKAN_HPP_NAMESPACE , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ } , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ } { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -64618,6 +64881,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; } PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -65441,6 +65706,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -65639,7 +65905,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -65739,7 +66009,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -65958,7 +66228,6 @@ namespace VULKAN_HPP_NAMESPACE dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -66178,6 +66447,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -66546,7 +66816,7 @@ namespace VULKAN_HPP_NAMESPACE diagnosticsConfig = diagnosticsConfig_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -66558,24 +66828,24 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple -# endif +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, diagnosticsConfig ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); @@ -66586,7 +66856,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV; @@ -66599,6 +66869,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceDiscardRectanglePropertiesEXT { @@ -66640,8 +66911,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -66650,14 +66921,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, maxDiscardRectangles ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); @@ -66668,7 +66939,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; @@ -66681,6 +66952,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) struct PhysicalDeviceDisplacementMicromapFeaturesNV @@ -66762,6 +67034,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displacementMicromap == rhs.displacementMicromap ); # endif } +#endif bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -67432,7 +67705,6 @@ namespace VULKAN_HPP_NAMESPACE exclusiveScissor = exclusiveScissor_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -67575,7 +67847,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; @@ -67593,7 +67864,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + return *this; } #endif @@ -67685,6 +67957,8 @@ namespace VULKAN_HPP_NAMESPACE , extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ } , extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ } { + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; } VULKAN_HPP_CONSTEXPR @@ -67693,6 +67967,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast( &rhs ) ) { + extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + return *this; } PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -67707,7 +67983,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_; return *this; } @@ -68777,6 +69053,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT { @@ -70424,6 +70701,7 @@ namespace VULKAN_HPP_NAMESPACE : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast( &rhs ) ) { } +#endif PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -70441,6 +70719,7 @@ namespace VULKAN_HPP_NAMESPACE pNext = pNext_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT @@ -70483,6 +70762,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); # endif } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -79008,7 +79288,6 @@ namespace VULKAN_HPP_NAMESPACE multiDraw = multiDraw_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -79024,7 +79303,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -79192,7 +79471,6 @@ namespace VULKAN_HPP_NAMESPACE multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -79203,12 +79481,27 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -79601,7 +79894,6 @@ namespace VULKAN_HPP_NAMESPACE multiviewPerViewViewports = multiviewPerViewViewports_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { @@ -79617,7 +79909,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -80128,7 +80420,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -82396,7 +82692,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + multisampleArrayImage = multisampleArrayImage_; + return *this; } # endif @@ -82441,6 +82738,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ } { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82448,6 +82747,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast( &rhs ) ) { + pointPolygons = pointPolygons_; + return *this; } PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82462,7 +82763,7 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + separateStencilMaskRef = separateStencilMaskRef_; return *this; } @@ -82483,6 +82784,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ # if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -82581,9 +82883,20 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -82592,14 +82905,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, presentBarrier ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default; #else bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier ); @@ -82610,7 +82923,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; @@ -82623,6 +82936,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDevicePresentBarrierFeaturesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePresentIdFeaturesKHR { @@ -86972,7 +87286,6 @@ namespace VULKAN_HPP_NAMESPACE sparseImageFloat32Atomics, sparseImageFloat32AtomicAdd ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; @@ -86994,7 +87307,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; } #endif @@ -87036,6 +87350,8 @@ namespace VULKAN_HPP_NAMESPACE , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87043,6 +87359,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast( &rhs ) ) { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; } PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87057,7 +87375,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + sparseImageFloat32Atomics = sparseImageFloat32Atomics_; return *this; } @@ -87284,12 +87602,12 @@ namespace VULKAN_HPP_NAMESPACE shaderCoreBuiltins = shaderCoreBuiltins_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT { @@ -87300,7 +87618,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -87809,7 +88127,6 @@ namespace VULKAN_HPP_NAMESPACE shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -88238,6 +88555,7 @@ namespace VULKAN_HPP_NAMESPACE maxExecutionGraphShaderOutputNodes = maxExecutionGraphShaderOutputNodes_; return *this; } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setMaxExecutionGraphShaderPayloadSize( uint32_t maxExecutionGraphShaderPayloadSize_ ) VULKAN_HPP_NOEXCEPT @@ -88399,7 +88717,6 @@ namespace VULKAN_HPP_NAMESPACE shaderExpectAssume = shaderExpectAssume_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -88411,17 +88728,11 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderExpectAssume ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderExpectAssumeFeatures const & ) const = default; @@ -88437,7 +88748,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; } #endif @@ -88470,6 +88782,7 @@ namespace VULKAN_HPP_NAMESPACE , shaderFloat16{ shaderFloat16_ } , shaderInt8{ shaderInt8_ } { + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88477,7 +88790,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast( &rhs ) ) { + return !operator==( rhs ); } +# endif PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -88624,7 +88939,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -88823,6 +89138,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT { @@ -88852,6 +89168,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); # endif } +#endif bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -92837,6 +93154,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) && ( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 ); } +#endif bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -95193,7 +95511,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { - uniformBufferStandardLayout = uniformBufferStandardLayout_; + separateDepthStencilLayouts = separateDepthStencilLayouts_; return *this; } diff --git a/third_party/vulkan/vulkan_to_string.hpp b/third_party/vulkan/vulkan_to_string.hpp index 971dc25..e8d8eeb 100644 --- a/third_party/vulkan/vulkan_to_string.hpp +++ b/third_party/vulkan/vulkan_to_string.hpp @@ -8,7 +8,7 @@ #ifndef VULKAN_TO_STRING_HPP #define VULKAN_TO_STRING_HPP -#include +#include // ignore warnings on using deprecated enum values in this header #if defined( __clang__ ) || defined( __GNUC__ ) @@ -6748,7 +6748,7 @@ namespace VULKAN_HPP_NAMESPACE case DriverId::eMesaNvk: return "MesaNvk"; case DriverId::eImaginationOpenSourceMESA: return "ImaginationOpenSourceMESA"; case DriverId::eMesaHoneykrisp: return "MesaHoneykrisp"; - case DriverId::eReserved27: return "Reserved27"; + case DriverId::eVulkanScEmulationOnVulkan: return "VulkanScEmulationOnVulkan"; default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast( value ) ) + " )"; } } diff --git a/third_party/vulkan/vulkan_video.hpp b/third_party/vulkan/vulkan_video.hpp index ee88975..696e685 100644 --- a/third_party/vulkan/vulkan_video.hpp +++ b/third_party/vulkan/vulkan_video.hpp @@ -364,19 +364,21 @@ namespace VULKAN_HPP_NAMESPACE enum class AV1ColorPrimaries { - eBt709 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709, - eBtUnspecified = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED, - eBt470M = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M, - eBt470BG = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G, - eBt601 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601, - eSmpte240 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240, - eGenericFilm = STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM, - eBt2020 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020, - eXyz = STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ, - eSmpte431 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431, - eSmpte432 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432, - eEbu3213 = STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213, - eInvalid = STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID + eBt709 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709, + eUnspecified = STD_VIDEO_AV1_COLOR_PRIMARIES_UNSPECIFIED, + eBtUnspecified VULKAN_HPP_DEPRECATED_17( "eBtUnspecified is deprecated, eUnspecified should be used instead." ) = + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED, + eBt470M = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M, + eBt470BG = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G, + eBt601 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601, + eSmpte240 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240, + eGenericFilm = STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM, + eBt2020 = STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020, + eXyz = STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ, + eSmpte431 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431, + eSmpte432 = STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432, + eEbu3213 = STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213, + eInvalid = STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID }; enum class AV1TransferCharacteristics @@ -3717,6 +3719,339 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_NAMESPACE::ArrayWrapper1D SavedOrderHints = {}; }; + //=== vulkan_video_codec_av1std_encode === + + struct EncodeAV1DecoderModelInfo + { + using NativeType = StdVideoEncodeAV1DecoderModelInfo; + + operator StdVideoEncodeAV1DecoderModelInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1DecoderModelInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1DecoderModelInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( buffer_delay_length_minus_1 == rhs.buffer_delay_length_minus_1 ) && + ( buffer_removal_time_length_minus_1 == rhs.buffer_removal_time_length_minus_1 ) && + ( frame_presentation_time_length_minus_1 == rhs.frame_presentation_time_length_minus_1 ) && ( reserved1 == rhs.reserved1 ) && + ( num_units_in_decoding_tick == rhs.num_units_in_decoding_tick ); + } + + bool operator!=( EncodeAV1DecoderModelInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t buffer_delay_length_minus_1 = {}; + uint8_t buffer_removal_time_length_minus_1 = {}; + uint8_t frame_presentation_time_length_minus_1 = {}; + uint8_t reserved1 = {}; + uint32_t num_units_in_decoding_tick = {}; + }; + + struct EncodeAV1ExtensionHeader + { + using NativeType = StdVideoEncodeAV1ExtensionHeader; + + operator StdVideoEncodeAV1ExtensionHeader const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ExtensionHeader &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ExtensionHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( temporal_id == rhs.temporal_id ) && ( spatial_id == rhs.spatial_id ); + } + + bool operator!=( EncodeAV1ExtensionHeader const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint8_t temporal_id = {}; + uint8_t spatial_id = {}; + }; + + struct EncodeAV1OperatingPointInfoFlags + { + using NativeType = StdVideoEncodeAV1OperatingPointInfoFlags; + + operator StdVideoEncodeAV1OperatingPointInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( decoder_model_present_for_this_op == rhs.decoder_model_present_for_this_op ) && ( low_delay_mode_flag == rhs.low_delay_mode_flag ) && + ( initial_display_delay_present_for_this_op == rhs.initial_display_delay_present_for_this_op ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1OperatingPointInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t decoder_model_present_for_this_op : 1; + uint32_t low_delay_mode_flag : 1; + uint32_t initial_display_delay_present_for_this_op : 1; + uint32_t reserved : 29; + }; + + struct EncodeAV1OperatingPointInfo + { + using NativeType = StdVideoEncodeAV1OperatingPointInfo; + + operator StdVideoEncodeAV1OperatingPointInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1OperatingPointInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1OperatingPointInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( operating_point_idc == rhs.operating_point_idc ) && ( seq_level_idx == rhs.seq_level_idx ) && + ( seq_tier == rhs.seq_tier ) && ( decoder_buffer_delay == rhs.decoder_buffer_delay ) && ( encoder_buffer_delay == rhs.encoder_buffer_delay ) && + ( initial_display_delay_minus_1 == rhs.initial_display_delay_minus_1 ); + } + + bool operator!=( EncodeAV1OperatingPointInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1OperatingPointInfoFlags flags = {}; + uint16_t operating_point_idc = {}; + uint8_t seq_level_idx = {}; + uint8_t seq_tier = {}; + uint32_t decoder_buffer_delay = {}; + uint32_t encoder_buffer_delay = {}; + uint8_t initial_display_delay_minus_1 = {}; + }; + + struct EncodeAV1PictureInfoFlags + { + using NativeType = StdVideoEncodeAV1PictureInfoFlags; + + operator StdVideoEncodeAV1PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( error_resilient_mode == rhs.error_resilient_mode ) && ( disable_cdf_update == rhs.disable_cdf_update ) && + ( use_superres == rhs.use_superres ) && ( render_and_frame_size_different == rhs.render_and_frame_size_different ) && + ( allow_screen_content_tools == rhs.allow_screen_content_tools ) && ( is_filter_switchable == rhs.is_filter_switchable ) && + ( force_integer_mv == rhs.force_integer_mv ) && ( frame_size_override_flag == rhs.frame_size_override_flag ) && + ( buffer_removal_time_present_flag == rhs.buffer_removal_time_present_flag ) && ( allow_intrabc == rhs.allow_intrabc ) && + ( frame_refs_short_signaling == rhs.frame_refs_short_signaling ) && ( allow_high_precision_mv == rhs.allow_high_precision_mv ) && + ( is_motion_mode_switchable == rhs.is_motion_mode_switchable ) && ( use_ref_frame_mvs == rhs.use_ref_frame_mvs ) && + ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( allow_warped_motion == rhs.allow_warped_motion ) && + ( reduced_tx_set == rhs.reduced_tx_set ) && ( skip_mode_present == rhs.skip_mode_present ) && ( delta_q_present == rhs.delta_q_present ) && + ( delta_lf_present == rhs.delta_lf_present ) && ( delta_lf_multi == rhs.delta_lf_multi ) && + ( segmentation_enabled == rhs.segmentation_enabled ) && ( segmentation_update_map == rhs.segmentation_update_map ) && + ( segmentation_temporal_update == rhs.segmentation_temporal_update ) && ( segmentation_update_data == rhs.segmentation_update_data ) && + ( UsesLr == rhs.UsesLr ) && ( usesChromaLr == rhs.usesChromaLr ) && ( show_frame == rhs.show_frame ) && + ( showable_frame == rhs.showable_frame ) && ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t show_frame : 1; + uint32_t showable_frame : 1; + uint32_t reserved : 3; + }; + + struct EncodeAV1PictureInfo + { + using NativeType = StdVideoEncodeAV1PictureInfo; + + operator StdVideoEncodeAV1PictureInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1PictureInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( frame_type == rhs.frame_type ) && ( frame_presentation_time == rhs.frame_presentation_time ) && + ( current_frame_id == rhs.current_frame_id ) && ( order_hint == rhs.order_hint ) && ( primary_ref_frame == rhs.primary_ref_frame ) && + ( refresh_frame_flags == rhs.refresh_frame_flags ) && ( coded_denom == rhs.coded_denom ) && + ( render_width_minus_1 == rhs.render_width_minus_1 ) && ( render_height_minus_1 == rhs.render_height_minus_1 ) && + ( interpolation_filter == rhs.interpolation_filter ) && ( TxMode == rhs.TxMode ) && ( delta_q_res == rhs.delta_q_res ) && + ( delta_lf_res == rhs.delta_lf_res ) && ( ref_order_hint == rhs.ref_order_hint ) && ( ref_frame_idx == rhs.ref_frame_idx ) && + ( reserved1 == rhs.reserved1 ) && ( delta_frame_id_minus_1 == rhs.delta_frame_id_minus_1 ) && ( pTileInfo == rhs.pTileInfo ) && + ( pQuantization == rhs.pQuantization ) && ( pSegmentation == rhs.pSegmentation ) && ( pLoopFilter == rhs.pLoopFilter ) && + ( pCDEF == rhs.pCDEF ) && ( pLoopRestoration == rhs.pLoopRestoration ) && ( pGlobalMotion == rhs.pGlobalMotion ) && + ( pExtensionHeader == rhs.pExtensionHeader ) && ( pBufferRemovalTimes == rhs.pBufferRemovalTimes ); + } + + bool operator!=( EncodeAV1PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1PictureInfoFlags flags = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint32_t frame_presentation_time = {}; + uint32_t current_frame_id = {}; + uint8_t order_hint = {}; + uint8_t primary_ref_frame = {}; + uint8_t refresh_frame_flags = {}; + uint8_t coded_denom = {}; + uint16_t render_width_minus_1 = {}; + uint16_t render_height_minus_1 = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter interpolation_filter = + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1InterpolationFilter::eEighttap; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode TxMode = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TxMode::eOnly4X4; + uint8_t delta_q_res = {}; + uint8_t delta_lf_res = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ref_order_hint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D ref_frame_idx = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D delta_frame_id_minus_1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1TileInfo * pTileInfo = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Quantization * pQuantization = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1Segmentation * pSegmentation = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopFilter * pLoopFilter = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1CDEF * pCDEF = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1LoopRestoration * pLoopRestoration = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1GlobalMotion * pGlobalMotion = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ExtensionHeader * pExtensionHeader = {}; + const uint32_t * pBufferRemovalTimes = {}; + }; + + struct EncodeAV1ReferenceInfoFlags + { + using NativeType = StdVideoEncodeAV1ReferenceInfoFlags; + + operator StdVideoEncodeAV1ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( disable_frame_end_update_cdf == rhs.disable_frame_end_update_cdf ) && ( segmentation_enabled == rhs.segmentation_enabled ) && + ( reserved == rhs.reserved ); + } + + bool operator!=( EncodeAV1ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; + }; + + struct EncodeAV1ReferenceInfo + { + using NativeType = StdVideoEncodeAV1ReferenceInfo; + + operator StdVideoEncodeAV1ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator StdVideoEncodeAV1ReferenceInfo &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + bool operator==( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return ( flags == rhs.flags ) && ( RefFrameId == rhs.RefFrameId ) && ( frame_type == rhs.frame_type ) && ( OrderHint == rhs.OrderHint ) && + ( reserved1 == rhs.reserved1 ) && ( pExtensionHeader == rhs.pExtensionHeader ); + } + + bool operator!=( EncodeAV1ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return !operator==( rhs ); + } + + public: + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ReferenceInfoFlags flags = {}; + uint32_t RefFrameId = {}; + VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType frame_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::AV1FrameType::eKey; + uint8_t OrderHint = {}; + VULKAN_HPP_NAMESPACE::ArrayWrapper1D reserved1 = {}; + const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeAV1ExtensionHeader * pExtensionHeader = {}; + }; + } // namespace VULKAN_HPP_VIDEO_NAMESPACE } // namespace VULKAN_HPP_NAMESPACE #endif From f6708058b379f87f5eb93a10b54635025ec6f235 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Fri, 27 Dec 2024 23:28:51 +0100 Subject: [PATCH 114/131] fixing vicious bug in scene code --- Makefile | 2 +- example/main.c | 2 +- runtime/Includes/Core/Graphics.inl | 1 + runtime/Includes/Embedded/Shader2DVertex.nzsl | 2 +- .../Includes/Embedded/Shader2DVertex.spv.h | 71 ++++++++++--------- runtime/Sources/Graphics/Scene.cpp | 2 +- runtime/Sources/Renderer/Image.cpp | 6 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 2 +- 8 files changed, 45 insertions(+), 43 deletions(-) diff --git a/Makefile b/Makefile index 7dd2529..0671d7e 100644 --- a/Makefile +++ b/Makefile @@ -38,7 +38,7 @@ GCH = runtime/Includes/PreCompiled.h.gch CCH = runtime/Includes/PreCompiled.h.pch PCH = -NZRRC ?= nzslc +NZSLC ?= nzslc ifeq ($(TOOLCHAIN), gcc) CXX = g++ diff --git a/example/main.c b/example/main.c index 8868be8..9336e73 100644 --- a/example/main.c +++ b/example/main.c @@ -169,7 +169,7 @@ int main(void) mlx.logo_jpg = mlx_new_image_from_file(mlx.mlx, "42_logo.jpg", &dummy, &dummy); mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, (mlx_color){ .rgba = 0xFF00FFFF }); - mlx_put_image_to_window(mlx.mlx, mlx.win, mlx.logo_png, 10, 190); + mlx_put_image_to_window(mlx.mlx, mlx.win, mlx.logo_png, 0, 0); mlx.img = create_image(&mlx); diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 1416b9a..783cc94 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -87,6 +87,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); NonOwningPtr sprite = p_scene->GetSpriteFromTexturePositionScaleRotation(texture, Vec2f{ static_cast(x), static_cast(y) }, scale_x, scale_y, angle); + std::cout << sprite.Get() << std::endl; if(!sprite) { if(m_pixelput_called) diff --git a/runtime/Includes/Embedded/Shader2DVertex.nzsl b/runtime/Includes/Embedded/Shader2DVertex.nzsl index 065d89b..fec2428 100644 --- a/runtime/Includes/Embedded/Shader2DVertex.nzsl +++ b/runtime/Includes/Embedded/Shader2DVertex.nzsl @@ -34,7 +34,7 @@ external [entry(vert)] fn main(input: VertIn) -> VertOut { - let position: vec4[f32] = vec4[f32](input.pos.xy, 1.0, 1.0); + let position: vec4[f32] = vec4[f32](input.pos.xy, 0.0, 1.0); input.uv *= -1.0; let output: VertOut; output.uv = input.uv; diff --git a/runtime/Includes/Embedded/Shader2DVertex.spv.h b/runtime/Includes/Embedded/Shader2DVertex.spv.h index 2d0f916..b4f4e35 100644 --- a/runtime/Includes/Embedded/Shader2DVertex.spv.h +++ b/runtime/Includes/Embedded/Shader2DVertex.spv.h @@ -1,5 +1,5 @@ -3,2,35,7,0,0,1,0,39,0,0,0,70,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, -3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,34,0,0,0,109,97,105,110,0,0,0,0, +3,2,35,7,0,0,1,0,39,0,0,0,71,0,0,0,0,0,0,0,17,0,2,0,1,0,0,0,14,0, +3,0,0,0,0,0,1,0,0,0,15,0,10,0,0,0,0,0,35,0,0,0,109,97,105,110,0,0,0,0, 13,0,0,0,19,0,0,0,25,0,0,0,27,0,0,0,28,0,0,0,3,0,3,0,0,0,0,0,100,0, 0,0,5,0,5,0,4,0,0,0,86,105,101,119,101,114,68,97,116,97,0,0,6,0,8,0,4,0,0,0, 0,0,0,0,112,114,111,106,101,99,116,105,111,110,95,109,97,116,114,105,120,0,0,0,5,0,5,0,7,0, @@ -12,7 +12,7 @@ 5,0,6,0,0,0,118,105,101,119,101,114,95,100,97,116,97,0,5,0,4,0,9,0,0,0,109,111,100,101, 108,0,0,0,5,0,3,0,13,0,0,0,112,111,115,0,5,0,3,0,19,0,0,0,117,118,0,0,5,0, 4,0,25,0,0,0,99,111,108,111,114,0,0,0,5,0,3,0,27,0,0,0,117,118,0,0,5,0,5,0, -28,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,34,0,0,0,109,97,105,110,0,0, +28,0,0,0,112,111,115,105,116,105,111,110,0,0,0,0,5,0,4,0,35,0,0,0,109,97,105,110,0,0, 0,0,71,0,4,0,6,0,0,0,33,0,0,0,0,0,0,0,71,0,4,0,6,0,0,0,34,0,0,0, 0,0,0,0,71,0,4,0,28,0,0,0,11,0,0,0,0,0,0,0,71,0,4,0,13,0,0,0,30,0, 0,0,0,0,0,0,71,0,4,0,19,0,0,0,30,0,0,0,1,0,0,0,71,0,4,0,25,0,0,0, @@ -37,35 +37,36 @@ 2,0,0,0,17,0,0,0,32,0,4,0,23,0,0,0,7,0,0,0,22,0,0,0,32,0,4,0,24,0, 0,0,3,0,0,0,2,0,0,0,32,0,4,0,26,0,0,0,3,0,0,0,17,0,0,0,30,0,5,0, 29,0,0,0,2,0,0,0,17,0,0,0,2,0,0,0,43,0,4,0,1,0,0,0,30,0,0,0,0,0, -128,63,43,0,4,0,1,0,0,0,31,0,0,0,0,0,128,191,32,0,4,0,32,0,0,0,7,0,0,0, -29,0,0,0,43,0,4,0,14,0,0,0,33,0,0,0,2,0,0,0,32,0,4,0,52,0,0,0,9,0, -0,0,2,0,0,0,32,0,4,0,56,0,0,0,2,0,0,0,3,0,0,0,32,0,4,0,59,0,0,0, -9,0,0,0,3,0,0,0,59,0,4,0,5,0,0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0, -0,0,9,0,0,0,9,0,0,0,59,0,4,0,12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0, -18,0,0,0,19,0,0,0,1,0,0,0,59,0,4,0,24,0,0,0,25,0,0,0,3,0,0,0,59,0, -4,0,26,0,0,0,27,0,0,0,3,0,0,0,59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0, -54,0,5,0,10,0,0,0,34,0,0,0,0,0,0,0,11,0,0,0,248,0,2,0,35,0,0,0,59,0, -4,0,16,0,0,0,36,0,0,0,7,0,0,0,59,0,4,0,32,0,0,0,37,0,0,0,7,0,0,0, -59,0,4,0,23,0,0,0,38,0,0,0,7,0,0,0,65,0,5,0,16,0,0,0,39,0,0,0,38,0, -0,0,15,0,0,0,63,0,3,0,39,0,0,0,13,0,0,0,65,0,5,0,21,0,0,0,40,0,0,0, -38,0,0,0,20,0,0,0,63,0,3,0,40,0,0,0,19,0,0,0,65,0,5,0,16,0,0,0,41,0, -0,0,38,0,0,0,15,0,0,0,61,0,4,0,2,0,0,0,42,0,0,0,41,0,0,0,79,0,7,0, -17,0,0,0,43,0,0,0,42,0,0,0,42,0,0,0,0,0,0,0,1,0,0,0,80,0,6,0,2,0, -0,0,44,0,0,0,43,0,0,0,30,0,0,0,30,0,0,0,62,0,3,0,36,0,0,0,44,0,0,0, -65,0,5,0,21,0,0,0,45,0,0,0,38,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,46,0, -0,0,45,0,0,0,142,0,5,0,17,0,0,0,47,0,0,0,46,0,0,0,31,0,0,0,65,0,5,0, -21,0,0,0,48,0,0,0,38,0,0,0,20,0,0,0,62,0,3,0,48,0,0,0,47,0,0,0,65,0, -5,0,21,0,0,0,49,0,0,0,38,0,0,0,20,0,0,0,61,0,4,0,17,0,0,0,50,0,0,0, -49,0,0,0,65,0,5,0,21,0,0,0,51,0,0,0,37,0,0,0,20,0,0,0,62,0,3,0,51,0, -0,0,50,0,0,0,65,0,5,0,52,0,0,0,53,0,0,0,9,0,0,0,20,0,0,0,61,0,4,0, -2,0,0,0,54,0,0,0,53,0,0,0,65,0,5,0,16,0,0,0,55,0,0,0,37,0,0,0,15,0, -0,0,62,0,3,0,55,0,0,0,54,0,0,0,65,0,5,0,56,0,0,0,57,0,0,0,6,0,0,0, -15,0,0,0,61,0,4,0,3,0,0,0,58,0,0,0,57,0,0,0,65,0,5,0,59,0,0,0,60,0, -0,0,9,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0,61,0,0,0,60,0,0,0,146,0,5,0, -3,0,0,0,62,0,0,0,58,0,0,0,61,0,0,0,61,0,4,0,2,0,0,0,63,0,0,0,36,0, -0,0,145,0,5,0,2,0,0,0,64,0,0,0,62,0,0,0,63,0,0,0,65,0,5,0,16,0,0,0, -65,0,0,0,37,0,0,0,33,0,0,0,62,0,3,0,65,0,0,0,64,0,0,0,61,0,4,0,29,0, -0,0,66,0,0,0,37,0,0,0,81,0,5,0,2,0,0,0,67,0,0,0,66,0,0,0,0,0,0,0, -62,0,3,0,25,0,0,0,67,0,0,0,81,0,5,0,17,0,0,0,68,0,0,0,66,0,0,0,1,0, -0,0,62,0,3,0,27,0,0,0,68,0,0,0,81,0,5,0,2,0,0,0,69,0,0,0,66,0,0,0, -2,0,0,0,62,0,3,0,28,0,0,0,69,0,0,0,253,0,1,0,56,0,1,0 +0,0,43,0,4,0,1,0,0,0,31,0,0,0,0,0,128,63,43,0,4,0,1,0,0,0,32,0,0,0, +0,0,128,191,32,0,4,0,33,0,0,0,7,0,0,0,29,0,0,0,43,0,4,0,14,0,0,0,34,0, +0,0,2,0,0,0,32,0,4,0,53,0,0,0,9,0,0,0,2,0,0,0,32,0,4,0,57,0,0,0, +2,0,0,0,3,0,0,0,32,0,4,0,60,0,0,0,9,0,0,0,3,0,0,0,59,0,4,0,5,0, +0,0,6,0,0,0,2,0,0,0,59,0,4,0,8,0,0,0,9,0,0,0,9,0,0,0,59,0,4,0, +12,0,0,0,13,0,0,0,1,0,0,0,59,0,4,0,18,0,0,0,19,0,0,0,1,0,0,0,59,0, +4,0,24,0,0,0,25,0,0,0,3,0,0,0,59,0,4,0,26,0,0,0,27,0,0,0,3,0,0,0, +59,0,4,0,24,0,0,0,28,0,0,0,3,0,0,0,54,0,5,0,10,0,0,0,35,0,0,0,0,0, +0,0,11,0,0,0,248,0,2,0,36,0,0,0,59,0,4,0,16,0,0,0,37,0,0,0,7,0,0,0, +59,0,4,0,33,0,0,0,38,0,0,0,7,0,0,0,59,0,4,0,23,0,0,0,39,0,0,0,7,0, +0,0,65,0,5,0,16,0,0,0,40,0,0,0,39,0,0,0,15,0,0,0,63,0,3,0,40,0,0,0, +13,0,0,0,65,0,5,0,21,0,0,0,41,0,0,0,39,0,0,0,20,0,0,0,63,0,3,0,41,0, +0,0,19,0,0,0,65,0,5,0,16,0,0,0,42,0,0,0,39,0,0,0,15,0,0,0,61,0,4,0, +2,0,0,0,43,0,0,0,42,0,0,0,79,0,7,0,17,0,0,0,44,0,0,0,43,0,0,0,43,0, +0,0,0,0,0,0,1,0,0,0,80,0,6,0,2,0,0,0,45,0,0,0,44,0,0,0,30,0,0,0, +31,0,0,0,62,0,3,0,37,0,0,0,45,0,0,0,65,0,5,0,21,0,0,0,46,0,0,0,39,0, +0,0,20,0,0,0,61,0,4,0,17,0,0,0,47,0,0,0,46,0,0,0,142,0,5,0,17,0,0,0, +48,0,0,0,47,0,0,0,32,0,0,0,65,0,5,0,21,0,0,0,49,0,0,0,39,0,0,0,20,0, +0,0,62,0,3,0,49,0,0,0,48,0,0,0,65,0,5,0,21,0,0,0,50,0,0,0,39,0,0,0, +20,0,0,0,61,0,4,0,17,0,0,0,51,0,0,0,50,0,0,0,65,0,5,0,21,0,0,0,52,0, +0,0,38,0,0,0,20,0,0,0,62,0,3,0,52,0,0,0,51,0,0,0,65,0,5,0,53,0,0,0, +54,0,0,0,9,0,0,0,20,0,0,0,61,0,4,0,2,0,0,0,55,0,0,0,54,0,0,0,65,0, +5,0,16,0,0,0,56,0,0,0,38,0,0,0,15,0,0,0,62,0,3,0,56,0,0,0,55,0,0,0, +65,0,5,0,57,0,0,0,58,0,0,0,6,0,0,0,15,0,0,0,61,0,4,0,3,0,0,0,59,0, +0,0,58,0,0,0,65,0,5,0,60,0,0,0,61,0,0,0,9,0,0,0,15,0,0,0,61,0,4,0, +3,0,0,0,62,0,0,0,61,0,0,0,146,0,5,0,3,0,0,0,63,0,0,0,59,0,0,0,62,0, +0,0,61,0,4,0,2,0,0,0,64,0,0,0,37,0,0,0,145,0,5,0,2,0,0,0,65,0,0,0, +63,0,0,0,64,0,0,0,65,0,5,0,16,0,0,0,66,0,0,0,38,0,0,0,34,0,0,0,62,0, +3,0,66,0,0,0,65,0,0,0,61,0,4,0,29,0,0,0,67,0,0,0,38,0,0,0,81,0,5,0, +2,0,0,0,68,0,0,0,67,0,0,0,0,0,0,0,62,0,3,0,25,0,0,0,68,0,0,0,81,0, +5,0,17,0,0,0,69,0,0,0,67,0,0,0,1,0,0,0,62,0,3,0,27,0,0,0,69,0,0,0, +81,0,5,0,2,0,0,0,70,0,0,0,67,0,0,0,2,0,0,0,62,0,3,0,28,0,0,0,70,0, +0,0,253,0,1,0,56,0,1,0 diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index d849d91..d36b135 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -34,7 +34,7 @@ namespace mlx { if(!drawable || drawable->GetType() != DrawableType::Sprite) return false; - return static_cast(drawable.get())->GetTexture() == texture && + return static_cast(drawable.get())->GetTexture().Get() == texture.Get() && drawable->GetPosition() == position && drawable->GetScale() == Vec2f{ scale_x, scale_y } && drawable->GetRotation().ToEulerAngles() == EulerAnglesf{ 0.0f, 0.0f, rotation }; diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 9757ace..da6011d 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -307,7 +307,7 @@ namespace mlx void Texture::Clear(VkCommandBuffer cmd, Vec4f color) { MLX_PROFILE_FUNCTION(); - Image::Clear(cmd, std::move(color)); + Image::Clear(cmd, color); if(m_staging_buffer.has_value()) { mlx_color processed_color; @@ -324,7 +324,7 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(!m_has_been_modified) return; - std::memcpy(m_staging_buffer->GetMap(), m_cpu_buffer.data(), m_cpu_buffer.size() * kvfFormatSize(m_format)); + std::memcpy(m_staging_buffer->GetMap(), m_cpu_buffer.data(), m_cpu_buffer.size() * sizeof(mlx_color)); VkImageLayout old_layout = m_layout; TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); @@ -362,7 +362,7 @@ namespace mlx kvfDestroyFence(RenderCore::Get().GetDevice(), fence); m_cpu_buffer.resize(m_width * m_height); - std::memcpy(m_cpu_buffer.data(), m_staging_buffer->GetMap(), m_cpu_buffer.size()); + std::memcpy(m_cpu_buffer.data(), m_staging_buffer->GetMap(), m_cpu_buffer.size() * sizeof(mlx_color)); } Texture* StbTextureLoad(const std::filesystem::path& file, int* w, int* h) diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 0d1de8e..84e4d5f 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -113,7 +113,7 @@ namespace mlx drawable_data.model_matrix = Mat4f::Identity(); drawable_data.model_matrix.ApplyTranslation(Vec3f{ -drawable->GetCenter() / 2.0f, 0.0f }); drawable_data.model_matrix.ApplyRotation(drawable->GetRotation()); - drawable_data.model_matrix.ApplyTranslation(Vec3f{ drawable->GetPosition() + drawable->GetCenter(), 0.0f }); + drawable_data.model_matrix.ApplyTranslation(Vec3f{ drawable->GetPosition() + drawable->GetCenter() / 2.0f, 0.0f }); drawable_data.model_matrix.ApplyScale(Vec3f{ drawable->GetScale(), 1.0f }); drawable->Bind(frame_index, cmd); From c60c176a5414dbbc706024bdba5c28b2fc00c50f Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 28 Dec 2024 15:42:38 +0100 Subject: [PATCH 115/131] wtf is hapenning --- example/main.c | 9 +++++- runtime/Includes/Core/Graphics.inl | 1 - runtime/Includes/Renderer/Buffer.h | 3 +- runtime/Includes/Renderer/Image.h | 1 - runtime/Sources/Renderer/Buffer.cpp | 3 +- runtime/Sources/Renderer/Image.cpp | 43 ++++++++++++++++--------- runtime/Sources/Renderer/Memory.cpp | 1 - runtime/Sources/Renderer/RenderCore.cpp | 1 + runtime/Sources/Renderer/Swapchain.cpp | 1 + third_party/kvf.h | 29 +++++++++++++++-- 10 files changed, 68 insertions(+), 24 deletions(-) diff --git a/example/main.c b/example/main.c index 9336e73..3639bc5 100644 --- a/example/main.c +++ b/example/main.c @@ -130,6 +130,8 @@ void window_hook(int event, void* param) mlx_loop_end(((mlx_t*)param)->mlx); } +#include + int main(void) { mlx_t mlx; @@ -166,7 +168,12 @@ int main(void) mlx.logo_png = mlx_new_image_from_file(mlx.mlx, "42_logo.png", &dummy, &dummy); mlx.logo_bmp = mlx_new_image_from_file(mlx.mlx, "42_logo.bmp", &dummy, &dummy); - mlx.logo_jpg = mlx_new_image_from_file(mlx.mlx, "42_logo.jpg", &dummy, &dummy); + //mlx.logo_jpg = mlx_new_image_from_file(mlx.mlx, "42_logo.jpg", &dummy, &dummy); + mlx.logo_jpg = mlx_new_image(mlx.mlx, dummy, dummy); + + mlx_color* data = (mlx_color*)malloc(dummy * dummy * sizeof(mlx_color)); + mlx_get_image_region(mlx.mlx, mlx.logo_png, 0, 0, dummy, dummy, data); + mlx_set_image_region(mlx.mlx, mlx.logo_jpg, 0, 0, dummy, dummy, data); mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, (mlx_color){ .rgba = 0xFF00FFFF }); mlx_put_image_to_window(mlx.mlx, mlx.win, mlx.logo_png, 0, 0); diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 783cc94..1416b9a 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -87,7 +87,6 @@ namespace mlx { MLX_PROFILE_FUNCTION(); NonOwningPtr sprite = p_scene->GetSpriteFromTexturePositionScaleRotation(texture, Vec2f{ static_cast(x), static_cast(y) }, scale_x, scale_y, angle); - std::cout << sprite.Get() << std::endl; if(!sprite) { if(m_pixelput_called) diff --git a/runtime/Includes/Renderer/Buffer.h b/runtime/Includes/Renderer/Buffer.h index 0c4dcd1..356b613 100644 --- a/runtime/Includes/Renderer/Buffer.h +++ b/runtime/Includes/Renderer/Buffer.h @@ -19,7 +19,8 @@ namespace mlx void Swap(GPUBuffer& buffer) noexcept; - [[nodiscard]] MLX_FORCEINLINE void* GetMap() const noexcept { return p_map; } + template + [[nodiscard]] MLX_FORCEINLINE T GetMap() const noexcept { return reinterpret_cast(p_map); } [[nodiscard]] MLX_FORCEINLINE VkBuffer Get() const noexcept { return m_buffer; } [[nodiscard]] MLX_FORCEINLINE VmaAllocation GetAllocation() const noexcept { return m_allocation; } [[nodiscard]] MLX_FORCEINLINE VkDeviceSize GetSize() const noexcept { return m_size; } diff --git a/runtime/Includes/Renderer/Image.h b/runtime/Includes/Renderer/Image.h index 5833f0a..ba46d0e 100644 --- a/runtime/Includes/Renderer/Image.h +++ b/runtime/Includes/Renderer/Image.h @@ -98,7 +98,6 @@ namespace mlx void OpenCPUBuffer(); private: - std::vector m_cpu_buffer; std::optional m_staging_buffer; bool m_has_been_modified = false; }; diff --git a/runtime/Sources/Renderer/Buffer.cpp b/runtime/Sources/Renderer/Buffer.cpp index 95a70c3..c34f965 100644 --- a/runtime/Sources/Renderer/Buffer.cpp +++ b/runtime/Sources/Renderer/Buffer.cpp @@ -8,7 +8,7 @@ namespace mlx { MLX_PROFILE_FUNCTION(); VmaAllocationCreateInfo alloc_info{}; - alloc_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT; + alloc_info.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT; alloc_info.usage = VMA_MEMORY_USAGE_AUTO; if(type == BufferType::Constant) @@ -86,6 +86,7 @@ namespace mlx VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + kvfDestroyCommandBuffer(RenderCore::Get().GetDevice(), cmd); return true; } diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index da6011d..4d3df4b 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -108,6 +108,8 @@ namespace mlx if(is_single_time_cmd_buffer) cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); kvfTransitionImageLayout(RenderCore::Get().GetDevice(), m_image, KVF_IMAGE_COLOR, cmd, m_format, m_layout, new_layout, is_single_time_cmd_buffer); + if(is_single_time_cmd_buffer) + kvfDestroyCommandBuffer(RenderCore::Get().GetDevice(), cmd); m_layout = new_layout; } @@ -140,6 +142,7 @@ namespace mlx VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + kvfDestroyCommandBuffer(RenderCore::Get().GetDevice(), cmd); } } @@ -199,6 +202,7 @@ namespace mlx VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + kvfDestroyCommandBuffer(RenderCore::Get().GetDevice(), cmd); staging_buffer.Destroy(); } TransitionLayout(VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); @@ -219,9 +223,9 @@ namespace mlx if(!m_staging_buffer.has_value()) OpenCPUBuffer(); if constexpr(std::endian::native == std::endian::little) - m_cpu_buffer[(y * m_width) + x] = ReverseColor(color); + m_staging_buffer->GetMap()[(y * m_width) + x] = ReverseColor(color); else - m_cpu_buffer[(y * m_width) + x] = color; + m_staging_buffer->GetMap()[(y * m_width) + x] = color; m_has_been_modified = true; } @@ -244,9 +248,9 @@ namespace mlx moving_y++; } if constexpr(std::endian::native == std::endian::little) - m_cpu_buffer[(moving_y * m_width) + moving_x] = ReverseColor(pixels[i]); + m_staging_buffer->GetMap()[(moving_y * m_width) + moving_x] = ReverseColor(pixels[i]); else - m_cpu_buffer[(moving_y * m_width) + moving_x] = pixels[i]; + m_staging_buffer->GetMap()[(moving_y * m_width) + moving_x] = pixels[i]; } m_has_been_modified = true; } @@ -259,11 +263,13 @@ namespace mlx if(!m_staging_buffer.has_value()) OpenCPUBuffer(); if constexpr(std::endian::native == std::endian::little) + { for(std::size_t i = 0; i < len; i++) - m_cpu_buffer[(y * m_width) + x + i] = ReverseColor(pixels[i]); + m_staging_buffer->GetMap()[(y * m_width) + x + i] = ReverseColor(pixels[i]); + } else { - std::memcpy(&m_cpu_buffer[(y * m_width) + x], pixels, len); + std::memcpy(&m_staging_buffer->GetMap()[(y * m_width) + x], pixels, len); } m_has_been_modified = true; } @@ -276,9 +282,9 @@ namespace mlx if(!m_staging_buffer.has_value()) OpenCPUBuffer(); if constexpr(std::endian::native == std::endian::little) - return ReverseColor(m_cpu_buffer[(y * m_width) + x]); + return ReverseColor(m_staging_buffer->GetMap()[(y * m_width) + x]); else - return m_cpu_buffer[(y * m_width) + x]; + return m_staging_buffer->GetMap()[(y * m_width) + x]; } void Texture::GetRegion(int x, int y, int w, int h, mlx_color* dst) noexcept @@ -298,9 +304,9 @@ namespace mlx moving_y++; } if constexpr(std::endian::native == std::endian::little) - dst[i] = ReverseColor(m_cpu_buffer[(moving_y * m_width) + moving_x]); + dst[i] = ReverseColor(m_staging_buffer->GetMap()[(moving_y * m_width) + moving_x]); else - dst[i] = m_cpu_buffer[(moving_y * m_width) + moving_x]; + dst[i] = m_staging_buffer->GetMap()[(moving_y * m_width) + moving_x]; } } @@ -315,7 +321,16 @@ namespace mlx processed_color.g = static_cast(color.g * 255.f); processed_color.b = static_cast(color.b * 255.f); processed_color.a = static_cast(color.a * 255.f); - std::fill(m_cpu_buffer.begin(), m_cpu_buffer.end(), processed_color); + if(processed_color.r == 0 && processed_color.g == 0 && processed_color.b == 0) + std::memset(m_staging_buffer->GetMap(), processed_color.a, m_staging_buffer->GetSize()); + else + { + for(std::size_t y = 0; y < m_height; y++) + { + for(std::size_t x = 0; x < m_width; x++) + m_staging_buffer->GetMap()[y * m_width + x] = processed_color; + } + } } } @@ -324,8 +339,6 @@ namespace mlx MLX_PROFILE_FUNCTION(); if(!m_has_been_modified) return; - std::memcpy(m_staging_buffer->GetMap(), m_cpu_buffer.data(), m_cpu_buffer.size() * sizeof(mlx_color)); - VkImageLayout old_layout = m_layout; TransitionLayout(VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, cmd); kvfCopyBufferToImage(cmd, Image::Get(), m_staging_buffer->Get(), m_staging_buffer->GetOffset(), VK_IMAGE_ASPECT_COLOR_BIT, { m_width, m_height, 1 }); @@ -360,9 +373,7 @@ namespace mlx VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); - - m_cpu_buffer.resize(m_width * m_height); - std::memcpy(m_cpu_buffer.data(), m_staging_buffer->GetMap(), m_cpu_buffer.size() * sizeof(mlx_color)); + kvfDestroyCommandBuffer(RenderCore::Get().GetDevice(), cmd); } Texture* StbTextureLoad(const std::filesystem::path& file, int* w, int* h) diff --git a/runtime/Sources/Renderer/Memory.cpp b/runtime/Sources/Renderer/Memory.cpp index 16734c2..0a257be 100644 --- a/runtime/Sources/Renderer/Memory.cpp +++ b/runtime/Sources/Renderer/Memory.cpp @@ -1,4 +1,3 @@ -#include "vulkan/vulkan_core.h" #include #define VMA_IMPLEMENTATION #ifdef MLX_COMPILER_CLANG diff --git a/runtime/Sources/Renderer/RenderCore.cpp b/runtime/Sources/Renderer/RenderCore.cpp index 9a91b71..4bddadc 100644 --- a/runtime/Sources/Renderer/RenderCore.cpp +++ b/runtime/Sources/Renderer/RenderCore.cpp @@ -214,6 +214,7 @@ namespace mlx MLX_LOAD_FUNCTION(vkDestroyShaderModule); MLX_LOAD_FUNCTION(vkDeviceWaitIdle); MLX_LOAD_FUNCTION(vkEndCommandBuffer); + MLX_LOAD_FUNCTION(vkFreeCommandBuffers); MLX_LOAD_FUNCTION(vkGetDeviceQueue); MLX_LOAD_FUNCTION(vkGetImageSubresourceLayout); MLX_LOAD_FUNCTION(vkQueueSubmit); diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp index b4b3117..b0b5616 100644 --- a/runtime/Sources/Renderer/Swapchain.cpp +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -102,6 +102,7 @@ namespace mlx VkFence fence = kvfCreateFence(RenderCore::Get().GetDevice()); kvfSubmitSingleTimeCommandBuffer(RenderCore::Get().GetDevice(), cmd, KVF_GRAPHICS_QUEUE, fence); kvfDestroyFence(RenderCore::Get().GetDevice(), fence); + kvfDestroyCommandBuffer(RenderCore::Get().GetDevice(), cmd); m_resize = false; DebugLog("Vulkan: swapchain created"); } diff --git a/third_party/kvf.h b/third_party/kvf.h index fe1c172..3aa5486 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -167,12 +167,13 @@ VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass renderpass, VkI VkExtent2D kvfGetFramebufferSize(VkFramebuffer buffer); void kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer); -VkCommandBuffer kvfCreateCommandBuffer(VkDevice device); -VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLevel level); +VkCommandBuffer kvfCreateCommandBuffer(VkDevice device); // Uses internal command pool, not thread safe +VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLevel level); // Same void kvfBeginCommandBuffer(VkCommandBuffer buffer, VkCommandBufferUsageFlags flags); void kvfEndCommandBuffer(VkCommandBuffer buffer); void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkSemaphore signal, VkSemaphore wait, VkFence fence, VkPipelineStageFlags* stages); void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueType queue, VkFence fence); +void kvfDestroyCommandBuffer(VkDevice device, VkCommandBuffer buffer); VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear, VkSampleCountFlagBits samples); #ifndef KVF_NO_KHR @@ -313,6 +314,7 @@ void kvfCheckVk(VkResult result); KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDestroyShaderModule); KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkDeviceWaitIdle); KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkEndCommandBuffer); + KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkFreeCommandBuffers); KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetDeviceQueue); KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkGetImageSubresourceLayout); KVF_DEFINE_VULKAN_FUNCTION_PROTOTYPE(vkQueueSubmit); @@ -2416,6 +2418,29 @@ void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, K kvfWaitForFence(device, fence); } +void kvfDestroyCommandBuffer(VkDevice device, VkCommandBuffer buffer) +{ + if(buffer == VK_NULL_HANDLE) + return; + KVF_ASSERT(device != VK_NULL_HANDLE); + __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); + KVF_ASSERT(kvf_device != NULL); + + for(size_t i = 0; i < kvf_device->cmd_buffers_size; i++) + { + if(kvf_device->cmd_buffers[i] == buffer) + { + KVF_GET_DEVICE_FUNCTION(vkFreeCommandBuffers)(kvf_device->device, kvf_device->cmd_pool, 1, &buffer); + // Shift the elements to fill the gap + for(size_t j = i; j < kvf_device->cmd_buffers_size - 1; j++) + kvf_device->cmd_buffers[j] = kvf_device->cmd_buffers[j + 1]; + kvf_device->cmd_buffers--; + return; + } + } + KVF_ASSERT(false && "could not find command buffer in internal device"); +} + VkAttachmentDescription kvfBuildAttachmentDescription(KvfImageType type, VkFormat format, VkImageLayout initial, VkImageLayout final, bool clear, VkSampleCountFlagBits samples) { VkAttachmentDescription attachment = {}; From c6fefe2ffa4f5f527c173608257e50ff67ee650e Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 29 Dec 2024 00:41:00 +0000 Subject: [PATCH 116/131] [BOT] update dependencies --- third_party/vulkan/vulkan_handles.hpp | 106 ----- third_party/vulkan/vulkan_structs.hpp | 641 +++++++++++++++++--------- 2 files changed, 413 insertions(+), 334 deletions(-) diff --git a/third_party/vulkan/vulkan_handles.hpp b/third_party/vulkan/vulkan_handles.hpp index c983286..304e9ed 100644 --- a/third_party/vulkan/vulkan_handles.hpp +++ b/third_party/vulkan/vulkan_handles.hpp @@ -11633,112 +11633,6 @@ namespace VULKAN_HPP_NAMESPACE //=== VK_KHR_swapchain === - template - VULKAN_HPP_NODISCARD Result mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo * pMemoryMapInfo, - void ** ppData, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD typename ResultValueType::type mapMemory2( const VULKAN_HPP_NAMESPACE::MemoryMapInfo & memoryMapInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD Result unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo * pMemoryUnmapInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - typename ResultValueType::type unmapMemory2( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfo & memoryUnmapInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - void getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo * pRenderingAreaInfo, - VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D - getRenderingAreaGranularity( const VULKAN_HPP_NAMESPACE::RenderingAreaInfo & renderingAreaInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - void getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo * pInfo, - VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 - getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getImageSubresourceLayout( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfo & info, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - void getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2 * pSubresource, - VULKAN_HPP_NAMESPACE::SubresourceLayout2 * pLayout, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2 - getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; - template - VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain - getImageSubresourceLayout2( VULKAN_HPP_NAMESPACE::Image image, - const VULKAN_HPP_NAMESPACE::ImageSubresource2 & subresource, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD Result copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo * pCopyMemoryToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - copyMemoryToImage( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfo & copyMemoryToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD Result copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo * pCopyImageToMemoryInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - copyImageToMemory( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfo & copyImageToMemoryInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD Result copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo * pCopyImageToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - copyImageToImage( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfo & copyImageToImageInfo, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - template - VULKAN_HPP_NODISCARD Result transitionImageLayout( uint32_t transitionCount, - const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfo * pTransitions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT; -#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE - template - VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType::type - transitionImageLayout( VULKAN_HPP_NAMESPACE::ArrayProxy const & transitions, - Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const; -#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ - - //=== VK_KHR_swapchain === - template VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index 1b59c70..c4da3cd 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -53211,6 +53211,7 @@ namespace VULKAN_HPP_NAMESPACE return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && ( strcmp( description, rhs.description ) == 0 ); } +#endif bool operator!=( LayerProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -53478,7 +53479,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( LayerSettingEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return ( ( pLayerName == rhs.pLayerName ) || ( strcmp( pLayerName, rhs.pLayerName ) == 0 ) ) && + ( ( pSettingName == rhs.pSettingName ) || ( strcmp( pSettingName, rhs.pSettingName ) == 0 ) ) && ( type == rhs.type ) && + ( valueCount == rhs.valueCount ) && ( pValues == rhs.pValues ); } public: @@ -53701,6 +53704,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( pView == rhs.pView ); # endif } +# endif bool operator!=( MacOSSurfaceCreateInfoMVK const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -56812,7 +56816,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 MicromapVersionInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - firstIndex = firstIndex_; + pNext = pNext_; return *this; } @@ -58532,7 +58536,6 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV; @@ -58619,6 +58622,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif public: uint32_t presentID = {}; @@ -58650,6 +58654,8 @@ namespace VULKAN_HPP_NAMESPACE PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast( &rhs ) ) { + int32 = int32_; + return *this; } PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -58709,10 +58715,17 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( type == rhs.type ); # endif } +#endif bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #endif @@ -58755,8 +58768,6 @@ namespace VULKAN_HPP_NAMESPACE PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) { - int32 = int32_; - return *this; } PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -58823,7 +58834,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return !operator==( rhs ); } public: @@ -58861,6 +58872,8 @@ namespace VULKAN_HPP_NAMESPACE , storage{ storage_ } , uuid{ uuid_ } { + uint32 = uint32_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -58869,6 +58882,7 @@ namespace VULKAN_HPP_NAMESPACE : PerformanceCounterKHR( *reinterpret_cast( &rhs ) ) { } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -58883,6 +58897,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT { @@ -59056,12 +59071,12 @@ namespace VULKAN_HPP_NAMESPACE marker = marker_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT { @@ -59078,7 +59093,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, marker ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PerformanceMarkerInfoINTEL const & ) const = default; @@ -59170,7 +59184,6 @@ namespace VULKAN_HPP_NAMESPACE parameter = parameter_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { @@ -59309,7 +59322,36 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + } + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, data ); } #endif @@ -59367,6 +59409,7 @@ namespace VULKAN_HPP_NAMESPACE marker = marker_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceStreamMarkerInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { @@ -60737,6 +60780,7 @@ namespace VULKAN_HPP_NAMESPACE attachmentFeedbackLoopLayout = attachmentFeedbackLoopLayout_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -61276,6 +61320,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -61724,7 +61769,6 @@ namespace VULKAN_HPP_NAMESPACE deviceCoherentMemory = deviceCoherentMemory_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT { @@ -62226,7 +62270,6 @@ namespace VULKAN_HPP_NAMESPACE inheritedConditionalRendering = inheritedConditionalRendering_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -62242,15 +62285,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -62524,7 +62559,6 @@ namespace VULKAN_HPP_NAMESPACE cooperativeMatrixBlockLoads = cooperativeMatrixBlockLoads_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrix2FeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -62754,6 +62788,7 @@ namespace VULKAN_HPP_NAMESPACE cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -62862,7 +62897,6 @@ namespace VULKAN_HPP_NAMESPACE cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -63042,8 +63076,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63052,14 +63086,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); @@ -63070,7 +63104,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV; @@ -63083,6 +63117,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCooperativeMatrixPropertiesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCopyMemoryIndirectFeaturesNV { @@ -63127,7 +63162,6 @@ namespace VULKAN_HPP_NAMESPACE indirectCopy = indirectCopy_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -63139,8 +63173,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63149,14 +63183,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, indirectCopy ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy ); @@ -63167,7 +63201,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; @@ -63180,6 +63214,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCopyMemoryIndirectPropertiesNV { @@ -63319,9 +63354,20 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63330,14 +63376,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, cornerSampledImage ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); @@ -63348,7 +63394,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV; @@ -63361,6 +63407,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCornerSampledImageFeaturesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCoverageReductionModeFeaturesNV { @@ -63397,8 +63444,6 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & @@ -63407,7 +63452,6 @@ namespace VULKAN_HPP_NAMESPACE coverageReductionMode = coverageReductionMode_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -63504,6 +63548,7 @@ namespace VULKAN_HPP_NAMESPACE cubicRangeClamp = cubicRangeClamp_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCubicClampFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { @@ -63515,8 +63560,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63525,14 +63570,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, cubicRangeClamp ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCubicClampFeaturesQCOM const & ) const = default; #else bool operator==( PhysicalDeviceCubicClampFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cubicRangeClamp == rhs.cubicRangeClamp ); @@ -63543,7 +63588,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCubicClampFeaturesQCOM; @@ -63556,7 +63601,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCubicClampFeaturesQCOM; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCubicWeightsFeaturesQCOM { @@ -63602,7 +63646,6 @@ namespace VULKAN_HPP_NAMESPACE selectableCubicWeights = selectableCubicWeights_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCubicWeightsFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { @@ -64084,7 +64127,6 @@ namespace VULKAN_HPP_NAMESPACE dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -64316,7 +64358,6 @@ namespace VULKAN_HPP_NAMESPACE depthClampControl = depthClampControl_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDepthClampControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -64430,18 +64471,12 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, depthClampZeroOne ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & ) const = default; @@ -64457,9 +64492,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT; @@ -64486,6 +64521,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , depthClipControl{ depthClipControl_ } { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipControlFeaturesEXT( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -64493,6 +64530,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast( &rhs ) ) { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; } PhysicalDeviceDepthClipControlFeaturesEXT & operator=( PhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -64517,6 +64556,7 @@ namespace VULKAN_HPP_NAMESPACE depthClipControl = depthClipControl_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDepthClipControlFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -64729,7 +64769,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, supportedDepthResolveModes, supportedStencilResolveModes, independentResolveNone, independentResolve ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const & ) const = default; @@ -64747,7 +64786,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; } #endif @@ -64781,6 +64821,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ } { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; } VULKAN_HPP_CONSTEXPR @@ -64789,6 +64831,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; } PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT & @@ -64837,9 +64881,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; - return *this; + return !operator==( rhs ); } +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; @@ -64872,7 +64916,7 @@ namespace VULKAN_HPP_NAMESPACE , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ } , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ } { - descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } @@ -64881,7 +64925,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) { - descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } @@ -64893,6 +64937,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -65097,6 +65142,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -65706,7 +65752,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -65993,7 +66038,6 @@ namespace VULKAN_HPP_NAMESPACE descriptorSetHostMapping = descriptorSetHostMapping_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT { @@ -66211,8 +66255,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & @@ -66228,6 +66271,7 @@ namespace VULKAN_HPP_NAMESPACE dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -66597,8 +66641,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount ) && @@ -66652,7 +66696,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV; @@ -66673,6 +66717,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceDeviceMemoryReportFeaturesEXT { @@ -66730,8 +66775,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -66740,14 +66785,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, deviceMemoryReport ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); @@ -66758,7 +66803,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT; @@ -66771,6 +66816,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceDiagnosticsConfigFeaturesNV { @@ -66851,6 +66897,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); # endif } +#endif bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -66989,8 +67036,7 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & @@ -67705,6 +67751,7 @@ namespace VULKAN_HPP_NAMESPACE exclusiveScissor = exclusiveScissor_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -67726,7 +67773,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, exclusiveScissor ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const & ) const = default; @@ -67742,7 +67788,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + return *this; } #endif @@ -67775,6 +67822,8 @@ namespace VULKAN_HPP_NAMESPACE , extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ } , extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ } { + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; } VULKAN_HPP_CONSTEXPR @@ -67783,6 +67832,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast( &rhs ) ) { + extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + return *this; } PhysicalDeviceExtendedDynamicState2FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -70658,6 +70709,7 @@ namespace VULKAN_HPP_NAMESPACE ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); # endif } +#endif bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -70693,6 +70745,7 @@ namespace VULKAN_HPP_NAMESPACE , fragmentShaderBarycentric{ fragmentShaderBarycentric_ } { } +#endif VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -70701,7 +70754,6 @@ namespace VULKAN_HPP_NAMESPACE : PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( *reinterpret_cast( &rhs ) ) { } -#endif PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -70712,6 +70764,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -70719,7 +70772,6 @@ namespace VULKAN_HPP_NAMESPACE pNext = pNext_; return *this; } -#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT @@ -70733,6 +70785,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT { @@ -70762,7 +70815,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); # endif } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -79483,14 +79536,14 @@ namespace VULKAN_HPP_NAMESPACE } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceMultiviewFeatures const &() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } - operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + return *reinterpret_cast( this ); } #if defined( VULKAN_HPP_USE_REFLECT ) @@ -79573,6 +79626,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -80225,7 +80279,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default; @@ -80292,6 +80345,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferPropertiesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -80420,11 +80474,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -81044,7 +81094,6 @@ namespace VULKAN_HPP_NAMESPACE pageableDeviceLocalMemory = pageableDeviceLocalMemory_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -81152,7 +81201,6 @@ namespace VULKAN_HPP_NAMESPACE dynamicPipelineLayout = dynamicPipelineLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -81168,7 +81216,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -81459,7 +81507,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -82692,8 +82740,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - multisampleArrayImage = multisampleArrayImage_; - return *this; + return !operator==( rhs ); } # endif @@ -82738,8 +82785,6 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , minVertexInputBindingStrideAlignment{ minVertexInputBindingStrideAlignment_ } { - mutableComparisonSamplers = mutableComparisonSamplers_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82747,8 +82792,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast( &rhs ) ) { - pointPolygons = pointPolygons_; - return *this; } PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82763,7 +82806,7 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - separateStencilMaskRef = separateStencilMaskRef_; + pNext = pNext_; return *this; } @@ -82784,7 +82827,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ # if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -82812,7 +82854,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pointPolygons = pointPolygons_; + return *this; } # endif @@ -82842,6 +82885,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , presentBarrier{ presentBarrier_ } { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82849,6 +82894,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast( &rhs ) ) { + tessellationPointMode = tessellationPointMode_; + return *this; } PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82859,6 +82906,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -82883,17 +82931,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } # if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -82921,7 +82958,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + multisampleArrayImage = multisampleArrayImage_; + return *this; } # endif @@ -82950,6 +82988,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , presentId{ presentId_ } { + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82957,6 +82997,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) { + pointPolygons = pointPolygons_; + return *this; } PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82971,7 +83013,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + separateStencilMaskRef = separateStencilMaskRef_; return *this; } @@ -82980,7 +83022,7 @@ namespace VULKAN_HPP_NAMESPACE presentId = presentId_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -82991,25 +83033,26 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentId ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); @@ -83020,7 +83063,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; @@ -83033,6 +83076,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDevicePresentIdFeaturesKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT { @@ -83091,9 +83135,31 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -83102,14 +83168,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, presentModeFifoLatestReady ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady ); @@ -83120,7 +83186,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; @@ -83133,6 +83199,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePresentWaitFeaturesKHR { @@ -83624,7 +83691,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -84069,7 +84140,6 @@ namespace VULKAN_HPP_NAMESPACE formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -84085,7 +84155,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -84190,7 +84260,6 @@ namespace VULKAN_HPP_NAMESPACE rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -84411,7 +84480,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -85198,7 +85271,13 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -85506,7 +85585,6 @@ namespace VULKAN_HPP_NAMESPACE relaxedLineRasterization = relaxedLineRasterization_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const &() const VULKAN_HPP_NOEXCEPT { @@ -85905,7 +85983,6 @@ namespace VULKAN_HPP_NAMESPACE nullDescriptor = nullDescriptor_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -86091,6 +86168,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSampleLocationsPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -86295,7 +86373,6 @@ namespace VULKAN_HPP_NAMESPACE samplerYcbcrConversion = samplerYcbcrConversion_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -86395,7 +86472,6 @@ namespace VULKAN_HPP_NAMESPACE scalarBlockLayout = scalarBlockLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceScalarBlockLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -86411,7 +86487,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -86987,6 +87063,7 @@ namespace VULKAN_HPP_NAMESPACE shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT @@ -86994,7 +87071,6 @@ namespace VULKAN_HPP_NAMESPACE sparseImageFloat32AtomicMinMax = sparseImageFloat32AtomicMinMax_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -87286,6 +87362,7 @@ namespace VULKAN_HPP_NAMESPACE sparseImageFloat32Atomics, sparseImageFloat32AtomicAdd ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & ) const = default; @@ -87307,8 +87384,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; - return *this; + return !operator==( rhs ); } #endif @@ -87350,7 +87426,7 @@ namespace VULKAN_HPP_NAMESPACE , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } { - shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + pNext = pNext_; return *this; } @@ -87359,7 +87435,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast( &rhs ) ) { - shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; return *this; } @@ -87375,7 +87451,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - sparseImageFloat32Atomics = sparseImageFloat32Atomics_; + shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; return *this; } @@ -87392,7 +87468,6 @@ namespace VULKAN_HPP_NAMESPACE shaderSharedInt64Atomics = shaderSharedInt64Atomics_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT { @@ -87414,7 +87489,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; @@ -87431,7 +87505,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; + return *this; } #endif @@ -87465,6 +87540,8 @@ namespace VULKAN_HPP_NAMESPACE , shaderSubgroupClock{ shaderSubgroupClock_ } , shaderDeviceClock{ shaderDeviceClock_ } { + shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87472,6 +87549,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) { + shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; + return *this; } PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87482,6 +87561,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -87607,24 +87687,17 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & + setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderCoreBuiltins ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default; @@ -87640,7 +87713,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; } #endif @@ -87673,6 +87747,8 @@ namespace VULKAN_HPP_NAMESPACE , shaderCoreCount{ shaderCoreCount_ } , shaderWarpsPerCore{ shaderWarpsPerCore_ } { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; } VULKAN_HPP_CONSTEXPR @@ -87681,6 +87757,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast( &rhs ) ) { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; } PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87712,7 +87790,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default; @@ -87729,7 +87806,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; } #endif @@ -87762,6 +87840,8 @@ namespace VULKAN_HPP_NAMESPACE , shaderCoreFeatures{ shaderCoreFeatures_ } , activeComputeUnitCount{ activeComputeUnitCount_ } { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87769,6 +87849,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast( &rhs ) ) { + shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; + return *this; } PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88031,6 +88113,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderCorePropertiesARM &() VULKAN_HPP_NOEXCEPT { @@ -88555,7 +88638,6 @@ namespace VULKAN_HPP_NAMESPACE maxExecutionGraphShaderOutputNodes = maxExecutionGraphShaderOutputNodes_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & setMaxExecutionGraphShaderPayloadSize( uint32_t maxExecutionGraphShaderPayloadSize_ ) VULKAN_HPP_NOEXCEPT @@ -88717,6 +88799,7 @@ namespace VULKAN_HPP_NAMESPACE shaderExpectAssume = shaderExpectAssume_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -88728,11 +88811,17 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & - setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderExpectAssume ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderExpectAssumeFeatures const & ) const = default; @@ -88748,8 +88837,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderExpectAssumeFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; - return *this; + return !operator==( rhs ); } #endif @@ -88782,7 +88870,6 @@ namespace VULKAN_HPP_NAMESPACE , shaderFloat16{ shaderFloat16_ } , shaderInt8{ shaderInt8_ } { - return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88790,9 +88877,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast( &rhs ) ) { - return !operator==( rhs ); } -# endif PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -88821,20 +88906,25 @@ namespace VULKAN_HPP_NAMESPACE shaderInt8 = shaderInt8_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -88843,14 +88933,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderFloat16, shaderInt8 ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; #else bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ); @@ -88861,7 +88951,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; @@ -88875,6 +88965,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderFloat16Int8Features; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; @@ -88923,36 +89014,83 @@ namespace VULKAN_HPP_NAMESPACE shaderFloatControls2 = shaderFloatControls2_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderFloatControls2Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderFloatControls2Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphShaderPayloadCount = maxExecutionGraphShaderPayloadCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setExecutionGraphDispatchAddressAlignment( uint32_t executionGraphDispatchAddressAlignment_ ) VULKAN_HPP_NOEXCEPT + { + executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroupCount( std::array maxExecutionGraphWorkgroupCount_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & + setMaxExecutionGraphWorkgroups( uint32_t maxExecutionGraphWorkgroups_ ) VULKAN_HPP_NOEXCEPT + { + maxExecutionGraphWorkgroups = maxExecutionGraphWorkgroups_; + return *this; + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple const &, + uint32_t const &> +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderFloatControls2 ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderFloatControls2Features const & ) const = default; #else bool operator==( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloatControls2 == rhs.shaderFloatControls2 ); @@ -88963,7 +89101,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloatControls2Features; @@ -88976,6 +89114,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderFloatControls2Features; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features; @@ -89033,7 +89172,6 @@ namespace VULKAN_HPP_NAMESPACE sparseImageInt64Atomics = sparseImageInt64Atomics_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -89055,7 +89193,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; @@ -89072,7 +89209,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; } #endif @@ -89102,6 +89240,7 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , imageFootprint{ imageFootprint_ } { + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -89109,7 +89248,9 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast( &rhs ) ) { + return !operator==( rhs ); } +# endif PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -89613,6 +89754,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT { @@ -89642,6 +89784,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); # endif } +#endif bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -89806,7 +89949,6 @@ namespace VULKAN_HPP_NAMESPACE shaderModuleIdentifier = shaderModuleIdentifier_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -89822,7 +89964,38 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -90376,7 +90549,6 @@ namespace VULKAN_HPP_NAMESPACE shaderReplicatedComposites = shaderReplicatedComposites_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -90392,7 +90564,8 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std:: + tuple const &, uint32_t const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -90863,8 +91036,6 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & @@ -90873,7 +91044,6 @@ namespace VULKAN_HPP_NAMESPACE shaderSubgroupUniformControlFlow = shaderSubgroupUniformControlFlow_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -91522,7 +91692,6 @@ namespace VULKAN_HPP_NAMESPACE tiling = tiling_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { @@ -91740,7 +91909,6 @@ namespace VULKAN_HPP_NAMESPACE computeFullSubgroups = computeFullSubgroups_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSubgroupSizeControlFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -91943,7 +92111,6 @@ namespace VULKAN_HPP_NAMESPACE subpassMergeFeedback = subpassMergeFeedback_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -91959,7 +92126,12 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -92415,7 +92587,6 @@ namespace VULKAN_HPP_NAMESPACE synchronization2 = synchronization2_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSynchronization2Features const &() const VULKAN_HPP_NOEXCEPT { @@ -92431,7 +92602,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -92919,7 +93090,6 @@ namespace VULKAN_HPP_NAMESPACE timelineSemaphore = timelineSemaphore_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceTimelineSemaphoreFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -92935,7 +93105,12 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -93154,7 +93329,6 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( strcmp( name, rhs.name ) == 0 ) && ( strcmp( version, rhs.version ) == 0 ) && ( purposes == rhs.purposes ) && ( strcmp( description, rhs.description ) == 0 ) && ( strcmp( layer, rhs.layer ) == 0 ); } -#endif bool operator!=( PhysicalDeviceToolProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -93247,7 +93421,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -93462,8 +93636,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & @@ -93709,7 +93882,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -93972,7 +94145,6 @@ namespace VULKAN_HPP_NAMESPACE vertexAttributeRobustness = vertexAttributeRobustness_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceVertexAttributeRobustnessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -93988,7 +94160,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -94523,7 +94695,12 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -95102,7 +95279,6 @@ namespace VULKAN_HPP_NAMESPACE maxPerSetDescriptors, maxMemoryAllocationSize ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; @@ -95125,7 +95301,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + storageInputOutput16 = storageInputOutput16_; + return *this; } #endif @@ -95260,6 +95437,8 @@ namespace VULKAN_HPP_NAMESPACE , shaderOutputLayer{ shaderOutputLayer_ } , subgroupBroadcastDynamicId{ subgroupBroadcastDynamicId_ } { + variablePointersStorageBuffer = variablePointersStorageBuffer_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -95267,6 +95446,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) { + protectedMemory = protectedMemory_; + return *this; } PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -95277,6 +95458,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -95317,6 +95499,7 @@ namespace VULKAN_HPP_NAMESPACE storagePushConstant8 = storagePushConstant8_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT @@ -95511,7 +95694,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { - separateDepthStencilLayouts = separateDepthStencilLayouts_; + shaderInt8 = shaderInt8_; return *this; } @@ -95600,7 +95783,6 @@ namespace VULKAN_HPP_NAMESPACE subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT { @@ -95718,7 +95900,6 @@ namespace VULKAN_HPP_NAMESPACE shaderOutputLayer, subgroupBroadcastDynamicId ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; @@ -95768,7 +95949,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; } #endif @@ -97043,6 +97225,7 @@ namespace VULKAN_HPP_NAMESPACE shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT { @@ -97061,6 +97244,7 @@ namespace VULKAN_HPP_NAMESPACE rectangularLines = rectangularLines_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT { @@ -97366,6 +97550,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif operator VkPhysicalDeviceVulkan14Properties const &() const VULKAN_HPP_NOEXCEPT { @@ -97561,21 +97746,21 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModel = vulkanMemoryModel_; + *this = *reinterpret_cast( &rhs ); return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + pNext = pNext_; return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT { - vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_; + workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ From 9874c1fd5b1e27ddec44f2ba4b973cfa81da73eb Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sun, 5 Jan 2025 00:40:51 +0000 Subject: [PATCH 117/131] [BOT] update dependencies --- third_party/vma.h | 549 +++++++++-- third_party/vulkan/vulkan_structs.hpp | 1252 ++++++++++++++----------- 2 files changed, 1180 insertions(+), 621 deletions(-) diff --git a/third_party/vma.h b/third_party/vma.h index 2307325..536ffcd 100644 --- a/third_party/vma.h +++ b/third_party/vma.h @@ -25,7 +25,7 @@ /** \mainpage Vulkan Memory Allocator -Version 3.1.0 +Version 3.2.0 Copyright (c) 2017-2024 Advanced Micro Devices, Inc. All rights reserved. \n License: MIT \n @@ -95,6 +95,7 @@ See also: [product page on GPUOpen](https://gpuopen.com/gaming-product/vulkan-me - \subpage enabling_buffer_device_address - \subpage vk_ext_memory_priority - \subpage vk_amd_device_coherent_memory + - \subpage vk_khr_external_memory_win32 - \subpage general_considerations - [Thread safety](@ref general_considerations_thread_safety) - [Versioning and compatibility](@ref general_considerations_versioning_and_compatibility) @@ -127,10 +128,14 @@ See documentation chapter: \ref statistics. extern "C" { #endif +#if !defined(VULKAN_H_) #include +#endif #if !defined(VMA_VULKAN_VERSION) - #if defined(VK_VERSION_1_3) + #if defined(VK_VERSION_1_4) + #define VMA_VULKAN_VERSION 1004000 + #elif defined(VK_VERSION_1_3) #define VMA_VULKAN_VERSION 1003000 #elif defined(VK_VERSION_1_2) #define VMA_VULKAN_VERSION 1002000 @@ -240,6 +245,15 @@ extern "C" { #endif #endif +// Defined to 1 when VK_KHR_external_memory_win32 device extension is defined in Vulkan headers. +#if !defined(VMA_EXTERNAL_MEMORY_WIN32) + #if VK_KHR_external_memory_win32 + #define VMA_EXTERNAL_MEMORY_WIN32 1 + #else + #define VMA_EXTERNAL_MEMORY_WIN32 0 + #endif +#endif + // Define these macros to decorate all public functions with additional code, // before and after returned type, appropriately. This may be useful for // exporting the functions when compiling VMA as a separate library. Example: @@ -459,6 +473,15 @@ typedef enum VmaAllocatorCreateFlagBits */ VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT = 0x00000100, + /** + Enables usage of VK_KHR_external_memory_win32 extension in the library. + + You should set this flag if you found available and enabled this device extension, + while creating Vulkan device passed as VmaAllocatorCreateInfo::device. + For more information, see \ref vk_khr_external_memory_win32. + */ + VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT = 0x00000200, + VMA_ALLOCATOR_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF } VmaAllocatorCreateFlagBits; /// See #VmaAllocatorCreateFlagBits. @@ -1033,6 +1056,11 @@ typedef struct VmaVulkanFunctions /// Fetch from "vkGetDeviceImageMemoryRequirements" on Vulkan >= 1.3, but you can also fetch it from "vkGetDeviceImageMemoryRequirementsKHR" if you enabled extension VK_KHR_maintenance4. PFN_vkGetDeviceImageMemoryRequirementsKHR VMA_NULLABLE vkGetDeviceImageMemoryRequirements; #endif +#if VMA_EXTERNAL_MEMORY_WIN32 + PFN_vkGetMemoryWin32HandleKHR VMA_NULLABLE vkGetMemoryWin32HandleKHR; +#else + void* VMA_NULLABLE vkGetMemoryWin32HandleKHR; +#endif } VmaVulkanFunctions; /// Description of a Allocator to be created. @@ -1095,7 +1123,7 @@ typedef struct VmaAllocatorCreateInfo It must be a value in the format as created by macro `VK_MAKE_VERSION` or a constant like: `VK_API_VERSION_1_1`, `VK_API_VERSION_1_0`. The patch version number specified is ignored. Only the major and minor versions are considered. - Only versions 1.0, 1.1, 1.2, 1.3 are supported by the current implementation. + Only versions 1.0...1.4 are supported by the current implementation. Leaving it initialized to zero is equivalent to `VK_API_VERSION_1_0`. It must match the Vulkan version used by the application and supported on the selected physical device, so it must be no higher than `VkApplicationInfo::apiVersion` passed to `vkCreateInstance` @@ -1810,6 +1838,9 @@ VMA_CALL_PRE void VMA_CALL_POST vmaDestroyPool( \param allocator Allocator object. \param pool Pool object. \param[out] pPoolStats Statistics of specified pool. + +Note that when using the pool from multiple threads, returned information may immediately +become outdated. */ VMA_CALL_PRE void VMA_CALL_POST vmaGetPoolStatistics( VmaAllocator VMA_NOT_NULL allocator, @@ -2050,6 +2081,40 @@ VMA_CALL_PRE void VMA_CALL_POST vmaGetAllocationMemoryProperties( VmaAllocation VMA_NOT_NULL allocation, VkMemoryPropertyFlags* VMA_NOT_NULL pFlags); + +#if VMA_EXTERNAL_MEMORY_WIN32 +/** +\brief Given an allocation, returns Win32 handle that may be imported by other processes or APIs. + +\param hTargetProcess Must be a valid handle to target process or null. If it's null, the function returns + handle for the current process. +\param[out] pHandle Output parameter that returns the handle. + +The function fills `pHandle` with handle that can be used in target process. +The handle is fetched using function `vkGetMemoryWin32HandleKHR`. +When no longer needed, you must close it using: + +\code +CloseHandle(handle); +\endcode + +You can close it any time, before or after destroying the allocation object. +It is reference-counted internally by Windows. + +Note the handle is returned for the entire `VkDeviceMemory` block that the allocation belongs to. +If the allocation is sub-allocated from a larger block, you may need to consider the offset of the allocation +(VmaAllocationInfo::offset). + +If the function fails with `VK_ERROR_FEATURE_NOT_PRESENT` error code, please double-check +that VmaVulkanFunctions::vkGetMemoryWin32HandleKHR function pointer is set, e.g. either by using `VMA_DYNAMIC_VULKAN_FUNCTIONS` +or by manually passing it through VmaAllocatorCreateInfo::pVulkanFunctions. + +For more information, see chapter \ref vk_khr_external_memory_win32. +*/ +VMA_CALL_PRE VkResult VMA_CALL_POST vmaGetMemoryWin32Handle(VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, HANDLE hTargetProcess, HANDLE* VMA_NOT_NULL pHandle); +#endif // VMA_EXTERNAL_MEMORY_WIN32 + /** \brief Maps memory represented by given allocation and returns pointer to it. Maps memory represented by given allocation to make it accessible to CPU code. @@ -3097,7 +3162,7 @@ static void vma_aligned_free(void* VMA_NULLABLE ptr) std::shared_mutex m_Mutex; }; #define VMA_RW_MUTEX VmaRWMutex - #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 + #elif defined(_WIN32) && defined(WINVER) && defined(SRWLOCK_INIT) && WINVER >= 0x0600 // Use SRWLOCK from WinAPI. // Minimum supported client = Windows Vista, server = Windows Server 2008. class VmaRWMutex @@ -3838,12 +3903,6 @@ struct VmaBufferImageUsage const VmaBufferImageUsage VmaBufferImageUsage::UNKNOWN = VmaBufferImageUsage(0); -static void swap(VmaBufferImageUsage& lhs, VmaBufferImageUsage& rhs) noexcept -{ - using std::swap; - swap(lhs.Value, rhs.Value); -} - VmaBufferImageUsage::VmaBufferImageUsage(const VkBufferCreateInfo &createInfo, bool useKhrMaintenance5) { @@ -6073,6 +6132,84 @@ private: #endif // _VMA_MAPPING_HYSTERESIS +#if VMA_EXTERNAL_MEMORY_WIN32 +class VmaWin32Handle +{ +public: + VmaWin32Handle() noexcept : m_hHandle(VMA_NULL) { } + explicit VmaWin32Handle(HANDLE hHandle) noexcept : m_hHandle(hHandle) { } + ~VmaWin32Handle() noexcept { if (m_hHandle != VMA_NULL) { ::CloseHandle(m_hHandle); } } + VMA_CLASS_NO_COPY_NO_MOVE(VmaWin32Handle) + +public: + // Strengthened + VkResult GetHandle(VkDevice device, VkDeviceMemory memory, PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, HANDLE hTargetProcess, bool useMutex, HANDLE* pHandle) noexcept + { + *pHandle = VMA_NULL; + // Try to get handle first. + if (m_hHandle != VMA_NULL) + { + *pHandle = Duplicate(hTargetProcess); + return VK_SUCCESS; + } + + VkResult res = VK_SUCCESS; + // If failed, try to create it. + { + VmaMutexLockWrite lock(m_Mutex, useMutex); + if (m_hHandle == VMA_NULL) + { + res = Create(device, memory, pvkGetMemoryWin32HandleKHR, &m_hHandle); + } + } + + *pHandle = Duplicate(hTargetProcess); + return res; + } + + operator bool() const noexcept { return m_hHandle != VMA_NULL; } +private: + // Not atomic + static VkResult Create(VkDevice device, VkDeviceMemory memory, PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, HANDLE* pHandle) noexcept + { + VkResult res = VK_ERROR_FEATURE_NOT_PRESENT; + if (pvkGetMemoryWin32HandleKHR != VMA_NULL) + { + VkMemoryGetWin32HandleInfoKHR handleInfo{ }; + handleInfo.sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR; + handleInfo.memory = memory; + handleInfo.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR; + res = pvkGetMemoryWin32HandleKHR(device, &handleInfo, pHandle); + } + return res; + } + HANDLE Duplicate(HANDLE hTargetProcess = VMA_NULL) const noexcept + { + if (!m_hHandle) + return m_hHandle; + + HANDLE hCurrentProcess = ::GetCurrentProcess(); + HANDLE hDupHandle = VMA_NULL; + if (!::DuplicateHandle(hCurrentProcess, m_hHandle, hTargetProcess ? hTargetProcess : hCurrentProcess, &hDupHandle, 0, FALSE, DUPLICATE_SAME_ACCESS)) + { + VMA_ASSERT(0 && "Failed to duplicate handle."); + } + return hDupHandle; + } +private: + HANDLE m_hHandle; + VMA_RW_MUTEX m_Mutex; // Protects access m_Handle +}; +#else +class VmaWin32Handle +{ + // ABI compatibility + void* placeholder = VMA_NULL; + VMA_RW_MUTEX placeholder2; +}; +#endif // VMA_EXTERNAL_MEMORY_WIN32 + + #ifndef _VMA_DEVICE_MEMORY_BLOCK /* Represents a single block of device memory (`VkDeviceMemory`) with all the @@ -6139,7 +6276,13 @@ public: VkDeviceSize allocationLocalOffset, VkImage hImage, const void* pNext); - +#if VMA_EXTERNAL_MEMORY_WIN32 + VkResult CreateWin32Handle( + const VmaAllocator hAllocator, + PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, + HANDLE hTargetProcess, + HANDLE* pHandle)noexcept; +#endif // VMA_EXTERNAL_MEMORY_WIN32 private: VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. uint32_t m_MemoryTypeIndex; @@ -6155,10 +6298,18 @@ private: VmaMappingHysteresis m_MappingHysteresis; uint32_t m_MapCount; void* m_pMappedData; + + VmaWin32Handle m_Handle; }; #endif // _VMA_DEVICE_MEMORY_BLOCK #ifndef _VMA_ALLOCATION_T +struct VmaAllocationExtraData +{ + void* m_pMappedData = VMA_NULL; // Not null means memory is mapped. + VmaWin32Handle m_Handle; +}; + struct VmaAllocation_T { friend struct VmaDedicatedAllocationListItemTraits; @@ -6191,12 +6342,14 @@ public: bool mapped); // pMappedData not null means allocation is created with MAPPED flag. void InitDedicatedAllocation( + VmaAllocator allocator, VmaPool hParentPool, uint32_t memoryTypeIndex, VkDeviceMemory hMemory, VmaSuballocationType suballocationType, void* pMappedData, VkDeviceSize size); + void Destroy(VmaAllocator allocator); ALLOCATION_TYPE GetType() const { return (ALLOCATION_TYPE)m_Type; } VkDeviceSize GetAlignment() const { return m_Alignment; } @@ -6240,6 +6393,10 @@ public: void PrintParameters(class VmaJsonWriter& json) const; #endif +#if VMA_EXTERNAL_MEMORY_WIN32 + VkResult GetWin32Handle(VmaAllocator hAllocator, HANDLE hTargetProcess, HANDLE* hHandle) noexcept; +#endif // VMA_EXTERNAL_MEMORY_WIN32 + private: // Allocation out of VmaDeviceMemoryBlock. struct BlockAllocation @@ -6252,7 +6409,7 @@ private: { VmaPool m_hParentPool; // VK_NULL_HANDLE if not belongs to custom pool. VkDeviceMemory m_hMemory; - void* m_pMappedData; // Not null means memory is mapped. + VmaAllocationExtraData* m_ExtraData; VmaAllocation_T* m_Prev; VmaAllocation_T* m_Next; }; @@ -6277,6 +6434,8 @@ private: #if VMA_STATS_STRING_ENABLED VmaBufferImageUsage m_BufferImageUsage; // 0 if unknown. #endif + + void EnsureExtraData(VmaAllocator hAllocator); }; #endif // _VMA_ALLOCATION_T @@ -10075,6 +10234,7 @@ public: bool m_UseExtMemoryPriority; bool m_UseKhrMaintenance4; bool m_UseKhrMaintenance5; + bool m_UseKhrExternalMemoryWin32; const VkDevice m_hDevice; const VkInstance m_hInstance; const bool m_AllocationCallbacksSpecified; @@ -10438,7 +10598,7 @@ VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) m_Id(0), m_hMemory(VK_NULL_HANDLE), m_MapCount(0), - m_pMappedData(VMA_NULL) {} + m_pMappedData(VMA_NULL){} VmaDeviceMemoryBlock::~VmaDeviceMemoryBlock() { @@ -10681,6 +10841,14 @@ VkResult VmaDeviceMemoryBlock::BindImageMemory( VmaMutexLock lock(m_MapAndBindMutex, hAllocator->m_UseMutex); return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext); } + +#if VMA_EXTERNAL_MEMORY_WIN32 +VkResult VmaDeviceMemoryBlock::CreateWin32Handle(const VmaAllocator hAllocator, PFN_vkGetMemoryWin32HandleKHR pvkGetMemoryWin32HandleKHR, HANDLE hTargetProcess, HANDLE* pHandle) noexcept +{ + VMA_ASSERT(pHandle); + return m_Handle.GetHandle(hAllocator->m_hDevice, m_hMemory, pvkGetMemoryWin32HandleKHR, hTargetProcess, hAllocator->m_UseMutex, pHandle); +} +#endif // VMA_EXTERNAL_MEMORY_WIN32 #endif // _VMA_DEVICE_MEMORY_BLOCK_FUNCTIONS #ifndef _VMA_ALLOCATION_T_FUNCTIONS @@ -10733,6 +10901,7 @@ void VmaAllocation_T::InitBlockAllocation( } void VmaAllocation_T::InitDedicatedAllocation( + VmaAllocator allocator, VmaPool hParentPool, uint32_t memoryTypeIndex, VkDeviceMemory hMemory, @@ -10747,16 +10916,29 @@ void VmaAllocation_T::InitDedicatedAllocation( m_Size = size; m_MemoryTypeIndex = memoryTypeIndex; m_SuballocationType = (uint8_t)suballocationType; - if(pMappedData != VMA_NULL) + m_DedicatedAllocation.m_ExtraData = VMA_NULL; + m_DedicatedAllocation.m_hParentPool = hParentPool; + m_DedicatedAllocation.m_hMemory = hMemory; + m_DedicatedAllocation.m_Prev = VMA_NULL; + m_DedicatedAllocation.m_Next = VMA_NULL; + + if (pMappedData != VMA_NULL) { VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); m_Flags |= (uint8_t)FLAG_PERSISTENT_MAP; + EnsureExtraData(allocator); + m_DedicatedAllocation.m_ExtraData->m_pMappedData = pMappedData; + } +} + +void VmaAllocation_T::Destroy(VmaAllocator allocator) +{ + FreeName(allocator); + + if (GetType() == ALLOCATION_TYPE_DEDICATED) + { + vma_delete(allocator, m_DedicatedAllocation.m_ExtraData); } - m_DedicatedAllocation.m_hParentPool = hParentPool; - m_DedicatedAllocation.m_hMemory = hMemory; - m_DedicatedAllocation.m_pMappedData = pMappedData; - m_DedicatedAllocation.m_Prev = VMA_NULL; - m_DedicatedAllocation.m_Next = VMA_NULL; } void VmaAllocation_T::SetName(VmaAllocator hAllocator, const char* pName) @@ -10861,8 +11043,9 @@ void* VmaAllocation_T::GetMappedData() const } break; case ALLOCATION_TYPE_DEDICATED: - VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0 || IsPersistentMap())); - return m_DedicatedAllocation.m_pMappedData; + VMA_ASSERT((m_DedicatedAllocation.m_ExtraData != VMA_NULL && m_DedicatedAllocation.m_ExtraData->m_pMappedData != VMA_NULL) == + (m_MapCount != 0 || IsPersistentMap())); + return m_DedicatedAllocation.m_ExtraData != VMA_NULL ? m_DedicatedAllocation.m_ExtraData->m_pMappedData : VMA_NULL; default: VMA_ASSERT(0); return VMA_NULL; @@ -10903,12 +11086,14 @@ VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppDa VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED); VMA_ASSERT(IsMappingAllowed() && "Mapping is not allowed on this allocation! Please use one of the new VMA_ALLOCATION_CREATE_HOST_ACCESS_* flags when creating it."); + EnsureExtraData(hAllocator); + if (m_MapCount != 0 || IsPersistentMap()) { if (m_MapCount < 0xFF) { - VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL); - *ppData = m_DedicatedAllocation.m_pMappedData; + VMA_ASSERT(m_DedicatedAllocation.m_ExtraData->m_pMappedData != VMA_NULL); + *ppData = m_DedicatedAllocation.m_ExtraData->m_pMappedData; ++m_MapCount; return VK_SUCCESS; } @@ -10929,7 +11114,7 @@ VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator, void** ppDa ppData); if (result == VK_SUCCESS) { - m_DedicatedAllocation.m_pMappedData = *ppData; + m_DedicatedAllocation.m_ExtraData->m_pMappedData = *ppData; m_MapCount = 1; } return result; @@ -10945,7 +11130,8 @@ void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator) --m_MapCount; if (m_MapCount == 0 && !IsPersistentMap()) { - m_DedicatedAllocation.m_pMappedData = VMA_NULL; + VMA_ASSERT(m_DedicatedAllocation.m_ExtraData != VMA_NULL); + m_DedicatedAllocation.m_ExtraData->m_pMappedData = VMA_NULL; (*hAllocator->GetVulkanFunctions().vkUnmapMemory)( hAllocator->m_hDevice, m_DedicatedAllocation.m_hMemory); @@ -10981,8 +11167,33 @@ void VmaAllocation_T::PrintParameters(class VmaJsonWriter& json) const json.WriteString(m_pName); } } +#if VMA_EXTERNAL_MEMORY_WIN32 +VkResult VmaAllocation_T::GetWin32Handle(VmaAllocator hAllocator, HANDLE hTargetProcess, HANDLE* pHandle) noexcept +{ + auto pvkGetMemoryWin32HandleKHR = hAllocator->GetVulkanFunctions().vkGetMemoryWin32HandleKHR; + switch (m_Type) + { + case ALLOCATION_TYPE_BLOCK: + return m_BlockAllocation.m_Block->CreateWin32Handle(hAllocator, pvkGetMemoryWin32HandleKHR, hTargetProcess, pHandle); + case ALLOCATION_TYPE_DEDICATED: + EnsureExtraData(hAllocator); + return m_DedicatedAllocation.m_ExtraData->m_Handle.GetHandle(hAllocator->m_hDevice, m_DedicatedAllocation.m_hMemory, pvkGetMemoryWin32HandleKHR, hTargetProcess, hAllocator->m_UseMutex, pHandle); + default: + VMA_ASSERT(0); + return VK_ERROR_FEATURE_NOT_PRESENT; + } +} +#endif // VMA_EXTERNAL_MEMORY_WIN32 #endif // VMA_STATS_STRING_ENABLED +void VmaAllocation_T::EnsureExtraData(VmaAllocator hAllocator) +{ + if (m_DedicatedAllocation.m_ExtraData == VMA_NULL) + { + m_DedicatedAllocation.m_ExtraData = vma_new(hAllocator, VmaAllocationExtraData)(); + } +} + void VmaAllocation_T::FreeName(VmaAllocator hAllocator) { if(m_pName) @@ -11399,6 +11610,10 @@ void VmaBlockVector::Free(const VmaAllocation hAllocation) } IncrementallySortBlocks(); + + m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize()); + hAllocation->Destroy(m_hAllocator); + m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation); } // Destruction of a free block. Deferred until this point, outside of mutex @@ -11409,9 +11624,6 @@ void VmaBlockVector::Free(const VmaAllocation hAllocation) pBlockToDelete->Destroy(m_hAllocator); vma_delete(m_hAllocator, pBlockToDelete); } - - m_hAllocator->m_Budget.RemoveAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), hAllocation->GetSize()); - m_hAllocator->m_AllocationObjectAllocator.Free(hAllocation); } VkDeviceSize VmaBlockVector::CalcMaxBlockSize() const @@ -12711,6 +12923,7 @@ VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : m_UseExtMemoryPriority((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT) != 0), m_UseKhrMaintenance4((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE4_BIT) != 0), m_UseKhrMaintenance5((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT) != 0), + m_UseKhrExternalMemoryWin32((pCreateInfo->flags & VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT) != 0), m_hDevice(pCreateInfo->device), m_hInstance(pCreateInfo->instance), m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL), @@ -12766,23 +12979,17 @@ VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); } #endif +#if VMA_VULKAN_VERSION < 1004000 + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 4, 0) && "vulkanApiVersion >= VK_API_VERSION_1_4 but required Vulkan version is disabled by preprocessor macros."); +#endif #if VMA_VULKAN_VERSION < 1003000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 3, 0)) - { - VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_3 but required Vulkan version is disabled by preprocessor macros."); - } + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 3, 0) && "vulkanApiVersion >= VK_API_VERSION_1_3 but required Vulkan version is disabled by preprocessor macros."); #endif #if VMA_VULKAN_VERSION < 1002000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0)) - { - VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros."); - } + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 2, 0) && "vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros."); #endif #if VMA_VULKAN_VERSION < 1001000 - if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) - { - VMA_ASSERT(0 && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros."); - } + VMA_ASSERT(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0) && "vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros."); #endif #if !(VMA_MEMORY_PRIORITY) if(m_UseExtMemoryPriority) @@ -12802,6 +13009,19 @@ VmaAllocator_T::VmaAllocator_T(const VmaAllocatorCreateInfo* pCreateInfo) : VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); } #endif +#if !(VMA_KHR_MAINTENANCE5) + if(m_UseKhrMaintenance5) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_MAINTENANCE5_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif + +#if !(VMA_EXTERNAL_MEMORY_WIN32) + if(m_UseKhrExternalMemoryWin32) + { + VMA_ASSERT(0 && "VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT is set but required extension is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro."); + } +#endif memset(&m_DeviceMemoryCallbacks, 0 ,sizeof(m_DeviceMemoryCallbacks)); memset(&m_PhysicalDeviceProperties, 0, sizeof(m_PhysicalDeviceProperties)); @@ -13026,7 +13246,9 @@ void VmaAllocator_T::ImportVulkanFunctions_Custom(const VmaVulkanFunctions* pVul VMA_COPY_IF_NOT_NULL(vkGetDeviceBufferMemoryRequirements); VMA_COPY_IF_NOT_NULL(vkGetDeviceImageMemoryRequirements); #endif - +#if VMA_EXTERNAL_MEMORY_WIN32 + VMA_COPY_IF_NOT_NULL(vkGetMemoryWin32HandleKHR); +#endif #undef VMA_COPY_IF_NOT_NULL } @@ -13128,7 +13350,12 @@ void VmaAllocator_T::ImportVulkanFunctions_Dynamic() VMA_FETCH_DEVICE_FUNC(vkGetDeviceImageMemoryRequirements, PFN_vkGetDeviceImageMemoryRequirementsKHR, "vkGetDeviceImageMemoryRequirementsKHR"); } #endif - +#if VMA_EXTERNAL_MEMORY_WIN32 + if (m_UseKhrExternalMemoryWin32) + { + VMA_FETCH_DEVICE_FUNC(vkGetMemoryWin32HandleKHR, PFN_vkGetMemoryWin32HandleKHR, "vkGetMemoryWin32HandleKHR"); + } +#endif #undef VMA_FETCH_DEVICE_FUNC #undef VMA_FETCH_INSTANCE_FUNC } @@ -13177,6 +13404,12 @@ void VmaAllocator_T::ValidateVulkanFunctions() VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL); } #endif +#if VMA_EXTERNAL_MEMORY_WIN32 + if (m_UseKhrExternalMemoryWin32) + { + VMA_ASSERT(m_VulkanFunctions.vkGetMemoryWin32HandleKHR != VMA_NULL); + } +#endif // Not validating these due to suspected driver bugs with these function // pointers being null despite correct extension or Vulkan version is enabled. @@ -13527,7 +13760,7 @@ VkResult VmaAllocator_T::AllocateDedicatedMemoryPage( } *pAllocation = m_AllocationObjectAllocator.Allocate(isMappingAllowed); - (*pAllocation)->InitDedicatedAllocation(pool, memTypeIndex, hMemory, suballocType, pMappedData, size); + (*pAllocation)->InitDedicatedAllocation(this, pool, memTypeIndex, hMemory, suballocType, pMappedData, size); if (isUserDataString) (*pAllocation)->SetName(this, (const char*)pUserData); else @@ -13863,8 +14096,6 @@ void VmaAllocator_T::FreeMemory( FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED); } - allocation->FreeName(this); - switch(allocation->GetType()) { case VmaAllocation_T::ALLOCATION_TYPE_BLOCK: @@ -14335,7 +14566,6 @@ VkResult VmaAllocator_T::Map(VmaAllocation hAllocation, void** ppData) } return res; } - VMA_FALLTHROUGH; // Fallthrough case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED: return hAllocation->DedicatedAllocMap(this, ppData); default: @@ -14549,6 +14779,7 @@ void VmaAllocator_T::FreeDedicatedMemory(const VmaAllocation allocation) FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory); m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize()); + allocation->Destroy(this); m_AllocationObjectAllocator.Free(allocation); VMA_DEBUG_LOG_FORMAT(" Freed DedicatedMemory MemoryTypeIndex=%" PRIu32, memTypeIndex); @@ -16169,7 +16400,7 @@ VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( pImageCreateInfo, allocator->GetAllocationCallbacks(), pImage); - if(res >= 0) + if(res == VK_SUCCESS) { VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ? VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL : @@ -16194,14 +16425,14 @@ VMA_CALL_PRE VkResult VMA_CALL_POST vmaCreateImage( 1, // allocationCount pAllocation); - if(res >= 0) + if(res == VK_SUCCESS) { // 3. Bind image with memory. if((pAllocationCreateInfo->flags & VMA_ALLOCATION_CREATE_DONT_BIND_BIT) == 0) { res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL); } - if(res >= 0) + if(res == VK_SUCCESS) { // All steps succeeded. #if VMA_STATS_STRING_ENABLED @@ -16434,6 +16665,15 @@ VMA_CALL_PRE void VMA_CALL_POST vmaFreeVirtualBlockStatsString(VmaVirtualBlock V VmaFreeString(virtualBlock->GetAllocationCallbacks(), pStatsString); } } +#if VMA_EXTERNAL_MEMORY_WIN32 +VMA_CALL_PRE VkResult VMA_CALL_POST vmaGetMemoryWin32Handle(VmaAllocator VMA_NOT_NULL allocator, + VmaAllocation VMA_NOT_NULL allocation, HANDLE hTargetProcess, HANDLE* VMA_NOT_NULL pHandle) +{ + VMA_ASSERT(allocator && allocation && pHandle); + VMA_DEBUG_GLOBAL_MUTEX_LOCK; + return allocation->GetWin32Handle(allocator, hTargetProcess, pHandle); +} +#endif // VMA_EXTERNAL_MEMORY_WIN32 #endif // VMA_STATS_STRING_ENABLED #endif // _VMA_PUBLIC_INTERFACE #endif // VMA_IMPLEMENTATION @@ -16567,6 +16807,7 @@ VK_EXT_memory_budget | #VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT VK_KHR_buffer_device_address | #VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT VK_EXT_memory_priority | #VMA_ALLOCATOR_CREATE_EXT_MEMORY_PRIORITY_BIT VK_AMD_device_coherent_memory | #VMA_ALLOCATOR_CREATE_AMD_DEVICE_COHERENT_MEMORY_BIT +VK_KHR_external_memory_win32 | #VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT Example with fetching pointers to Vulkan functions dynamically: @@ -17053,7 +17294,7 @@ implementation whether the allocation succeeds or fails. You can change this beh by using #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag. With it, the allocation is not made if it would exceed the budget or if the budget is already exceeded. VMA then tries to make the allocation from the next eligible Vulkan memory type. -The all of them fail, the call then fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. +If all of them fail, the call then fails with `VK_ERROR_OUT_OF_DEVICE_MEMORY`. Example usage pattern may be to pass the #VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT flag when creating resources that are not essential for the application (e.g. the texture of a specific object) and not to pass it when creating critically important resources @@ -18193,7 +18434,8 @@ allocCreateInfo.flags = VMA_ALLOCATION_CREATE_HOST_ACCESS_SEQUENTIAL_WRITE_BIT | VkBuffer buf; VmaAllocation alloc; VmaAllocationInfo allocInfo; -vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); +VkResult result = vmaCreateBuffer(allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, &allocInfo); +// Check result... VkMemoryPropertyFlags memPropFlags; vmaGetAllocationMemoryProperties(allocator, alloc, &memPropFlags); @@ -18204,10 +18446,24 @@ if(memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) // [Executed in runtime]: memcpy(allocInfo.pMappedData, myData, myDataSize); + result = vmaFlushAllocation(allocator, alloc, 0, VK_WHOLE_SIZE); + // Check result... + + VkBufferMemoryBarrier bufMemBarrier = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER }; + bufMemBarrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT; + bufMemBarrier.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT; + bufMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.buffer = buf; + bufMemBarrier.offset = 0; + bufMemBarrier.size = VK_WHOLE_SIZE; + + vkCmdPipelineBarrier(cmdBuf, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + 0, 0, nullptr, 1, &bufMemBarrier, 0, nullptr); } else { - // Allocation ended up in a non-mappable memory - need to transfer. + // Allocation ended up in a non-mappable memory - a transfer using a staging buffer is required. VkBufferCreateInfo stagingBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; stagingBufCreateInfo.size = 65536; stagingBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT; @@ -18220,18 +18476,46 @@ else VkBuffer stagingBuf; VmaAllocation stagingAlloc; VmaAllocationInfo stagingAllocInfo; - vmaCreateBuffer(allocator, &stagingBufCreateInfo, &stagingAllocCreateInfo, - &stagingBuf, &stagingAlloc, stagingAllocInfo); + result = vmaCreateBuffer(allocator, &stagingBufCreateInfo, &stagingAllocCreateInfo, + &stagingBuf, &stagingAlloc, &stagingAllocInfo); + // Check result... // [Executed in runtime]: memcpy(stagingAllocInfo.pMappedData, myData, myDataSize); - vmaFlushAllocation(allocator, stagingAlloc, 0, VK_WHOLE_SIZE); - //vkCmdPipelineBarrier: VK_ACCESS_HOST_WRITE_BIT --> VK_ACCESS_TRANSFER_READ_BIT + result = vmaFlushAllocation(allocator, stagingAlloc, 0, VK_WHOLE_SIZE); + // Check result... + + VkBufferMemoryBarrier bufMemBarrier = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER }; + bufMemBarrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT; + bufMemBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; + bufMemBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier.buffer = stagingBuf; + bufMemBarrier.offset = 0; + bufMemBarrier.size = VK_WHOLE_SIZE; + + vkCmdPipelineBarrier(cmdBuf, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, + 0, 0, nullptr, 1, &bufMemBarrier, 0, nullptr); + VkBufferCopy bufCopy = { 0, // srcOffset 0, // dstOffset, - myDataSize); // size + myDataSize, // size + }; + vkCmdCopyBuffer(cmdBuf, stagingBuf, buf, 1, &bufCopy); + + VkBufferMemoryBarrier bufMemBarrier2 = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER }; + bufMemBarrier2.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + bufMemBarrier2.dstAccessMask = VK_ACCESS_UNIFORM_READ_BIT; // We created a uniform buffer + bufMemBarrier2.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier2.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; + bufMemBarrier2.buffer = buf; + bufMemBarrier2.offset = 0; + bufMemBarrier2.size = VK_WHOLE_SIZE; + + vkCmdPipelineBarrier(cmdBuf, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, + 0, 0, nullptr, 1, &bufMemBarrier2, 0, nullptr); } \endcode @@ -18264,14 +18548,22 @@ Please check "CONFIGURATION SECTION" in the code to find macros that you can def before each include of this file or change directly in this file to provide your own implementation of basic facilities like assert, `min()` and `max()` functions, mutex, atomic etc. -The library uses its own implementation of containers by default, but you can switch to using -STL containers instead. For example, define `VMA_ASSERT(expr)` before including the library to provide custom implementation of the assertion, compatible with your project. By default it is defined to standard C `assert(expr)` in `_DEBUG` configuration and empty otherwise. +Similarly, you can define `VMA_LEAK_LOG_FORMAT` macro to enable printing of leaked (unfreed) allocations, +including their names and other parameters. Example: + +\code +#define VMA_LEAK_LOG_FORMAT(format, ...) do { \ + printf((format), __VA_ARGS__); \ + printf("\n"); \ + } while(false) +\endcode + \section config_Vulkan_functions Pointers to Vulkan functions There are multiple ways to import pointers to Vulkan functions in the library. @@ -18526,6 +18818,145 @@ Example use of this extension can be found in the code of the sample and test su accompanying this library. +\page vk_khr_external_memory_win32 VK_KHR_external_memory_win32 + +On Windows, the VK_KHR_external_memory_win32 device extension allows exporting a Win32 `HANDLE` +of a `VkDeviceMemory` block, to be able to reference the memory on other Vulkan logical devices or instances, +in multiple processes, and/or in multiple APIs. +VMA offers support for it. + +\section vk_khr_external_memory_win32_initialization Initialization + +1) Make sure the extension is defined in the code by including following header before including VMA: + +\code +#include +\endcode + +2) Check if "VK_KHR_external_memory_win32" is available among device extensions. +Enable it when creating the `VkDevice` object. + +3) Enable the usage of this extension in VMA by setting flag #VMA_ALLOCATOR_CREATE_KHR_EXTERNAL_MEMORY_WIN32_BIT +when calling vmaCreateAllocator(). + +4) Make sure that VMA has access to the `vkGetMemoryWin32HandleKHR` function by either enabling `VMA_DYNAMIC_VULKAN_FUNCTIONS` macro +or setting VmaVulkanFunctions::vkGetMemoryWin32HandleKHR explicitly. +For more information, see \ref quick_start_initialization_importing_vulkan_functions. + +\section vk_khr_external_memory_win32_preparations Preparations + +You can find example usage among tests, in file "Tests.cpp", function `TestWin32Handles()`. + +To use the extenion, buffers need to be created with `VkExternalMemoryBufferCreateInfoKHR` attached to their `pNext` chain, +and memory allocations need to be made with `VkExportMemoryAllocateInfoKHR` attached to their `pNext` chain. +To make use of them, you need to use \ref custom_memory_pools. Example: + +\code +// Define an example buffer and allocation parameters. +VkExternalMemoryBufferCreateInfoKHR externalMemBufCreateInfo = { + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + nullptr, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT +}; +VkBufferCreateInfo exampleBufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +exampleBufCreateInfo.size = 0x10000; // Doesn't matter here. +exampleBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; +exampleBufCreateInfo.pNext = &externalMemBufCreateInfo; + +VmaAllocationCreateInfo exampleAllocCreateInfo = {}; +exampleAllocCreateInfo.usage = VMA_MEMORY_USAGE_AUTO; + +// Find memory type index to use for the custom pool. +uint32_t memTypeIndex; +VkResult res = vmaFindMemoryTypeIndexForBufferInfo(g_Allocator, + &exampleBufCreateInfo, &exampleAllocCreateInfo, &memTypeIndex); +// Check res... + +// Create a custom pool. +constexpr static VkExportMemoryAllocateInfoKHR exportMemAllocInfo = { + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR, + nullptr, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT +}; +VmaPoolCreateInfo poolCreateInfo = {}; +poolCreateInfo.memoryTypeIndex = memTypeIndex; +poolCreateInfo.pMemoryAllocateNext = (void*)&exportMemAllocInfo; + +VmaPool pool; +res = vmaCreatePool(g_Allocator, &poolCreateInfo, &pool); +// Check res... + +// YOUR OTHER CODE COMES HERE.... + +// At the end, don't forget to destroy it! +vmaDestroyPool(g_Allocator, pool); +\endcode + +Note that the structure passed as VmaPoolCreateInfo::pMemoryAllocateNext must remain alive and unchanged +for the whole lifetime of the custom pool, because it will be used when the pool allocates a new device memory block. +No copy is made internally. This is why variable `exportMemAllocInfo` is defined as `static`. + +\section vk_khr_external_memory_win32_memory_allocation Memory allocation + +Finally, you can create a buffer with an allocation out of the custom pool. +The buffer should use same flags as the sample buffer used to find the memory type. +It should also specify `VkExternalMemoryBufferCreateInfoKHR` in its `pNext` chain. + +\code +VkExternalMemoryBufferCreateInfoKHR externalMemBufCreateInfo = { + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR, + nullptr, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT +}; +VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO }; +bufCreateInfo.size = // Your desired buffer size. +bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT; +bufCreateInfo.pNext = &externalMemBufCreateInfo; + +VmaAllocationCreateInfo allocCreateInfo = {}; +allocCreateInfo.pool = pool; // It is enough to set this one member. + +VkBuffer buf; +VmaAllocation alloc; +res = vmaCreateBuffer(g_Allocator, &bufCreateInfo, &allocCreateInfo, &buf, &alloc, nullptr); +// Check res... + +// YOUR OTHER CODE COMES HERE.... + +// At the end, don't forget to destroy it! +vmaDestroyBuffer(g_Allocator, buf, alloc); +\endcode + +If you need each allocation to have its own device memory block and start at offset 0, you can still do +by using #VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT flag. It works also with custom pools. + +\section vk_khr_external_memory_win32_exporting_win32_handle Exporting Win32 handle + +After the allocation is created, you can acquire a Win32 `HANDLE` to the `VkDeviceMemory` block it belongs to. +VMA function vmaGetMemoryWin32Handle() is a replacement of the Vulkan function `vkGetMemoryWin32HandleKHR`. + +\code +HANDLE handle; +res = vmaGetMemoryWin32Handle(g_Allocator, alloc, nullptr, &handle); +// Check res... + +// YOUR OTHER CODE COMES HERE.... + +// At the end, you must close the handle. +CloseHandle(handle); +\endcode + +Documentation of the VK_KHR_external_memory_win32 extension states that: + +> If handleType is defined as an NT handle, vkGetMemoryWin32HandleKHR must be called no more than once for each valid unique combination of memory and handleType. + +This is ensured automatically inside VMA. +The library fetches the handle on first use, remembers it internally, and closes it when the memory block or dedicated allocation is destroyed. +Every time you call vmaGetMemoryWin32Handle(), VMA calls `DuplicateHandle` and returns a new handle that you need to close. + +For further information, please check documentation of the vmaGetMemoryWin32Handle() function. + + \page enabling_buffer_device_address Enabling buffer device address Device extension VK_KHR_buffer_device_address diff --git a/third_party/vulkan/vulkan_structs.hpp b/third_party/vulkan/vulkan_structs.hpp index c4da3cd..25b8612 100644 --- a/third_party/vulkan/vulkan_structs.hpp +++ b/third_party/vulkan/vulkan_structs.hpp @@ -53241,7 +53241,10 @@ namespace VULKAN_HPP_NAMESPACE , valueCount{ valueCount_ } , pValues{ pValues_ } { + return ( strcmp( layerName, rhs.layerName ) == 0 ) && ( specVersion == rhs.specVersion ) && ( implementationVersion == rhs.implementationVersion ) && + ( strcmp( description, rhs.description ) == 0 ); } +#endif VULKAN_HPP_CONSTEXPR LayerSettingEXT( LayerSettingEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -58437,7 +58440,6 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOpticalFlowSessionCreatePrivateDataInfoNV; @@ -58536,6 +58538,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eOutOfBandQueueTypeInfoNV; @@ -58617,10 +58620,17 @@ namespace VULKAN_HPP_NAMESPACE ( earliestPresentTime == rhs.earliestPresentTime ) && ( presentMargin == rhs.presentMargin ); # endif } +#endif bool operator!=( PastPresentationTimingGOOGLE const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && + ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); + } + + bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); } #endif @@ -58654,8 +58664,6 @@ namespace VULKAN_HPP_NAMESPACE PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast( &rhs ) ) { - int32 = int32_; - return *this; } PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -58719,13 +58727,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( strcmp( name, rhs.name ) == 0 ) && - ( strcmp( category, rhs.category ) == 0 ) && ( strcmp( description, rhs.description ) == 0 ); - } - - bool operator!=( PerformanceCounterDescriptionKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); + return !operator==( rhs ); } #endif @@ -58761,6 +58763,8 @@ namespace VULKAN_HPP_NAMESPACE , category{ category_ } , description{ description_ } { + uint32 = uint32_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -58769,6 +58773,7 @@ namespace VULKAN_HPP_NAMESPACE : PerformanceCounterDescriptionKHR( *reinterpret_cast( &rhs ) ) { } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -58783,6 +58788,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT { @@ -58897,12 +58903,12 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -59108,7 +59114,36 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PerformanceMarkerInfoINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + } + + PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; +#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ + + PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT + { + *this = *reinterpret_cast( &rhs ); + return *this; + } + + operator VkPerformanceValueINTEL const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT + { + return std::tie( type, data ); } #endif @@ -59184,6 +59219,7 @@ namespace VULKAN_HPP_NAMESPACE parameter = parameter_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPerformanceOverrideInfoINTEL const &() const VULKAN_HPP_NOEXCEPT { @@ -59262,6 +59298,8 @@ namespace VULKAN_HPP_NAMESPACE PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PerformanceQuerySubmitInfoKHR( *reinterpret_cast( &rhs ) ) { + value32 = value32_; + return *this; } PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -59285,12 +59323,12 @@ namespace VULKAN_HPP_NAMESPACE counterPassIndex = counterPassIndex_; return *this; } +#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkPerformanceQuerySubmitInfoKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_UNION_SETTERS*/ operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT { @@ -59321,13 +59359,6 @@ namespace VULKAN_HPP_NAMESPACE } bool operator!=( PerformanceQuerySubmitInfoKHR const & rhs ) const VULKAN_HPP_NOEXCEPT - { - } - - PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default; -#endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ - - PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT { *this = *reinterpret_cast( &rhs ); return *this; @@ -61320,7 +61351,6 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -61561,12 +61591,12 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI &() VULKAN_HPP_NOEXCEPT { @@ -61603,6 +61633,7 @@ namespace VULKAN_HPP_NAMESPACE ( indirectBufferOffsetAlignment == rhs.indirectBufferOffsetAlignment ); # endif } +#endif bool operator!=( PhysicalDeviceClusterCullingShaderPropertiesHUAWEI const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -61671,7 +61702,6 @@ namespace VULKAN_HPP_NAMESPACE clusterShadingRate = clusterShadingRate_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI const &() const VULKAN_HPP_NOEXCEPT { @@ -61769,6 +61799,7 @@ namespace VULKAN_HPP_NAMESPACE deviceCoherentMemory = deviceCoherentMemory_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const &() const VULKAN_HPP_NOEXCEPT { @@ -61866,7 +61897,6 @@ namespace VULKAN_HPP_NAMESPACE colorWriteEnable = colorWriteEnable_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceColorWriteEnableFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -62114,7 +62144,6 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesKHR; @@ -62684,8 +62713,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -62698,14 +62727,14 @@ namespace VULKAN_HPP_NAMESPACE cooperativeMatrixFlexibleDimensionsMaxDimension, cooperativeMatrixWorkgroupScopeReservedSharedMemory ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrix2PropertiesNV const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrix2PropertiesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && @@ -62719,7 +62748,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrix2PropertiesNV; @@ -62734,6 +62763,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCooperativeMatrix2PropertiesNV; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCooperativeMatrixFeaturesKHR { @@ -62788,7 +62818,6 @@ namespace VULKAN_HPP_NAMESPACE cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCooperativeMatrixFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -62800,8 +62829,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -62810,14 +62839,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, cooperativeMatrix, cooperativeMatrixRobustBufferAccess ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrix == rhs.cooperativeMatrix ) && @@ -62829,7 +62858,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesKHR; @@ -62843,6 +62872,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCooperativeMatrixFeaturesKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCooperativeMatrixFeaturesNV { @@ -62992,9 +63022,20 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63003,14 +63044,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, cooperativeMatrixSupportedStages ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesKHR const & ) const = default; #else bool operator==( PhysicalDeviceCooperativeMatrixPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages ); @@ -63021,7 +63062,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesKHR; @@ -63034,6 +63075,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCooperativeMatrixPropertiesKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCooperativeMatrixPropertiesNV { @@ -63162,6 +63204,7 @@ namespace VULKAN_HPP_NAMESPACE indirectCopy = indirectCopy_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCopyMemoryIndirectFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -63173,8 +63216,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63183,14 +63226,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, indirectCopy ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCopyMemoryIndirectFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( indirectCopy == rhs.indirectCopy ); @@ -63201,7 +63244,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCopyMemoryIndirectFeaturesNV; @@ -63214,7 +63257,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceCopyMemoryIndirectFeaturesNV; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceCopyMemoryIndirectPropertiesNV { @@ -63356,18 +63398,8 @@ namespace VULKAN_HPP_NAMESPACE } # endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceCudaKernelLaunchFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -63376,18 +63408,31 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, cornerSampledImage ); } -# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const & ) const = default; #else bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cornerSampledImage == rhs.cornerSampledImage ); # endif + +# if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) + auto operator<=>( PhysicalDeviceCudaKernelLaunchFeaturesNV const & ) const = default; +# else + bool operator==( PhysicalDeviceCudaKernelLaunchFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT + { +# if defined( VULKAN_HPP_USE_REFLECT ) + return this->reflect() == rhs.reflect(); +# else + return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( cudaKernelLaunchFeatures == rhs.cudaKernelLaunchFeatures ); +# endif } bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -63941,7 +63986,6 @@ namespace VULKAN_HPP_NAMESPACE customBorderColorWithoutFormat = customBorderColorWithoutFormat_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -64048,7 +64092,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, maxCustomBorderColorSamplers ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const & ) const = default; @@ -64064,9 +64107,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT; @@ -64093,6 +64136,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , dedicatedAllocationImageAliasing{ dedicatedAllocationImageAliasing_ } { + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) @@ -64101,6 +64146,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast( &rhs ) ) { + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + return *this; } PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & @@ -64127,6 +64174,7 @@ namespace VULKAN_HPP_NAMESPACE dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -64379,7 +64427,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, depthClampControl ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDepthClampControlFeaturesEXT const & ) const = default; @@ -64395,7 +64442,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDepthClampControlFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; + return *this; } #endif @@ -64424,6 +64472,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , depthClampZeroOne{ depthClampZeroOne_ } { + shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClampZeroOneFeaturesEXT( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -64431,6 +64481,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDepthClampZeroOneFeaturesEXT( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClampZeroOneFeaturesEXT( *reinterpret_cast( &rhs ) ) { + shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + return *this; } PhysicalDeviceDepthClampZeroOneFeaturesEXT & operator=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -64492,9 +64544,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDepthClampZeroOneFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; - return *this; + return !operator==( rhs ); } +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClampZeroOneFeaturesEXT; @@ -64521,7 +64573,7 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , depthClipControl{ depthClipControl_ } { - descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; + descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; return *this; } @@ -64530,7 +64582,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDepthClipControlFeaturesEXT( VkPhysicalDeviceDepthClipControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDepthClipControlFeaturesEXT( *reinterpret_cast( &rhs ) ) { - descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; + descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; return *this; } @@ -64542,6 +64594,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipControlFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -64664,6 +64717,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -64789,7 +64843,6 @@ namespace VULKAN_HPP_NAMESPACE shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_; return *this; } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties; @@ -64821,7 +64874,7 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , combinedImageSamplerDensityMapDescriptorSize{ combinedImageSamplerDensityMapDescriptorSize_ } { - shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_; + descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_; return *this; } @@ -64831,7 +64884,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT( *reinterpret_cast( &rhs ) ) { - shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_; + descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_; return *this; } @@ -64865,6 +64918,7 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, combinedImageSamplerDensityMapDescriptorSize ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT const & ) const = default; @@ -64916,8 +64970,6 @@ namespace VULKAN_HPP_NAMESPACE , descriptorBufferImageLayoutIgnored{ descriptorBufferImageLayoutIgnored_ } , descriptorBufferPushDescriptors{ descriptorBufferPushDescriptors_ } { - descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorBufferFeaturesEXT( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -64925,8 +64977,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDescriptorBufferFeaturesEXT( VkPhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDescriptorBufferFeaturesEXT( *reinterpret_cast( &rhs ) ) { - descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_; - return *this; } PhysicalDeviceDescriptorBufferFeaturesEXT & operator=( PhysicalDeviceDescriptorBufferFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -64937,7 +64987,6 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -65142,7 +65191,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -65531,7 +65579,6 @@ namespace VULKAN_HPP_NAMESPACE runtimeDescriptorArray = runtimeDescriptorArray_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDescriptorIndexingFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -65924,7 +65971,13 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorPoolOverallocationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + return *reinterpret_cast( this ); + } + + VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & + setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT + { + deviceGeneratedCommands = deviceGeneratedCommands_; return *this; } @@ -65950,11 +66003,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -66038,6 +66087,7 @@ namespace VULKAN_HPP_NAMESPACE descriptorSetHostMapping = descriptorSetHostMapping_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE const &() const VULKAN_HPP_NOEXCEPT { @@ -66053,7 +66103,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -66132,8 +66182,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV & @@ -66255,7 +66304,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT & @@ -66271,7 +66321,6 @@ namespace VULKAN_HPP_NAMESPACE dynamicGeneratedPipelineLayout = dynamicGeneratedPipelineLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -66283,8 +66332,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -66293,14 +66342,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, deviceGeneratedCommands, dynamicGeneratedPipelineLayout ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands ) && @@ -66312,7 +66361,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; @@ -66326,6 +66375,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV { @@ -66491,7 +66541,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -66552,6 +66601,7 @@ namespace VULKAN_HPP_NAMESPACE ( deviceGeneratedCommandsMultiDrawIndirectCount == rhs.deviceGeneratedCommandsMultiDrawIndirectCount ); # endif } +#endif bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -66774,6 +66824,17 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDeviceDisplacementMicromapFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } # if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -66798,6 +66859,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( deviceMemoryReport == rhs.deviceMemoryReport ); # endif } +#endif bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -66862,7 +66924,6 @@ namespace VULKAN_HPP_NAMESPACE diagnosticsConfig = diagnosticsConfig_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -66878,7 +66939,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -66897,7 +66958,6 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( diagnosticsConfig == rhs.diagnosticsConfig ); # endif } -#endif bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -66958,8 +67018,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -66968,14 +67028,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, maxDiscardRectangles ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( maxDiscardRectangles == rhs.maxDiscardRectangles ); @@ -66986,7 +67046,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT; @@ -66999,7 +67059,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceDiscardRectanglePropertiesEXT; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ #if defined( VK_ENABLE_BETA_EXTENSIONS ) struct PhysicalDeviceDisplacementMicromapFeaturesNV @@ -67036,7 +67095,8 @@ namespace VULKAN_HPP_NAMESPACE # if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDisplacementMicromapFeaturesNV & @@ -67080,7 +67140,6 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( displacementMicromap == rhs.displacementMicromap ); # endif } -#endif bool operator!=( PhysicalDeviceDisplacementMicromapFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -67471,7 +67530,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, dynamicRendering ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDynamicRenderingFeatures const & ) const = default; @@ -67487,7 +67545,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDynamicRenderingFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + return *this; } #endif @@ -67518,6 +67577,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , dynamicRenderingLocalRead{ dynamicRenderingLocalRead_ } { + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; } VULKAN_HPP_CONSTEXPR @@ -67526,6 +67587,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceDynamicRenderingLocalReadFeatures( VkPhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceDynamicRenderingLocalReadFeatures( *reinterpret_cast( &rhs ) ) { + extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + return *this; } PhysicalDeviceDynamicRenderingLocalReadFeatures & operator=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -67572,7 +67635,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, dynamicRenderingLocalRead ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDynamicRenderingLocalReadFeatures const & ) const = default; @@ -67588,7 +67650,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDynamicRenderingLocalReadFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + return *this; } #endif @@ -67619,6 +67682,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , dynamicRenderingUnusedAttachments{ dynamicRenderingUnusedAttachments_ } { + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) @@ -67628,6 +67693,8 @@ namespace VULKAN_HPP_NAMESPACE : PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT( *reinterpret_cast( &rhs ) ) { + extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + return *this; } PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT & @@ -67676,7 +67743,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, dynamicRenderingUnusedAttachments ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & ) const = default; @@ -67692,7 +67758,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + return *this; } #endif @@ -67721,6 +67788,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , exclusiveScissor{ exclusiveScissor_ } { + extendedDynamicState3LogicOpEnable = extendedDynamicState3LogicOpEnable_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -67728,6 +67797,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast( &rhs ) ) { + extendedDynamicState3ColorBlendEquation = extendedDynamicState3ColorBlendEquation_; + return *this; } PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -67742,7 +67813,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + extendedDynamicState3ConservativeRasterizationMode = extendedDynamicState3ConservativeRasterizationMode_; return *this; } @@ -67751,7 +67822,6 @@ namespace VULKAN_HPP_NAMESPACE exclusiveScissor = exclusiveScissor_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExclusiveScissorFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -67788,10 +67858,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; + extendedDynamicState3ViewportWScalingEnable = extendedDynamicState3ViewportWScalingEnable_; return *this; } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV; @@ -67822,7 +67891,7 @@ namespace VULKAN_HPP_NAMESPACE , extendedDynamicState2LogicOp{ extendedDynamicState2LogicOp_ } , extendedDynamicState2PatchControlPoints{ extendedDynamicState2PatchControlPoints_ } { - extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; + extendedDynamicState3CoverageModulationTableEnable = extendedDynamicState3CoverageModulationTableEnable_; return *this; } @@ -67832,7 +67901,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceExtendedDynamicState2FeaturesEXT( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState2FeaturesEXT( *reinterpret_cast( &rhs ) ) { - extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; + extendedDynamicState3CoverageReductionMode = extendedDynamicState3CoverageReductionMode_; return *this; } @@ -67844,6 +67913,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState2FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -67898,6 +67968,7 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, extendedDynamicState2, extendedDynamicState2LogicOp, extendedDynamicState2PatchControlPoints ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & ) const = default; @@ -67915,8 +67986,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceExtendedDynamicState2FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - extendedDynamicState3PolygonMode = extendedDynamicState3PolygonMode_; - return *this; + return !operator==( rhs ); } #endif @@ -68008,8 +68078,6 @@ namespace VULKAN_HPP_NAMESPACE , extendedDynamicState3RepresentativeFragmentTestEnable{ extendedDynamicState3RepresentativeFragmentTestEnable_ } , extendedDynamicState3ShadingRateImageEnable{ extendedDynamicState3ShadingRateImageEnable_ } { - extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; - return *this; } VULKAN_HPP_CONSTEXPR @@ -68018,8 +68086,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceExtendedDynamicState3FeaturesEXT( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceExtendedDynamicState3FeaturesEXT( *reinterpret_cast( &rhs ) ) { - extendedDynamicState3SampleMask = extendedDynamicState3SampleMask_; - return *this; } PhysicalDeviceExtendedDynamicState3FeaturesEXT & operator=( PhysicalDeviceExtendedDynamicState3FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -68034,7 +68100,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - extendedDynamicState3AlphaToOneEnable = extendedDynamicState3AlphaToOneEnable_; + pNext = pNext_; return *this; } @@ -68044,6 +68110,7 @@ namespace VULKAN_HPP_NAMESPACE extendedDynamicState3TessellationDomainOrigin = extendedDynamicState3TessellationDomainOrigin_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClampEnable_ ) VULKAN_HPP_NOEXCEPT @@ -68065,6 +68132,7 @@ namespace VULKAN_HPP_NAMESPACE extendedDynamicState3RasterizationSamples = extendedDynamicState3RasterizationSamples_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3SampleMask( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3SampleMask_ ) VULKAN_HPP_NOEXCEPT @@ -68079,6 +68147,7 @@ namespace VULKAN_HPP_NAMESPACE extendedDynamicState3AlphaToCoverageEnable = extendedDynamicState3AlphaToCoverageEnable_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3AlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3AlphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT @@ -68114,6 +68183,7 @@ namespace VULKAN_HPP_NAMESPACE extendedDynamicState3ColorWriteMask = extendedDynamicState3ColorWriteMask_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3RasterizationStream( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3RasterizationStream_ ) VULKAN_HPP_NOEXCEPT @@ -68135,6 +68205,7 @@ namespace VULKAN_HPP_NAMESPACE extendedDynamicState3ExtraPrimitiveOverestimationSize = extendedDynamicState3ExtraPrimitiveOverestimationSize_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3DepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3DepthClipEnable_ ) VULKAN_HPP_NOEXCEPT @@ -68149,6 +68220,7 @@ namespace VULKAN_HPP_NAMESPACE extendedDynamicState3SampleLocationsEnable = extendedDynamicState3SampleLocationsEnable_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicState3FeaturesEXT & setExtendedDynamicState3ColorBlendAdvanced( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState3ColorBlendAdvanced_ ) VULKAN_HPP_NOEXCEPT @@ -68593,7 +68665,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -68693,7 +68769,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -68772,6 +68848,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif operator VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV &() VULKAN_HPP_NOEXCEPT { @@ -68811,7 +68888,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedSparseAddressSpacePropertiesNV; @@ -68826,6 +68903,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceExtendedSparseAddressSpacePropertiesNV; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ struct PhysicalDeviceExternalBufferInfo { @@ -68866,8 +68944,6 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT @@ -68888,7 +68964,6 @@ namespace VULKAN_HPP_NAMESPACE handleType = handleType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExternalBufferInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -68900,8 +68975,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple( PhysicalDeviceExternalBufferInfo const & ) const = default; #else bool operator==( PhysicalDeviceExternalBufferInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( flags == rhs.flags ) && ( usage == rhs.usage ) && ( handleType == rhs.handleType ); @@ -68932,7 +69007,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo; @@ -68947,6 +69022,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceExternalBufferInfo; }; +#endif /*VK_USE_PLATFORM_ANDROID_KHR*/ using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo; @@ -69104,7 +69180,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExternalFormatResolveFeaturesANDROID &() VULKAN_HPP_NOEXCEPT { @@ -69300,7 +69375,7 @@ namespace VULKAN_HPP_NAMESPACE handleType = handleType_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExternalImageFormatInfo const &() const VULKAN_HPP_NOEXCEPT { @@ -69312,8 +69387,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -69322,14 +69397,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, handleType ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalImageFormatInfo const & ) const = default; #else bool operator==( PhysicalDeviceExternalImageFormatInfo const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( handleType == rhs.handleType ); @@ -69340,7 +69415,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo; @@ -69353,6 +69428,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceExternalImageFormatInfo; }; +#endif /*VK_USE_PLATFORM_SCREEN_QNX*/ using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo; @@ -69585,7 +69661,7 @@ namespace VULKAN_HPP_NAMESPACE screenBufferImport = screenBufferImport_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const &() const VULKAN_HPP_NOEXCEPT { @@ -69597,24 +69673,24 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, screenBufferImport ); } -# endif +#endif # if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & ) const = default; # else bool operator==( PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( screenBufferImport == rhs.screenBufferImport ); @@ -69625,7 +69701,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; @@ -70388,7 +70464,6 @@ namespace VULKAN_HPP_NAMESPACE fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -70429,6 +70504,7 @@ namespace VULKAN_HPP_NAMESPACE ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages ); # endif } +#endif bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -70464,6 +70540,7 @@ namespace VULKAN_HPP_NAMESPACE , fragmentDensityMapOffset{ fragmentDensityMapOffset_ } { } +#endif VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -70482,6 +70559,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -70502,6 +70580,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM &() VULKAN_HPP_NOEXCEPT { @@ -70531,6 +70610,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentDensityMapOffset == rhs.fragmentDensityMapOffset ); # endif } +#endif bool operator!=( PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -70709,7 +70789,6 @@ namespace VULKAN_HPP_NAMESPACE ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize ) && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations ); # endif } -#endif bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -70745,7 +70824,6 @@ namespace VULKAN_HPP_NAMESPACE , fragmentShaderBarycentric{ fragmentShaderBarycentric_ } { } -#endif VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesKHR( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -70764,7 +70842,6 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -#endif #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -70785,7 +70862,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR &() VULKAN_HPP_NOEXCEPT { @@ -70815,7 +70891,6 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric ); # endif } -#endif bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -71221,7 +71296,6 @@ namespace VULKAN_HPP_NAMESPACE maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -72022,7 +72096,6 @@ namespace VULKAN_HPP_NAMESPACE graphicsPipelineLibraryIndependentInterpolationDecoration = graphicsPipelineLibraryIndependentInterpolationDecoration_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -72058,12 +72131,12 @@ namespace VULKAN_HPP_NAMESPACE ( graphicsPipelineLibraryIndependentInterpolationDecoration == rhs.graphicsPipelineLibraryIndependentInterpolationDecoration ); # endif } +#endif bool operator!=( PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; @@ -72338,6 +72411,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #if defined( VULKAN_HPP_USE_REFLECT ) # if 14 <= VULKAN_HPP_CPP_VERSION @@ -72406,6 +72480,8 @@ namespace VULKAN_HPP_NAMESPACE , optimalTilingLayoutUUID{ optimalTilingLayoutUUID_ } , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } { + copyDstLayoutCount = copyDstLayoutCount_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties( PhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -72413,6 +72489,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceHostImageCopyProperties( VkPhysicalDeviceHostImageCopyProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceHostImageCopyProperties( *reinterpret_cast( &rhs ) ) { + pCopyDstLayouts = pCopyDstLayouts_; + return *this; } # if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) @@ -72440,11 +72518,12 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostImageCopyProperties & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + optimalTilingLayoutUUID = optimalTilingLayoutUUID_; return *this; } @@ -73338,6 +73417,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -73406,7 +73486,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, drmFormatModifier, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const & ) const = default; @@ -73423,7 +73502,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + sharingMode = sharingMode_; + return *this; } #endif @@ -73463,6 +73543,8 @@ namespace VULKAN_HPP_NAMESPACE , usage{ usage_ } , flags{ flags_ } { + queueFamilyIndexCount = queueFamilyIndexCount_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -73470,6 +73552,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceImageFormatInfo2( *reinterpret_cast( &rhs ) ) { + pQueueFamilyIndices = pQueueFamilyIndices_; + return *this; } PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -73480,6 +73564,8 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -73644,7 +73730,13 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -73827,7 +73919,6 @@ namespace VULKAN_HPP_NAMESPACE textureBlockMatch = textureBlockMatch_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceImageProcessingFeaturesQCOM const &() const VULKAN_HPP_NOEXCEPT { @@ -73843,11 +73934,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -74030,7 +74117,6 @@ namespace VULKAN_HPP_NAMESPACE robustImageAccess = robustImageAccess_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceImageRobustnessFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -74746,7 +74832,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -75465,6 +75551,8 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT { @@ -76932,7 +77020,6 @@ namespace VULKAN_HPP_NAMESPACE linearColorAttachment = linearColorAttachment_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceLinearColorAttachmentFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -76948,7 +77035,11 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -77053,12 +77144,12 @@ namespace VULKAN_HPP_NAMESPACE ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize ); # endif } +#endif bool operator!=( PhysicalDeviceMaintenance3Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties; @@ -77117,7 +77208,6 @@ namespace VULKAN_HPP_NAMESPACE maintenance4 = maintenance4_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMaintenance4Features const &() const VULKAN_HPP_NOEXCEPT { @@ -77133,7 +77223,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -77300,7 +77390,6 @@ namespace VULKAN_HPP_NAMESPACE maintenance5 = maintenance5_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMaintenance5Features const &() const VULKAN_HPP_NOEXCEPT { @@ -77316,7 +77405,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -77728,7 +77817,14 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -78038,6 +78134,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceMapMemoryPlacedPropertiesEXT( VkPhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceMapMemoryPlacedPropertiesEXT( *reinterpret_cast( &rhs ) ) { + return *reinterpret_cast( this ); } PhysicalDeviceMapMemoryPlacedPropertiesEXT & operator=( PhysicalDeviceMapMemoryPlacedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -78852,7 +78949,6 @@ namespace VULKAN_HPP_NAMESPACE meshShader = meshShader_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMeshShaderFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -79341,6 +79437,7 @@ namespace VULKAN_HPP_NAMESPACE multiDraw = multiDraw_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMultiDrawFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -79356,7 +79453,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -79524,16 +79621,6 @@ namespace VULKAN_HPP_NAMESPACE multisampledRenderToSingleSampled = multisampledRenderToSingleSampled_; return *this; } - - operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT @@ -79554,6 +79641,8 @@ namespace VULKAN_HPP_NAMESPACE void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -79764,7 +79853,36 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + uint32_t const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &, + VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -79963,7 +80081,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -80225,6 +80343,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceNestedCommandBufferFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -80279,6 +80398,7 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, nestedCommandBuffer, nestedCommandBufferRendering, nestedCommandBufferSimultaneousUse ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceNestedCommandBufferFeaturesEXT const & ) const = default; @@ -80296,7 +80416,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceNestedCommandBufferFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pNext = pNext_; + return *this; } #endif @@ -80327,6 +80448,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , maxCommandBufferNestingLevel{ maxCommandBufferNestingLevel_ } { + multiview = multiview_; + return *this; } VULKAN_HPP_CONSTEXPR @@ -80335,6 +80458,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceNestedCommandBufferPropertiesEXT( VkPhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceNestedCommandBufferPropertiesEXT( *reinterpret_cast( &rhs ) ) { + multiviewGeometryShader = multiviewGeometryShader_; + return *this; } PhysicalDeviceNestedCommandBufferPropertiesEXT & operator=( PhysicalDeviceNestedCommandBufferPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -80458,7 +80583,6 @@ namespace VULKAN_HPP_NAMESPACE nonSeamlessCubeMap = nonSeamlessCubeMap_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -80474,7 +80598,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -80536,6 +80660,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceOpacityMicromapFeaturesEXT( VkPhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceOpacityMicromapFeaturesEXT( *reinterpret_cast( &rhs ) ) { + *this = *reinterpret_cast( &rhs ); + return *this; } PhysicalDeviceOpacityMicromapFeaturesEXT & operator=( PhysicalDeviceOpacityMicromapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -80546,6 +80672,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceOpacityMicromapFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -81094,6 +81221,7 @@ namespace VULKAN_HPP_NAMESPACE pageableDeviceLocalMemory = pageableDeviceLocalMemory_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -81115,7 +81243,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, pageableDeviceLocalMemory ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT const & ) const = default; @@ -81180,6 +81307,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerStageDescriptorSetFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -81201,6 +81329,7 @@ namespace VULKAN_HPP_NAMESPACE dynamicPipelineLayout = dynamicPipelineLayout_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePerStageDescriptorSetFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -81216,7 +81345,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -81491,7 +81620,6 @@ namespace VULKAN_HPP_NAMESPACE pipelineBinaries = pipelineBinaries_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePipelineBinaryFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -81507,7 +81635,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -81745,7 +81873,6 @@ namespace VULKAN_HPP_NAMESPACE pipelineCreationCacheControl = pipelineCreationCacheControl_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePipelineCreationCacheControlFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -81761,7 +81888,19 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -81847,7 +81986,6 @@ namespace VULKAN_HPP_NAMESPACE pipelineExecutableInfo = pipelineExecutableInfo_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -82045,7 +82183,6 @@ namespace VULKAN_HPP_NAMESPACE pipelinePropertiesIdentifier = pipelinePropertiesIdentifier_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePipelinePropertiesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -82061,7 +82198,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -82160,7 +82297,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -82854,8 +82991,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { - pointPolygons = pointPolygons_; - return *this; + return !operator==( rhs ); } # endif @@ -82885,8 +83021,6 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , presentBarrier{ presentBarrier_ } { - shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentBarrierFeaturesNV( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82894,8 +83028,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevicePresentBarrierFeaturesNV( VkPhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentBarrierFeaturesNV( *reinterpret_cast( &rhs ) ) { - tessellationPointMode = tessellationPointMode_; - return *this; } PhysicalDevicePresentBarrierFeaturesNV & operator=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82906,7 +83038,6 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentBarrierFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -82932,8 +83063,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -82942,14 +83073,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, presentBarrier ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentBarrierFeaturesNV const & ) const = default; #else bool operator==( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentBarrier == rhs.presentBarrier ); @@ -82958,10 +83089,9 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDevicePresentBarrierFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { - multisampleArrayImage = multisampleArrayImage_; - return *this; + return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentBarrierFeaturesNV; @@ -82974,7 +83104,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDevicePresentBarrierFeaturesNV; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePresentIdFeaturesKHR { @@ -82988,8 +83117,6 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , presentId{ presentId_ } { - mutableComparisonSamplers = mutableComparisonSamplers_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDevicePresentIdFeaturesKHR( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -82997,8 +83124,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevicePresentIdFeaturesKHR( VkPhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePresentIdFeaturesKHR( *reinterpret_cast( &rhs ) ) { - pointPolygons = pointPolygons_; - return *this; } PhysicalDevicePresentIdFeaturesKHR & operator=( PhysicalDevicePresentIdFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -83013,7 +83138,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePresentIdFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - separateStencilMaskRef = separateStencilMaskRef_; + pNext = pNext_; return *this; } @@ -83022,7 +83147,7 @@ namespace VULKAN_HPP_NAMESPACE presentId = presentId_; return *this; } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePresentIdFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -83033,26 +83158,25 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, presentId ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentIdFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDevicePresentIdFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentId == rhs.presentId ); @@ -83063,7 +83187,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentIdFeaturesKHR; @@ -83076,7 +83200,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDevicePresentIdFeaturesKHR; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT { @@ -83135,31 +83258,9 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -83168,14 +83269,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, presentModeFifoLatestReady ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( presentModeFifoLatestReady == rhs.presentModeFifoLatestReady ); @@ -83186,7 +83287,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; @@ -83199,7 +83300,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDevicePresentModeFifoLatestReadyFeaturesEXT; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePresentWaitFeaturesKHR { @@ -83352,7 +83452,6 @@ namespace VULKAN_HPP_NAMESPACE primitiveTopologyPatchListRestart = primitiveTopologyPatchListRestart_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -83472,7 +83571,6 @@ namespace VULKAN_HPP_NAMESPACE primitivesGeneratedQueryWithNonZeroStreams = primitivesGeneratedQueryWithNonZeroStreams_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -83498,7 +83596,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, primitivesGeneratedQuery, primitivesGeneratedQueryWithRasterizerDiscard, primitivesGeneratedQueryWithNonZeroStreams ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & ) const = default; @@ -83516,7 +83613,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + mutableComparisonSamplers = mutableComparisonSamplers_; + return *this; } #endif @@ -83546,6 +83644,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , privateData{ privateData_ } { + pointPolygons = pointPolygons_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeatures( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -83553,6 +83653,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDevicePrivateDataFeatures( VkPhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDevicePrivateDataFeatures( *reinterpret_cast( &rhs ) ) { + separateStencilMaskRef = separateStencilMaskRef_; + return *this; } PhysicalDevicePrivateDataFeatures & operator=( PhysicalDevicePrivateDataFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -83567,7 +83669,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + tessellationPointMode = tessellationPointMode_; return *this; } @@ -83576,7 +83678,7 @@ namespace VULKAN_HPP_NAMESPACE privateData = privateData_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDevicePrivateDataFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -83588,8 +83690,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -83598,14 +83700,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, privateData ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDevicePrivateDataFeatures const & ) const = default; #else bool operator==( PhysicalDevicePrivateDataFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( privateData == rhs.privateData ); @@ -83616,7 +83718,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeatures; @@ -83629,6 +83731,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDevicePrivateDataFeatures; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ using PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures; @@ -83686,29 +83789,36 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, protectedMemory ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const & ) const = default; #else bool operator==( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedMemory == rhs.protectedMemory ); @@ -83717,9 +83827,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + pointPolygons = pointPolygons_; + return *this; } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures; @@ -83732,6 +83843,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceProtectedMemoryFeatures; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceProtectedMemoryProperties { @@ -83746,6 +83858,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , protectedNoFault{ protectedNoFault_ } { + shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -83753,6 +83867,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast( &rhs ) ) { + tessellationPointMode = tessellationPointMode_; + return *this; } PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -83774,8 +83890,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -83784,14 +83900,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, protectedNoFault ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProtectedMemoryProperties const & ) const = default; #else bool operator==( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( protectedNoFault == rhs.protectedNoFault ); @@ -83800,9 +83916,10 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceProtectedMemoryProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + multisampleArrayImage = multisampleArrayImage_; + return *this; } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties; @@ -83815,6 +83932,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceProtectedMemoryProperties; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceProvokingVertexFeaturesEXT { @@ -83880,9 +83998,10 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -83891,14 +84010,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, provokingVertexLast, transformFeedbackPreservesProvokingVertex ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProvokingVertexFeaturesEXT const & ) const = default; #else bool operator==( PhysicalDeviceProvokingVertexFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexLast == rhs.provokingVertexLast ) && @@ -83910,7 +84029,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexFeaturesEXT; @@ -83924,6 +84043,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceProvokingVertexFeaturesEXT; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceProvokingVertexPropertiesEXT { @@ -83967,9 +84087,31 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDevicePresentBarrierFeaturesNV const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePresentBarrierFeaturesNV &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const &() const VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + + operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -83978,14 +84120,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, provokingVertexModePerPipeline, transformFeedbackPreservesTriangleFanProvokingVertex ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceProvokingVertexPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceProvokingVertexPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( provokingVertexModePerPipeline == rhs.provokingVertexModePerPipeline ) && @@ -83997,7 +84139,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProvokingVertexPropertiesEXT; @@ -84011,6 +84153,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceProvokingVertexPropertiesEXT; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDevicePushDescriptorProperties { @@ -84130,7 +84273,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRGBA10X6FormatsFeaturesEXT & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + primitiveTopologyListRestart = primitiveTopologyListRestart_; return *this; } @@ -84140,6 +84283,7 @@ namespace VULKAN_HPP_NAMESPACE formatRgba10x6WithoutYCbCrSampler = formatRgba10x6WithoutYCbCrSampler_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -84260,6 +84404,7 @@ namespace VULKAN_HPP_NAMESPACE rasterizationOrderStencilAttachmentAccess = rasterizationOrderStencilAttachmentAccess_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -84747,8 +84892,6 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingMaintenance1FeaturesKHR & @@ -85009,7 +85152,6 @@ namespace VULKAN_HPP_NAMESPACE rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -85275,8 +85417,6 @@ namespace VULKAN_HPP_NAMESPACE void * const &, VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, - VULKAN_HPP_NAMESPACE::Bool32 const &, VULKAN_HPP_NAMESPACE::Bool32 const &> # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -85475,8 +85615,6 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingValidationFeaturesNV & @@ -85485,7 +85623,6 @@ namespace VULKAN_HPP_NAMESPACE rayTracingValidation = rayTracingValidation_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRayTracingValidationFeaturesNV const &() const VULKAN_HPP_NOEXCEPT { @@ -85585,6 +85722,7 @@ namespace VULKAN_HPP_NAMESPACE relaxedLineRasterization = relaxedLineRasterization_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG const &() const VULKAN_HPP_NOEXCEPT { @@ -85698,7 +85836,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -85885,7 +86023,16 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -85983,6 +86130,7 @@ namespace VULKAN_HPP_NAMESPACE nullDescriptor = nullDescriptor_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceRobustness2FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -86373,6 +86521,7 @@ namespace VULKAN_HPP_NAMESPACE samplerYcbcrConversion = samplerYcbcrConversion_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -86487,7 +86636,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -86671,7 +86820,6 @@ namespace VULKAN_HPP_NAMESPACE schedulingControlsFlags = schedulingControlsFlags_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSchedulingControlsPropertiesARM const &() const VULKAN_HPP_NOEXCEPT { @@ -87063,7 +87211,6 @@ namespace VULKAN_HPP_NAMESPACE shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; return *this; } -#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloat2FeaturesEXT & setSparseImageFloat32AtomicMinMax( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicMinMax_ ) VULKAN_HPP_NOEXCEPT @@ -87426,8 +87573,6 @@ namespace VULKAN_HPP_NAMESPACE , shaderBufferInt64Atomics{ shaderBufferInt64Atomics_ } , shaderSharedInt64Atomics{ shaderSharedInt64Atomics_ } { - pNext = pNext_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87435,8 +87580,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast( &rhs ) ) { - shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; - return *this; } PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87451,7 +87594,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - shaderBufferFloat16AtomicMinMax = shaderBufferFloat16AtomicMinMax_; + pNext = pNext_; return *this; } @@ -87468,6 +87611,7 @@ namespace VULKAN_HPP_NAMESPACE shaderSharedInt64Atomics = shaderSharedInt64Atomics_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderAtomicInt64Features const &() const VULKAN_HPP_NOEXCEPT { @@ -87489,6 +87633,7 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderBufferInt64Atomics, shaderSharedInt64Atomics ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const & ) const = default; @@ -87505,8 +87650,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderSharedFloat32AtomicMinMax = shaderSharedFloat32AtomicMinMax_; - return *this; + return !operator==( rhs ); } #endif @@ -87540,8 +87684,6 @@ namespace VULKAN_HPP_NAMESPACE , shaderSubgroupClock{ shaderSubgroupClock_ } , shaderDeviceClock{ shaderDeviceClock_ } { - shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87549,8 +87691,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast( &rhs ) ) { - shaderImageFloat32AtomicMinMax = shaderImageFloat32AtomicMinMax_; - return *this; } PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87561,7 +87701,6 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -87682,6 +87821,7 @@ namespace VULKAN_HPP_NAMESPACE shaderCoreBuiltins = shaderCoreBuiltins_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM const &() const VULKAN_HPP_NOEXCEPT { @@ -87693,11 +87833,17 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & - setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION + auto +# else + std::tuple +# endif + reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderCoreBuiltins ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & ) const = default; @@ -87713,8 +87859,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderCoreBuiltinsFeaturesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; - return *this; + return !operator==( rhs ); } #endif @@ -87747,8 +87892,6 @@ namespace VULKAN_HPP_NAMESPACE , shaderCoreCount{ shaderCoreCount_ } , shaderWarpsPerCore{ shaderWarpsPerCore_ } { - shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; - return *this; } VULKAN_HPP_CONSTEXPR @@ -87757,8 +87900,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderCoreBuiltinsPropertiesARM( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderCoreBuiltinsPropertiesARM( *reinterpret_cast( &rhs ) ) { - shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; - return *this; } PhysicalDeviceShaderCoreBuiltinsPropertiesARM & operator=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87790,6 +87931,7 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderCoreMask, shaderCoreCount, shaderWarpsPerCore ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & ) const = default; @@ -87806,8 +87948,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderCoreBuiltinsPropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { - shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; - return *this; + return !operator==( rhs ); } #endif @@ -87840,8 +87981,6 @@ namespace VULKAN_HPP_NAMESPACE , shaderCoreFeatures{ shaderCoreFeatures_ } , activeComputeUnitCount{ activeComputeUnitCount_ } { - shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87849,8 +87988,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast( &rhs ) ) { - shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_; - return *this; } PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87882,7 +88019,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderCoreFeatures, activeComputeUnitCount ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const & ) const = default; @@ -87899,7 +88035,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderSharedFloat16Atomics = shaderSharedFloat16Atomics_; + return *this; } #endif @@ -87955,6 +88092,8 @@ namespace VULKAN_HPP_NAMESPACE , maxVgprAllocation{ maxVgprAllocation_ } , vgprAllocationGranularity{ vgprAllocationGranularity_ } { + shaderSharedFloat16AtomicMinMax = shaderSharedFloat16AtomicMinMax_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -87962,6 +88101,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast( &rhs ) ) { + shaderSharedFloat64AtomicMinMax = shaderSharedFloat64AtomicMinMax_; + return *this; } PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88113,7 +88254,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderCorePropertiesARM &() VULKAN_HPP_NOEXCEPT { @@ -88130,7 +88270,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, pixelRate, texelRate, fmaRate ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderCorePropertiesARM const & ) const = default; @@ -88146,7 +88285,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderCorePropertiesARM const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_; + return *this; } #endif @@ -88177,6 +88317,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , shaderDemoteToHelperInvocation{ shaderDemoteToHelperInvocation_ } { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; } VULKAN_HPP_CONSTEXPR @@ -88185,6 +88327,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderDemoteToHelperInvocationFeatures( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderDemoteToHelperInvocationFeatures( *reinterpret_cast( &rhs ) ) { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; } PhysicalDeviceShaderDemoteToHelperInvocationFeatures & @@ -88200,7 +88344,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + shaderImageFloat32Atomics = shaderImageFloat32Atomics_; return *this; } @@ -88210,6 +88354,7 @@ namespace VULKAN_HPP_NAMESPACE shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -88278,6 +88423,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , shaderDrawParameters{ shaderDrawParameters_ } { + pNext = pNext_; + return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88285,6 +88432,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast( &rhs ) ) { + shaderBufferFloat16Atomics = shaderBufferFloat16Atomics_; + return *this; } PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88434,7 +88583,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderEarlyAndLateFragmentTests ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & ) const = default; @@ -88450,7 +88598,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; } #endif @@ -88489,6 +88638,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderEnqueueFeaturesAMDX( VkPhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderEnqueueFeaturesAMDX( *reinterpret_cast( &rhs ) ) { + shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_; + return *this; } PhysicalDeviceShaderEnqueueFeaturesAMDX & operator=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88557,7 +88708,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderEnqueueFeaturesAMDX const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_; + return *this; } # endif @@ -88608,6 +88760,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderEnqueuePropertiesAMDX( VkPhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderEnqueuePropertiesAMDX( *reinterpret_cast( &rhs ) ) { + shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_; + return *this; } PhysicalDeviceShaderEnqueuePropertiesAMDX & operator=( PhysicalDeviceShaderEnqueuePropertiesAMDX const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -88799,7 +88953,6 @@ namespace VULKAN_HPP_NAMESPACE shaderExpectAssume = shaderExpectAssume_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderExpectAssumeFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -88815,7 +88968,22 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -88891,7 +89059,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } @@ -88906,25 +89074,20 @@ namespace VULKAN_HPP_NAMESPACE shaderInt8 = shaderInt8_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderFloat16Int8Features const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } - operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -88933,14 +89096,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderFloat16, shaderInt8 ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const & ) const = default; #else bool operator==( PhysicalDeviceShaderFloat16Int8Features const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloat16 == rhs.shaderFloat16 ) && ( shaderInt8 == rhs.shaderInt8 ); @@ -88951,7 +89114,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features; @@ -88965,7 +89128,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderFloat16Int8Features; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features; @@ -89019,78 +89181,30 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderFloatControls2Features &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & - setMaxExecutionGraphShaderPayloadCount( uint32_t maxExecutionGraphShaderPayloadCount_ ) VULKAN_HPP_NOEXCEPT - { - maxExecutionGraphShaderPayloadCount = maxExecutionGraphShaderPayloadCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & - setExecutionGraphDispatchAddressAlignment( uint32_t executionGraphDispatchAddressAlignment_ ) VULKAN_HPP_NOEXCEPT - { - executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & - setMaxExecutionGraphWorkgroupCount( std::array maxExecutionGraphWorkgroupCount_ ) VULKAN_HPP_NOEXCEPT - { - maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; - return *this; - } - - VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderEnqueuePropertiesAMDX & - setMaxExecutionGraphWorkgroups( uint32_t maxExecutionGraphWorkgroups_ ) VULKAN_HPP_NOEXCEPT - { - maxExecutionGraphWorkgroups = maxExecutionGraphWorkgroups_; - return *this; - } -# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ - - operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX const &() const VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - - operator VkPhysicalDeviceShaderEnqueuePropertiesAMDX &() VULKAN_HPP_NOEXCEPT - { - return *reinterpret_cast( this ); - } - -# if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +#if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else - std::tuple const &, - uint32_t const &> -# endif +# else + std::tuple +# endif reflect() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderFloatControls2 ); } -# endif +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderFloatControls2Features const & ) const = default; #else bool operator==( PhysicalDeviceShaderFloatControls2Features const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderFloatControls2 == rhs.shaderFloatControls2 ); @@ -89101,7 +89215,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -# endif +#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloatControls2Features; @@ -89114,7 +89228,6 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderFloatControls2Features; }; -#endif /*VK_ENABLE_BETA_EXTENSIONS*/ using PhysicalDeviceShaderFloatControls2FeaturesKHR = PhysicalDeviceShaderFloatControls2Features; @@ -89172,6 +89285,7 @@ namespace VULKAN_HPP_NAMESPACE sparseImageInt64Atomics = sparseImageInt64Atomics_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -89193,6 +89307,7 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderImageInt64Atomics, sparseImageInt64Atomics ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & ) const = default; @@ -89209,8 +89324,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; - return *this; + return !operator==( rhs ); } #endif @@ -89240,7 +89354,6 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , imageFootprint{ imageFootprint_ } { - return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -89248,9 +89361,7 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast( &rhs ) ) { - return !operator==( rhs ); } -# endif PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default; #endif /*VULKAN_HPP_NO_STRUCT_CONSTRUCTORS*/ @@ -89279,7 +89390,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT { @@ -89309,7 +89419,6 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( imageFootprint == rhs.imageFootprint ); # endif } -#endif bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -89374,7 +89483,7 @@ namespace VULKAN_HPP_NAMESPACE shaderIntegerDotProduct = shaderIntegerDotProduct_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderIntegerDotProductFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -89386,8 +89495,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -89396,14 +89505,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderIntegerDotProduct ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderIntegerDotProductFeatures const & ) const = default; #else bool operator==( PhysicalDeviceShaderIntegerDotProductFeatures const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerDotProduct == rhs.shaderIntegerDotProduct ); @@ -89414,7 +89523,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductFeatures; @@ -89427,6 +89536,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderIntegerDotProductFeatures; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ using PhysicalDeviceShaderIntegerDotProductFeaturesKHR = PhysicalDeviceShaderIntegerDotProductFeatures; @@ -89531,10 +89641,10 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto -# else +# else std::tuple( PhysicalDeviceShaderIntegerDotProductProperties const & ) const = default; #else bool operator==( PhysicalDeviceShaderIntegerDotProductProperties const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( integerDotProduct8BitUnsignedAccelerated == rhs.integerDotProduct8BitUnsignedAccelerated ) && @@ -89657,7 +89767,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerDotProductProperties; @@ -89699,6 +89809,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderIntegerDotProductProperties; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ using PhysicalDeviceShaderIntegerDotProductPropertiesKHR = PhysicalDeviceShaderIntegerDotProductProperties; @@ -89754,7 +89865,6 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT { @@ -89784,7 +89894,6 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 ); # endif } -#endif bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -89850,20 +89959,25 @@ namespace VULKAN_HPP_NAMESPACE shaderMaximalReconvergence = shaderMaximalReconvergence_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR &() VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION + operator VkPhysicalDeviceShaderEnqueueFeaturesAMDX &() VULKAN_HPP_NOEXCEPT + { + return *reinterpret_cast( this ); + } + +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple @@ -89872,14 +89986,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderMaximalReconvergence ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & ) const = default; #else bool operator==( PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderMaximalReconvergence == rhs.shaderMaximalReconvergence ); @@ -89890,7 +90004,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; @@ -89903,6 +90017,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT { @@ -89949,6 +90064,7 @@ namespace VULKAN_HPP_NAMESPACE shaderModuleIdentifier = shaderModuleIdentifier_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -89960,48 +90076,11 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION - auto -# else - std::tuple -# endif - reflect() const VULKAN_HPP_NOEXCEPT + operator VkPhysicalDeviceShaderFloatControls2Features const &() const VULKAN_HPP_NOEXCEPT { return std::tie( sType, pNext, shaderModuleIdentifier ); } -#endif +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & ) const = default; @@ -90017,7 +90096,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderModuleIdentifierFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + maxExecutionGraphShaderPayloadCount = maxExecutionGraphShaderPayloadCount_; + return *this; } #endif @@ -90047,6 +90127,8 @@ namespace VULKAN_HPP_NAMESPACE : pNext{ pNext_ } , shaderModuleIdentifierAlgorithmUUID{ shaderModuleIdentifierAlgorithmUUID_ } { + executionGraphDispatchAddressAlignment = executionGraphDispatchAddressAlignment_; + return *this; } VULKAN_HPP_CONSTEXPR_14 @@ -90055,6 +90137,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderModuleIdentifierPropertiesEXT( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderModuleIdentifierPropertiesEXT( *reinterpret_cast( &rhs ) ) { + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; } PhysicalDeviceShaderModuleIdentifierPropertiesEXT & @@ -90066,6 +90150,7 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +# endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -90077,8 +90162,8 @@ namespace VULKAN_HPP_NAMESPACE return *reinterpret_cast( this ); } -#if defined( VULKAN_HPP_USE_REFLECT ) -# if 14 <= VULKAN_HPP_CPP_VERSION +# if defined( VULKAN_HPP_USE_REFLECT ) +# if 14 <= VULKAN_HPP_CPP_VERSION auto # else std::tuple const &> @@ -90087,14 +90172,14 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderModuleIdentifierAlgorithmUUID ); } -#endif +# endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & ) const = default; #else bool operator==( PhysicalDeviceShaderModuleIdentifierPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { -# if defined( VULKAN_HPP_USE_REFLECT ) +# if defined( VULKAN_HPP_USE_REFLECT ) return this->reflect() == rhs.reflect(); # else return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderModuleIdentifierAlgorithmUUID == rhs.shaderModuleIdentifierAlgorithmUUID ); @@ -90105,7 +90190,7 @@ namespace VULKAN_HPP_NAMESPACE { return !operator==( rhs ); } -#endif +# endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderModuleIdentifierPropertiesEXT; @@ -90118,6 +90203,7 @@ namespace VULKAN_HPP_NAMESPACE { using Type = PhysicalDeviceShaderModuleIdentifierPropertiesEXT; }; +#endif /*VK_ENABLE_BETA_EXTENSIONS*/ struct PhysicalDeviceShaderObjectFeaturesEXT { @@ -90183,7 +90269,6 @@ namespace VULKAN_HPP_NAMESPACE { return std::tie( sType, pNext, shaderObject ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceShaderObjectFeaturesEXT const & ) const = default; @@ -90199,7 +90284,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceShaderObjectFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + maxExecutionGraphWorkgroupCount = maxExecutionGraphWorkgroupCount_; + return *this; } #endif @@ -90230,6 +90316,7 @@ namespace VULKAN_HPP_NAMESPACE , shaderBinaryUUID{ shaderBinaryUUID_ } , shaderBinaryVersion{ shaderBinaryVersion_ } { + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderObjectPropertiesEXT( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -90237,6 +90324,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceShaderObjectPropertiesEXT( VkPhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceShaderObjectPropertiesEXT( *reinterpret_cast( &rhs ) ) { + *this = *reinterpret_cast( &rhs ); + return *this; } PhysicalDeviceShaderObjectPropertiesEXT & operator=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -90247,11 +90336,13 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderObjectPropertiesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderObjectPropertiesEXT &() VULKAN_HPP_NOEXCEPT { @@ -90283,6 +90374,7 @@ namespace VULKAN_HPP_NAMESPACE ( shaderBinaryVersion == rhs.shaderBinaryVersion ); # endif } +#endif bool operator!=( PhysicalDeviceShaderObjectPropertiesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -90449,7 +90541,6 @@ namespace VULKAN_HPP_NAMESPACE shaderRelaxedExtendedInstruction = shaderRelaxedExtendedInstruction_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -90465,7 +90556,38 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -90549,11 +90671,13 @@ namespace VULKAN_HPP_NAMESPACE shaderReplicatedComposites = shaderReplicatedComposites_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT &() VULKAN_HPP_NOEXCEPT { @@ -90564,8 +90688,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std:: - tuple const &, uint32_t const &> + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -90584,6 +90707,7 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( shaderReplicatedComposites == rhs.shaderReplicatedComposites ); # endif } +#endif bool operator!=( PhysicalDeviceShaderReplicatedCompositesFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -91036,6 +91160,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { + pNext = pNext_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR & @@ -91692,6 +91818,7 @@ namespace VULKAN_HPP_NAMESPACE tiling = tiling_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSparseImageFormatInfo2 const &() const VULKAN_HPP_NOEXCEPT { @@ -91892,8 +92019,6 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeatures & @@ -92111,6 +92236,7 @@ namespace VULKAN_HPP_NAMESPACE subpassMergeFeedback = subpassMergeFeedback_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -92128,9 +92254,8 @@ namespace VULKAN_HPP_NAMESPACE # else std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT @@ -92391,7 +92516,6 @@ namespace VULKAN_HPP_NAMESPACE surface = surface_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSurfaceInfo2KHR const &() const VULKAN_HPP_NOEXCEPT { @@ -92490,7 +92614,6 @@ namespace VULKAN_HPP_NAMESPACE swapchainMaintenance1 = swapchainMaintenance1_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -92506,7 +92629,13 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -92602,7 +92731,12 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -92893,7 +93027,6 @@ namespace VULKAN_HPP_NAMESPACE textureCompressionASTC_HDR = textureCompressionASTC_HDR_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceTextureCompressionASTCHDRFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -93105,12 +93238,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -93405,7 +93533,6 @@ namespace VULKAN_HPP_NAMESPACE geometryStreams = geometryStreams_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const &() const VULKAN_HPP_NOEXCEPT { @@ -93421,7 +93548,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -93636,7 +93763,8 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT { - return *reinterpret_cast( this ); + pNext = pNext_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & @@ -93645,7 +93773,6 @@ namespace VULKAN_HPP_NAMESPACE uniformBufferStandardLayout = uniformBufferStandardLayout_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const &() const VULKAN_HPP_NOEXCEPT { @@ -94160,7 +94287,13 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ToolPurposeFlags const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &, + VULKAN_HPP_NAMESPACE::ArrayWrapper1D const &> # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -94179,12 +94312,12 @@ namespace VULKAN_HPP_NAMESPACE return ( sType == rhs.sType ) && ( pNext == rhs.pNext ) && ( vertexAttributeRobustness == rhs.vertexAttributeRobustness ); # endif } +#endif bool operator!=( PhysicalDeviceVertexAttributeRobustnessFeaturesEXT const & rhs ) const VULKAN_HPP_NOEXCEPT { return !operator==( rhs ); } -#endif public: VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeRobustnessFeaturesEXT; @@ -94340,7 +94473,6 @@ namespace VULKAN_HPP_NAMESPACE videoEncodeAV1 = videoEncodeAV1_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceVideoEncodeAV1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -94434,8 +94566,7 @@ namespace VULKAN_HPP_NAMESPACE #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & setPNext( const void * pNext_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; - return *this; + return *reinterpret_cast( this ); } VULKAN_HPP_CONSTEXPR_14 VideoProfileInfoKHR & @@ -94695,12 +94826,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -94879,7 +95005,6 @@ namespace VULKAN_HPP_NAMESPACE videoMaintenance1 = videoMaintenance1_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceVideoMaintenance1FeaturesKHR const &() const VULKAN_HPP_NOEXCEPT { @@ -94895,7 +95020,7 @@ namespace VULKAN_HPP_NAMESPACE # if 14 <= VULKAN_HPP_CPP_VERSION auto # else - std::tuple + std::tuple # endif reflect() const VULKAN_HPP_NOEXCEPT { @@ -95279,6 +95404,7 @@ namespace VULKAN_HPP_NAMESPACE maxPerSetDescriptors, maxMemoryAllocationSize ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceVulkan11Properties const & ) const = default; @@ -95301,8 +95427,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceVulkan11Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - storageInputOutput16 = storageInputOutput16_; - return *this; + return !operator==( rhs ); } #endif @@ -95437,8 +95562,6 @@ namespace VULKAN_HPP_NAMESPACE , shaderOutputLayer{ shaderOutputLayer_ } , subgroupBroadcastDynamicId{ subgroupBroadcastDynamicId_ } { - variablePointersStorageBuffer = variablePointersStorageBuffer_; - return *this; } VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -95446,8 +95569,6 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan12Features( *reinterpret_cast( &rhs ) ) { - protectedMemory = protectedMemory_; - return *this; } PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -95458,7 +95579,6 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ #if !defined( VULKAN_HPP_NO_STRUCT_SETTERS ) VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setPNext( void * pNext_ ) VULKAN_HPP_NOEXCEPT @@ -95694,7 +95814,7 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT { - shaderInt8 = shaderInt8_; + imageUsage = imageUsage_; return *this; } @@ -95783,6 +95903,7 @@ namespace VULKAN_HPP_NAMESPACE subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceVulkan12Features const &() const VULKAN_HPP_NOEXCEPT { @@ -95900,6 +96021,7 @@ namespace VULKAN_HPP_NAMESPACE shaderOutputLayer, subgroupBroadcastDynamicId ); } +#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) auto operator<=>( PhysicalDeviceVulkan12Features const & ) const = default; @@ -95949,8 +96071,7 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceVulkan12Features const & rhs ) const VULKAN_HPP_NOEXCEPT { - descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; - return *this; + return !operator==( rhs ); } #endif @@ -96128,6 +96249,8 @@ namespace VULKAN_HPP_NAMESPACE , maxTimelineSemaphoreValueDifference{ maxTimelineSemaphoreValueDifference_ } , framebufferIntegerColorSampleCounts{ framebufferIntegerColorSampleCounts_ } { + bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_; + return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -96135,6 +96258,8 @@ namespace VULKAN_HPP_NAMESPACE PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT : PhysicalDeviceVulkan12Properties( *reinterpret_cast( &rhs ) ) { + vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_; + return *this; } PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -96272,7 +96397,6 @@ namespace VULKAN_HPP_NAMESPACE maxTimelineSemaphoreValueDifference, framebufferIntegerColorSampleCounts ); } -#endif #if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR ) std::strong_ordering operator<=>( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT @@ -96437,7 +96561,8 @@ namespace VULKAN_HPP_NAMESPACE bool operator!=( PhysicalDeviceVulkan12Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { - return !operator==( rhs ); + descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_; + return *this; } public: @@ -97225,13 +97350,13 @@ namespace VULKAN_HPP_NAMESPACE shaderSubgroupRotateClustered = shaderSubgroupRotateClustered_; return *this; } -#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderFloatControls2( VULKAN_HPP_NAMESPACE::Bool32 shaderFloatControls2_ ) VULKAN_HPP_NOEXCEPT { shaderFloatControls2 = shaderFloatControls2_; return *this; } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setShaderExpectAssume( VULKAN_HPP_NAMESPACE::Bool32 shaderExpectAssume_ ) VULKAN_HPP_NOEXCEPT { @@ -97244,13 +97369,13 @@ namespace VULKAN_HPP_NAMESPACE rectangularLines = rectangularLines_; return *this; } -#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT { bresenhamLines = bresenhamLines_; return *this; } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Features & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT { @@ -97347,6 +97472,7 @@ namespace VULKAN_HPP_NAMESPACE { return *reinterpret_cast( this ); } +#endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ operator VkPhysicalDeviceVulkan14Features &() VULKAN_HPP_NOEXCEPT { @@ -97429,6 +97555,7 @@ namespace VULKAN_HPP_NAMESPACE ( hostImageCopy == rhs.hostImageCopy ) && ( pushDescriptor == rhs.pushDescriptor ); # endif } +#endif bool operator!=( PhysicalDeviceVulkan14Features const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -97534,6 +97661,7 @@ namespace VULKAN_HPP_NAMESPACE , identicalMemoryTypeRequirements{ identicalMemoryTypeRequirements_ } { } +#endif VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan14Properties( PhysicalDeviceVulkan14Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -97550,12 +97678,12 @@ namespace VULKAN_HPP_NAMESPACE *this = *reinterpret_cast( &rhs ); return *this; } -#endif operator VkPhysicalDeviceVulkan14Properties const &() const VULKAN_HPP_NOEXCEPT { return *reinterpret_cast( this ); } +#endif operator VkPhysicalDeviceVulkan14Properties &() VULKAN_HPP_NOEXCEPT { @@ -97655,6 +97783,7 @@ namespace VULKAN_HPP_NAMESPACE ( optimalTilingLayoutUUID == rhs.optimalTilingLayoutUUID ) && ( identicalMemoryTypeRequirements == rhs.identicalMemoryTypeRequirements ); # endif } +#endif bool operator!=( PhysicalDeviceVulkan14Properties const & rhs ) const VULKAN_HPP_NOEXCEPT { @@ -97719,6 +97848,7 @@ namespace VULKAN_HPP_NAMESPACE , vulkanMemoryModelAvailabilityVisibilityChains{ vulkanMemoryModelAvailabilityVisibilityChains_ } { } +#endif VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default; @@ -97746,21 +97876,19 @@ namespace VULKAN_HPP_NAMESPACE VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT { - *this = *reinterpret_cast( &rhs ); - return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT { - pNext = pNext_; + *this = *reinterpret_cast( &rhs ); return *this; } VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT { - workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_; + pNext = pNext_; return *this; } #endif /*VULKAN_HPP_NO_STRUCT_SETTERS*/ From 54b45824b34fd7f98cece9f9bc44c4f95d941783 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 7 Jan 2025 01:20:26 +0100 Subject: [PATCH 118/131] fixing things --- .gitignore | 1 + example/main.c | 21 ++-- .../build.sh | 0 .../main.c | 0 .../run.sh | 0 includes/mlx.h | 4 +- includes/mlx_profile.h | 6 +- runtime/Includes/Core/Application.inl | 7 ++ runtime/Includes/Core/Graphics.h | 12 +- runtime/Includes/Core/Graphics.inl | 101 --------------- runtime/Includes/Graphics/Scene.h | 6 +- runtime/Sources/Core/Application.cpp | 1 + runtime/Sources/Core/Bridge.cpp | 2 - runtime/Sources/Core/Graphics.cpp | 101 +++++++++++++++ runtime/Sources/Graphics/Scene.cpp | 5 + runtime/Sources/Renderer/Descriptor.cpp | 40 ++++-- .../Sources/Renderer/RenderPasses/2DPass.cpp | 2 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 11 +- runtime/Sources/Renderer/Renderer.cpp | 2 + third_party/kvf.h | 115 +++++++++--------- xmake.lua | 4 +- 21 files changed, 242 insertions(+), 199 deletions(-) rename {experimental/RenderToTexture => in_depth_features}/build.sh (100%) rename {experimental/RenderToTexture => in_depth_features}/main.c (100%) rename {experimental/RenderToTexture => in_depth_features}/run.sh (100%) diff --git a/.gitignore b/.gitignore index 9c8cb76..a55bcdc 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,7 @@ *.pdb *.gch *.pch +*.sym *.exe *vgcore.* *.gdb_history diff --git a/example/main.c b/example/main.c index 3639bc5..6db5745 100644 --- a/example/main.c +++ b/example/main.c @@ -18,23 +18,25 @@ typedef struct static mlx_color pixels_circle[CIRCLE_DIAMETER * CIRCLE_DIAMETER] = { 0 }; +#define THRESHOLD 200 + void update(void* param) { static int i = 0; mlx_t* mlx = (mlx_t*)param; - if(i > 200) + if(i > THRESHOLD) { mlx_clear_window(mlx->mlx, mlx->win, (mlx_color){ .rgba = 0x334D4DFF }); mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_bmp, 220, 40, 0.5f, 0.5f, i); } - if(i >= 250) + if(i >= THRESHOLD + THRESHOLD / 4) mlx_set_font_scale(mlx->mlx, "default", 16.f); else mlx_set_font_scale(mlx->mlx, "default", 6.f); - mlx_string_put(mlx->mlx, mlx->win, 160, 120, (mlx_color){ .rgba = 0xFF2066FF }, "this text should be hidden"); + mlx_string_put(mlx->mlx, mlx->win, 160, 120, (mlx_color){ .rgba = 0xFF2066FF }, "this text should be behind"); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->logo_png, 100, 100); mlx_put_image_to_window(mlx->mlx, mlx->win, mlx->img, 150, 60); @@ -50,7 +52,7 @@ void update(void* param) color += (color < 255); } - if(i < 200) + if(i < THRESHOLD) mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, 0.5f, 2.0f, 0.0f); else mlx_put_transformed_image_to_window(mlx->mlx, mlx->win, mlx->logo_jpg, 210, 150, fabs(sin(i / 100.0f)), fabs(cos(i / 100.0f) * 2.0f), 0.0f); @@ -59,7 +61,7 @@ void update(void* param) mlx_pixel_put_region(mlx->mlx, mlx->win, 200, 170, CIRCLE_DIAMETER, CIRCLE_DIAMETER, pixels_circle); - i++; + i++; // Will overflow and I don't care } mlx_image create_image(mlx_t* mlx) @@ -166,14 +168,9 @@ int main(void) mlx_on_event(mlx.mlx, mlx.win, MLX_KEYDOWN, key_hook, &mlx); mlx_on_event(mlx.mlx, mlx.win, MLX_WINDOW_EVENT, window_hook, &mlx); - mlx.logo_png = mlx_new_image_from_file(mlx.mlx, "42_logo.png", &dummy, &dummy); mlx.logo_bmp = mlx_new_image_from_file(mlx.mlx, "42_logo.bmp", &dummy, &dummy); - //mlx.logo_jpg = mlx_new_image_from_file(mlx.mlx, "42_logo.jpg", &dummy, &dummy); - mlx.logo_jpg = mlx_new_image(mlx.mlx, dummy, dummy); - - mlx_color* data = (mlx_color*)malloc(dummy * dummy * sizeof(mlx_color)); - mlx_get_image_region(mlx.mlx, mlx.logo_png, 0, 0, dummy, dummy, data); - mlx_set_image_region(mlx.mlx, mlx.logo_jpg, 0, 0, dummy, dummy, data); + mlx.logo_png = mlx_new_image_from_file(mlx.mlx, "42_logo.png", &dummy, &dummy); + mlx.logo_jpg = mlx_new_image_from_file(mlx.mlx, "42_logo.jpg", &dummy, &dummy); mlx_pixel_put(mlx.mlx, mlx.win, 200, 10, (mlx_color){ .rgba = 0xFF00FFFF }); mlx_put_image_to_window(mlx.mlx, mlx.win, mlx.logo_png, 0, 0); diff --git a/experimental/RenderToTexture/build.sh b/in_depth_features/build.sh similarity index 100% rename from experimental/RenderToTexture/build.sh rename to in_depth_features/build.sh diff --git a/experimental/RenderToTexture/main.c b/in_depth_features/main.c similarity index 100% rename from experimental/RenderToTexture/main.c rename to in_depth_features/main.c diff --git a/experimental/RenderToTexture/run.sh b/in_depth_features/run.sh similarity index 100% rename from experimental/RenderToTexture/run.sh rename to in_depth_features/run.sh diff --git a/includes/mlx.h b/includes/mlx.h index 0c4152a..4b373de 100644 --- a/includes/mlx.h +++ b/includes/mlx.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2022/10/04 16:56:35 by maldavid #+# #+# */ -/* Updated: 2024/12/20 00:42:01 by maldavid ### ########.fr */ +/* Updated: 2025/01/05 22:44:22 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -79,7 +79,7 @@ MLX_API mlx_context mlx_init(); * @brief Caps the FPS * * @param mlx Internal MLX application - * @param fps The FPS cap + * @param fps The FPS cap or 0 for vsync */ MLX_API void mlx_set_fps_goal(mlx_context mlx, int fps); diff --git a/includes/mlx_profile.h b/includes/mlx_profile.h index 5f53b0f..9c7e167 100644 --- a/includes/mlx_profile.h +++ b/includes/mlx_profile.h @@ -6,12 +6,12 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2023/11/10 08:49:17 by maldavid #+# #+# */ -/* Updated: 2024/12/17 00:35:35 by maldavid ### ########.fr */ +/* Updated: 2025/01/07 00:17:45 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ -#ifndef __MLX_PROFILE__ -#define __MLX_PROFILE__ +#ifndef MACROLIBX_PROFILE_H +#define MACROLIBX_PROFILE_H // Try to identify the compiler #if defined(__BORLANDC__) diff --git a/runtime/Includes/Core/Application.inl b/runtime/Includes/Core/Application.inl index 8cc58c3..c0d00b8 100644 --- a/runtime/Includes/Core/Application.inl +++ b/runtime/Includes/Core/Application.inl @@ -50,6 +50,13 @@ namespace mlx void Application::SetFPSCap(std::uint32_t fps) noexcept { + if(fps == 0) + { + SDL_DisplayMode mode; + if(!SDL_GetCurrentDisplayMode(1, &mode)) + return; + fps = mode.refresh_rate; + } m_fps.SetMaxFPS(fps); } diff --git a/runtime/Includes/Core/Graphics.h b/runtime/Includes/Core/Graphics.h index 06a221a..7d51d1c 100644 --- a/runtime/Includes/Core/Graphics.h +++ b/runtime/Includes/Core/Graphics.h @@ -22,13 +22,13 @@ namespace mlx void Render() noexcept; - inline void ResetRenderData(mlx_color color) noexcept; + void ResetRenderData(mlx_color color) noexcept; - inline void PixelPut(int x, int y, mlx_color color) noexcept; - inline void PixelPutArray(int x, int y, mlx_color* color, std::size_t pixels_size) noexcept; - inline void PixelPutRegion(int x, int y, int w, int h, mlx_color* color) noexcept; - inline void StringPut(int x, int y, mlx_color color, std::string str); - inline void TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle); + void PixelPut(int x, int y, mlx_color color) noexcept; + void PixelPutArray(int x, int y, mlx_color* color, std::size_t pixels_size) noexcept; + void PixelPutRegion(int x, int y, int w, int h, mlx_color* color) noexcept; + void StringPut(int x, int y, mlx_color color, std::string str); + void TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle); inline void TryEraseSpritesInScene(NonOwningPtr texture) noexcept; diff --git a/runtime/Includes/Core/Graphics.inl b/runtime/Includes/Core/Graphics.inl index 1416b9a..6cb2c34 100644 --- a/runtime/Includes/Core/Graphics.inl +++ b/runtime/Includes/Core/Graphics.inl @@ -3,107 +3,6 @@ namespace mlx { - void GraphicsSupport::ResetRenderData(mlx_color color) noexcept - { - MLX_PROFILE_FUNCTION(); - Vec4f vec_color = { - static_cast(color.r) / 255.0f, - static_cast(color.g) / 255.0f, - static_cast(color.b) / 255.0f, - static_cast(color.a) / 255.0f - }; - p_scene->ResetScene(std::move(vec_color)); - m_put_pixel_manager.ResetRenderData(); - m_draw_layer = 0; - m_pixelput_called = false; - } - - void GraphicsSupport::PixelPut(int x, int y, mlx_color color) noexcept - { - MLX_PROFILE_FUNCTION(); - NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_draw_layer, color); - if(texture) - { - m_pixelput_called = true; - Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); - } - } - - void GraphicsSupport::PixelPutArray(int x, int y, mlx_color* pixels, std::size_t pixels_size) noexcept - { - MLX_PROFILE_FUNCTION(); - NonOwningPtr texture = m_put_pixel_manager.DrawPixelsArray(x, y, m_draw_layer, pixels, pixels_size); - if(texture) - { - m_pixelput_called = true; - Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); - } - } - - void GraphicsSupport::PixelPutRegion(int x, int y, int w, int h, mlx_color* pixels) noexcept - { - MLX_PROFILE_FUNCTION(); - NonOwningPtr texture = m_put_pixel_manager.DrawPixelsRegion(x, y, w, h, m_draw_layer, pixels); - if(texture) - { - m_pixelput_called = true; - Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); - } - } - - void GraphicsSupport::StringPut(int x, int y, mlx_color color, std::string str) - { - MLX_PROFILE_FUNCTION(); - if(str.empty()) - return; - - Vec4f vec_color = { - static_cast(color.r) / 255.0f, - static_cast(color.g) / 255.0f, - static_cast(color.b) / 255.0f, - static_cast(color.a) / 255.0f, - }; - - NonOwningPtr text = p_scene->GetTextFromPositionAndColor(str, Vec2f{ static_cast(x), static_cast(y) }, vec_color); - if(!text) - { - if(m_pixelput_called) - { - m_draw_layer++; - m_pixelput_called = false; - } - Text& new_text = p_scene->CreateText(str); - new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); - new_text.SetColor(std::move(vec_color)); - } - else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) - p_scene->BringToDrawLayer(text.Get(), m_draw_layer); - } - - void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle) - { - MLX_PROFILE_FUNCTION(); - NonOwningPtr sprite = p_scene->GetSpriteFromTexturePositionScaleRotation(texture, Vec2f{ static_cast(x), static_cast(y) }, scale_x, scale_y, angle); - if(!sprite) - { - if(m_pixelput_called) - { - m_draw_layer++; - m_pixelput_called = false; - } - Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetCenter(Vec2f{ texture->GetWidth() / 2.0f, texture->GetHeight() / 2.0f }); - new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); - new_sprite.SetScale(Vec2f{ scale_x, scale_y }); - new_sprite.SetRotation(angle); - } - else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) - p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); - } - void GraphicsSupport::TryEraseSpritesInScene(NonOwningPtr texture) noexcept { MLX_PROFILE_FUNCTION(); diff --git a/runtime/Includes/Graphics/Scene.h b/runtime/Includes/Graphics/Scene.h index 69d7fdc..cfa1ff7 100644 --- a/runtime/Includes/Graphics/Scene.h +++ b/runtime/Includes/Graphics/Scene.h @@ -29,17 +29,21 @@ namespace mlx void BringToDrawLayer(NonOwningPtr drawable, std::uint64_t draw_layer); - inline void ResetScene(Vec4f clear) { m_drawables.clear(); m_clear_color = std::move(clear); } + inline void ResetScene(Vec4f clear) { m_drawables.clear(); m_clear_color = std::move(clear); m_has_scene_changed = true; } inline const Vec4f& GetClearColor() const noexcept { return m_clear_color; } [[nodiscard]] MLX_FORCEINLINE const std::vector>& GetDrawables() const noexcept { return m_drawables; } + inline void ResetChangeChecker() noexcept { m_has_scene_changed = false; } + inline bool HasSceneChanged() const noexcept { return m_has_scene_changed; } + ~Scene() = default; private: std::vector> m_drawables; std::shared_ptr p_bound_font; Vec4f m_clear_color = { 0.0f, 0.0f, 0.0f, 1.0f }; + bool m_has_scene_changed = false; }; } diff --git a/runtime/Sources/Core/Application.cpp b/runtime/Sources/Core/Application.cpp index b3cf579..ef253b3 100644 --- a/runtime/Sources/Core/Application.cpp +++ b/runtime/Sources/Core/Application.cpp @@ -66,6 +66,7 @@ namespace mlx catch(...) { return nullptr; } m_image_registry.RegisterTexture(texture); image->texture = texture; + texture->Clear(VK_NULL_HANDLE, Vec4f{ 0.0f }); return image; } diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index 835dce2..ef83773 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -47,8 +47,6 @@ extern "C" MLX_CHECK_APPLICATION_POINTER(mlx); if(fps < 0) mlx::Error("You cannot set a negative FPS cap (nice try)"); - else if(fps == 0) - mlx::Error("You cannot set a FPS cap to 0 (nice try)"); else mlx->app->SetFPSCap(static_cast(fps)); } diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 6560495..1e89686 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -51,6 +51,107 @@ namespace mlx #endif } + void GraphicsSupport::ResetRenderData(mlx_color color) noexcept + { + MLX_PROFILE_FUNCTION(); + Vec4f vec_color = { + static_cast(color.r) / 255.0f, + static_cast(color.g) / 255.0f, + static_cast(color.b) / 255.0f, + static_cast(color.a) / 255.0f + }; + p_scene->ResetScene(std::move(vec_color)); + m_put_pixel_manager.ResetRenderData(); + m_draw_layer = 0; + m_pixelput_called = false; + } + + void GraphicsSupport::PixelPut(int x, int y, mlx_color color) noexcept + { + MLX_PROFILE_FUNCTION(); + NonOwningPtr texture = m_put_pixel_manager.DrawPixel(x, y, m_draw_layer, color); + if(texture) + { + m_pixelput_called = true; + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); + } + } + + void GraphicsSupport::PixelPutArray(int x, int y, mlx_color* pixels, std::size_t pixels_size) noexcept + { + MLX_PROFILE_FUNCTION(); + NonOwningPtr texture = m_put_pixel_manager.DrawPixelsArray(x, y, m_draw_layer, pixels, pixels_size); + if(texture) + { + m_pixelput_called = true; + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); + } + } + + void GraphicsSupport::PixelPutRegion(int x, int y, int w, int h, mlx_color* pixels) noexcept + { + MLX_PROFILE_FUNCTION(); + NonOwningPtr texture = m_put_pixel_manager.DrawPixelsRegion(x, y, w, h, m_draw_layer, pixels); + if(texture) + { + m_pixelput_called = true; + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetPosition(Vec2f{ 0.0f, 0.0f }); + } + } + + void GraphicsSupport::StringPut(int x, int y, mlx_color color, std::string str) + { + MLX_PROFILE_FUNCTION(); + if(str.empty()) + return; + + Vec4f vec_color = { + static_cast(color.r) / 255.0f, + static_cast(color.g) / 255.0f, + static_cast(color.b) / 255.0f, + static_cast(color.a) / 255.0f, + }; + + NonOwningPtr text = p_scene->GetTextFromPositionAndColor(str, Vec2f{ static_cast(x), static_cast(y) }, vec_color); + if(!text) + { + if(m_pixelput_called) + { + m_draw_layer++; + m_pixelput_called = false; + } + Text& new_text = p_scene->CreateText(str); + new_text.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); + new_text.SetColor(std::move(vec_color)); + } + else if(!p_scene->IsTextAtGivenDrawLayer(str, m_draw_layer)) + p_scene->BringToDrawLayer(text.Get(), m_draw_layer); + } + + void GraphicsSupport::TexturePut(NonOwningPtr texture, int x, int y, float scale_x, float scale_y, float angle) + { + MLX_PROFILE_FUNCTION(); + NonOwningPtr sprite = p_scene->GetSpriteFromTexturePositionScaleRotation(texture, Vec2f{ static_cast(x), static_cast(y) }, scale_x, scale_y, angle); + if(!sprite) + { + if(m_pixelput_called) + { + m_draw_layer++; + m_pixelput_called = false; + } + Sprite& new_sprite = p_scene->CreateSprite(texture); + new_sprite.SetCenter(Vec2f{ texture->GetWidth() / 2.0f, texture->GetHeight() / 2.0f }); + new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); + new_sprite.SetScale(Vec2f{ scale_x, scale_y }); + new_sprite.SetRotation(angle); + } + else if(!p_scene->IsTextureAtGivenDrawLayer(texture, m_draw_layer)) + p_scene->BringToDrawLayer(sprite.Get(), m_draw_layer); + } + GraphicsSupport::~GraphicsSupport() { MLX_PROFILE_FUNCTION(); diff --git a/runtime/Sources/Graphics/Scene.cpp b/runtime/Sources/Graphics/Scene.cpp index d36b135..34cef74 100644 --- a/runtime/Sources/Graphics/Scene.cpp +++ b/runtime/Sources/Graphics/Scene.cpp @@ -18,12 +18,14 @@ namespace mlx { std::shared_ptr new_sprite = std::make_shared(drawable->GetMesh(), texture); m_drawables.push_back(new_sprite); + m_has_scene_changed = true; return *new_sprite; } } std::shared_ptr sprite = std::make_shared(texture); m_drawables.push_back(sprite); + m_has_scene_changed = true; return *sprite; } @@ -83,12 +85,14 @@ namespace mlx { std::shared_ptr new_text = std::make_shared(text, p_bound_font, drawable->GetMesh()); m_drawables.push_back(new_text); + m_has_scene_changed = true; return *new_text; } } std::shared_ptr new_text = std::make_shared(text, p_bound_font); m_drawables.push_back(new_text); + m_has_scene_changed = true; return *new_text; } @@ -127,5 +131,6 @@ namespace mlx if(it == m_drawables.end()) return; std::swap(*it, *(m_drawables.begin() + draw_layer)); + m_has_scene_changed = true; } } diff --git a/runtime/Sources/Renderer/Descriptor.cpp b/runtime/Sources/Renderer/Descriptor.cpp index 8d7afca..ba29b37 100644 --- a/runtime/Sources/Renderer/Descriptor.cpp +++ b/runtime/Sources/Renderer/Descriptor.cpp @@ -227,9 +227,27 @@ namespace mlx { MLX_PROFILE_FUNCTION(); Verify(m_sets[i] != VK_NULL_HANDLE, "invalid descriptor"); - std::vector writes; - std::vector buffer_infos; - std::vector image_infos; + + std::size_t image_count = 0; + std::size_t buffer_count = 0; + + for(auto& descriptor : m_descriptors) + { + if(descriptor.image_ptr) + image_count++; + else if(descriptor.uniform_buffer_ptr || descriptor.storage_buffer_ptr) + buffer_count++; + else + FatalError("unknown descriptor data"); + } + + std::vector writes(m_descriptors.size()); + std::vector buffer_infos(buffer_count); + std::vector image_infos(image_count); + std::size_t buffer_index = 0; + std::size_t image_index = 0; + std::size_t write_index = 0; + for(auto& descriptor : m_descriptors) { if(descriptor.image_ptr) @@ -239,8 +257,9 @@ namespace mlx info.sampler = descriptor.image_ptr->GetSampler(); info.imageLayout = descriptor.image_ptr->GetLayout(); info.imageView = descriptor.image_ptr->GetImageView(); - image_infos.push_back(info); - writes.push_back(kvfWriteImageToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &image_infos.back(), descriptor.binding)); + image_infos[image_index] = std::move(info); + writes[write_index] = kvfWriteImageToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &image_infos[image_index], descriptor.binding); + image_index++; } else if(descriptor.uniform_buffer_ptr) { @@ -248,8 +267,9 @@ namespace mlx info.buffer = descriptor.uniform_buffer_ptr->Get(); info.offset = descriptor.uniform_buffer_ptr->GetOffset(); info.range = VK_WHOLE_SIZE; - buffer_infos.push_back(info); - writes.push_back(kvfWriteUniformBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &buffer_infos.back(), descriptor.binding)); + buffer_infos[buffer_index] = std::move(info); + writes[write_index] = kvfWriteUniformBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &buffer_infos[buffer_index], descriptor.binding); + buffer_index++; } else if(descriptor.storage_buffer_ptr) { @@ -257,9 +277,11 @@ namespace mlx info.buffer = descriptor.storage_buffer_ptr->Get(); info.offset = descriptor.storage_buffer_ptr->GetOffset(); info.range = VK_WHOLE_SIZE; - buffer_infos.push_back(info); - writes.push_back(kvfWriteStorageBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &buffer_infos.back(), descriptor.binding)); + buffer_infos[buffer_index] = std::move(info); + writes[write_index] = kvfWriteStorageBufferToDescriptorSet(RenderCore::Get().GetDevice(), m_sets[i], &buffer_infos[buffer_index], descriptor.binding); + buffer_index++; } + write_index++; } RenderCore::Get().vkUpdateDescriptorSets(RenderCore::Get().GetDevice(), writes.size(), writes.data(), 0, nullptr); } diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 84e4d5f..7a68c34 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -100,9 +100,9 @@ namespace mlx for(auto& drawable : drawables) { // Check every textures and update modified ones to GPU before starting the render pass + drawable->Update(cmd); if(!drawable->IsSetInit()) drawable->UpdateDescriptorSet(p_texture_set); - drawable->Update(cmd); } m_pipeline.BindPipeline(cmd, 0, {}); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 75c09a5..8f36483 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -21,6 +21,7 @@ namespace mlx void RenderPasses::Pass(Scene& scene, Renderer& renderer, const Vec4f& clear_color) { + bool force_render = false; if(!m_main_render_texture.IsInit()) { VkExtent2D extent; @@ -34,11 +35,15 @@ namespace mlx m_main_render_texture.Init({}, extent.width, extent.height, VK_FORMAT_R8G8B8A8_UNORM, false, {}); #endif m_main_render_texture.TransitionLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); + force_render = true; + } + if(scene.HasSceneChanged() || force_render) + { + m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), clear_color); + m_2Dpass.Pass(scene, renderer, m_main_render_texture); } - m_main_render_texture.Clear(renderer.GetActiveCommandBuffer(), clear_color); - - m_2Dpass.Pass(scene, renderer, m_main_render_texture); m_final.Pass(scene, renderer, m_main_render_texture, p_render_target); + scene.ResetChangeChecker(); } void RenderPasses::Destroy() diff --git a/runtime/Sources/Renderer/Renderer.cpp b/runtime/Sources/Renderer/Renderer.cpp index 8c18706..eeefe2b 100644 --- a/runtime/Sources/Renderer/Renderer.cpp +++ b/runtime/Sources/Renderer/Renderer.cpp @@ -87,6 +87,8 @@ namespace mlx DebugLog("Vulkan: image available semaphore destroyed"); kvfDestroySemaphore(RenderCore::Get().GetDevice(), m_render_finished_semaphores[i]); DebugLog("Vulkan: render finished semaphore destroyed"); + kvfDestroyCommandBuffer(RenderCore::Get().GetDevice(), m_cmd_buffers[i]); + DebugLog("Vulkan: command buffer destroyed"); kvfDestroyFence(RenderCore::Get().GetDevice(), m_cmd_fences[i]); DebugLog("Vulkan: fence destroyed"); } diff --git a/third_party/kvf.h b/third_party/kvf.h index 3aa5486..b480456 100755 --- a/third_party/kvf.h +++ b/third_party/kvf.h @@ -549,7 +549,7 @@ void __kvfCompleteDevice(VkPhysicalDevice physical, VkDevice device) kvf_device = &__kvf_internal_devices[i]; } - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkCommandPool pool; VkCommandPoolCreateInfo pool_info = {}; @@ -583,7 +583,7 @@ void __kvfCompleteDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, kvf_device = &__kvf_internal_devices[i]; } - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkCommandPool pool; VkCommandPoolCreateInfo pool_info = {}; @@ -645,7 +645,7 @@ void kvfSetAllocationCallbacks(VkDevice device, const VkAllocationCallbacks* cal { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); kvf_device->callbacks = (VkAllocationCallbacks*)KVF_MALLOC(sizeof(VkAllocationCallbacks)); KVF_ASSERT(kvf_device->callbacks && "allocation failed :("); memcpy(kvf_device->callbacks, callbacks, sizeof(VkAllocationCallbacks)); @@ -703,7 +703,7 @@ void __kvfDestroyDevice(VkDevice device) KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); for(size_t i = 0; i < __kvf_internal_swapchains_size; i++) { @@ -758,7 +758,7 @@ void __kvfDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer) KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); for(size_t i = 0; i < __kvf_internal_framebuffers_size; i++) { @@ -796,7 +796,7 @@ VkDescriptorPool __kvfDeviceCreateDescriptorPool(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); kvf_device->sets_pools_size++; kvf_device->sets_pools = (__KvfDescriptorPool*)KVF_REALLOC(kvf_device->sets_pools, kvf_device->sets_pools_size * sizeof(__KvfDescriptorPool)); memset(&kvf_device->sets_pools[kvf_device->sets_pools_size - 1], 0, sizeof(__KvfDescriptorPool)); @@ -831,7 +831,7 @@ void __kvfDestroyDescriptorPools(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); for(size_t i = 0; i < kvf_device->sets_pools_size; i++) KVF_GET_DEVICE_FUNCTION(vkDestroyDescriptorPool)(device, kvf_device->sets_pools[i].pool, NULL); @@ -976,7 +976,7 @@ VkFormat kvfFindSupportFormatInCandidates(VkDevice device, VkFormat* candidates, { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); for(size_t i = 0; i < candidates_count; i++) { VkFormatProperties props; @@ -1529,7 +1529,7 @@ VkDevice kvfCreateDevice(VkPhysicalDevice physical, const char** extensions, uin __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkPhysicalDevice(physical); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); uint32_t queue_count = 0; queue_count += (kvf_device->queues.graphics != -1); @@ -1668,7 +1668,7 @@ VkDevice kvfCreateDeviceCustomPhysicalDeviceAndQueues(VkPhysicalDevice physical, KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(fns != NULL); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkPhysicalDevice(physical); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); kvf_device->fns = *fns; __kvfCompleteDevice(physical, device); } @@ -1685,7 +1685,7 @@ VkQueue kvfGetDeviceQueue(VkDevice device, KvfQueueType queue) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkQueue vk_queue = VK_NULL_HANDLE; if(queue == KVF_GRAPHICS_QUEUE) { @@ -1709,7 +1709,7 @@ uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); if(queue == KVF_GRAPHICS_QUEUE) return kvf_device->queues.graphics; else if(queue == KVF_PRESENT_QUEUE) @@ -1726,7 +1726,7 @@ uint32_t kvfGetDeviceQueueFamily(VkDevice device, KvfQueueType queue) KVF_ASSERT(device != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkPresentInfoKHR present_info = {}; present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; @@ -1814,7 +1814,7 @@ VkFence kvfCreateFence(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkFenceCreateInfo fence_info = {}; fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO; fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT; @@ -1829,7 +1829,7 @@ void kvfWaitForFence(VkDevice device, VkFence fence) KVF_ASSERT(fence != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif KVF_GET_DEVICE_FUNCTION(vkWaitForFences)(device, 1, &fence, VK_TRUE, UINT64_MAX); } @@ -1840,7 +1840,7 @@ void kvfDestroyFence(VkDevice device, VkFence fence) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyFence)(device, fence, kvf_device->callbacks); } @@ -1848,7 +1848,7 @@ VkSemaphore kvfCreateSemaphore(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkSemaphoreCreateInfo semaphore_info = {}; semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO; VkSemaphore semaphore; @@ -1862,7 +1862,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroySemaphore)(device, semaphore, kvf_device->callbacks); } @@ -1941,7 +1941,7 @@ void kvfDestroySemaphore(VkDevice device, VkSemaphore semaphore) image_count = support.capabilities.maxImageCount; __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); uint32_t queue_family_indices[] = { (uint32_t)kvf_device->queues.graphics, (uint32_t)kvf_device->queues.present }; @@ -2032,7 +2032,7 @@ VkImage kvfCreateImage(VkDevice device, uint32_t width, uint32_t height, VkForma { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkImageCreateInfo image_info = {}; image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO; image_info.imageType = VK_IMAGE_TYPE_2D; @@ -2066,7 +2066,7 @@ void kvfCopyImageToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkImage src, size_t KVF_ASSERT(src != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkOffset3D offset = { 0, 0, 0 }; VkBufferImageCopy region = {}; @@ -2088,7 +2088,7 @@ void kvfDestroyImage(VkDevice device, VkImage image) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyImage)(device, image, kvf_device->callbacks); } @@ -2096,7 +2096,7 @@ VkImageView kvfCreateImageView(VkDevice device, VkImage image, VkFormat format, { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkImageViewCreateInfo create_info = {}; create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; create_info.image = image; @@ -2121,7 +2121,7 @@ void kvfDestroyImageView(VkDevice device, VkImageView image_view) KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(image_view != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyImageView)(device, image_view, kvf_device->callbacks); } @@ -2135,7 +2135,7 @@ void kvfTransitionImageLayout(VkDevice device, VkImage image, KvfImageType type, #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif if(is_single_time_cmd_buffer) @@ -2189,7 +2189,7 @@ VkSampler kvfCreateSampler(VkDevice device, VkFilter filters, VkSamplerAddressMo { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkSamplerCreateInfo info = {}; info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; info.magFilter = filters; @@ -2213,7 +2213,7 @@ void kvfDestroySampler(VkDevice device, VkSampler sampler) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroySampler)(device, sampler, kvf_device->callbacks); } @@ -2221,7 +2221,7 @@ VkBuffer kvfCreateBuffer(VkDevice device, VkBufferUsageFlags usage, VkDeviceSize { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkBufferCreateInfo buffer_info = {}; buffer_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO; buffer_info.size = size; @@ -2239,7 +2239,7 @@ void kvfCopyBufferToBuffer(VkCommandBuffer cmd, VkBuffer dst, VkBuffer src, size KVF_ASSERT(src != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkBufferCopy copy_region = {}; copy_region.size = size; @@ -2253,7 +2253,7 @@ void kvfCopyBufferToImage(VkCommandBuffer cmd, VkImage dst, VkBuffer src, size_t KVF_ASSERT(src != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkOffset3D offset = { 0, 0, 0 }; VkBufferImageCopy region = {}; @@ -2275,7 +2275,7 @@ void kvfDestroyBuffer(VkDevice device, VkBuffer buffer) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyBuffer)(device, buffer, kvf_device->callbacks); } @@ -2284,7 +2284,7 @@ VkFramebuffer kvfCreateFramebuffer(VkDevice device, VkRenderPass render_pass, Vk KVF_ASSERT(device != VK_NULL_HANDLE); KVF_ASSERT(image_views != NULL); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkFramebufferCreateInfo framebuffer_info = {}; framebuffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO; framebuffer_info.renderPass = render_pass; @@ -2324,7 +2324,7 @@ VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLe { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkCommandPool pool = kvf_device->cmd_pool; VkCommandBuffer buffer; @@ -2335,7 +2335,7 @@ VkCommandBuffer kvfCreateCommandBufferLeveled(VkDevice device, VkCommandBufferLe alloc_info.commandBufferCount = 1; __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkAllocateCommandBuffers)(device, &alloc_info, &buffer)); - if(kvf_device->cmd_buffers_size == kvf_device->cmd_buffers_capacity) + if(kvf_device->cmd_buffers_size >= kvf_device->cmd_buffers_capacity) { // Resize the dynamic array if necessary kvf_device->cmd_buffers_capacity += KVF_COMMAND_POOL_CAPACITY; @@ -2352,7 +2352,7 @@ void kvfBeginCommandBuffer(VkCommandBuffer buffer, VkCommandBufferUsageFlags usa KVF_ASSERT(buffer != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(buffer); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkCommandBufferBeginInfo begin_info = {}; begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; @@ -2365,7 +2365,7 @@ void kvfEndCommandBuffer(VkCommandBuffer buffer) KVF_ASSERT(buffer != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(buffer); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkEndCommandBuffer)(buffer)); } @@ -2376,7 +2376,7 @@ void kvfSubmitCommandBuffer(VkDevice device, VkCommandBuffer buffer, KvfQueueTyp #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkSemaphore signal_semaphores[1]; VkSemaphore wait_semaphores[1]; @@ -2403,7 +2403,7 @@ void kvfSubmitSingleTimeCommandBuffer(VkDevice device, VkCommandBuffer buffer, K KVF_ASSERT(device != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif if(fence != VK_NULL_HANDLE) @@ -2424,7 +2424,7 @@ void kvfDestroyCommandBuffer(VkDevice device, VkCommandBuffer buffer) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); for(size_t i = 0; i < kvf_device->cmd_buffers_size; i++) { @@ -2434,7 +2434,7 @@ void kvfDestroyCommandBuffer(VkDevice device, VkCommandBuffer buffer) // Shift the elements to fill the gap for(size_t j = i; j < kvf_device->cmd_buffers_size - 1; j++) kvf_device->cmd_buffers[j] = kvf_device->cmd_buffers[j + 1]; - kvf_device->cmd_buffers--; + kvf_device->cmd_buffers_size--; return; } } @@ -2553,7 +2553,7 @@ VkRenderPass kvfCreateRenderPassWithSubpassDependencies(VkDevice device, VkAttac } __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkSubpassDescription subpass = {}; subpass.pipelineBindPoint = bind_point; @@ -2583,7 +2583,7 @@ void kvfDestroyRenderPass(VkDevice device, VkRenderPass renderPass) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyRenderPass)(device, renderPass, kvf_device->callbacks); } @@ -2593,7 +2593,7 @@ void kvfBeginRenderPass(VkRenderPass pass, VkCommandBuffer cmd, VkFramebuffer fr KVF_ASSERT(framebuffer != VK_NULL_HANDLE); #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkCommandBuffer(cmd); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkOffset2D offset = { 0, 0 }; @@ -2612,7 +2612,7 @@ VkShaderModule kvfCreateShaderModule(VkDevice device, uint32_t* code, size_t siz { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkShaderModuleCreateInfo createInfo = {}; createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO; createInfo.codeSize = size * sizeof(uint32_t); @@ -2628,7 +2628,7 @@ void kvfDestroyShaderModule(VkDevice device, VkShaderModule shader) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyShaderModule)(device, shader, kvf_device->callbacks); } @@ -2636,7 +2636,7 @@ VkDescriptorSetLayout kvfCreateDescriptorSetLayout(VkDevice device, VkDescriptor { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkDescriptorSetLayoutCreateInfo layout_info = {}; layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; layout_info.bindingCount = bindings_count; @@ -2653,7 +2653,7 @@ void kvfDestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout layout return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyDescriptorSetLayout)(device, layout, kvf_device->callbacks); } @@ -2661,7 +2661,7 @@ VkDescriptorSet kvfAllocateDescriptorSet(VkDevice device, VkDescriptorSetLayout { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkDescriptorPool pool = VK_NULL_HANDLE; for(uint32_t i = 0; i < kvf_device->sets_pools_size; i++) { @@ -2687,7 +2687,7 @@ void kvfUpdateStorageBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, { #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkWriteDescriptorSet write = kvfWriteStorageBufferToDescriptorSet(device, set, info, binding); KVF_GET_DEVICE_FUNCTION(vkUpdateDescriptorSets)(device, 1, &write, 0, NULL); @@ -2697,7 +2697,7 @@ void kvfUpdateUniformBufferToDescriptorSet(VkDevice device, VkDescriptorSet set, { #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkWriteDescriptorSet write = kvfWriteUniformBufferToDescriptorSet(device, set, info, binding); KVF_GET_DEVICE_FUNCTION(vkUpdateDescriptorSets)(device, 1, &write, 0, NULL); @@ -2707,7 +2707,7 @@ void kvfUpdateImageToDescriptorSet(VkDevice device, VkDescriptorSet set, const V { #ifdef KVF_IMPL_VK_NO_PROTOTYPES __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); #endif VkWriteDescriptorSet write = kvfWriteImageToDescriptorSet(device, set, info, binding); KVF_GET_DEVICE_FUNCTION(vkUpdateDescriptorSets)(device, 1, &write, 0, NULL); @@ -2725,6 +2725,7 @@ VkWriteDescriptorSet kvfWriteStorageBufferToDescriptorSet(VkDevice device, VkDes descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER; descriptor_write.descriptorCount = 1; descriptor_write.pBufferInfo = info; + descriptor_write.pNext = NULL; return descriptor_write; } @@ -2740,6 +2741,7 @@ VkWriteDescriptorSet kvfWriteUniformBufferToDescriptorSet(VkDevice device, VkDes descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; descriptor_write.descriptorCount = 1; descriptor_write.pBufferInfo = info; + descriptor_write.pNext = NULL; return descriptor_write; } @@ -2755,6 +2757,7 @@ VkWriteDescriptorSet kvfWriteImageToDescriptorSet(VkDevice device, VkDescriptorS descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER; descriptor_write.descriptorCount = 1; descriptor_write.pImageInfo = info; + descriptor_write.pNext = NULL; return descriptor_write; } @@ -2762,7 +2765,7 @@ VkPipelineLayout kvfCreatePipelineLayout(VkDevice device, VkDescriptorSetLayout* { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkPipelineLayoutCreateInfo pipeline_layout_info = {}; pipeline_layout_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO; pipeline_layout_info.setLayoutCount = set_layouts_count; @@ -2781,7 +2784,7 @@ void kvfDestroyPipelineLayout(VkDevice device, VkPipelineLayout layout) return; KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyPipelineLayout)(device, layout, kvf_device->callbacks); } @@ -2789,7 +2792,7 @@ void kvfResetDeviceDescriptorPools(VkDevice device) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); for(uint32_t i = 0; i < kvf_device->sets_pools_size; i++) { KVF_GET_DEVICE_FUNCTION(vkResetDescriptorPool)(device, kvf_device->sets_pools[i].pool, 0); @@ -3018,7 +3021,7 @@ VkPipeline kvfCreateGraphicsPipeline(VkDevice device, VkPipelineCache cache, VkP pipeline_info.pDepthStencilState = &builder->depth_stencil_state; __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); VkPipeline pipeline; __kvfCheckVk(KVF_GET_DEVICE_FUNCTION(vkCreateGraphicsPipelines)(device, cache, 1, &pipeline_info, kvf_device->callbacks, &pipeline)); return pipeline; @@ -3028,7 +3031,7 @@ void kvfDestroyPipeline(VkDevice device, VkPipeline pipeline) { KVF_ASSERT(device != VK_NULL_HANDLE); __KvfDevice* kvf_device = __kvfGetKvfDeviceFromVkDevice(device); - KVF_ASSERT(kvf_device != NULL); + KVF_ASSERT(kvf_device != NULL && "could not find VkDevice in registered devices"); KVF_GET_DEVICE_FUNCTION(vkDestroyPipeline)(device, pipeline, kvf_device->callbacks); } diff --git a/xmake.lua b/xmake.lua index 1ffbc7e..074e4ce 100644 --- a/xmake.lua +++ b/xmake.lua @@ -2,7 +2,7 @@ add_requires("libsdl", { configs = { sdlmain = false } }) -add_rules("mode.debug", "mode.release") +add_rules("mode.debug", "mode.release", "mode.releasedbg") set_languages("cxx20", "c11") set_objectdir("objs/xmake/$(os)_$(arch)") @@ -85,7 +85,6 @@ target("mlx") end) target_end() ---[[ target("Test") set_default(false) set_kind("binary") @@ -101,4 +100,3 @@ target("Test") add_packages("libsdl") target_end() -]]-- From f9ef8fc7900513ae7ce51a2117cb1c1f1f0e6ac5 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 7 Jan 2025 15:59:26 +0100 Subject: [PATCH 119/131] fixing possible segmentation fault in Texture::SetLinearRegion --- runtime/Sources/Renderer/Image.cpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/runtime/Sources/Renderer/Image.cpp b/runtime/Sources/Renderer/Image.cpp index 4d3df4b..69fd9bc 100644 --- a/runtime/Sources/Renderer/Image.cpp +++ b/runtime/Sources/Renderer/Image.cpp @@ -264,12 +264,17 @@ namespace mlx OpenCPUBuffer(); if constexpr(std::endian::native == std::endian::little) { - for(std::size_t i = 0; i < len; i++) + for(std::size_t i = 0; i < len && (y * m_width) + x + i < m_width * m_height; i++) m_staging_buffer->GetMap()[(y * m_width) + x + i] = ReverseColor(pixels[i]); } else { - std::memcpy(&m_staging_buffer->GetMap()[(y * m_width) + x], pixels, len); + std::size_t len_guard; + if((y * m_width + x + len) < m_width * m_height) + len_guard = len; + else + len_guard = len - (m_width * m_height - (y * m_width + x + len)); + std::memcpy(&m_staging_buffer->GetMap()[(y * m_width) + x], pixels, len_guard); } m_has_been_modified = true; } From 273255ecfe2784945eb73f88f00477b056c13709 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Tue, 7 Jan 2025 19:57:03 +0100 Subject: [PATCH 120/131] fixing transformations --- runtime/Sources/Core/Graphics.cpp | 2 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 18 +++++++++++++----- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/runtime/Sources/Core/Graphics.cpp b/runtime/Sources/Core/Graphics.cpp index 1e89686..a4421c2 100644 --- a/runtime/Sources/Core/Graphics.cpp +++ b/runtime/Sources/Core/Graphics.cpp @@ -143,7 +143,7 @@ namespace mlx m_pixelput_called = false; } Sprite& new_sprite = p_scene->CreateSprite(texture); - new_sprite.SetCenter(Vec2f{ texture->GetWidth() / 2.0f, texture->GetHeight() / 2.0f }); + new_sprite.SetCenter(Vec2f{ texture->GetWidth() * 0.5f, texture->GetHeight() * 0.5f }); new_sprite.SetPosition(Vec2f{ static_cast(x), static_cast(y) }); new_sprite.SetScale(Vec2f{ scale_x, scale_y }); new_sprite.SetRotation(angle); diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 7a68c34..2a13704 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -100,9 +100,9 @@ namespace mlx for(auto& drawable : drawables) { // Check every textures and update modified ones to GPU before starting the render pass - drawable->Update(cmd); if(!drawable->IsSetInit()) drawable->UpdateDescriptorSet(p_texture_set); + drawable->Update(cmd); } m_pipeline.BindPipeline(cmd, 0, {}); @@ -110,11 +110,19 @@ namespace mlx { DrawableData drawable_data; drawable_data.color = drawable->GetColor(); + + Mat4f rotation_matrix = Mat4f::Identity(); + rotation_matrix.ApplyTranslation(Vec3f{ -drawable->GetCenter(), 0.0f }); + rotation_matrix.ApplyRotation(drawable->GetRotation()); + rotation_matrix.ApplyTranslation(Vec3f{ drawable->GetCenter(), 0.0f }); + + Mat4f translation_matrix = Mat4f::Identity().ApplyTranslation(Vec3f{ drawable->GetPosition(), 0.0f }); + Mat4f scale_matrix = Mat4f::Identity().ApplyScale(Vec3f{ drawable->GetScale(), 1.0f }); + drawable_data.model_matrix = Mat4f::Identity(); - drawable_data.model_matrix.ApplyTranslation(Vec3f{ -drawable->GetCenter() / 2.0f, 0.0f }); - drawable_data.model_matrix.ApplyRotation(drawable->GetRotation()); - drawable_data.model_matrix.ApplyTranslation(Vec3f{ drawable->GetPosition() + drawable->GetCenter() / 2.0f, 0.0f }); - drawable_data.model_matrix.ApplyScale(Vec3f{ drawable->GetScale(), 1.0f }); + drawable_data.model_matrix.ConcatenateTransform(rotation_matrix); + drawable_data.model_matrix.ConcatenateTransform(scale_matrix); + drawable_data.model_matrix.ConcatenateTransform(translation_matrix); drawable->Bind(frame_index, cmd); From 55f1df2ec71334d3e5d77b3bc5fba126cce70a85 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Wed, 8 Jan 2025 12:37:34 +0100 Subject: [PATCH 121/131] fixing haeder --- includes/mlx_extended.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/includes/mlx_extended.h b/includes/mlx_extended.h index 12bbea0..9489989 100644 --- a/includes/mlx_extended.h +++ b/includes/mlx_extended.h @@ -6,7 +6,7 @@ /* By: maldavid +#+ +:+ +#+ */ /* +#+#+#+#+#+ +#+ */ /* Created: 2024/12/14 16:17:10 by maldavid #+# #+# */ -/* Updated: 2024/12/17 02:59:50 by maldavid ### ########.fr */ +/* Updated: 2025/01/08 12:37:15 by maldavid ### ########.fr */ /* */ /* ************************************************************************** */ @@ -84,9 +84,9 @@ MLX_API void mlx_restore_window(mlx_context mlx, mlx_window win); * @param x X coordinate * @param y Y coordinate * @param pixels Array of pixels - * @param pixels_size Size or the array of pixels + * @param pixels_number Number of pixels */ -MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, mlx_color* pixels, size_t pixels_size); +MLX_API void mlx_pixel_put_array(mlx_context mlx, mlx_window win, int x, int y, mlx_color* pixels, size_t pixels_number); /** * @brief Put a region of pixels in the window From 427e5aa099975233ed7c1a586f14f5501ce01760 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 21:42:50 +0100 Subject: [PATCH 122/131] fixing macOS issue --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 0671d7e..7712279 100644 --- a/Makefile +++ b/Makefile @@ -50,8 +50,8 @@ else endif ifeq ($(OS), Darwin) - LDFLAGS += -L /opt/homebrew/Cellar/lib -lSDL2 - CXXFLAGS += -I /opt/homebrew/Cellar/include + LDFLAGS += -L /opt/homebrew/Cellar/lib -L /usr/local/Cellar -lSDL2 + CXXFLAGS += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar NAME = libmlx.dylib endif From c6490c979cc22f18b4af031e0e393c3ac3d4f801 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 21:44:17 +0100 Subject: [PATCH 123/131] fixing macOS issue --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 7712279..20633d8 100644 --- a/Makefile +++ b/Makefile @@ -51,7 +51,7 @@ endif ifeq ($(OS), Darwin) LDFLAGS += -L /opt/homebrew/Cellar/lib -L /usr/local/Cellar -lSDL2 - CXXFLAGS += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar + INCLUDES += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar NAME = libmlx.dylib endif From c9870faaf1078c6fcc96e05377a4c9a034ab5068 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 21:49:30 +0100 Subject: [PATCH 124/131] fixing macOS issue --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 20633d8..a1758bd 100644 --- a/Makefile +++ b/Makefile @@ -51,7 +51,7 @@ endif ifeq ($(OS), Darwin) LDFLAGS += -L /opt/homebrew/Cellar/lib -L /usr/local/Cellar -lSDL2 - INCLUDES += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar + INCLUDES += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar/include NAME = libmlx.dylib endif From 285bcda21fbc9c623c00b0504ee2f0915d3d342e Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 22:00:42 +0100 Subject: [PATCH 125/131] fixing macOS issue --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index a1758bd..84e6192 100644 --- a/Makefile +++ b/Makefile @@ -50,8 +50,8 @@ else endif ifeq ($(OS), Darwin) - LDFLAGS += -L /opt/homebrew/Cellar/lib -L /usr/local/Cellar -lSDL2 - INCLUDES += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar/include + LDFLAGS += -L /opt/homebrew/Cellar/lib -L /usr/local/Cellar -L /opt/homebrew/lib -lSDL2 + INCLUDES += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar/include -I -I /opt/homebrew/include NAME = libmlx.dylib endif From 809bea2fca7d05ae1b330d386833dc2e8af90a7b Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 22:21:32 +0100 Subject: [PATCH 126/131] fixing macOS issue --- runtime/Sources/Renderer/Swapchain.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/runtime/Sources/Renderer/Swapchain.cpp b/runtime/Sources/Renderer/Swapchain.cpp index b0b5616..14899b5 100644 --- a/runtime/Sources/Renderer/Swapchain.cpp +++ b/runtime/Sources/Renderer/Swapchain.cpp @@ -1,3 +1,4 @@ +#include "kvf.h" #include #include @@ -88,6 +89,7 @@ namespace mlx RenderCore::Get().vkGetSwapchainImagesKHR(RenderCore::Get().GetDevice(), m_swapchain, &m_images_count, tmp.data()); VkCommandBuffer cmd = kvfCreateCommandBuffer(RenderCore::Get().GetDevice()); kvfBeginCommandBuffer(cmd, VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT); + extent = kvfGetSwapchainImagesSize(m_swapchain); // fix the extent for(std::size_t i = 0; i < m_images_count; i++) { #ifdef DEBUG From 712ac4be6426a7aaeb77504210c53cfffe541bf1 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 22:23:20 +0100 Subject: [PATCH 127/131] fixing macOS issue --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 84e6192..37104a3 100644 --- a/Makefile +++ b/Makefile @@ -46,12 +46,12 @@ ifeq ($(TOOLCHAIN), gcc) CXXFLAGS += -Wno-error=cpp else PCH = $(CCH) - CXXFLAGS += -Wno-error=#warning -include-pch $(GCH) + CXXFLAGS += -Wno-error=#warning -include-pch $(PCH) endif ifeq ($(OS), Darwin) LDFLAGS += -L /opt/homebrew/Cellar/lib -L /usr/local/Cellar -L /opt/homebrew/lib -lSDL2 - INCLUDES += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar/include -I -I /opt/homebrew/include + INCLUDES += -I /opt/homebrew/Cellar/include -I /usr/local/Cellar/include -I /opt/homebrew/include NAME = libmlx.dylib endif From 2030534cf256247446469b4631364cf002eecc97 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 22:28:39 +0100 Subject: [PATCH 128/131] fixing macOS issue --- runtime/Includes/Core/Application.h | 4 +- runtime/Includes/Core/EventListener.h | 4 +- runtime/Includes/Core/SDLManager.h | 2 +- runtime/Includes/Platform/Inputs.h | 4 +- runtime/Includes/PreCompiled.h | 1 - runtime/Sources/Core/Bridge.cpp | 1 - runtime/Sources/Core/EventListener.cpp | 2 +- runtime/Sources/Core/SDLManager.cpp | 2 +- .../Sources/Renderer/RenderPasses/2DPass.cpp | 2 +- .../Renderer/RenderPasses/FinalPass.cpp | 2 +- .../Sources/Renderer/RenderPasses/Passes.cpp | 2 +- third_party/function.h | 630 ------------------ 12 files changed, 12 insertions(+), 644 deletions(-) delete mode 100644 third_party/function.h diff --git a/runtime/Includes/Core/Application.h b/runtime/Includes/Core/Application.h index 3cc30ee..a9a1c77 100644 --- a/runtime/Includes/Core/Application.h +++ b/runtime/Includes/Core/Application.h @@ -44,10 +44,10 @@ namespace mlx private: struct Hook { - func::function fn; + std::function fn; void* param; - Hook(func::function fn, void* param) : fn(fn), param(param) {} + Hook(std::function fn, void* param) : fn(fn), param(param) {} }; private: diff --git a/runtime/Includes/Core/EventListener.h b/runtime/Includes/Core/EventListener.h index 4907aa4..e8540c2 100644 --- a/runtime/Includes/Core/EventListener.h +++ b/runtime/Includes/Core/EventListener.h @@ -9,7 +9,7 @@ namespace mlx { public: EventListener() = delete; - EventListener(func::function functor, std::string name); + EventListener(std::function functor, std::string name); inline const std::string& GetName() const { return m_name; } inline void Call(const EventBase& event) const noexcept { m_listen_functor(event); } @@ -17,7 +17,7 @@ namespace mlx ~EventListener() = default; private: - func::function m_listen_functor; + std::function m_listen_functor; std::string m_name; }; } diff --git a/runtime/Includes/Core/SDLManager.h b/runtime/Includes/Core/SDLManager.h index 6ed1c57..5db1a4f 100644 --- a/runtime/Includes/Core/SDLManager.h +++ b/runtime/Includes/Core/SDLManager.h @@ -14,7 +14,7 @@ namespace mlx Handle CreateWindow(const mlx_window_create_info* info, std::int32_t& id, bool hidden); void DestroyWindow(Handle window) noexcept; - void InputsFetcher(func::function functor); + void InputsFetcher(std::function functor); VkSurfaceKHR CreateVulkanSurface(Handle window, VkInstance instance) const noexcept; std::vector GetRequiredVulkanInstanceExtentions(Handle window) const noexcept; diff --git a/runtime/Includes/Platform/Inputs.h b/runtime/Includes/Platform/Inputs.h index e2b7c9a..cce4138 100644 --- a/runtime/Includes/Platform/Inputs.h +++ b/runtime/Includes/Platform/Inputs.h @@ -12,10 +12,10 @@ namespace mlx public: struct Hook { - func::function fn; + std::function fn; void* param = nullptr; - Hook(func::function fn, void* param) : fn(fn), param(param) {} + Hook(std::function fn, void* param) : fn(fn), param(param) {} }; public: diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index 8979494..f39b701 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -28,7 +28,6 @@ #include #include #include -#include #include #include #include diff --git a/runtime/Sources/Core/Bridge.cpp b/runtime/Sources/Core/Bridge.cpp index ef83773..e410b6c 100644 --- a/runtime/Sources/Core/Bridge.cpp +++ b/runtime/Sources/Core/Bridge.cpp @@ -1,4 +1,3 @@ -#include "mlx_extended.h" #include #include diff --git a/runtime/Sources/Core/EventListener.cpp b/runtime/Sources/Core/EventListener.cpp index 21540f5..83f18eb 100644 --- a/runtime/Sources/Core/EventListener.cpp +++ b/runtime/Sources/Core/EventListener.cpp @@ -3,7 +3,7 @@ namespace mlx { - EventListener::EventListener(func::function functor, std::string name) + EventListener::EventListener(std::function functor, std::string name) : m_listen_functor(std::move(functor)), m_name(std::move(name)) {} } diff --git a/runtime/Sources/Core/SDLManager.cpp b/runtime/Sources/Core/SDLManager.cpp index 08141de..c6a794a 100644 --- a/runtime/Sources/Core/SDLManager.cpp +++ b/runtime/Sources/Core/SDLManager.cpp @@ -216,7 +216,7 @@ namespace mlx return y; } - void SDLManager::InputsFetcher(func::function functor) + void SDLManager::InputsFetcher(std::function functor) { SDL_Event event; while(SDL_PollEvent(&event)) diff --git a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp index 2a13704..c3aff3f 100644 --- a/runtime/Sources/Renderer/RenderPasses/2DPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/2DPass.cpp @@ -45,7 +45,7 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - func::function functor = [this](const EventBase& event) + std::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); diff --git a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp index 0a897a8..b3e5ae8 100644 --- a/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp +++ b/runtime/Sources/Renderer/RenderPasses/FinalPass.cpp @@ -31,7 +31,7 @@ namespace mlx }; p_fragment_shader = std::make_shared(fragment_shader_code, ShaderType::Fragment, std::move(fragment_shader_layout)); - func::function functor = [this](const EventBase& event) + std::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_pipeline.Destroy(); diff --git a/runtime/Sources/Renderer/RenderPasses/Passes.cpp b/runtime/Sources/Renderer/RenderPasses/Passes.cpp index 8f36483..a5f43ad 100644 --- a/runtime/Sources/Renderer/RenderPasses/Passes.cpp +++ b/runtime/Sources/Renderer/RenderPasses/Passes.cpp @@ -11,7 +11,7 @@ namespace mlx m_2Dpass.Init(); m_final.Init(); - func::function functor = [this](const EventBase& event) + std::function functor = [this](const EventBase& event) { if(event.What() == Event::ResizeEventCode) m_main_render_texture.Destroy(); diff --git a/third_party/function.h b/third_party/function.h deleted file mode 100644 index 291d46a..0000000 --- a/third_party/function.h +++ /dev/null @@ -1,630 +0,0 @@ -/* -This is free and unencumbered software released into the public domain. - -Anyone is free to copy, modify, publish, use, compile, sell, or -distribute this software, either in source code form or as a compiled -binary, for any purpose, commercial or non-commercial, and by any -means. - -In jurisdictions that recognize copyright laws, the author or authors -of this software dedicate any and all copyright interest in the -software to the public domain. We make this dedication for the benefit -of the public at large and to the detriment of our heirs and -successors. We intend this dedication to be an overt act of -relinquishment in perpetuity of all present and future rights to this -software under copyright law. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR -OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, -ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. - -For more information, please refer to - */ -// despite that it would be nice if you give credit to Malte Skarupke - - -#pragma once -#include -#include -#include -#include -#include -#include - -#ifdef _MSC_VER -#define FUNC_NOEXCEPT -#define FUNC_TEMPLATE_NOEXCEPT(FUNCTOR, ALLOCATOR) -#define FUNC_CONSTEXPR const -#else -#define FUNC_NOEXCEPT noexcept -#define FUNC_TEMPLATE_NOEXCEPT(FUNCTOR, ALLOCATOR) noexcept(detail::is_inplace_allocated::value) -#define FUNC_CONSTEXPR constexpr -#endif -#ifdef __GNUC__ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wstrict-aliasing" -#endif - -#define FUNC_MOVE(value) static_cast::type &&>(value) -#define FUNC_FORWARD(type, value) static_cast(value) - -namespace func -{ -#ifndef FUNC_NO_EXCEPTIONS - struct bad_function_call : std::exception - { - const char * what() const FUNC_NOEXCEPT override - { - return "Bad function call"; - } - }; -#endif - -template -struct force_function_heap_allocation - : std::false_type -{ -}; - -template -class function; - -namespace detail -{ - struct manager_storage_type; - struct function_manager; - struct functor_padding - { - protected: - size_t padding_first; - size_t padding_second; - }; - - struct empty_struct - { - }; - -# ifndef FUNC_NO_EXCEPTIONS - template - Result empty_call(const functor_padding &, Arguments...) - { - throw bad_function_call(); - } -# endif - - template - struct is_inplace_allocated - { - static const bool value - // so that it fits - = sizeof(T) <= sizeof(functor_padding) - // so that it will be aligned - && std::alignment_of::value % std::alignment_of::value == 0 - // so that we can offer noexcept move - && std::is_nothrow_move_constructible::value - // so that the user can override it - && !force_function_heap_allocation::value; - }; - - template - T to_functor(T && func) - { - return FUNC_FORWARD(T, func); - } - template - auto to_functor(Result (Class::*func)(Arguments...)) -> decltype(std::mem_fn(func)) - { - return std::mem_fn(func); - } - template - auto to_functor(Result (Class::*func)(Arguments...) const) -> decltype(std::mem_fn(func)) - { - return std::mem_fn(func); - } - - template - struct functor_type - { - typedef decltype(to_functor(std::declval())) type; - }; - - template - bool is_null(const T &) - { - return false; - } - template - bool is_null(Result (* const & function_pointer)(Arguments...)) - { - return function_pointer == nullptr; - } - template - bool is_null(Result (Class::* const & function_pointer)(Arguments...)) - { - return function_pointer == nullptr; - } - template - bool is_null(Result (Class::* const & function_pointer)(Arguments...) const) - { - return function_pointer == nullptr; - } - - template - struct is_valid_function_argument - { - static const bool value = false; - }; - - template - struct is_valid_function_argument, Result (Arguments...)> - { - static const bool value = false; - }; - - template - struct is_valid_function_argument - { -# ifdef _MSC_VER - // as of january 2013 visual studio doesn't support the SFINAE below - static const bool value = true; -# else - template - static decltype(to_functor(std::declval())(std::declval()...)) check(U *); - template - static empty_struct check(...); - - static const bool value = std::is_convertible(nullptr)), Result>::value; -# endif - }; - - typedef const function_manager * manager_type; - - struct manager_storage_type - { - template - Allocator & get_allocator() FUNC_NOEXCEPT - { - return reinterpret_cast(manager); - } - template - const Allocator & get_allocator() const FUNC_NOEXCEPT - { - return reinterpret_cast(manager); - } - - functor_padding functor; - manager_type manager; - }; - - template - struct function_manager_inplace_specialization - { - template - static Result call(const functor_padding & storage, Arguments... arguments) - { - // do not call get_functor_ref because I want this function to be fast - // in debug when nothing gets inlined - return const_cast(reinterpret_cast(storage))(FUNC_FORWARD(Arguments, arguments)...); - } - - static void store_functor(manager_storage_type & storage, T to_store) - { - new (&get_functor_ref(storage)) T(FUNC_FORWARD(T, to_store)); - } - static void move_functor(manager_storage_type & lhs, manager_storage_type && rhs) FUNC_NOEXCEPT - { - new (&get_functor_ref(lhs)) T(FUNC_MOVE(get_functor_ref(rhs))); - } - static void destroy_functor(Allocator &, manager_storage_type & storage) FUNC_NOEXCEPT - { - get_functor_ref(storage).~T(); - } - static T & get_functor_ref(const manager_storage_type & storage) FUNC_NOEXCEPT - { - return const_cast(reinterpret_cast(storage.functor)); - } - }; - template - struct function_manager_inplace_specialization::value>::type> - { - template - static Result call(const functor_padding & storage, Arguments... arguments) - { - // do not call get_functor_ptr_ref because I want this function to be fast - // in debug when nothing gets inlined - return (*reinterpret_cast::pointer &>(storage))(FUNC_FORWARD(Arguments, arguments)...); - } - - static void store_functor(manager_storage_type & self, T to_store) - { - Allocator & allocator = self.get_allocator();; - static_assert(sizeof(typename std::allocator_traits::pointer) <= sizeof(self.functor), "The allocator's pointer type is too big"); - typename std::allocator_traits::pointer * ptr = new (&get_functor_ptr_ref(self)) typename std::allocator_traits::pointer(std::allocator_traits::allocate(allocator, 1)); - std::allocator_traits::construct(allocator, *ptr, FUNC_FORWARD(T, to_store)); - } - static void move_functor(manager_storage_type & lhs, manager_storage_type && rhs) FUNC_NOEXCEPT - { - static_assert(std::is_nothrow_move_constructible::pointer>::value, "we can't offer a noexcept swap if the pointer type is not nothrow move constructible"); - new (&get_functor_ptr_ref(lhs)) typename std::allocator_traits::pointer(FUNC_MOVE(get_functor_ptr_ref(rhs))); - // this next assignment makes the destroy function easier - get_functor_ptr_ref(rhs) = nullptr; - } - static void destroy_functor(Allocator & allocator, manager_storage_type & storage) FUNC_NOEXCEPT - { - typename std::allocator_traits::pointer & pointer = get_functor_ptr_ref(storage); - if (!pointer) return; - std::allocator_traits::destroy(allocator, pointer); - std::allocator_traits::deallocate(allocator, pointer, 1); - } - static T & get_functor_ref(const manager_storage_type & storage) FUNC_NOEXCEPT - { - return *get_functor_ptr_ref(storage); - } - static typename std::allocator_traits::pointer & get_functor_ptr_ref(manager_storage_type & storage) FUNC_NOEXCEPT - { - return reinterpret_cast::pointer &>(storage.functor); - } - static const typename std::allocator_traits::pointer & get_functor_ptr_ref(const manager_storage_type & storage) FUNC_NOEXCEPT - { - return reinterpret_cast::pointer &>(storage.functor); - } - }; - - template - static const function_manager & get_default_manager(); - - template - static void create_manager(manager_storage_type & storage, Allocator && allocator) - { - new (&storage.get_allocator()) Allocator(FUNC_MOVE(allocator)); - storage.manager = &get_default_manager(); - } - - // this struct acts as a vtable. it is an optimization to prevent - // code-bloat from rtti. see the documentation of boost::function - struct function_manager - { - template - inline static FUNC_CONSTEXPR function_manager create_default_manager() - { -# ifdef _MSC_VER - function_manager result = -# else - return function_manager -# endif - { - &templated_call_move_and_destroy, - &templated_call_copy, - &templated_call_copy_functor_only, - &templated_call_destroy, -# ifndef FUNC_NO_RTTI - &templated_call_type_id, - &templated_call_target -# endif - }; -# ifdef _MSC_VER - return result; -# endif - } - - void (* const call_move_and_destroy)(manager_storage_type & lhs, manager_storage_type && rhs); - void (* const call_copy)(manager_storage_type & lhs, const manager_storage_type & rhs); - void (* const call_copy_functor_only)(manager_storage_type & lhs, const manager_storage_type & rhs); - void (* const call_destroy)(manager_storage_type & manager); -# ifndef FUNC_NO_RTTI - const std::type_info & (* const call_type_id)(); - void * (* const call_target)(const manager_storage_type & manager, const std::type_info & type); -# endif - - template - static void templated_call_move_and_destroy(manager_storage_type & lhs, manager_storage_type && rhs) - { - typedef function_manager_inplace_specialization specialization; - specialization::move_functor(lhs, FUNC_MOVE(rhs)); - specialization::destroy_functor(rhs.get_allocator(), rhs); - create_manager(lhs, FUNC_MOVE(rhs.get_allocator())); - rhs.get_allocator().~Allocator(); - } - template - static void templated_call_copy(manager_storage_type & lhs, const manager_storage_type & rhs) - { - typedef function_manager_inplace_specialization specialization; - create_manager(lhs, Allocator(rhs.get_allocator())); - specialization::store_functor(lhs, specialization::get_functor_ref(rhs)); - } - template - static void templated_call_destroy(manager_storage_type & self) - { - typedef function_manager_inplace_specialization specialization; - specialization::destroy_functor(self.get_allocator(), self); - self.get_allocator().~Allocator(); - } - template - static void templated_call_copy_functor_only(manager_storage_type & lhs, const manager_storage_type & rhs) - { - typedef function_manager_inplace_specialization specialization; - specialization::store_functor(lhs, specialization::get_functor_ref(rhs)); - } -# ifndef FUNC_NO_RTTI - template - static const std::type_info & templated_call_type_id() - { - return typeid(T); - } - template - static void * templated_call_target(const manager_storage_type & self, const std::type_info & type) - { - typedef function_manager_inplace_specialization specialization; - if (type == typeid(T)) - return &specialization::get_functor_ref(self); - else - return nullptr; - } -# endif - }; - template - inline static const function_manager & get_default_manager() - { - static FUNC_CONSTEXPR function_manager default_manager = function_manager::create_default_manager(); - return default_manager; - } - - template - struct typedeffer - { - typedef Result result_type; - }; - template - struct typedeffer - { - typedef Result result_type; - typedef Argument argument_type; - }; - template - struct typedeffer - { - typedef Result result_type; - typedef First_Argument first_argument_type; - typedef Second_Argument second_argument_type; - }; -} - -template -class function - : public detail::typedeffer -{ -public: - function() FUNC_NOEXCEPT - { - initialize_empty(); - } - function(std::nullptr_t) FUNC_NOEXCEPT - { - initialize_empty(); - } - function(function && other) FUNC_NOEXCEPT - { - initialize_empty(); - swap(other); - } - function(const function & other) - : call(other.call) - { - other.manager_storage.manager->call_copy(manager_storage, other.manager_storage); - } - template - function(T functor, - typename std::enable_if::value, detail::empty_struct>::type = detail::empty_struct()) FUNC_TEMPLATE_NOEXCEPT(T, std::allocator::type>) - { - if (detail::is_null(functor)) - { - initialize_empty(); - } - else - { - typedef typename detail::functor_type::type functor_type; - initialize(detail::to_functor(FUNC_FORWARD(T, functor)), std::allocator()); - } - } - template - function(std::allocator_arg_t, const Allocator &) - { - // ignore the allocator because I don't allocate - initialize_empty(); - } - template - function(std::allocator_arg_t, const Allocator &, std::nullptr_t) - { - // ignore the allocator because I don't allocate - initialize_empty(); - } - template - function(std::allocator_arg_t, const Allocator & allocator, T functor, - typename std::enable_if::value, detail::empty_struct>::type = detail::empty_struct()) - FUNC_TEMPLATE_NOEXCEPT(T, Allocator) - { - if (detail::is_null(functor)) - { - initialize_empty(); - } - else - { - initialize(detail::to_functor(FUNC_FORWARD(T, functor)), Allocator(allocator)); - } - } - template - function(std::allocator_arg_t, const Allocator & allocator, const function & other) - : call(other.call) - { - typedef typename std::allocator_traits::template rebind_alloc MyAllocator; - - // first try to see if the allocator matches the target type - detail::manager_type manager_for_allocator = &detail::get_default_manager::value_type, Allocator>(); - if (other.manager_storage.manager == manager_for_allocator) - { - detail::create_manager::value_type, Allocator>(manager_storage, Allocator(allocator)); - manager_for_allocator->call_copy_functor_only(manager_storage, other.manager_storage); - } - // if it does not, try to see if the target contains my type. this - // breaks the recursion of the last case. otherwise repeated copies - // would allocate more and more memory - else - { - detail::manager_type manager_for_function = &detail::get_default_manager(); - if (other.manager_storage.manager == manager_for_function) - { - detail::create_manager(manager_storage, MyAllocator(allocator)); - manager_for_function->call_copy_functor_only(manager_storage, other.manager_storage); - } - else - { - // else store the other function as my target - initialize(other, MyAllocator(allocator)); - } - } - } - template - function(std::allocator_arg_t, const Allocator &, function && other) FUNC_NOEXCEPT - { - // ignore the allocator because I don't allocate - initialize_empty(); - swap(other); - } - - function & operator=(function other) FUNC_NOEXCEPT - { - swap(other); - return *this; - } - ~function() FUNC_NOEXCEPT - { - manager_storage.manager->call_destroy(manager_storage); - } - - Result operator()(Arguments... arguments) const - { - return call(manager_storage.functor, FUNC_FORWARD(Arguments, arguments)...); - } - - template - void assign(T && functor, const Allocator & allocator) FUNC_TEMPLATE_NOEXCEPT(T, Allocator) - { - function(std::allocator_arg, allocator, functor).swap(*this); - } - - void swap(function & other) FUNC_NOEXCEPT - { - detail::manager_storage_type temp_storage; - other.manager_storage.manager->call_move_and_destroy(temp_storage, FUNC_MOVE(other.manager_storage)); - manager_storage.manager->call_move_and_destroy(other.manager_storage, FUNC_MOVE(manager_storage)); - temp_storage.manager->call_move_and_destroy(manager_storage, FUNC_MOVE(temp_storage)); - - std::swap(call, other.call); - } - - -# ifndef FUNC_NO_RTTI - const std::type_info & target_type() const FUNC_NOEXCEPT - { - return manager_storage.manager->call_type_id(); - } - template - T * target() FUNC_NOEXCEPT - { - return static_cast(manager_storage.manager->call_target(manager_storage, typeid(T))); - } - template - const T * target() const FUNC_NOEXCEPT - { - return static_cast(manager_storage.manager->call_target(manager_storage, typeid(T))); - } -# endif - - operator bool() const FUNC_NOEXCEPT - { - -# ifdef FUNC_NO_EXCEPTIONS - return call != nullptr; -# else - return call != &detail::empty_call; -# endif - } - -private: - detail::manager_storage_type manager_storage; - Result (*call)(const detail::functor_padding &, Arguments...); - - template - void initialize(T functor, Allocator && allocator) - { - call = &detail::function_manager_inplace_specialization::template call; - detail::create_manager(manager_storage, FUNC_FORWARD(Allocator, allocator)); - detail::function_manager_inplace_specialization::store_functor(manager_storage, FUNC_FORWARD(T, functor)); - } - - typedef Result(*Empty_Function_Type)(Arguments...); - void initialize_empty() FUNC_NOEXCEPT - { - typedef std::allocator Allocator; - static_assert(detail::is_inplace_allocated::value, "The empty function should benefit from small functor optimization"); - - detail::create_manager(manager_storage, Allocator()); - detail::function_manager_inplace_specialization::store_functor(manager_storage, nullptr); -# ifdef FUNC_NO_EXCEPTIONS - call = nullptr; -# else - call = &detail::empty_call; -# endif - } -}; - -template -bool operator==(std::nullptr_t, const function & rhs) FUNC_NOEXCEPT -{ - return !rhs; -} -template -bool operator==(const function & lhs, std::nullptr_t) FUNC_NOEXCEPT -{ - return !lhs; -} -template -bool operator!=(std::nullptr_t, const function & rhs) FUNC_NOEXCEPT -{ - return rhs; -} -template -bool operator!=(const function & lhs, std::nullptr_t) FUNC_NOEXCEPT -{ - return lhs; -} - -template -void swap(function & lhs, function & rhs) -{ - lhs.swap(rhs); -} - -} // end namespace func - -namespace std -{ -template -struct uses_allocator, Allocator> - : std::true_type -{ -}; -} - -#ifdef __GNUC__ -#pragma GCC diagnostic pop -#endif -#undef FUNC_NOEXCEPT -#undef FUNC_TEMPLATE_NOEXCEPT -#undef FUNC_FORWARD -#undef FUNC_MOVE -#undef FUNC_CONSTEXPR From f73fa1b02db574699b330c0f32f06d89e04fa64c Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 22:42:40 +0100 Subject: [PATCH 129/131] fixing macOS issue --- runtime/Includes/Core/Memory.h | 4 ++-- runtime/Includes/PreCompiled.h | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/runtime/Includes/Core/Memory.h b/runtime/Includes/Core/Memory.h index 891baba..49038c7 100644 --- a/runtime/Includes/Core/Memory.h +++ b/runtime/Includes/Core/Memory.h @@ -16,8 +16,8 @@ namespace mlx static void* AlignedRealloc(void* ptr, std::size_t alignment, std::size_t size); static void Free(void* ptr); - inline static bool IsInit() noexcept { return s_instance != nullptr; } - inline static MemManager& Get() noexcept { return *s_instance; } + static inline bool IsInit() noexcept { return s_instance != nullptr; } + static inline MemManager& Get() noexcept { return *s_instance; } ~MemManager(); diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index f39b701..d900ce8 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -8,6 +8,10 @@ #include #include #include +#include +#include +#include +#include #include @@ -29,12 +33,8 @@ #include #include #include -#include -#include #include #include -#include -#include #include #include #include From d2a8ac46f670cfce7f8eeb4d1b7686539b1e1cb3 Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 22:47:26 +0100 Subject: [PATCH 130/131] fixing macOS issue --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 37104a3..81930c5 100644 --- a/Makefile +++ b/Makefile @@ -31,7 +31,7 @@ CXX = clang++ CXXFLAGS = -std=c++20 -fPIC -Wall -Wextra -DSDL_MAIN_HANDLED INCLUDES = -I./includes -I./runtime/Includes -I./runtime/Sources -I./third_party -CXXPCHFLAGS = -xc++-header +CXXPCHFLAGS = -xc++-header -std=c++20 -Wall -Wextra PCH_SOURCE = runtime/Includes/PreCompiled.h GCH = runtime/Includes/PreCompiled.h.gch From 07f521d3d5412f30b81d700bf50b676755d1858a Mon Sep 17 00:00:00 2001 From: Kbz-8 Date: Sat, 11 Jan 2025 23:04:58 +0100 Subject: [PATCH 131/131] fixing macOS issue --- runtime/Includes/PreCompiled.h | 2 ++ runtime/Sources/Core/Logs.cpp | 8 +++++++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/runtime/Includes/PreCompiled.h b/runtime/Includes/PreCompiled.h index d900ce8..653f085 100644 --- a/runtime/Includes/PreCompiled.h +++ b/runtime/Includes/PreCompiled.h @@ -5,6 +5,7 @@ #include #include + #include #include #include @@ -12,6 +13,7 @@ #include #include #include +#include #include diff --git a/runtime/Sources/Core/Logs.cpp b/runtime/Sources/Core/Logs.cpp index f61ec99..780cb15 100644 --- a/runtime/Sources/Core/Logs.cpp +++ b/runtime/Sources/Core/Logs.cpp @@ -47,9 +47,15 @@ namespace mlx default: break; } + /* const std::chrono::zoned_time current_time{ std::chrono::current_zone(), std::chrono::floor(std::chrono::system_clock::now()) }; - std::cout << Ansi::yellow << std::format("[{0:%H:%M:%S}] ", current_time) << Ansi::def << code_infos << message << std::endl; + */ + std::time_t now = time(0); + std::tm tstruct = *localtime(&now); + char buffer[80]; + std::strftime(buffer, sizeof(buffer), "[%X] ", &tstruct); + std::cout << Ansi::yellow << buffer << Ansi::def << code_infos << message << std::endl; if(type == LogType::FatalError) {